├── .editorconfig ├── .gitignore ├── .pylintrc ├── README.md ├── attention.png ├── data ├── chinese │ ├── neg_t.txt │ └── pos_t.txt ├── rt-polaritydata │ ├── rt-polarity.neg │ └── rt-polarity.pos ├── train.tfrecords ├── valid.tfrecords └── vocab.pkl ├── data_helpers.py ├── model.py ├── ran_cell.py ├── train.py └── visualizer.ipynb /.editorconfig: -------------------------------------------------------------------------------- 1 | # EditorConfig helps developers define and maintain consistent 2 | # coding styles between different editors and IDEs 3 | # editorconfig.org 4 | 5 | root = true 6 | 7 | 8 | [*] 9 | 10 | # Change these settings to your own preference 11 | indent_style = space 12 | indent_size = 2 13 | 14 | # We recommend you to keep these unchanged 15 | end_of_line = lf 16 | charset = utf-8 17 | trim_trailing_whitespace = true 18 | insert_final_newline = true 19 | 20 | [*.md] 21 | trim_trailing_whitespace = false 22 | -------------------------------------------------------------------------------- /.gitignore: -------------------------------------------------------------------------------- 1 | *.pyc 2 | .DS_Store 3 | checkpoint 4 | log 5 | __pycache__ 6 | .ipynb_checkpoints 7 | data/*.tfrecords 8 | data/*.npy 9 | -------------------------------------------------------------------------------- /.pylintrc: -------------------------------------------------------------------------------- 1 | #My pylintrc for use with atom.io's linter-pylint 2 | [MESSAGES CONTROL] 3 | disable=W0311,W1201,W0702,W0611,W0621,W0212,W0512,E1101,C0111,C0103,R0902,R0903,R0913 4 | 5 | # checks for : 6 | # * unauthorized constructions 7 | # * strict indentation 8 | # * line length 9 | # * use of <> instead of != 10 | # 11 | [FORMAT] 12 | # Maximum number of characters on a single line. 13 | max-line-length=128 14 | # Maximum number of lines in a module 15 | max-module-lines=1000 16 | # String used as indentation unit. This is usually " " (4 spaces) or "\t" (1 17 | # tab). In repo it is 2 spaces. 18 | indent-string=' ' 19 | -------------------------------------------------------------------------------- /README.md: -------------------------------------------------------------------------------- 1 | Hierarchical Attention Networks for Chinese Sentiment Classification 2 | ==================================================== 3 | 4 | This is [HAN](http://www.aclweb.org/anthology/N16-1174) version of sentiment classification, with pre-trained [character-level embedding](https://github.com/indiejoseph/chinese-char-rnn), and used [RAN](https://github.com/indiejoseph/tf-ran-cell) instead of GRU. 5 | 6 | ### Dataset 7 | Downloaded from internet but i forget where is it ;p, the original dataset is in Simplified Chinese, i used opencc translated it into Traditional Chinese. 8 | After 100 epochs, the valid accuracy achieved 96.31% 9 | 10 | ### Requirement 11 | Tensorflow r1.1+ 12 | 13 | ### Attention Heatmap 14 | ![attention heatmap](/attention.png) 15 | -------------------------------------------------------------------------------- /attention.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/indiejoseph/doc-han-att/b6e26db7d405eaa84c11a316605333c98874ba83/attention.png -------------------------------------------------------------------------------- /data/rt-polaritydata/rt-polarity.neg: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/indiejoseph/doc-han-att/b6e26db7d405eaa84c11a316605333c98874ba83/data/rt-polaritydata/rt-polarity.neg -------------------------------------------------------------------------------- /data/rt-polaritydata/rt-polarity.pos: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/indiejoseph/doc-han-att/b6e26db7d405eaa84c11a316605333c98874ba83/data/rt-polaritydata/rt-polarity.pos -------------------------------------------------------------------------------- /data/train.tfrecords: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/indiejoseph/doc-han-att/b6e26db7d405eaa84c11a316605333c98874ba83/data/train.tfrecords -------------------------------------------------------------------------------- /data/valid.tfrecords: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/indiejoseph/doc-han-att/b6e26db7d405eaa84c11a316605333c98874ba83/data/valid.tfrecords -------------------------------------------------------------------------------- /data/vocab.pkl: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/indiejoseph/doc-han-att/b6e26db7d405eaa84c11a316605333c98874ba83/data/vocab.pkl -------------------------------------------------------------------------------- /data_helpers.py: -------------------------------------------------------------------------------- 1 | from __future__ import print_function 2 | import numpy as np 3 | import os 4 | import tensorflow as tf 5 | from tqdm import tqdm 6 | import _pickle as cPickle 7 | import re 8 | import itertools 9 | from collections import Counter 10 | 11 | 12 | PAD = "_PAD" 13 | UNK = "_UNK" 14 | 15 | 16 | def Q2B(uchar): 17 | """全角转半角""" 18 | inside_code = ord(uchar) 19 | if inside_code == 0x3000: 20 | inside_code = 0x0020 21 | else: 22 | inside_code -= 0xfee0 23 | #转完之后不是半角字符返回原来的字符 24 | if inside_code < 0x0020 or inside_code > 0x7e: 25 | return uchar 26 | return chr(inside_code) 27 | 28 | 29 | def replace_all(repls, text): 30 | # return re.sub('|'.join(repls.keys()), lambda k: repls[k.group(0)], text) 31 | return re.sub('|'.join(re.escape(key) for key in repls.keys()), lambda k: repls[k.group(0)], text) 32 | 33 | 34 | def split_sentence(txt): 35 | sents = re.split(r'\n|\s|;|;|。|,|\.|,|\?|\!|||[=]{2,}|[.]{3,}|[─]{2,}|[\-]{2,}|~|、|╱|∥', txt) 36 | sents = [c for s in sents for c in re.split(r'([^%]+[\d,.]+%)', s)] 37 | sents = list(filter(None, sents)) 38 | return sents 39 | 40 | 41 | def normalize_punctuation(text): 42 | cpun = [[' '], 43 | ['﹗', '!'], 44 | ['“', '゛', '〃', '′', '"'], 45 | ['”'], 46 | ['´', '‘', '’'], 47 | [';', '﹔'], 48 | ['《', '〈', '<'], 49 | ['》', '〉', '>'], 50 | ['﹑'], 51 | ['【', '『', '〔', '﹝', '「', '﹁'], 52 | ['】', '』', '〕', '﹞', '」', '﹂'], 53 | ['(', '「'], 54 | [')', '」'], 55 | ['﹖', '?'], 56 | ['︰', '﹕', ':'], 57 | ['・', '.', '·', '‧', '°'], 58 | ['●', '○', '▲', '◎', '◇', '■', '□', '※', '◆'], 59 | ['〜', '~', '∼'], 60 | ['︱', '│', '┼'], 61 | ['╱'], 62 | ['╲'], 63 | ['—', 'ー', '―', '‐', '−', '─', '﹣', '–', 'ㄧ', '-']] 64 | epun = [' ', '!', '"', '"', '\'', ';', '<', '>', '、', '[', ']', '(', ')', '?', ':', '・', '•', '~', '|', '/', '\\', '-'] 65 | repls = {} 66 | 67 | for i in range(len(cpun)): 68 | for j in range(len(cpun[i])): 69 | repls[cpun[i][j]] = epun[i] 70 | 71 | return replace_all(repls, text) 72 | 73 | 74 | def clean_str(txt): 75 | # txt = txt.replace('説', '說') 76 | # txt = txt.replace('閲', '閱') 77 | # txt = txt.replace('脱', '脫') 78 | # txt = txt.replace('蜕', '蛻') 79 | # txt = txt.replace('户', '戶') 80 | # 臺 81 | txt = txt.replace('臺', '台') 82 | txt = txt.replace(' ', '') # \u3000 83 | txt = normalize_punctuation(txt) 84 | txt = ''.join([Q2B(c) for c in list(txt)]) 85 | return txt 86 | 87 | 88 | def build_vocab(sentences): 89 | """ 90 | Builds a vocabulary mapping from word to index based on the sentences. 91 | Returns vocabulary mapping and inverse vocabulary mapping. 92 | """ 93 | # Build vocabulary 94 | word_counts = Counter(itertools.chain(*sentences)) 95 | # Mapping from index to word 96 | vocabulary_inv = [x[0] for x in word_counts.most_common()] 97 | # Mapping from word to index 98 | vocabulary = {x: i for i, x in enumerate(vocabulary_inv)} 99 | return [vocabulary, vocabulary_inv] 100 | 101 | 102 | def get_vocab(path='./data/vocab.pkl'): 103 | """Loads the vocab file, if present""" 104 | if not os.path.exists(path) or os.path.isdir(path): 105 | raise ValueError('No file at {}'.format(path)) 106 | 107 | char_list = cPickle.load(open(path, 'rb')) 108 | vocab = dict(zip(char_list, range(len(char_list)))) 109 | 110 | return vocab, char_list 111 | 112 | 113 | def build_dataset(pos_path='chinese/pos_t.txt', neg_path='chinese/neg_t.txt', 114 | data_dir='./data', max_doc_len=30, max_sent_len=50, ): 115 | pos_docs = list(open(os.path.join(data_dir, pos_path)).readlines()) 116 | neg_docs = list(open(os.path.join(data_dir, neg_path)).readlines()) 117 | vocab, _ = get_vocab('./data/vocab.pkl') 118 | pos_size = len(pos_docs) 119 | neg_size = len(neg_docs) 120 | pos_train_size = int(pos_size * 0.9) 121 | pos_valid_size = pos_size - pos_train_size 122 | neg_train_size = int(neg_size * 0.9) 123 | neg_valid_size = neg_size - neg_train_size 124 | train_path = os.path.join(data_dir, 'train.tfrecords') 125 | valid_path = os.path.join(data_dir, 'valid.tfrecords') 126 | 127 | def upsampling(x, size): 128 | if len(x) > size: 129 | return x 130 | diff_size = size - len(x) 131 | return x + list(np.random.choice(x, diff_size, replace=False)) 132 | 133 | 134 | def write_data(doc, label, out_f): 135 | doc = split_sentence(clean_str(doc)) 136 | document_length = len(doc) 137 | sentence_lengths = np.zeros((max_doc_len,), dtype=np.int64) 138 | data = np.ones((max_doc_len * max_sent_len,), dtype=np.int64) 139 | doc_len = min(document_length, max_doc_len) 140 | 141 | for j in range(doc_len): 142 | sent = doc[j] 143 | actual_len = len(sent) 144 | pos = j * max_sent_len 145 | sent_len = min(actual_len, max_sent_len) 146 | # sentence_lengths 147 | sentence_lengths[j] = sent_len 148 | # dataset 149 | data[pos:pos+sent_len] = [vocab.get(sent[k], 0) for k in range(sent_len)] 150 | 151 | features = {'sentence_lengths': tf.train.Feature(int64_list=tf.train.Int64List(value=sentence_lengths)), 152 | 'document_lengths': tf.train.Feature(int64_list=tf.train.Int64List(value=[doc_len])), 153 | 'label': tf.train.Feature(int64_list=tf.train.Int64List(value=[label])), 154 | 'text': tf.train.Feature(int64_list=tf.train.Int64List(value=data))} 155 | example = tf.train.Example(features=tf.train.Features(feature=features)) 156 | out_f.write(example.SerializeToString()) 157 | 158 | # oversampling 159 | with tf.python_io.TFRecordWriter(train_path) as out_f: 160 | train_size = max(pos_train_size, neg_train_size) 161 | pos_train_docs = np.random.choice(upsampling(pos_docs[:pos_train_size], train_size), train_size, replace=False) 162 | neg_train_docs = np.random.choice(upsampling(neg_docs[:neg_train_size], train_size), train_size, replace=False) 163 | 164 | print(len(pos_train_docs), len(neg_train_docs)) 165 | for i in tqdm(range(train_size)): 166 | pos_row = pos_train_docs[i] 167 | neg_row = neg_train_docs[i] 168 | write_data(pos_row, 1, out_f) 169 | write_data(neg_row, 0, out_f) 170 | 171 | with tf.python_io.TFRecordWriter(valid_path) as out_f: 172 | valid_size = max(pos_valid_size, neg_valid_size) 173 | pos_valid_docs = np.random.choice(upsampling(pos_docs[pos_train_size:], valid_size), valid_size, replace=False) 174 | neg_valid_docs = np.random.choice(upsampling(neg_docs[neg_train_size:], valid_size), valid_size, replace=False) 175 | for i in tqdm(range(valid_size)): 176 | pos_row = pos_valid_docs[i] 177 | neg_row = neg_valid_docs[i] 178 | write_data(pos_row, 1, out_f) 179 | write_data(neg_row, 0, out_f) 180 | 181 | print('Done {} records, train {}, valid {}'.format(pos_size + neg_size, 182 | pos_train_size + neg_train_size, 183 | pos_valid_size + neg_valid_size)) 184 | 185 | 186 | if __name__ == '__main__': 187 | build_dataset() 188 | -------------------------------------------------------------------------------- /model.py: -------------------------------------------------------------------------------- 1 | from __future__ import print_function 2 | import tensorflow as tf 3 | from tensorflow.contrib.rnn import DropoutWrapper, GRUCell 4 | import tensorflow.contrib.layers as layers 5 | from tensorflow.python.ops.nn import bidirectional_dynamic_rnn 6 | from ran_cell import RANCell 7 | 8 | 9 | L2_REG = 1e-4 10 | 11 | 12 | class Model(object): 13 | def __init__(self, conf): 14 | self.batch_size = conf.batch_size 15 | self.vocab_size = conf.vocab_size 16 | self.rnn_size = conf.rnn_size 17 | self.document_size = conf.document_size 18 | self.sentence_size = conf.sentence_size 19 | self.word_attention_size = conf.word_attention_size 20 | self.sent_attention_size = conf.sent_attention_size 21 | self.char_embedding_size = conf.char_embedding_size 22 | self.keep_prob = conf.keep_prob 23 | 24 | self.is_training = tf.placeholder(dtype=tf.bool, name='is_training') 25 | self.inputs = tf.placeholder(shape=(self.batch_size, self.document_size, self.sentence_size), dtype=tf.int64, name='inputs') 26 | self.labels = tf.placeholder(shape=(self.batch_size,), dtype=tf.int64, name='labels') 27 | self.sentence_lengths = tf.placeholder(shape=(self.batch_size, self.document_size), dtype=tf.int64, name='sentence_lengths') 28 | self.document_lengths = tf.placeholder(shape=(self.batch_size), dtype=tf.int64, name='document_lengths') 29 | 30 | with tf.device('/cpu:0'): 31 | self.embedding = tf.get_variable('embedding', 32 | [self.vocab_size, self.char_embedding_size], 33 | trainable=False) 34 | inputs = tf.nn.embedding_lookup(self.embedding, self.inputs) 35 | 36 | char_length = tf.reshape(self.sentence_lengths, [-1]) # [batch_size * document_size] 37 | char_inputs = tf.reshape(inputs, [self.batch_size * self.document_size, self.sentence_size, self.char_embedding_size]) 38 | 39 | with tf.variable_scope('character_encoder') as scope: 40 | char_outputs, _ = self.bi_gru_encode(char_inputs, char_length, scope) 41 | 42 | with tf.variable_scope('attention') as scope: 43 | char_attn_outputs = self.attention(char_outputs, self.word_attention_size, scope) 44 | char_attn_outputs = tf.reshape(char_attn_outputs, [self.batch_size, self.document_size, -1]) 45 | 46 | with tf.variable_scope('dropout'): 47 | char_attn_outputs = layers.dropout(char_attn_outputs, 48 | keep_prob=self.keep_prob, 49 | is_training=self.is_training) 50 | 51 | with tf.variable_scope('sentence_encoder') as scope: 52 | sent_outputs, _ = self.bi_gru_encode(char_attn_outputs, self.document_lengths, scope) 53 | 54 | with tf.variable_scope('attention') as scope: 55 | sent_attn_outputs = self.attention(sent_outputs, self.sent_attention_size, scope) 56 | 57 | with tf.variable_scope('dropout'): 58 | sent_attn_outputs = layers.dropout(sent_attn_outputs, 59 | keep_prob=self.keep_prob, 60 | is_training=self.is_training) 61 | 62 | with tf.variable_scope('losses'): 63 | logits = layers.fully_connected(inputs=sent_attn_outputs, 64 | num_outputs=2, 65 | activation_fn=None, 66 | weights_regularizer=layers.l2_regularizer(scale=L2_REG)) 67 | pred = tf.argmax(logits, 1) 68 | loss = tf.reduce_mean(tf.nn.sparse_softmax_cross_entropy_with_logits(logits=logits, 69 | labels=self.labels)) 70 | correct_pred = tf.equal(self.labels, pred) 71 | correct_pred = tf.cast(correct_pred, tf.float32) 72 | self.accuracy = tf.reduce_mean(correct_pred) 73 | 74 | reg_losses = tf.get_collection(tf.GraphKeys.REGULARIZATION_LOSSES) 75 | self.cost = tf.add_n([loss] + reg_losses) 76 | 77 | 78 | def attention(self, inputs, size, scope): 79 | with tf.variable_scope(scope or 'attention') as scope: 80 | attention_context_vector = tf.get_variable(name='attention_context_vector', 81 | shape=[size], 82 | regularizer=layers.l2_regularizer(scale=L2_REG), 83 | dtype=tf.float32) 84 | input_projection = layers.fully_connected(inputs, size, 85 | activation_fn=tf.tanh, 86 | weights_regularizer=layers.l2_regularizer(scale=L2_REG)) 87 | vector_attn = tf.reduce_sum(tf.multiply(input_projection, attention_context_vector), axis=2, keep_dims=True) 88 | attention_weights = tf.nn.softmax(vector_attn, dim=1) 89 | weighted_projection = tf.multiply(inputs, attention_weights) 90 | outputs = tf.reduce_sum(weighted_projection, axis=1) 91 | 92 | return outputs 93 | 94 | 95 | def bi_gru_encode(self, inputs, sentence_size, scope=None): 96 | batch_size = inputs.get_shape()[0] 97 | 98 | with tf.variable_scope(scope or 'bi_gru_encode'): 99 | fw_cell = RANCell(self.rnn_size, keep_prob=self.keep_prob, normalize=True, is_training=self.is_training) 100 | bw_cell = RANCell(self.rnn_size, keep_prob=self.keep_prob, normalize=True, is_training=self.is_training) 101 | fw_cell_state = fw_cell.zero_state(batch_size, tf.float32) 102 | bw_cell_state = bw_cell.zero_state(batch_size, tf.float32) 103 | 104 | enc_out, (enc_state_fw, enc_state_bw) = bidirectional_dynamic_rnn(cell_fw=fw_cell, 105 | cell_bw=bw_cell, 106 | inputs=inputs, 107 | sequence_length=sentence_size, 108 | initial_state_fw=fw_cell_state, 109 | initial_state_bw=bw_cell_state) 110 | 111 | enc_state = tf.concat([enc_state_fw, enc_state_bw], 1) 112 | enc_outputs = tf.concat(enc_out, 2) 113 | 114 | return enc_outputs, enc_state 115 | 116 | 117 | if __name__ == '__main__': 118 | tf.flags.DEFINE_integer('batch_size', 32, 'Batch Size') 119 | tf.flags.DEFINE_integer('vocab_size', 1000, 'Vocabulary size') 120 | tf.flags.DEFINE_integer('word_attention_size', 300, 'Word level attention unit size') 121 | tf.flags.DEFINE_integer('sent_attention_size', 300, 'Sentence level attention unit size') 122 | tf.flags.DEFINE_integer('document_size', 16, 'Document size') 123 | tf.flags.DEFINE_integer('sentence_size', 25, 'Sentence size') 124 | tf.flags.DEFINE_integer('attention_size', 300, 'Sentence size') 125 | tf.flags.DEFINE_integer('rnn_size', 300, 'RNN unit size') 126 | tf.flags.DEFINE_integer('char_embedding_size', 300, 'Embedding dimension') 127 | tf.flags.DEFINE_float('keep_prob', 0.5, 'Dropout keep prob') 128 | tf.flags.DEFINE_bool('is_training', True, 'training model') 129 | 130 | FLAGS = tf.flags.FLAGS 131 | FLAGS._parse_flags() 132 | 133 | model = Model(FLAGS) 134 | -------------------------------------------------------------------------------- /ran_cell.py: -------------------------------------------------------------------------------- 1 | import collections 2 | import tensorflow as tf 3 | import numpy as np 4 | from tensorflow.python.util import nest 5 | from tensorflow.python.ops import variable_scope as vs 6 | from tensorflow.python.ops import array_ops 7 | from tensorflow.python.ops import init_ops 8 | from tensorflow.python.ops import math_ops 9 | from tensorflow.python.ops import nn_ops 10 | from tensorflow.contrib.rnn.python.ops import core_rnn_cell_impl 11 | from tensorflow.python.ops.rnn_cell_impl import _RNNCell as RNNCell 12 | from tensorflow.contrib.rnn.python.ops import core_rnn_cell_impl 13 | from tensorflow.python.ops.math_ops import tanh 14 | from tensorflow.python.ops import variable_scope as vs 15 | 16 | 17 | _checked_scope = core_rnn_cell_impl._checked_scope 18 | _BIAS_VARIABLE_NAME = "bias" 19 | _WEIGHTS_VARIABLE_NAME = "kernel" 20 | 21 | 22 | def orthogonal(shape): 23 | """Orthogonal initilaizer.""" 24 | flat_shape = (shape[0], np.prod(shape[1:])) 25 | a = np.random.normal(0.0, 1.0, flat_shape) 26 | u, _, v = np.linalg.svd(a, full_matrices=False) 27 | q = u if u.shape == flat_shape else v 28 | return q.reshape(shape) 29 | 30 | 31 | def orthogonal_initializer(scale=1.0): 32 | """Orthogonal initializer.""" 33 | def _initializer(shape, dtype=tf.float32, partition_info=None): # pylint: disable=unused-argument 34 | return tf.constant(orthogonal(shape) * scale, dtype) 35 | 36 | return _initializer 37 | 38 | 39 | def linear(args, 40 | output_size, 41 | bias, 42 | bias_initializer=None, 43 | kernel_initializer=None, 44 | kernel_regularizer=None, 45 | bias_regularizer=None, 46 | normalize=False): 47 | """Linear map: sum_i(args[i] * W[i]), where W[i] is a variable. 48 | Args: 49 | args: a 2D Tensor or a list of 2D, batch x n, Tensors. 50 | output_size: int, second dimension of W[i]. 51 | bias: boolean, whether to add a bias term or not. 52 | bias_initializer: starting value to initialize the bias 53 | (default is all zeros). 54 | kernel_initializer: starting value to initialize the weight. 55 | kernel_regularizer: kernel regularizer 56 | bias_regularizer: bias regularizer 57 | Returns: 58 | A 2D Tensor with shape [batch x output_size] equal to 59 | sum_i(args[i] * W[i]), where W[i]s are newly created matrices. 60 | Raises: 61 | ValueError: if some of the arguments has unspecified or wrong shape. 62 | """ 63 | if args is None or (nest.is_sequence(args) and not args): 64 | raise ValueError("`args` must be specified") 65 | if not nest.is_sequence(args): 66 | args = [args] 67 | 68 | # Calculate the total size of arguments on dimension 1. 69 | total_arg_size = 0 70 | shapes = [a.get_shape() for a in args] 71 | for shape in shapes: 72 | if shape.ndims != 2: 73 | raise ValueError("linear is expecting 2D arguments: %s" % shapes) 74 | if shape[1].value is None: 75 | raise ValueError("linear expects shape[1] to be provided for shape %s, " 76 | "but saw %s" % (shape, shape[1])) 77 | else: 78 | total_arg_size += shape[1].value 79 | 80 | dtype = [a.dtype for a in args][0] 81 | 82 | # Now the computation. 83 | scope = vs.get_variable_scope() 84 | with vs.variable_scope(scope) as outer_scope: 85 | weights = vs.get_variable( 86 | _WEIGHTS_VARIABLE_NAME, [total_arg_size, output_size], 87 | dtype=dtype, 88 | initializer=kernel_initializer, 89 | regularizer=kernel_regularizer) 90 | 91 | if len(args) == 1: 92 | res = math_ops.matmul(args[0], weights) 93 | else: 94 | res = math_ops.matmul(array_ops.concat(args, 1), weights) 95 | 96 | if normalize: 97 | res = tf.contrib.layers.layer_norm(res) 98 | 99 | # remove the layer’s bias if there is one (because it would be redundant) 100 | if not bias or normalize: 101 | return res 102 | 103 | with vs.variable_scope(outer_scope) as inner_scope: 104 | inner_scope.set_partitioner(None) 105 | if bias_initializer is None: 106 | bias_initializer = init_ops.constant_initializer(0.0, dtype=dtype) 107 | biases = vs.get_variable( 108 | _BIAS_VARIABLE_NAME, [output_size], 109 | dtype=dtype, 110 | initializer=bias_initializer, 111 | regularizer=bias_regularizer) 112 | 113 | return nn_ops.bias_add(res, biases) 114 | 115 | 116 | class RANCell(RNNCell): 117 | """Recurrent Additive Networks (cf. https://arxiv.org/abs/1705.07393).""" 118 | 119 | def __init__(self, num_units, input_size=None, activation=tanh, keep_prob=0.5, 120 | normalize=False, reuse=None, is_training=tf.constant(False)): 121 | if input_size is not None: 122 | logging.warn("%s: The input_size parameter is deprecated.", self) 123 | self._num_units = num_units 124 | self._activation = activation 125 | self._normalize = normalize 126 | self._keep_prob = keep_prob 127 | self._reuse = reuse 128 | self._is_training = is_training 129 | 130 | @property 131 | def state_size(self): 132 | return tf.contrib.rnn.LSTMStateTuple(self._num_units, self.output_size) 133 | 134 | @property 135 | def output_size(self): 136 | return self._num_units 137 | 138 | def __call__(self, inputs, state, scope=None): 139 | with _checked_scope(self, scope or "ran_cell", reuse=self._reuse): 140 | with vs.variable_scope("gates"): 141 | c, h = state 142 | gates = tf.nn.sigmoid(linear([inputs, h], 2 * self._num_units, True, 143 | normalize=self._normalize, 144 | kernel_initializer=tf.orthogonal_initializer())) 145 | i, f = array_ops.split(value=gates, num_or_size_splits=2, axis=1) 146 | 147 | with vs.variable_scope("candidate"): 148 | content = linear([inputs], self._num_units, True, normalize=self._normalize) 149 | 150 | new_c = i * content + f * c 151 | new_h = self._activation(c) 152 | 153 | new_h = tf.cond(self._is_training, 154 | lambda: nn_ops.dropout(new_h, self._keep_prob), 155 | lambda: new_h) 156 | 157 | new_state = tf.contrib.rnn.LSTMStateTuple(new_c, new_h) 158 | output = new_h 159 | return output, new_state 160 | -------------------------------------------------------------------------------- /train.py: -------------------------------------------------------------------------------- 1 | from __future__ import print_function 2 | import os 3 | import pprint 4 | import time 5 | import tensorflow as tf 6 | import numpy as np 7 | from model import Model 8 | from data_helpers import get_vocab 9 | 10 | pp = pprint.PrettyPrinter() 11 | 12 | flags = tf.app.flags 13 | 14 | flags.DEFINE_integer('batch_size', 64, 'Batch size') 15 | flags.DEFINE_integer('epochs', 100, 'epochs') 16 | flags.DEFINE_integer('rnn_size', 300, 'RNN unit size') 17 | flags.DEFINE_integer('word_attention_size', 300, 'Word level attention unit size') 18 | flags.DEFINE_integer('sent_attention_size', 300, 'Sentence level attention unit size') 19 | flags.DEFINE_integer('char_embedding_size', 300, 'Embedding dimension') 20 | flags.DEFINE_string('checkpoint_dir', 'checkpoint', 'Directory name to save the checkpoints [checkpoint]') 21 | flags.DEFINE_integer('vocab_size', 6790, 'vocabulary size') 22 | flags.DEFINE_float('keep_prob', 0.5, 'Dropout keep prob') 23 | flags.DEFINE_integer('document_size', 30, 'document size') 24 | flags.DEFINE_integer('sentence_size', 50, 'sentence size') 25 | flags.DEFINE_float('learning_rate', 1e-4, 'learning rate') 26 | flags.DEFINE_float('grad_clip', 5.0, 'grad clip') 27 | 28 | FLAGS = flags.FLAGS 29 | 30 | def read_records(index=0): 31 | train_queue = tf.train.string_input_producer(['./data/train.tfrecords'], num_epochs=FLAGS.epochs) 32 | valid_queue = tf.train.string_input_producer(['./data/valid.tfrecords'], num_epochs=FLAGS.epochs) 33 | queue = tf.QueueBase.from_list(index, [train_queue, valid_queue]) 34 | reader = tf.TFRecordReader() 35 | _, serialized_example = reader.read(queue) 36 | features = tf.parse_single_example( 37 | serialized_example, 38 | features={ 39 | 'sentence_lengths': tf.FixedLenFeature([FLAGS.document_size], tf.int64), 40 | 'document_lengths': tf.FixedLenFeature([], tf.int64), 41 | 'label': tf.FixedLenFeature([], tf.int64), 42 | 'text': tf.FixedLenFeature([FLAGS.document_size * FLAGS.sentence_size], tf.int64), 43 | }) 44 | 45 | sentence_lengths = features['sentence_lengths'] 46 | document_lengths = features['document_lengths'] 47 | label = features['label'] 48 | text = features['text'] 49 | 50 | sentence_lengths_batch, document_lengths_batch, label_batch, text_batch = tf.train.shuffle_batch( 51 | [sentence_lengths, document_lengths, label, text], 52 | batch_size=FLAGS.batch_size, 53 | capacity=5000, 54 | min_after_dequeue=1000) 55 | 56 | return sentence_lengths_batch, document_lengths_batch, label_batch, text_batch 57 | 58 | 59 | def main(_): 60 | pp.pprint(FLAGS.__flags) 61 | 62 | if not os.path.exists(FLAGS.checkpoint_dir): 63 | print(' [*] Creating checkpoint directory...') 64 | os.makedirs(FLAGS.checkpoint_dir) 65 | 66 | checkpoint_path = os.path.join(FLAGS.checkpoint_dir, 'model.ckpt') 67 | 68 | # load pre-trained char embedding 69 | char_emb = np.load('./data/emb.npy') 70 | 71 | sentence_lengths_batch, document_lengths_batch, label_batch, text_batch = read_records() 72 | valid_sentence_lengths_batch, valid_document_lengths_batch, valid_label_batch, valid_text_batch = read_records(1) 73 | 74 | text_batch = tf.reshape(text_batch, (-1, FLAGS.document_size, FLAGS.sentence_size)) 75 | valid_text_batch = tf.reshape(valid_text_batch, (-1, FLAGS.document_size, FLAGS.sentence_size)) 76 | 77 | with tf.variable_scope('model'): 78 | train_model = Model(FLAGS) 79 | with tf.variable_scope('model', reuse=True): 80 | valid_model = Model(FLAGS) 81 | 82 | # training operator 83 | global_step = tf.Variable(0, name='global_step', trainable=False) 84 | lr = tf.train.exponential_decay(FLAGS.learning_rate, global_step, 10000, 0.9) 85 | tvars = tf.trainable_variables() 86 | grads, _ = tf.clip_by_global_norm(tf.gradients(train_model.cost, tvars), FLAGS.grad_clip) 87 | optimizer = tf.train.AdamOptimizer(lr) 88 | train_op = optimizer.apply_gradients(zip(grads, tvars), global_step=global_step) 89 | 90 | tf.summary.scalar('train_loss', train_model.cost) 91 | tf.summary.scalar('valid_loss', valid_model.cost) 92 | 93 | saver = tf.train.Saver() 94 | 95 | with tf.Session() as sess: 96 | tf.local_variables_initializer().run() 97 | tf.global_variables_initializer().run() 98 | 99 | # assign char embedding 100 | sess.run([], feed_dict={train_model.embedding: char_emb}) 101 | sess.run([], feed_dict={valid_model.embedding: char_emb}) 102 | 103 | # saver.restore(sess, checkpoint_path) 104 | 105 | # stock_emb = train_model.label_embedding.eval() 106 | # 107 | # np.save('./data/stock_emb.npy', stock_emb) 108 | # print('done') 109 | 110 | # summary_op = tf.summary.merge_all() 111 | # train_writer = tf.summary.FileWriter('./log/train', sess.graph) 112 | # valid_writer = tf.summary.FileWriter('./log/test') 113 | 114 | current_step = 0 115 | coord = tf.train.Coordinator() 116 | threads = tf.train.start_queue_runners(coord=coord) 117 | valid_cost = 0 118 | valid_accuracy = 0 119 | train_cost = 0 120 | VALID_SIZE = 54 121 | 122 | _, chars = get_vocab() 123 | 124 | try: 125 | while not coord.should_stop(): 126 | start = time.time() 127 | 128 | if current_step % 500 == 0: 129 | valid_cost = 0 130 | for _ in range(VALID_SIZE): 131 | valid_text, valid_label, valid_sentence_lengths, valid_document_lengths =\ 132 | sess.run([valid_text_batch, valid_label_batch, valid_sentence_lengths_batch, valid_document_lengths_batch]) 133 | 134 | valid_outputs = sess.run([valid_model.cost, valid_model.accuracy], feed_dict={ 135 | valid_model.inputs: valid_text, 136 | valid_model.labels: valid_label, 137 | valid_model.sentence_lengths: valid_sentence_lengths, 138 | valid_model.document_lengths: valid_document_lengths, 139 | valid_model.is_training: False 140 | }) 141 | valid_cost += valid_outputs[0] 142 | valid_accuracy += valid_outputs[1] 143 | valid_cost /= VALID_SIZE 144 | valid_accuracy /= VALID_SIZE 145 | 146 | inputs, labels, sentence_lengths, document_lengths =\ 147 | sess.run([text_batch, label_batch, sentence_lengths_batch, document_lengths_batch]) 148 | 149 | # valid_writer.add_summary(summary, current_step) 150 | train_cost, train_accuracy, _ = sess.run([train_model.cost, train_model.accuracy, train_op], feed_dict={ 151 | train_model.inputs: inputs, 152 | train_model.labels: labels, 153 | train_model.sentence_lengths: sentence_lengths, 154 | train_model.document_lengths: document_lengths, 155 | train_model.is_training: True 156 | }) 157 | # train_writer.add_summary(summary, current_step) 158 | end = time.time() 159 | 160 | print('Cost at step %s: %s(%s), test cost: %s(%s), time: %s' % 161 | (current_step, train_cost, train_accuracy, valid_cost, valid_accuracy, end - start)) 162 | 163 | current_step = tf.train.global_step(sess, global_step) 164 | 165 | if current_step != 0 and current_step % 1000 == 0: 166 | save_path = saver.save(sess, checkpoint_path) 167 | print('Model saved in file:', save_path) 168 | 169 | except tf.errors.OutOfRangeError: 170 | print('Done training!') 171 | finally: 172 | coord.request_stop() 173 | 174 | save_path = saver.save(sess, checkpoint_path) 175 | print('Model saved in file:', save_path) 176 | 177 | coord.join(threads) 178 | 179 | if __name__ == '__main__': 180 | tf.app.run() 181 | -------------------------------------------------------------------------------- /visualizer.ipynb: -------------------------------------------------------------------------------- 1 | { 2 | "cells": [ 3 | { 4 | "cell_type": "code", 5 | "execution_count": 23, 6 | "metadata": { 7 | "collapsed": false 8 | }, 9 | "outputs": [], 10 | "source": [ 11 | "import os\n", 12 | "import re\n", 13 | "import json\n", 14 | "import tensorflow as tf\n", 15 | "from tensorflow.python.platform import flags\n", 16 | "import numpy as np\n", 17 | "from model import Model\n", 18 | "import matplotlib.pyplot as plt\n", 19 | "import argparse\n", 20 | "from data_helpers import split_sentence, clean_str, get_vocab\n", 21 | "\n", 22 | "plt.rcParams['figure.figsize'] = (16, 12)\n", 23 | "plt.rcParams['font.sans-serif'] = ['SimHei']\n", 24 | "plt.rcParams['axes.unicode_minus'] = False\n", 25 | "\n", 26 | "%matplotlib inline\n", 27 | "%config InlineBackend.figure_format = 'retina'\n" 28 | ] 29 | }, 30 | { 31 | "cell_type": "code", 32 | "execution_count": 2, 33 | "metadata": { 34 | "collapsed": false 35 | }, 36 | "outputs": [], 37 | "source": [ 38 | "# reset tf.flag\n", 39 | "tf.app.flags.FLAGS = flags._FlagValues()\n", 40 | "tf.app.flags._global_parser = argparse.ArgumentParser()\n", 41 | "\n", 42 | "flags = tf.app.flags\n", 43 | "flags.DEFINE_integer('batch_size', 1, 'Batch size')\n", 44 | "flags.DEFINE_integer('epochs', 100, 'epochs')\n", 45 | "flags.DEFINE_integer('rnn_size', 300, 'RNN unit size')\n", 46 | "flags.DEFINE_integer('word_attention_size', 300, 'Word level attention unit size')\n", 47 | "flags.DEFINE_integer('sent_attention_size', 300, 'Sentence level attention unit size')\n", 48 | "flags.DEFINE_integer('char_embedding_size', 300, 'Embedding dimension')\n", 49 | "flags.DEFINE_string('checkpoint_dir', 'checkpoint', 'Directory name to save the checkpoints [checkpoint]')\n", 50 | "flags.DEFINE_integer('vocab_size', 6790, 'vocabulary size')\n", 51 | "flags.DEFINE_float('keep_prob', 0.5, 'Dropout keep prob')\n", 52 | "flags.DEFINE_integer('document_size', 30, 'document size')\n", 53 | "flags.DEFINE_integer('sentence_size', 50, 'sentence size')\n", 54 | "flags.DEFINE_float('grad_clip', 5.0, 'grad clip')\n", 55 | "FLAGS = flags.FLAGS\n", 56 | "\n", 57 | "checkpoint_path = os.path.join(FLAGS.checkpoint_dir, 'model.ckpt')\n", 58 | "\n", 59 | "# load pre-trained char embedding\n", 60 | "char_emb = np.load('./data/emb.npy')\n", 61 | "\n", 62 | "with tf.variable_scope('model'):\n", 63 | " test_model = Model(FLAGS)\n" 64 | ] 65 | }, 66 | { 67 | "cell_type": "code", 68 | "execution_count": 119, 69 | "metadata": { 70 | "collapsed": false 71 | }, 72 | "outputs": [], 73 | "source": [ 74 | "pos_doc = \"\"\"作者的觀點獨特,語言犀利,深刻的總結了男人與女人之間的是非恩怨,把男人與女人在生活中積累的宿怨一語道破---原來非本質問題,而是雙方本能。此書不足之處乃後面章節雷同,越看越有寡味之感,但是還是要感謝作者帶我們遊覽一次男人、女人的本能世界!呵呵,建議讀者可以看一看,有則改之,無則加勉!!\"\"\"\n", 75 | "neg_doc = \"\"\"書太小了 拿到手裡很是失望本以為會多麼精緻~~~真的是適合小孩看啊可很小的孩子的又看不懂還不是大人給小孩子講所以應該精緻一點才好價錢還是不太值啊\"\"\"" 76 | ] 77 | }, 78 | { 79 | "cell_type": "markdown", 80 | "metadata": {}, 81 | "source": [ 82 | "Documents\n", 83 | "=====\n", 84 | "\n", 85 | "### Positive:\n", 86 | "作者的觀點獨特,語言犀利,深刻的總結了男人與女人之間的是非恩怨,把男人與女人在生活中積累的宿怨一語道破---原來非本質問題,而是雙方本能。此書不足之處乃後面章節雷同,越看越有寡味之感,但是還是要感謝作者帶我們遊覽一次男人、女人的本能世界!呵呵,建議讀者可以看一看,有則改之,無則加勉!!\n", 87 | "\n", 88 | "### Negative:\n", 89 | "書太小了 拿到手裡很是失望本以為會多麼精緻~~~真的是適合小孩看啊可很小的孩子的又看不懂還不是大人給小孩子講所以應該精緻一點才好價錢還是不太值啊\n" 90 | ] 91 | }, 92 | { 93 | "cell_type": "code", 94 | "execution_count": 120, 95 | "metadata": { 96 | "collapsed": false 97 | }, 98 | "outputs": [ 99 | { 100 | "name": "stdout", 101 | "output_type": "stream", 102 | "text": [ 103 | "作者的觀點獨特\n", 104 | "語言犀利\n", 105 | "深刻的總結了男人與女人之間的是非恩怨\n", 106 | "把男人與女人在生活中積累的宿怨一語道破\n", 107 | "原來非本質問題\n", 108 | "而是雙方本能\n", 109 | "此書不足之處乃後面章節雷同\n", 110 | "越看越有寡味之感\n", 111 | "但是還是要感謝作者帶我們遊覽一次男人\n", 112 | "女人的本能世界\n", 113 | "呵呵\n", 114 | "建議讀者可以看一看\n", 115 | "有則改之\n", 116 | "無則加勉 \n", 117 | "\n", 118 | "書太小了\n", 119 | "拿到手裡很是失望本以為會多麼精緻\n", 120 | "真的是適合小孩看啊可很小的孩子的又看不懂還不是大人給小孩子講所以應該精緻一點才好價錢還是不太值啊 \n", 121 | "\n" 122 | ] 123 | } 124 | ], 125 | "source": [ 126 | "vocab, _ = get_vocab('./data/vocab.pkl')\n", 127 | "max_doc_len = FLAGS.document_size\n", 128 | "max_sent_len = FLAGS.sentence_size\n", 129 | "\n", 130 | "def vectorize(doc):\n", 131 | " doc_sents = split_sentence(clean_str(doc))\n", 132 | " document_length = len(doc_sents)\n", 133 | " sentence_lengths = np.zeros((max_doc_len,), dtype=np.int64)\n", 134 | " data = np.ones((max_doc_len * max_sent_len,), dtype=np.int64) # 1 = PAD\n", 135 | " doc_len = min(document_length, max_doc_len)\n", 136 | "\n", 137 | " for j in range(doc_len):\n", 138 | " sent = doc_sents[j]\n", 139 | " actual_len = len(sent)\n", 140 | " pos = j * max_sent_len\n", 141 | " sent_len = min(actual_len, max_sent_len)\n", 142 | " # sentence_lengths\n", 143 | " sentence_lengths[j] = sent_len\n", 144 | " # dataset\n", 145 | " data[pos:pos+sent_len] = [vocab.get(sent[k], 0) for k in range(sent_len)]\n", 146 | " \n", 147 | " return data, document_length, sentence_lengths, doc_sents\n", 148 | "\n", 149 | "pos_data = vectorize(pos_doc)\n", 150 | "neg_data = vectorize(neg_doc)\n", 151 | "\n", 152 | "print('\\n'.join(pos_data[-1]), '\\n')\n", 153 | "print('\\n'.join(neg_data[-1]), '\\n')" 154 | ] 155 | }, 156 | { 157 | "cell_type": "code", 158 | "execution_count": 121, 159 | "metadata": { 160 | "collapsed": false 161 | }, 162 | "outputs": [ 163 | { 164 | "name": "stdout", 165 | "output_type": "stream", 166 | "text": [ 167 | "INFO:tensorflow:Restoring parameters from checkpoint/model.ckpt\n" 168 | ] 169 | }, 170 | { 171 | "name": "stderr", 172 | "output_type": "stream", 173 | "text": [ 174 | "INFO:tensorflow:Restoring parameters from checkpoint/model.ckpt\n" 175 | ] 176 | }, 177 | { 178 | "name": "stdout", 179 | "output_type": "stream", 180 | "text": [ 181 | "pos_doc: 1 neg_doc: 0\n" 182 | ] 183 | } 184 | ], 185 | "source": [ 186 | "saver = tf.train.Saver()\n", 187 | "with tf.Session() as sess:\n", 188 | " saver.restore(sess, checkpoint_path)\n", 189 | " \n", 190 | " sess.run([], feed_dict={test_model.embedding: char_emb})\n", 191 | " \n", 192 | " def forward(data, document_length, sentence_lengths):\n", 193 | " pred, attention = sess.run([test_model.pred, test_model.char_attentions], feed_dict={\n", 194 | " test_model.inputs: data.reshape((1, FLAGS.document_size, FLAGS.sentence_size)),\n", 195 | " test_model.sentence_lengths: [sentence_lengths],\n", 196 | " test_model.document_lengths: [document_length],\n", 197 | " test_model.is_training: False\n", 198 | " })\n", 199 | " return pred[0], attention\n", 200 | " \n", 201 | " pos_result, pos_attention = forward(pos_data[0], pos_data[1], pos_data[2])\n", 202 | " neg_result, neg_attention = forward(neg_data[0], neg_data[1], neg_data[2])\n", 203 | "\n", 204 | "print('pos_doc: ', pos_result, 'neg_doc:', neg_result)" 205 | ] 206 | }, 207 | { 208 | "cell_type": "code", 209 | "execution_count": 122, 210 | "metadata": { 211 | "collapsed": false 212 | }, 213 | "outputs": [ 214 | { 215 | "data": { 216 | "application/javascript": [ 217 | "\n", 218 | " window.pos_attention=[[[0.015739217400550842], [0.016517963260412216], [0.02646193653345108], [0.014510318636894226], [0.03354310244321823], [0.04292082414031029], [0.035157326608896255], [0.01895696111023426], [0.01895696111023426], [0.01895696111023426], [0.01895696111023426], [0.01895696111023426], [0.01895696111023426], [0.01895696111023426], [0.01895696111023426], [0.01895696111023426], [0.01895696111023426], [0.01895696111023426], [0.01895696111023426], [0.01895696111023426], [0.01895696111023426], [0.01895696111023426], [0.01895696111023426], [0.01895696111023426], [0.01895696111023426], [0.01895696111023426], [0.01895696111023426], [0.01895696111023426], [0.01895696111023426], [0.01895696111023426], [0.01895696111023426], [0.01895696111023426], [0.01895696111023426], [0.01895696111023426], [0.01895696111023426], [0.01895696111023426], [0.01895696111023426], [0.01895696111023426], [0.01895696111023426], [0.01895696111023426], [0.01895696111023426], [0.01895696111023426], [0.01895696111023426], [0.01895696111023426], [0.01895696111023426], [0.01895696111023426], [0.01895696111023426], [0.01895696111023426], [0.01895696111023426], [0.01895696111023426]], [[0.0200754813849926], [0.04622826352715492], [0.0353507325053215], [0.04024874046444893], [0.01865427941083908], [0.01865427941083908], [0.01865427941083908], [0.01865427941083908], [0.01865427941083908], [0.01865427941083908], [0.01865427941083908], [0.01865427941083908], [0.01865427941083908], [0.01865427941083908], [0.01865427941083908], [0.01865427941083908], [0.01865427941083908], [0.01865427941083908], [0.01865427941083908], [0.01865427941083908], [0.01865427941083908], [0.01865427941083908], [0.01865427941083908], [0.01865427941083908], [0.01865427941083908], [0.01865427941083908], [0.01865427941083908], [0.01865427941083908], [0.01865427941083908], [0.01865427941083908], [0.01865427941083908], [0.01865427941083908], [0.01865427941083908], [0.01865427941083908], [0.01865427941083908], [0.01865427941083908], [0.01865427941083908], [0.01865427941083908], [0.01865427941083908], [0.01865427941083908], [0.01865427941083908], [0.01865427941083908], [0.01865427941083908], [0.01865427941083908], [0.01865427941083908], [0.01865427941083908], [0.01865427941083908], [0.01865427941083908], [0.01865427941083908], [0.01865427941083908]], [[0.0312258992344141], [0.010454152710735798], [0.01453914400190115], [0.01074204035103321], [0.010843772441148758], [0.015376781113445759], [0.013199101202189922], [0.0074391127564013], [0.0061227562837302685], [0.011153111234307289], [0.016417143866419792], [0.01138870045542717], [0.015696728602051735], [0.023543773218989372], [0.012492950074374676], [0.02030280977487564], [0.03811663016676903], [0.03512769937515259], [0.02174430526793003], [0.02174430526793003], [0.02174430526793003], [0.02174430526793003], [0.02174430526793003], [0.02174430526793003], [0.02174430526793003], [0.02174430526793003], [0.02174430526793003], [0.02174430526793003], [0.02174430526793003], [0.02174430526793003], [0.02174430526793003], [0.02174430526793003], [0.02174430526793003], [0.02174430526793003], [0.02174430526793003], [0.02174430526793003], [0.02174430526793003], [0.02174430526793003], [0.02174430526793003], [0.02174430526793003], [0.02174430526793003], [0.02174430526793003], [0.02174430526793003], [0.02174430526793003], [0.02174430526793003], [0.02174430526793003], [0.02174430526793003], [0.02174430526793003], [0.02174430526793003], [0.02174430526793003]], [[0.010905313305556774], [0.036627814173698425], [0.012703223153948784], [0.005024139303714037], [0.015786241739988327], [0.02466018870472908], [0.02391139417886734], [0.04144403338432312], [0.03781731799244881], [0.038675807416439056], [0.028063438832759857], [0.013546678237617016], [0.01362547092139721], [0.014260546304285526], [0.017221493646502495], [0.00859071221202612], [0.01659669727087021], [0.04818723350763321], [0.02089623175561428], [0.0184340663254261], [0.0184340663254261], [0.0184340663254261], [0.0184340663254261], [0.0184340663254261], [0.0184340663254261], [0.0184340663254261], [0.0184340663254261], [0.0184340663254261], [0.0184340663254261], [0.0184340663254261], [0.0184340663254261], [0.0184340663254261], [0.0184340663254261], [0.0184340663254261], [0.0184340663254261], [0.0184340663254261], [0.0184340663254261], [0.0184340663254261], [0.0184340663254261], [0.0184340663254261], [0.0184340663254261], [0.0184340663254261], [0.0184340663254261], [0.0184340663254261], [0.0184340663254261], [0.0184340663254261], [0.0184340663254261], [0.0184340663254261], [0.0184340663254261], [0.0184340663254261]], [[0.014565601013600826], [0.04347262904047966], [0.03393774852156639], [0.03302479162812233], [0.013109729625284672], [0.01565137878060341], [0.006948120892047882], [0.019518371671438217], [0.019518371671438217], [0.019518371671438217], [0.019518371671438217], [0.019518371671438217], [0.019518371671438217], [0.019518371671438217], [0.019518371671438217], [0.019518371671438217], [0.019518371671438217], [0.019518371671438217], [0.019518371671438217], [0.019518371671438217], [0.019518371671438217], [0.019518371671438217], [0.019518371671438217], [0.019518371671438217], [0.019518371671438217], [0.019518371671438217], [0.019518371671438217], [0.019518371671438217], [0.019518371671438217], [0.019518371671438217], [0.019518371671438217], [0.019518371671438217], [0.019518371671438217], [0.019518371671438217], [0.019518371671438217], [0.019518371671438217], [0.019518371671438217], [0.019518371671438217], [0.019518371671438217], [0.019518371671438217], [0.019518371671438217], [0.019518371671438217], [0.019518371671438217], [0.019518371671438217], [0.019518371671438217], [0.019518371671438217], [0.019518371671438217], [0.019518371671438217], [0.019518371671438217], [0.019518371671438217]], [[0.009023631922900677], [0.007066773716360331], [0.015038601122796535], [0.02524625137448311], [0.01902127079665661], [0.016898194327950478], [0.02062966488301754], [0.02062966488301754], [0.02062966488301754], [0.02062966488301754], [0.02062966488301754], [0.02062966488301754], [0.02062966488301754], [0.02062966488301754], [0.02062966488301754], [0.02062966488301754], [0.02062966488301754], [0.02062966488301754], [0.02062966488301754], [0.02062966488301754], [0.02062966488301754], [0.02062966488301754], [0.02062966488301754], [0.02062966488301754], [0.02062966488301754], [0.02062966488301754], [0.02062966488301754], [0.02062966488301754], [0.02062966488301754], [0.02062966488301754], [0.02062966488301754], [0.02062966488301754], [0.02062966488301754], [0.02062966488301754], [0.02062966488301754], [0.02062966488301754], [0.02062966488301754], [0.02062966488301754], [0.02062966488301754], [0.02062966488301754], [0.02062966488301754], [0.02062966488301754], [0.02062966488301754], [0.02062966488301754], [0.02062966488301754], [0.02062966488301754], [0.02062966488301754], [0.02062966488301754], [0.02062966488301754], [0.02062966488301754]], [[0.020015791058540344], [0.01262903306633234], [0.021519839763641357], [0.02915417030453682], [0.007743278983980417], [0.0075540426187217236], [0.016103625297546387], [0.03405074402689934], [0.0142541928216815], [0.023427821695804596], [0.0639534592628479], [0.1549873650074005], [0.05374211072921753], [0.014617962762713432], [0.014617962762713432], [0.014617962762713432], [0.014617962762713432], [0.014617962762713432], [0.014617962762713432], [0.014617962762713432], [0.014617962762713432], [0.014617962762713432], [0.014617962762713432], [0.014617962762713432], [0.014617962762713432], [0.014617962762713432], [0.014617962762713432], [0.014617962762713432], [0.014617962762713432], [0.014617962762713432], [0.014617962762713432], [0.014617962762713432], [0.014617962762713432], [0.014617962762713432], [0.014617962762713432], [0.014617962762713432], [0.014617962762713432], [0.014617962762713432], [0.014617962762713432], [0.014617962762713432], [0.014617962762713432], [0.014617962762713432], [0.014617962762713432], [0.014617962762713432], [0.014617962762713432], [0.014617962762713432], [0.014617962762713432], [0.014617962762713432], [0.014617962762713432], [0.014617962762713432]], [[0.012464748695492744], [0.024017615243792534], [0.015259768813848495], [0.02441595494747162], [0.03501574695110321], [0.03557053580880165], [0.025538261979818344], [0.01577942818403244], [0.019331855699419975], [0.019331855699419975], [0.019331855699419975], [0.019331855699419975], [0.019331855699419975], [0.019331855699419975], [0.019331855699419975], [0.019331855699419975], [0.019331855699419975], [0.019331855699419975], [0.019331855699419975], [0.019331855699419975], [0.019331855699419975], [0.019331855699419975], [0.019331855699419975], [0.019331855699419975], [0.019331855699419975], [0.019331855699419975], [0.019331855699419975], [0.019331855699419975], [0.019331855699419975], [0.019331855699419975], [0.019331855699419975], [0.019331855699419975], [0.019331855699419975], [0.019331855699419975], [0.019331855699419975], [0.019331855699419975], [0.019331855699419975], [0.019331855699419975], [0.019331855699419975], [0.019331855699419975], [0.019331855699419975], [0.019331855699419975], [0.019331855699419975], [0.019331855699419975], [0.019331855699419975], [0.019331855699419975], [0.019331855699419975], [0.019331855699419975], [0.019331855699419975], [0.019331855699419975]], [[0.04005454480648041], [0.018946269527077675], [0.014106400310993195], [0.01521324273198843], [0.021426286548376083], [0.01286331471055746], [0.015942012891173363], [0.037073876708745956], [0.026201002299785614], [0.02362414449453354], [0.02684468775987625], [0.04065943509340286], [0.014465799555182457], [0.015338975936174393], [0.011362220160663128], [0.004938698839396238], [0.007818265818059444], [0.0069685750640928745], [0.020192259922623634], [0.020192259922623634], [0.020192259922623634], [0.020192259922623634], [0.020192259922623634], [0.020192259922623634], [0.020192259922623634], [0.020192259922623634], [0.020192259922623634], [0.020192259922623634], [0.020192259922623634], [0.020192259922623634], [0.020192259922623634], [0.020192259922623634], [0.020192259922623634], [0.020192259922623634], [0.020192259922623634], [0.020192259922623634], [0.020192259922623634], [0.020192259922623634], [0.020192259922623634], [0.020192259922623634], [0.020192259922623634], [0.020192259922623634], [0.020192259922623634], [0.020192259922623634], [0.020192259922623634], [0.020192259922623634], [0.020192259922623634], [0.020192259922623634], [0.020192259922623634], [0.020192259922623634]], [[0.016227608546614647], [0.025506246834993362], [0.010812053456902504], [0.009303192608058453], [0.00974765419960022], [0.03668497875332832], [0.014704683795571327], [0.020395664498209953], [0.020395664498209953], [0.020395664498209953], [0.020395664498209953], [0.020395664498209953], [0.020395664498209953], [0.020395664498209953], [0.020395664498209953], [0.020395664498209953], [0.020395664498209953], [0.020395664498209953], [0.020395664498209953], [0.020395664498209953], [0.020395664498209953], [0.020395664498209953], [0.020395664498209953], [0.020395664498209953], [0.020395664498209953], [0.020395664498209953], [0.020395664498209953], [0.020395664498209953], [0.020395664498209953], [0.020395664498209953], [0.020395664498209953], [0.020395664498209953], [0.020395664498209953], [0.020395664498209953], [0.020395664498209953], [0.020395664498209953], [0.020395664498209953], [0.020395664498209953], [0.020395664498209953], [0.020395664498209953], [0.020395664498209953], [0.020395664498209953], [0.020395664498209953], [0.020395664498209953], [0.020395664498209953], [0.020395664498209953], [0.020395664498209953], [0.020395664498209953], [0.020395664498209953], [0.020395664498209953]], [[0.05110703408718109], [0.008848583325743675], [0.019584260880947113], [0.019584260880947113], [0.019584260880947113], [0.019584260880947113], [0.019584260880947113], [0.019584260880947113], [0.019584260880947113], [0.019584260880947113], [0.019584260880947113], [0.019584260880947113], [0.019584260880947113], [0.019584260880947113], [0.019584260880947113], [0.019584260880947113], [0.019584260880947113], [0.019584260880947113], [0.019584260880947113], [0.019584260880947113], [0.019584260880947113], [0.019584260880947113], [0.019584260880947113], [0.019584260880947113], [0.019584260880947113], [0.019584260880947113], [0.019584260880947113], [0.019584260880947113], [0.019584260880947113], [0.019584260880947113], [0.019584260880947113], [0.019584260880947113], [0.019584260880947113], [0.019584260880947113], [0.019584260880947113], [0.019584260880947113], [0.019584260880947113], [0.019584260880947113], [0.019584260880947113], [0.019584260880947113], [0.019584260880947113], [0.019584260880947113], [0.019584260880947113], [0.019584260880947113], [0.019584260880947113], [0.019584260880947113], [0.019584260880947113], [0.019584260880947113], [0.019584260880947113], [0.019584260880947113]], [[0.017589788883924484], [0.036121468991041183], [0.021073337644338608], [0.0402899794280529], [0.020428476855158806], [0.009321657009422779], [0.0131349116563797], [0.013157394714653492], [0.014650467783212662], [0.019859328866004944], [0.019859328866004944], [0.019859328866004944], [0.019859328866004944], [0.019859328866004944], [0.019859328866004944], [0.019859328866004944], [0.019859328866004944], [0.019859328866004944], [0.019859328866004944], [0.019859328866004944], [0.019859328866004944], [0.019859328866004944], [0.019859328866004944], [0.019859328866004944], [0.019859328866004944], [0.019859328866004944], [0.019859328866004944], [0.019859328866004944], [0.019859328866004944], [0.019859328866004944], [0.019859328866004944], [0.019859328866004944], [0.019859328866004944], [0.019859328866004944], [0.019859328866004944], [0.019859328866004944], [0.019859328866004944], [0.019859328866004944], [0.019859328866004944], [0.019859328866004944], [0.019859328866004944], [0.019859328866004944], [0.019859328866004944], [0.019859328866004944], [0.019859328866004944], [0.019859328866004944], [0.019859328866004944], [0.019859328866004944], [0.019859328866004944], [0.019859328866004944]], [[0.014073622412979603], [0.023160213604569435], [0.025702515617012978], [0.032801054418087006], [0.019657881930470467], [0.019657881930470467], [0.019657881930470467], [0.019657881930470467], [0.019657881930470467], [0.019657881930470467], [0.019657881930470467], [0.019657881930470467], [0.019657881930470467], [0.019657881930470467], [0.019657881930470467], [0.019657881930470467], [0.019657881930470467], [0.019657881930470467], [0.019657881930470467], [0.019657881930470467], [0.019657881930470467], [0.019657881930470467], [0.019657881930470467], [0.019657881930470467], [0.019657881930470467], [0.019657881930470467], [0.019657881930470467], [0.019657881930470467], [0.019657881930470467], [0.019657881930470467], [0.019657881930470467], [0.019657881930470467], [0.019657881930470467], [0.019657881930470467], [0.019657881930470467], [0.019657881930470467], [0.019657881930470467], [0.019657881930470467], [0.019657881930470467], [0.019657881930470467], [0.019657881930470467], [0.019657881930470467], [0.019657881930470467], [0.019657881930470467], [0.019657881930470467], [0.019657881930470467], [0.019657881930470467], [0.019657881930470467], [0.019657881930470467], [0.019657881930470467]], [[0.008625851012766361], [0.012195100076496601], [0.020423131063580513], [0.01735428348183632], [0.020465252920985222], [0.020465252920985222], [0.020465252920985222], [0.020465252920985222], [0.020465252920985222], [0.020465252920985222], [0.020465252920985222], [0.020465252920985222], [0.020465252920985222], [0.020465252920985222], [0.020465252920985222], [0.020465252920985222], [0.020465252920985222], [0.020465252920985222], [0.020465252920985222], [0.020465252920985222], [0.020465252920985222], [0.020465252920985222], [0.020465252920985222], [0.020465252920985222], [0.020465252920985222], [0.020465252920985222], [0.020465252920985222], [0.020465252920985222], [0.020465252920985222], [0.020465252920985222], [0.020465252920985222], [0.020465252920985222], [0.020465252920985222], [0.020465252920985222], [0.020465252920985222], [0.020465252920985222], [0.020465252920985222], [0.020465252920985222], [0.020465252920985222], [0.020465252920985222], [0.020465252920985222], [0.020465252920985222], [0.020465252920985222], [0.020465252920985222], [0.020465252920985222], [0.020465252920985222], [0.020465252920985222], [0.020465252920985222], [0.020465252920985222], [0.020465252920985222]], [[0.019999999552965164], [0.019999999552965164], [0.019999999552965164], [0.019999999552965164], [0.019999999552965164], [0.019999999552965164], [0.019999999552965164], [0.019999999552965164], [0.019999999552965164], [0.019999999552965164], [0.019999999552965164], [0.019999999552965164], [0.019999999552965164], [0.019999999552965164], [0.019999999552965164], [0.019999999552965164], [0.019999999552965164], [0.019999999552965164], [0.019999999552965164], [0.019999999552965164], [0.019999999552965164], [0.019999999552965164], [0.019999999552965164], [0.019999999552965164], [0.019999999552965164], [0.019999999552965164], [0.019999999552965164], [0.019999999552965164], [0.019999999552965164], [0.019999999552965164], [0.019999999552965164], [0.019999999552965164], [0.019999999552965164], [0.019999999552965164], [0.019999999552965164], [0.019999999552965164], [0.019999999552965164], [0.019999999552965164], [0.019999999552965164], [0.019999999552965164], [0.019999999552965164], [0.019999999552965164], [0.019999999552965164], [0.019999999552965164], [0.019999999552965164], [0.019999999552965164], [0.019999999552965164], [0.019999999552965164], [0.019999999552965164], [0.019999999552965164]], [[0.019999999552965164], [0.019999999552965164], [0.019999999552965164], [0.019999999552965164], [0.019999999552965164], [0.019999999552965164], [0.019999999552965164], [0.019999999552965164], [0.019999999552965164], [0.019999999552965164], [0.019999999552965164], [0.019999999552965164], [0.019999999552965164], [0.019999999552965164], [0.019999999552965164], [0.019999999552965164], [0.019999999552965164], [0.019999999552965164], [0.019999999552965164], [0.019999999552965164], [0.019999999552965164], [0.019999999552965164], [0.019999999552965164], [0.019999999552965164], [0.019999999552965164], [0.019999999552965164], [0.019999999552965164], [0.019999999552965164], [0.019999999552965164], [0.019999999552965164], [0.019999999552965164], [0.019999999552965164], [0.019999999552965164], [0.019999999552965164], [0.019999999552965164], [0.019999999552965164], [0.019999999552965164], [0.019999999552965164], [0.019999999552965164], [0.019999999552965164], [0.019999999552965164], [0.019999999552965164], [0.019999999552965164], [0.019999999552965164], [0.019999999552965164], [0.019999999552965164], [0.019999999552965164], [0.019999999552965164], [0.019999999552965164], [0.019999999552965164]], [[0.019999999552965164], [0.019999999552965164], [0.019999999552965164], [0.019999999552965164], [0.019999999552965164], [0.019999999552965164], [0.019999999552965164], [0.019999999552965164], [0.019999999552965164], [0.019999999552965164], [0.019999999552965164], [0.019999999552965164], [0.019999999552965164], [0.019999999552965164], [0.019999999552965164], [0.019999999552965164], [0.019999999552965164], [0.019999999552965164], [0.019999999552965164], [0.019999999552965164], [0.019999999552965164], [0.019999999552965164], [0.019999999552965164], [0.019999999552965164], [0.019999999552965164], [0.019999999552965164], [0.019999999552965164], [0.019999999552965164], [0.019999999552965164], [0.019999999552965164], [0.019999999552965164], [0.019999999552965164], [0.019999999552965164], [0.019999999552965164], [0.019999999552965164], [0.019999999552965164], [0.019999999552965164], [0.019999999552965164], [0.019999999552965164], [0.019999999552965164], [0.019999999552965164], [0.019999999552965164], [0.019999999552965164], [0.019999999552965164], [0.019999999552965164], [0.019999999552965164], [0.019999999552965164], [0.019999999552965164], [0.019999999552965164], [0.019999999552965164]], [[0.019999999552965164], [0.019999999552965164], [0.019999999552965164], [0.019999999552965164], [0.019999999552965164], [0.019999999552965164], [0.019999999552965164], [0.019999999552965164], [0.019999999552965164], [0.019999999552965164], [0.019999999552965164], [0.019999999552965164], [0.019999999552965164], [0.019999999552965164], [0.019999999552965164], [0.019999999552965164], [0.019999999552965164], [0.019999999552965164], [0.019999999552965164], [0.019999999552965164], [0.019999999552965164], [0.019999999552965164], [0.019999999552965164], [0.019999999552965164], [0.019999999552965164], [0.019999999552965164], [0.019999999552965164], [0.019999999552965164], [0.019999999552965164], [0.019999999552965164], [0.019999999552965164], [0.019999999552965164], [0.019999999552965164], [0.019999999552965164], [0.019999999552965164], [0.019999999552965164], [0.019999999552965164], [0.019999999552965164], [0.019999999552965164], [0.019999999552965164], [0.019999999552965164], [0.019999999552965164], [0.019999999552965164], [0.019999999552965164], [0.019999999552965164], [0.019999999552965164], [0.019999999552965164], [0.019999999552965164], [0.019999999552965164], [0.019999999552965164]], [[0.019999999552965164], [0.019999999552965164], [0.019999999552965164], [0.019999999552965164], [0.019999999552965164], [0.019999999552965164], [0.019999999552965164], [0.019999999552965164], [0.019999999552965164], [0.019999999552965164], [0.019999999552965164], [0.019999999552965164], [0.019999999552965164], [0.019999999552965164], [0.019999999552965164], [0.019999999552965164], [0.019999999552965164], [0.019999999552965164], [0.019999999552965164], [0.019999999552965164], [0.019999999552965164], [0.019999999552965164], [0.019999999552965164], [0.019999999552965164], [0.019999999552965164], [0.019999999552965164], [0.019999999552965164], [0.019999999552965164], [0.019999999552965164], [0.019999999552965164], [0.019999999552965164], [0.019999999552965164], [0.019999999552965164], [0.019999999552965164], [0.019999999552965164], [0.019999999552965164], [0.019999999552965164], [0.019999999552965164], [0.019999999552965164], [0.019999999552965164], [0.019999999552965164], [0.019999999552965164], [0.019999999552965164], [0.019999999552965164], [0.019999999552965164], [0.019999999552965164], [0.019999999552965164], [0.019999999552965164], [0.019999999552965164], [0.019999999552965164]], [[0.019999999552965164], [0.019999999552965164], [0.019999999552965164], [0.019999999552965164], [0.019999999552965164], [0.019999999552965164], [0.019999999552965164], [0.019999999552965164], [0.019999999552965164], [0.019999999552965164], [0.019999999552965164], [0.019999999552965164], [0.019999999552965164], [0.019999999552965164], [0.019999999552965164], [0.019999999552965164], [0.019999999552965164], [0.019999999552965164], [0.019999999552965164], [0.019999999552965164], [0.019999999552965164], [0.019999999552965164], [0.019999999552965164], [0.019999999552965164], [0.019999999552965164], [0.019999999552965164], [0.019999999552965164], [0.019999999552965164], [0.019999999552965164], [0.019999999552965164], [0.019999999552965164], [0.019999999552965164], [0.019999999552965164], [0.019999999552965164], [0.019999999552965164], [0.019999999552965164], [0.019999999552965164], [0.019999999552965164], [0.019999999552965164], [0.019999999552965164], [0.019999999552965164], [0.019999999552965164], [0.019999999552965164], [0.019999999552965164], [0.019999999552965164], [0.019999999552965164], [0.019999999552965164], [0.019999999552965164], [0.019999999552965164], [0.019999999552965164]], [[0.019999999552965164], [0.019999999552965164], [0.019999999552965164], [0.019999999552965164], [0.019999999552965164], [0.019999999552965164], [0.019999999552965164], [0.019999999552965164], [0.019999999552965164], [0.019999999552965164], [0.019999999552965164], [0.019999999552965164], [0.019999999552965164], [0.019999999552965164], [0.019999999552965164], [0.019999999552965164], [0.019999999552965164], [0.019999999552965164], [0.019999999552965164], [0.019999999552965164], [0.019999999552965164], [0.019999999552965164], [0.019999999552965164], [0.019999999552965164], [0.019999999552965164], [0.019999999552965164], [0.019999999552965164], [0.019999999552965164], [0.019999999552965164], [0.019999999552965164], [0.019999999552965164], [0.019999999552965164], [0.019999999552965164], [0.019999999552965164], [0.019999999552965164], [0.019999999552965164], [0.019999999552965164], [0.019999999552965164], [0.019999999552965164], [0.019999999552965164], [0.019999999552965164], [0.019999999552965164], [0.019999999552965164], [0.019999999552965164], [0.019999999552965164], [0.019999999552965164], [0.019999999552965164], [0.019999999552965164], [0.019999999552965164], [0.019999999552965164]], [[0.019999999552965164], [0.019999999552965164], [0.019999999552965164], [0.019999999552965164], [0.019999999552965164], [0.019999999552965164], [0.019999999552965164], [0.019999999552965164], [0.019999999552965164], [0.019999999552965164], [0.019999999552965164], [0.019999999552965164], [0.019999999552965164], [0.019999999552965164], [0.019999999552965164], [0.019999999552965164], [0.019999999552965164], [0.019999999552965164], [0.019999999552965164], [0.019999999552965164], [0.019999999552965164], [0.019999999552965164], [0.019999999552965164], [0.019999999552965164], [0.019999999552965164], [0.019999999552965164], [0.019999999552965164], [0.019999999552965164], [0.019999999552965164], [0.019999999552965164], [0.019999999552965164], [0.019999999552965164], [0.019999999552965164], [0.019999999552965164], [0.019999999552965164], [0.019999999552965164], [0.019999999552965164], [0.019999999552965164], [0.019999999552965164], [0.019999999552965164], [0.019999999552965164], [0.019999999552965164], [0.019999999552965164], [0.019999999552965164], [0.019999999552965164], [0.019999999552965164], [0.019999999552965164], [0.019999999552965164], [0.019999999552965164], [0.019999999552965164]], [[0.019999999552965164], [0.019999999552965164], [0.019999999552965164], [0.019999999552965164], [0.019999999552965164], [0.019999999552965164], [0.019999999552965164], [0.019999999552965164], [0.019999999552965164], [0.019999999552965164], [0.019999999552965164], [0.019999999552965164], [0.019999999552965164], [0.019999999552965164], [0.019999999552965164], [0.019999999552965164], [0.019999999552965164], [0.019999999552965164], [0.019999999552965164], [0.019999999552965164], [0.019999999552965164], [0.019999999552965164], [0.019999999552965164], [0.019999999552965164], [0.019999999552965164], [0.019999999552965164], [0.019999999552965164], [0.019999999552965164], [0.019999999552965164], [0.019999999552965164], [0.019999999552965164], [0.019999999552965164], [0.019999999552965164], [0.019999999552965164], [0.019999999552965164], [0.019999999552965164], [0.019999999552965164], [0.019999999552965164], [0.019999999552965164], [0.019999999552965164], [0.019999999552965164], [0.019999999552965164], [0.019999999552965164], [0.019999999552965164], [0.019999999552965164], [0.019999999552965164], [0.019999999552965164], [0.019999999552965164], [0.019999999552965164], [0.019999999552965164]], [[0.019999999552965164], [0.019999999552965164], [0.019999999552965164], [0.019999999552965164], [0.019999999552965164], [0.019999999552965164], [0.019999999552965164], [0.019999999552965164], [0.019999999552965164], [0.019999999552965164], [0.019999999552965164], [0.019999999552965164], [0.019999999552965164], [0.019999999552965164], [0.019999999552965164], [0.019999999552965164], [0.019999999552965164], [0.019999999552965164], [0.019999999552965164], [0.019999999552965164], [0.019999999552965164], [0.019999999552965164], [0.019999999552965164], [0.019999999552965164], [0.019999999552965164], [0.019999999552965164], [0.019999999552965164], [0.019999999552965164], [0.019999999552965164], [0.019999999552965164], [0.019999999552965164], [0.019999999552965164], [0.019999999552965164], [0.019999999552965164], [0.019999999552965164], [0.019999999552965164], [0.019999999552965164], [0.019999999552965164], [0.019999999552965164], [0.019999999552965164], [0.019999999552965164], [0.019999999552965164], [0.019999999552965164], [0.019999999552965164], [0.019999999552965164], [0.019999999552965164], [0.019999999552965164], [0.019999999552965164], [0.019999999552965164], [0.019999999552965164]], [[0.019999999552965164], [0.019999999552965164], [0.019999999552965164], [0.019999999552965164], [0.019999999552965164], [0.019999999552965164], [0.019999999552965164], [0.019999999552965164], [0.019999999552965164], [0.019999999552965164], [0.019999999552965164], [0.019999999552965164], [0.019999999552965164], [0.019999999552965164], [0.019999999552965164], [0.019999999552965164], [0.019999999552965164], [0.019999999552965164], [0.019999999552965164], [0.019999999552965164], [0.019999999552965164], [0.019999999552965164], [0.019999999552965164], [0.019999999552965164], [0.019999999552965164], [0.019999999552965164], [0.019999999552965164], [0.019999999552965164], [0.019999999552965164], [0.019999999552965164], [0.019999999552965164], [0.019999999552965164], [0.019999999552965164], [0.019999999552965164], [0.019999999552965164], [0.019999999552965164], [0.019999999552965164], [0.019999999552965164], [0.019999999552965164], [0.019999999552965164], [0.019999999552965164], [0.019999999552965164], [0.019999999552965164], [0.019999999552965164], [0.019999999552965164], [0.019999999552965164], [0.019999999552965164], [0.019999999552965164], [0.019999999552965164], [0.019999999552965164]], [[0.019999999552965164], [0.019999999552965164], [0.019999999552965164], [0.019999999552965164], [0.019999999552965164], [0.019999999552965164], [0.019999999552965164], [0.019999999552965164], [0.019999999552965164], [0.019999999552965164], [0.019999999552965164], [0.019999999552965164], [0.019999999552965164], [0.019999999552965164], [0.019999999552965164], [0.019999999552965164], [0.019999999552965164], [0.019999999552965164], [0.019999999552965164], [0.019999999552965164], [0.019999999552965164], [0.019999999552965164], [0.019999999552965164], [0.019999999552965164], [0.019999999552965164], [0.019999999552965164], [0.019999999552965164], [0.019999999552965164], [0.019999999552965164], [0.019999999552965164], [0.019999999552965164], [0.019999999552965164], [0.019999999552965164], [0.019999999552965164], [0.019999999552965164], [0.019999999552965164], [0.019999999552965164], [0.019999999552965164], [0.019999999552965164], [0.019999999552965164], [0.019999999552965164], [0.019999999552965164], [0.019999999552965164], [0.019999999552965164], [0.019999999552965164], [0.019999999552965164], [0.019999999552965164], [0.019999999552965164], [0.019999999552965164], [0.019999999552965164]], [[0.019999999552965164], [0.019999999552965164], [0.019999999552965164], [0.019999999552965164], [0.019999999552965164], [0.019999999552965164], [0.019999999552965164], [0.019999999552965164], [0.019999999552965164], [0.019999999552965164], [0.019999999552965164], [0.019999999552965164], [0.019999999552965164], [0.019999999552965164], [0.019999999552965164], [0.019999999552965164], [0.019999999552965164], [0.019999999552965164], [0.019999999552965164], [0.019999999552965164], [0.019999999552965164], [0.019999999552965164], [0.019999999552965164], [0.019999999552965164], [0.019999999552965164], [0.019999999552965164], [0.019999999552965164], [0.019999999552965164], [0.019999999552965164], [0.019999999552965164], [0.019999999552965164], [0.019999999552965164], [0.019999999552965164], [0.019999999552965164], [0.019999999552965164], [0.019999999552965164], [0.019999999552965164], [0.019999999552965164], [0.019999999552965164], [0.019999999552965164], [0.019999999552965164], [0.019999999552965164], [0.019999999552965164], [0.019999999552965164], [0.019999999552965164], [0.019999999552965164], [0.019999999552965164], [0.019999999552965164], [0.019999999552965164], [0.019999999552965164]], [[0.019999999552965164], [0.019999999552965164], [0.019999999552965164], [0.019999999552965164], [0.019999999552965164], [0.019999999552965164], [0.019999999552965164], [0.019999999552965164], [0.019999999552965164], [0.019999999552965164], [0.019999999552965164], [0.019999999552965164], [0.019999999552965164], [0.019999999552965164], [0.019999999552965164], [0.019999999552965164], [0.019999999552965164], [0.019999999552965164], [0.019999999552965164], [0.019999999552965164], [0.019999999552965164], [0.019999999552965164], [0.019999999552965164], [0.019999999552965164], [0.019999999552965164], [0.019999999552965164], [0.019999999552965164], [0.019999999552965164], [0.019999999552965164], [0.019999999552965164], [0.019999999552965164], [0.019999999552965164], [0.019999999552965164], [0.019999999552965164], [0.019999999552965164], [0.019999999552965164], [0.019999999552965164], [0.019999999552965164], [0.019999999552965164], [0.019999999552965164], [0.019999999552965164], [0.019999999552965164], [0.019999999552965164], [0.019999999552965164], [0.019999999552965164], [0.019999999552965164], [0.019999999552965164], [0.019999999552965164], [0.019999999552965164], [0.019999999552965164]], [[0.019999999552965164], [0.019999999552965164], [0.019999999552965164], [0.019999999552965164], [0.019999999552965164], [0.019999999552965164], [0.019999999552965164], [0.019999999552965164], [0.019999999552965164], [0.019999999552965164], [0.019999999552965164], [0.019999999552965164], [0.019999999552965164], [0.019999999552965164], [0.019999999552965164], [0.019999999552965164], [0.019999999552965164], [0.019999999552965164], [0.019999999552965164], [0.019999999552965164], [0.019999999552965164], [0.019999999552965164], [0.019999999552965164], [0.019999999552965164], [0.019999999552965164], [0.019999999552965164], [0.019999999552965164], [0.019999999552965164], [0.019999999552965164], [0.019999999552965164], [0.019999999552965164], [0.019999999552965164], [0.019999999552965164], [0.019999999552965164], [0.019999999552965164], [0.019999999552965164], [0.019999999552965164], [0.019999999552965164], [0.019999999552965164], [0.019999999552965164], [0.019999999552965164], [0.019999999552965164], [0.019999999552965164], [0.019999999552965164], [0.019999999552965164], [0.019999999552965164], [0.019999999552965164], [0.019999999552965164], [0.019999999552965164], [0.019999999552965164]], [[0.019999999552965164], [0.019999999552965164], [0.019999999552965164], [0.019999999552965164], [0.019999999552965164], [0.019999999552965164], [0.019999999552965164], [0.019999999552965164], [0.019999999552965164], [0.019999999552965164], [0.019999999552965164], [0.019999999552965164], [0.019999999552965164], [0.019999999552965164], [0.019999999552965164], [0.019999999552965164], [0.019999999552965164], [0.019999999552965164], [0.019999999552965164], [0.019999999552965164], [0.019999999552965164], [0.019999999552965164], [0.019999999552965164], [0.019999999552965164], [0.019999999552965164], [0.019999999552965164], [0.019999999552965164], [0.019999999552965164], [0.019999999552965164], [0.019999999552965164], [0.019999999552965164], [0.019999999552965164], [0.019999999552965164], [0.019999999552965164], [0.019999999552965164], [0.019999999552965164], [0.019999999552965164], [0.019999999552965164], [0.019999999552965164], [0.019999999552965164], [0.019999999552965164], [0.019999999552965164], [0.019999999552965164], [0.019999999552965164], [0.019999999552965164], [0.019999999552965164], [0.019999999552965164], [0.019999999552965164], [0.019999999552965164], [0.019999999552965164]]];\n", 219 | " window.neg_attention=[[[0.021320132538676262], [0.019803090021014214], [0.007362167350947857], [0.03556017577648163], [0.019912056624889374], [0.019912056624889374], [0.019912056624889374], [0.019912056624889374], [0.019912056624889374], [0.019912056624889374], [0.019912056624889374], [0.019912056624889374], [0.019912056624889374], [0.019912056624889374], [0.019912056624889374], [0.019912056624889374], [0.019912056624889374], [0.019912056624889374], [0.019912056624889374], [0.019912056624889374], [0.019912056624889374], [0.019912056624889374], [0.019912056624889374], [0.019912056624889374], [0.019912056624889374], [0.019912056624889374], [0.019912056624889374], [0.019912056624889374], [0.019912056624889374], [0.019912056624889374], [0.019912056624889374], [0.019912056624889374], [0.019912056624889374], [0.019912056624889374], [0.019912056624889374], [0.019912056624889374], [0.019912056624889374], [0.019912056624889374], [0.019912056624889374], [0.019912056624889374], [0.019912056624889374], [0.019912056624889374], [0.019912056624889374], [0.019912056624889374], [0.019912056624889374], [0.019912056624889374], [0.019912056624889374], [0.019912056624889374], [0.019912056624889374], [0.019912056624889374]], [[0.010753834620118141], [0.011556935496628284], [0.019164109602570534], [0.08377645909786224], [0.05491115525364876], [0.03573393076658249], [0.03631164878606796], [0.0395965613424778], [0.0405363067984581], [0.014468257315456867], [0.017827892675995827], [0.0159095861017704], [0.01627264730632305], [0.03051900677382946], [0.025570303201675415], [0.031108791008591652], [0.015175954438745975], [0.015175954438745975], [0.015175954438745975], [0.015175954438745975], [0.015175954438745975], [0.015175954438745975], [0.015175954438745975], [0.015175954438745975], [0.015175954438745975], [0.015175954438745975], [0.015175954438745975], [0.015175954438745975], [0.015175954438745975], [0.015175954438745975], [0.015175954438745975], [0.015175954438745975], [0.015175954438745975], [0.015175954438745975], [0.015175954438745975], [0.015175954438745975], [0.015175954438745975], [0.015175954438745975], [0.015175954438745975], [0.015175954438745975], [0.015175954438745975], [0.015175954438745975], [0.015175954438745975], [0.015175954438745975], [0.015175954438745975], [0.015175954438745975], [0.015175954438745975], [0.015175954438745975], [0.015175954438745975], [0.015175954438745975]], [[0.02041417360305786], [0.028972148895263672], [0.016511525958776474], [0.016160670667886734], [0.014390633441507816], [0.015896182507276535], [0.012575287371873856], [0.028222765773534775], [0.03542638570070267], [0.0131822619587183], [0.008579907938838005], [0.007905285805463791], [0.008034045808017254], [0.005735623650252819], [0.008156300522387028], [0.004871524404734373], [0.011524590663611889], [0.011995930224657059], [0.024501007050275803], [0.09452126175165176], [0.03197263181209564], [0.053579531610012054], [0.083500437438488], [0.02532924711704254], [0.03632040321826935], [0.028527190908789635], [0.015425660647451878], [0.014293434098362923], [0.03263203427195549], [0.02204308845102787], [0.011421012692153454], [0.007416676264256239], [0.007705042138695717], [0.008996419608592987], [0.005934998858720064], [0.012047926895320415], [0.0055339825339615345], [0.0050652227364480495], [0.014626240357756615], [0.0162619911134243], [0.01648116484284401], [0.018484536558389664], [0.007615578360855579], [0.00770860631018877], [0.01146935485303402], [0.0264904722571373], [0.02992531843483448], [0.02364170365035534], [0.01598626933991909], [0.01598626933991909]], [[0.019999999552965164], [0.019999999552965164], [0.019999999552965164], [0.019999999552965164], [0.019999999552965164], [0.019999999552965164], [0.019999999552965164], [0.019999999552965164], [0.019999999552965164], [0.019999999552965164], [0.019999999552965164], [0.019999999552965164], [0.019999999552965164], [0.019999999552965164], [0.019999999552965164], [0.019999999552965164], [0.019999999552965164], [0.019999999552965164], [0.019999999552965164], [0.019999999552965164], [0.019999999552965164], [0.019999999552965164], [0.019999999552965164], [0.019999999552965164], [0.019999999552965164], [0.019999999552965164], [0.019999999552965164], [0.019999999552965164], [0.019999999552965164], [0.019999999552965164], [0.019999999552965164], [0.019999999552965164], [0.019999999552965164], [0.019999999552965164], [0.019999999552965164], [0.019999999552965164], [0.019999999552965164], [0.019999999552965164], [0.019999999552965164], [0.019999999552965164], [0.019999999552965164], [0.019999999552965164], [0.019999999552965164], [0.019999999552965164], [0.019999999552965164], [0.019999999552965164], [0.019999999552965164], [0.019999999552965164], [0.019999999552965164], [0.019999999552965164]], [[0.019999999552965164], [0.019999999552965164], [0.019999999552965164], [0.019999999552965164], [0.019999999552965164], [0.019999999552965164], [0.019999999552965164], [0.019999999552965164], [0.019999999552965164], [0.019999999552965164], [0.019999999552965164], [0.019999999552965164], [0.019999999552965164], [0.019999999552965164], [0.019999999552965164], [0.019999999552965164], [0.019999999552965164], [0.019999999552965164], [0.019999999552965164], [0.019999999552965164], [0.019999999552965164], [0.019999999552965164], [0.019999999552965164], [0.019999999552965164], [0.019999999552965164], [0.019999999552965164], [0.019999999552965164], [0.019999999552965164], [0.019999999552965164], [0.019999999552965164], [0.019999999552965164], [0.019999999552965164], [0.019999999552965164], [0.019999999552965164], [0.019999999552965164], [0.019999999552965164], [0.019999999552965164], [0.019999999552965164], [0.019999999552965164], [0.019999999552965164], [0.019999999552965164], [0.019999999552965164], [0.019999999552965164], [0.019999999552965164], [0.019999999552965164], [0.019999999552965164], [0.019999999552965164], [0.019999999552965164], [0.019999999552965164], [0.019999999552965164]], [[0.019999999552965164], [0.019999999552965164], [0.019999999552965164], [0.019999999552965164], [0.019999999552965164], [0.019999999552965164], [0.019999999552965164], [0.019999999552965164], [0.019999999552965164], [0.019999999552965164], [0.019999999552965164], [0.019999999552965164], [0.019999999552965164], [0.019999999552965164], [0.019999999552965164], [0.019999999552965164], [0.019999999552965164], [0.019999999552965164], [0.019999999552965164], [0.019999999552965164], [0.019999999552965164], [0.019999999552965164], [0.019999999552965164], [0.019999999552965164], [0.019999999552965164], [0.019999999552965164], [0.019999999552965164], [0.019999999552965164], [0.019999999552965164], [0.019999999552965164], [0.019999999552965164], [0.019999999552965164], [0.019999999552965164], [0.019999999552965164], [0.019999999552965164], [0.019999999552965164], [0.019999999552965164], [0.019999999552965164], [0.019999999552965164], [0.019999999552965164], [0.019999999552965164], [0.019999999552965164], [0.019999999552965164], [0.019999999552965164], [0.019999999552965164], [0.019999999552965164], [0.019999999552965164], [0.019999999552965164], [0.019999999552965164], [0.019999999552965164]], [[0.019999999552965164], [0.019999999552965164], [0.019999999552965164], [0.019999999552965164], [0.019999999552965164], [0.019999999552965164], [0.019999999552965164], [0.019999999552965164], [0.019999999552965164], [0.019999999552965164], [0.019999999552965164], [0.019999999552965164], [0.019999999552965164], [0.019999999552965164], [0.019999999552965164], [0.019999999552965164], [0.019999999552965164], [0.019999999552965164], [0.019999999552965164], [0.019999999552965164], [0.019999999552965164], [0.019999999552965164], [0.019999999552965164], [0.019999999552965164], [0.019999999552965164], [0.019999999552965164], [0.019999999552965164], [0.019999999552965164], [0.019999999552965164], [0.019999999552965164], [0.019999999552965164], [0.019999999552965164], [0.019999999552965164], [0.019999999552965164], [0.019999999552965164], [0.019999999552965164], [0.019999999552965164], [0.019999999552965164], [0.019999999552965164], [0.019999999552965164], [0.019999999552965164], [0.019999999552965164], [0.019999999552965164], [0.019999999552965164], [0.019999999552965164], [0.019999999552965164], [0.019999999552965164], [0.019999999552965164], [0.019999999552965164], [0.019999999552965164]], [[0.019999999552965164], [0.019999999552965164], [0.019999999552965164], [0.019999999552965164], [0.019999999552965164], [0.019999999552965164], [0.019999999552965164], [0.019999999552965164], [0.019999999552965164], [0.019999999552965164], [0.019999999552965164], [0.019999999552965164], [0.019999999552965164], [0.019999999552965164], [0.019999999552965164], [0.019999999552965164], [0.019999999552965164], [0.019999999552965164], [0.019999999552965164], [0.019999999552965164], [0.019999999552965164], [0.019999999552965164], [0.019999999552965164], [0.019999999552965164], [0.019999999552965164], [0.019999999552965164], [0.019999999552965164], [0.019999999552965164], [0.019999999552965164], [0.019999999552965164], [0.019999999552965164], [0.019999999552965164], [0.019999999552965164], [0.019999999552965164], [0.019999999552965164], [0.019999999552965164], [0.019999999552965164], [0.019999999552965164], [0.019999999552965164], [0.019999999552965164], [0.019999999552965164], [0.019999999552965164], [0.019999999552965164], [0.019999999552965164], [0.019999999552965164], [0.019999999552965164], [0.019999999552965164], [0.019999999552965164], [0.019999999552965164], [0.019999999552965164]], [[0.019999999552965164], [0.019999999552965164], [0.019999999552965164], [0.019999999552965164], [0.019999999552965164], [0.019999999552965164], [0.019999999552965164], [0.019999999552965164], [0.019999999552965164], [0.019999999552965164], [0.019999999552965164], [0.019999999552965164], [0.019999999552965164], [0.019999999552965164], [0.019999999552965164], [0.019999999552965164], [0.019999999552965164], [0.019999999552965164], [0.019999999552965164], [0.019999999552965164], [0.019999999552965164], [0.019999999552965164], [0.019999999552965164], [0.019999999552965164], [0.019999999552965164], [0.019999999552965164], [0.019999999552965164], [0.019999999552965164], [0.019999999552965164], [0.019999999552965164], [0.019999999552965164], [0.019999999552965164], [0.019999999552965164], [0.019999999552965164], [0.019999999552965164], [0.019999999552965164], [0.019999999552965164], [0.019999999552965164], [0.019999999552965164], [0.019999999552965164], [0.019999999552965164], [0.019999999552965164], [0.019999999552965164], [0.019999999552965164], [0.019999999552965164], [0.019999999552965164], [0.019999999552965164], [0.019999999552965164], [0.019999999552965164], [0.019999999552965164]], [[0.019999999552965164], [0.019999999552965164], [0.019999999552965164], [0.019999999552965164], [0.019999999552965164], [0.019999999552965164], [0.019999999552965164], [0.019999999552965164], [0.019999999552965164], [0.019999999552965164], [0.019999999552965164], [0.019999999552965164], [0.019999999552965164], [0.019999999552965164], [0.019999999552965164], [0.019999999552965164], [0.019999999552965164], [0.019999999552965164], [0.019999999552965164], [0.019999999552965164], [0.019999999552965164], [0.019999999552965164], [0.019999999552965164], [0.019999999552965164], [0.019999999552965164], [0.019999999552965164], [0.019999999552965164], [0.019999999552965164], [0.019999999552965164], [0.019999999552965164], [0.019999999552965164], [0.019999999552965164], [0.019999999552965164], [0.019999999552965164], [0.019999999552965164], [0.019999999552965164], [0.019999999552965164], [0.019999999552965164], [0.019999999552965164], [0.019999999552965164], [0.019999999552965164], [0.019999999552965164], [0.019999999552965164], [0.019999999552965164], [0.019999999552965164], [0.019999999552965164], [0.019999999552965164], [0.019999999552965164], [0.019999999552965164], [0.019999999552965164]], [[0.019999999552965164], [0.019999999552965164], [0.019999999552965164], [0.019999999552965164], [0.019999999552965164], [0.019999999552965164], [0.019999999552965164], [0.019999999552965164], [0.019999999552965164], [0.019999999552965164], [0.019999999552965164], [0.019999999552965164], [0.019999999552965164], [0.019999999552965164], [0.019999999552965164], [0.019999999552965164], [0.019999999552965164], [0.019999999552965164], [0.019999999552965164], [0.019999999552965164], [0.019999999552965164], [0.019999999552965164], [0.019999999552965164], [0.019999999552965164], [0.019999999552965164], [0.019999999552965164], [0.019999999552965164], [0.019999999552965164], [0.019999999552965164], [0.019999999552965164], [0.019999999552965164], [0.019999999552965164], [0.019999999552965164], [0.019999999552965164], [0.019999999552965164], [0.019999999552965164], [0.019999999552965164], [0.019999999552965164], [0.019999999552965164], [0.019999999552965164], [0.019999999552965164], [0.019999999552965164], [0.019999999552965164], [0.019999999552965164], [0.019999999552965164], [0.019999999552965164], [0.019999999552965164], [0.019999999552965164], [0.019999999552965164], [0.019999999552965164]], [[0.019999999552965164], [0.019999999552965164], [0.019999999552965164], [0.019999999552965164], [0.019999999552965164], [0.019999999552965164], [0.019999999552965164], [0.019999999552965164], [0.019999999552965164], [0.019999999552965164], [0.019999999552965164], [0.019999999552965164], [0.019999999552965164], [0.019999999552965164], [0.019999999552965164], [0.019999999552965164], [0.019999999552965164], [0.019999999552965164], [0.019999999552965164], [0.019999999552965164], [0.019999999552965164], [0.019999999552965164], [0.019999999552965164], [0.019999999552965164], [0.019999999552965164], [0.019999999552965164], [0.019999999552965164], [0.019999999552965164], [0.019999999552965164], [0.019999999552965164], [0.019999999552965164], [0.019999999552965164], [0.019999999552965164], [0.019999999552965164], [0.019999999552965164], [0.019999999552965164], [0.019999999552965164], [0.019999999552965164], [0.019999999552965164], [0.019999999552965164], [0.019999999552965164], [0.019999999552965164], [0.019999999552965164], [0.019999999552965164], [0.019999999552965164], [0.019999999552965164], [0.019999999552965164], [0.019999999552965164], [0.019999999552965164], [0.019999999552965164]], [[0.019999999552965164], [0.019999999552965164], [0.019999999552965164], [0.019999999552965164], [0.019999999552965164], [0.019999999552965164], [0.019999999552965164], [0.019999999552965164], [0.019999999552965164], [0.019999999552965164], [0.019999999552965164], [0.019999999552965164], [0.019999999552965164], [0.019999999552965164], [0.019999999552965164], [0.019999999552965164], [0.019999999552965164], [0.019999999552965164], [0.019999999552965164], [0.019999999552965164], [0.019999999552965164], [0.019999999552965164], [0.019999999552965164], [0.019999999552965164], [0.019999999552965164], [0.019999999552965164], [0.019999999552965164], [0.019999999552965164], [0.019999999552965164], [0.019999999552965164], [0.019999999552965164], [0.019999999552965164], [0.019999999552965164], [0.019999999552965164], [0.019999999552965164], [0.019999999552965164], [0.019999999552965164], [0.019999999552965164], [0.019999999552965164], [0.019999999552965164], [0.019999999552965164], [0.019999999552965164], [0.019999999552965164], [0.019999999552965164], [0.019999999552965164], [0.019999999552965164], [0.019999999552965164], [0.019999999552965164], [0.019999999552965164], [0.019999999552965164]], [[0.019999999552965164], [0.019999999552965164], [0.019999999552965164], [0.019999999552965164], [0.019999999552965164], [0.019999999552965164], [0.019999999552965164], [0.019999999552965164], [0.019999999552965164], [0.019999999552965164], [0.019999999552965164], [0.019999999552965164], [0.019999999552965164], [0.019999999552965164], [0.019999999552965164], [0.019999999552965164], [0.019999999552965164], [0.019999999552965164], [0.019999999552965164], [0.019999999552965164], [0.019999999552965164], [0.019999999552965164], [0.019999999552965164], [0.019999999552965164], [0.019999999552965164], [0.019999999552965164], [0.019999999552965164], [0.019999999552965164], [0.019999999552965164], [0.019999999552965164], [0.019999999552965164], [0.019999999552965164], [0.019999999552965164], [0.019999999552965164], [0.019999999552965164], [0.019999999552965164], [0.019999999552965164], [0.019999999552965164], [0.019999999552965164], [0.019999999552965164], [0.019999999552965164], [0.019999999552965164], [0.019999999552965164], [0.019999999552965164], [0.019999999552965164], [0.019999999552965164], [0.019999999552965164], [0.019999999552965164], [0.019999999552965164], [0.019999999552965164]], [[0.019999999552965164], [0.019999999552965164], [0.019999999552965164], [0.019999999552965164], [0.019999999552965164], [0.019999999552965164], [0.019999999552965164], [0.019999999552965164], [0.019999999552965164], [0.019999999552965164], [0.019999999552965164], [0.019999999552965164], [0.019999999552965164], [0.019999999552965164], [0.019999999552965164], [0.019999999552965164], [0.019999999552965164], [0.019999999552965164], [0.019999999552965164], [0.019999999552965164], [0.019999999552965164], [0.019999999552965164], [0.019999999552965164], [0.019999999552965164], [0.019999999552965164], [0.019999999552965164], [0.019999999552965164], [0.019999999552965164], [0.019999999552965164], [0.019999999552965164], [0.019999999552965164], [0.019999999552965164], [0.019999999552965164], [0.019999999552965164], [0.019999999552965164], [0.019999999552965164], [0.019999999552965164], [0.019999999552965164], [0.019999999552965164], [0.019999999552965164], [0.019999999552965164], [0.019999999552965164], [0.019999999552965164], [0.019999999552965164], [0.019999999552965164], [0.019999999552965164], [0.019999999552965164], [0.019999999552965164], [0.019999999552965164], [0.019999999552965164]], [[0.019999999552965164], [0.019999999552965164], [0.019999999552965164], [0.019999999552965164], [0.019999999552965164], [0.019999999552965164], [0.019999999552965164], [0.019999999552965164], [0.019999999552965164], [0.019999999552965164], [0.019999999552965164], [0.019999999552965164], [0.019999999552965164], [0.019999999552965164], [0.019999999552965164], [0.019999999552965164], [0.019999999552965164], [0.019999999552965164], [0.019999999552965164], [0.019999999552965164], [0.019999999552965164], [0.019999999552965164], [0.019999999552965164], [0.019999999552965164], [0.019999999552965164], [0.019999999552965164], [0.019999999552965164], [0.019999999552965164], [0.019999999552965164], [0.019999999552965164], [0.019999999552965164], [0.019999999552965164], [0.019999999552965164], [0.019999999552965164], [0.019999999552965164], [0.019999999552965164], [0.019999999552965164], [0.019999999552965164], [0.019999999552965164], [0.019999999552965164], [0.019999999552965164], [0.019999999552965164], [0.019999999552965164], [0.019999999552965164], [0.019999999552965164], [0.019999999552965164], [0.019999999552965164], [0.019999999552965164], [0.019999999552965164], [0.019999999552965164]], [[0.019999999552965164], [0.019999999552965164], [0.019999999552965164], [0.019999999552965164], [0.019999999552965164], [0.019999999552965164], [0.019999999552965164], [0.019999999552965164], [0.019999999552965164], [0.019999999552965164], [0.019999999552965164], [0.019999999552965164], [0.019999999552965164], [0.019999999552965164], [0.019999999552965164], [0.019999999552965164], [0.019999999552965164], [0.019999999552965164], [0.019999999552965164], [0.019999999552965164], [0.019999999552965164], [0.019999999552965164], [0.019999999552965164], [0.019999999552965164], [0.019999999552965164], [0.019999999552965164], [0.019999999552965164], [0.019999999552965164], [0.019999999552965164], [0.019999999552965164], [0.019999999552965164], [0.019999999552965164], [0.019999999552965164], [0.019999999552965164], [0.019999999552965164], [0.019999999552965164], [0.019999999552965164], [0.019999999552965164], [0.019999999552965164], [0.019999999552965164], [0.019999999552965164], [0.019999999552965164], [0.019999999552965164], [0.019999999552965164], [0.019999999552965164], [0.019999999552965164], [0.019999999552965164], [0.019999999552965164], [0.019999999552965164], [0.019999999552965164]], [[0.019999999552965164], [0.019999999552965164], [0.019999999552965164], [0.019999999552965164], [0.019999999552965164], [0.019999999552965164], [0.019999999552965164], [0.019999999552965164], [0.019999999552965164], [0.019999999552965164], [0.019999999552965164], [0.019999999552965164], [0.019999999552965164], [0.019999999552965164], [0.019999999552965164], [0.019999999552965164], [0.019999999552965164], [0.019999999552965164], [0.019999999552965164], [0.019999999552965164], [0.019999999552965164], [0.019999999552965164], [0.019999999552965164], [0.019999999552965164], [0.019999999552965164], [0.019999999552965164], [0.019999999552965164], [0.019999999552965164], [0.019999999552965164], [0.019999999552965164], [0.019999999552965164], [0.019999999552965164], [0.019999999552965164], [0.019999999552965164], [0.019999999552965164], [0.019999999552965164], [0.019999999552965164], [0.019999999552965164], [0.019999999552965164], [0.019999999552965164], [0.019999999552965164], [0.019999999552965164], [0.019999999552965164], [0.019999999552965164], [0.019999999552965164], [0.019999999552965164], [0.019999999552965164], [0.019999999552965164], [0.019999999552965164], [0.019999999552965164]], [[0.019999999552965164], [0.019999999552965164], [0.019999999552965164], [0.019999999552965164], [0.019999999552965164], [0.019999999552965164], [0.019999999552965164], [0.019999999552965164], [0.019999999552965164], [0.019999999552965164], [0.019999999552965164], [0.019999999552965164], [0.019999999552965164], [0.019999999552965164], [0.019999999552965164], [0.019999999552965164], [0.019999999552965164], [0.019999999552965164], [0.019999999552965164], [0.019999999552965164], [0.019999999552965164], [0.019999999552965164], [0.019999999552965164], [0.019999999552965164], [0.019999999552965164], [0.019999999552965164], [0.019999999552965164], [0.019999999552965164], [0.019999999552965164], [0.019999999552965164], [0.019999999552965164], [0.019999999552965164], [0.019999999552965164], [0.019999999552965164], [0.019999999552965164], [0.019999999552965164], [0.019999999552965164], [0.019999999552965164], [0.019999999552965164], [0.019999999552965164], [0.019999999552965164], [0.019999999552965164], [0.019999999552965164], [0.019999999552965164], [0.019999999552965164], [0.019999999552965164], [0.019999999552965164], [0.019999999552965164], [0.019999999552965164], [0.019999999552965164]], [[0.019999999552965164], [0.019999999552965164], [0.019999999552965164], [0.019999999552965164], [0.019999999552965164], [0.019999999552965164], [0.019999999552965164], [0.019999999552965164], [0.019999999552965164], [0.019999999552965164], [0.019999999552965164], [0.019999999552965164], [0.019999999552965164], [0.019999999552965164], [0.019999999552965164], [0.019999999552965164], [0.019999999552965164], [0.019999999552965164], [0.019999999552965164], [0.019999999552965164], [0.019999999552965164], [0.019999999552965164], [0.019999999552965164], [0.019999999552965164], [0.019999999552965164], [0.019999999552965164], [0.019999999552965164], [0.019999999552965164], [0.019999999552965164], [0.019999999552965164], [0.019999999552965164], [0.019999999552965164], [0.019999999552965164], [0.019999999552965164], [0.019999999552965164], [0.019999999552965164], [0.019999999552965164], [0.019999999552965164], [0.019999999552965164], [0.019999999552965164], [0.019999999552965164], [0.019999999552965164], [0.019999999552965164], [0.019999999552965164], [0.019999999552965164], [0.019999999552965164], [0.019999999552965164], [0.019999999552965164], [0.019999999552965164], [0.019999999552965164]], [[0.019999999552965164], [0.019999999552965164], [0.019999999552965164], [0.019999999552965164], [0.019999999552965164], [0.019999999552965164], [0.019999999552965164], [0.019999999552965164], [0.019999999552965164], [0.019999999552965164], [0.019999999552965164], [0.019999999552965164], [0.019999999552965164], [0.019999999552965164], [0.019999999552965164], [0.019999999552965164], [0.019999999552965164], [0.019999999552965164], [0.019999999552965164], [0.019999999552965164], [0.019999999552965164], [0.019999999552965164], [0.019999999552965164], [0.019999999552965164], [0.019999999552965164], [0.019999999552965164], [0.019999999552965164], [0.019999999552965164], [0.019999999552965164], [0.019999999552965164], [0.019999999552965164], [0.019999999552965164], [0.019999999552965164], [0.019999999552965164], [0.019999999552965164], [0.019999999552965164], [0.019999999552965164], [0.019999999552965164], [0.019999999552965164], [0.019999999552965164], [0.019999999552965164], [0.019999999552965164], [0.019999999552965164], [0.019999999552965164], [0.019999999552965164], [0.019999999552965164], [0.019999999552965164], [0.019999999552965164], [0.019999999552965164], [0.019999999552965164]], [[0.019999999552965164], [0.019999999552965164], [0.019999999552965164], [0.019999999552965164], [0.019999999552965164], [0.019999999552965164], [0.019999999552965164], [0.019999999552965164], [0.019999999552965164], [0.019999999552965164], [0.019999999552965164], [0.019999999552965164], [0.019999999552965164], [0.019999999552965164], [0.019999999552965164], [0.019999999552965164], [0.019999999552965164], [0.019999999552965164], [0.019999999552965164], [0.019999999552965164], [0.019999999552965164], [0.019999999552965164], [0.019999999552965164], [0.019999999552965164], [0.019999999552965164], [0.019999999552965164], [0.019999999552965164], [0.019999999552965164], [0.019999999552965164], [0.019999999552965164], [0.019999999552965164], [0.019999999552965164], [0.019999999552965164], [0.019999999552965164], [0.019999999552965164], [0.019999999552965164], [0.019999999552965164], [0.019999999552965164], [0.019999999552965164], [0.019999999552965164], [0.019999999552965164], [0.019999999552965164], [0.019999999552965164], [0.019999999552965164], [0.019999999552965164], [0.019999999552965164], [0.019999999552965164], [0.019999999552965164], [0.019999999552965164], [0.019999999552965164]], [[0.019999999552965164], [0.019999999552965164], [0.019999999552965164], [0.019999999552965164], [0.019999999552965164], [0.019999999552965164], [0.019999999552965164], [0.019999999552965164], [0.019999999552965164], [0.019999999552965164], [0.019999999552965164], [0.019999999552965164], [0.019999999552965164], [0.019999999552965164], [0.019999999552965164], [0.019999999552965164], [0.019999999552965164], [0.019999999552965164], [0.019999999552965164], [0.019999999552965164], [0.019999999552965164], [0.019999999552965164], [0.019999999552965164], [0.019999999552965164], [0.019999999552965164], [0.019999999552965164], [0.019999999552965164], [0.019999999552965164], [0.019999999552965164], [0.019999999552965164], [0.019999999552965164], [0.019999999552965164], [0.019999999552965164], [0.019999999552965164], [0.019999999552965164], [0.019999999552965164], [0.019999999552965164], [0.019999999552965164], [0.019999999552965164], [0.019999999552965164], [0.019999999552965164], [0.019999999552965164], [0.019999999552965164], [0.019999999552965164], [0.019999999552965164], [0.019999999552965164], [0.019999999552965164], [0.019999999552965164], [0.019999999552965164], [0.019999999552965164]], [[0.019999999552965164], [0.019999999552965164], [0.019999999552965164], [0.019999999552965164], [0.019999999552965164], [0.019999999552965164], [0.019999999552965164], [0.019999999552965164], [0.019999999552965164], [0.019999999552965164], [0.019999999552965164], [0.019999999552965164], [0.019999999552965164], [0.019999999552965164], [0.019999999552965164], [0.019999999552965164], [0.019999999552965164], [0.019999999552965164], [0.019999999552965164], [0.019999999552965164], [0.019999999552965164], [0.019999999552965164], [0.019999999552965164], [0.019999999552965164], [0.019999999552965164], [0.019999999552965164], [0.019999999552965164], [0.019999999552965164], [0.019999999552965164], [0.019999999552965164], [0.019999999552965164], [0.019999999552965164], [0.019999999552965164], [0.019999999552965164], [0.019999999552965164], [0.019999999552965164], [0.019999999552965164], [0.019999999552965164], [0.019999999552965164], [0.019999999552965164], [0.019999999552965164], [0.019999999552965164], [0.019999999552965164], [0.019999999552965164], [0.019999999552965164], [0.019999999552965164], [0.019999999552965164], [0.019999999552965164], [0.019999999552965164], [0.019999999552965164]], [[0.019999999552965164], [0.019999999552965164], [0.019999999552965164], [0.019999999552965164], [0.019999999552965164], [0.019999999552965164], [0.019999999552965164], [0.019999999552965164], [0.019999999552965164], [0.019999999552965164], [0.019999999552965164], [0.019999999552965164], [0.019999999552965164], [0.019999999552965164], [0.019999999552965164], [0.019999999552965164], [0.019999999552965164], [0.019999999552965164], [0.019999999552965164], [0.019999999552965164], [0.019999999552965164], [0.019999999552965164], [0.019999999552965164], [0.019999999552965164], [0.019999999552965164], [0.019999999552965164], [0.019999999552965164], [0.019999999552965164], [0.019999999552965164], [0.019999999552965164], [0.019999999552965164], [0.019999999552965164], [0.019999999552965164], [0.019999999552965164], [0.019999999552965164], [0.019999999552965164], [0.019999999552965164], [0.019999999552965164], [0.019999999552965164], [0.019999999552965164], [0.019999999552965164], [0.019999999552965164], [0.019999999552965164], [0.019999999552965164], [0.019999999552965164], [0.019999999552965164], [0.019999999552965164], [0.019999999552965164], [0.019999999552965164], [0.019999999552965164]], [[0.019999999552965164], [0.019999999552965164], [0.019999999552965164], [0.019999999552965164], [0.019999999552965164], [0.019999999552965164], [0.019999999552965164], [0.019999999552965164], [0.019999999552965164], [0.019999999552965164], [0.019999999552965164], [0.019999999552965164], [0.019999999552965164], [0.019999999552965164], [0.019999999552965164], [0.019999999552965164], [0.019999999552965164], [0.019999999552965164], [0.019999999552965164], [0.019999999552965164], [0.019999999552965164], [0.019999999552965164], [0.019999999552965164], [0.019999999552965164], [0.019999999552965164], [0.019999999552965164], [0.019999999552965164], [0.019999999552965164], [0.019999999552965164], [0.019999999552965164], [0.019999999552965164], [0.019999999552965164], [0.019999999552965164], [0.019999999552965164], [0.019999999552965164], [0.019999999552965164], [0.019999999552965164], [0.019999999552965164], [0.019999999552965164], [0.019999999552965164], [0.019999999552965164], [0.019999999552965164], [0.019999999552965164], [0.019999999552965164], [0.019999999552965164], [0.019999999552965164], [0.019999999552965164], [0.019999999552965164], [0.019999999552965164], [0.019999999552965164]], [[0.019999999552965164], [0.019999999552965164], [0.019999999552965164], [0.019999999552965164], [0.019999999552965164], [0.019999999552965164], [0.019999999552965164], [0.019999999552965164], [0.019999999552965164], [0.019999999552965164], [0.019999999552965164], [0.019999999552965164], [0.019999999552965164], [0.019999999552965164], [0.019999999552965164], [0.019999999552965164], [0.019999999552965164], [0.019999999552965164], [0.019999999552965164], [0.019999999552965164], [0.019999999552965164], [0.019999999552965164], [0.019999999552965164], [0.019999999552965164], [0.019999999552965164], [0.019999999552965164], [0.019999999552965164], [0.019999999552965164], [0.019999999552965164], [0.019999999552965164], [0.019999999552965164], [0.019999999552965164], [0.019999999552965164], [0.019999999552965164], [0.019999999552965164], [0.019999999552965164], [0.019999999552965164], [0.019999999552965164], [0.019999999552965164], [0.019999999552965164], [0.019999999552965164], [0.019999999552965164], [0.019999999552965164], [0.019999999552965164], [0.019999999552965164], [0.019999999552965164], [0.019999999552965164], [0.019999999552965164], [0.019999999552965164], [0.019999999552965164]], [[0.019999999552965164], [0.019999999552965164], [0.019999999552965164], [0.019999999552965164], [0.019999999552965164], [0.019999999552965164], [0.019999999552965164], [0.019999999552965164], [0.019999999552965164], [0.019999999552965164], [0.019999999552965164], [0.019999999552965164], [0.019999999552965164], [0.019999999552965164], [0.019999999552965164], [0.019999999552965164], [0.019999999552965164], [0.019999999552965164], [0.019999999552965164], [0.019999999552965164], [0.019999999552965164], [0.019999999552965164], [0.019999999552965164], [0.019999999552965164], [0.019999999552965164], [0.019999999552965164], [0.019999999552965164], [0.019999999552965164], [0.019999999552965164], [0.019999999552965164], [0.019999999552965164], [0.019999999552965164], [0.019999999552965164], [0.019999999552965164], [0.019999999552965164], [0.019999999552965164], [0.019999999552965164], [0.019999999552965164], [0.019999999552965164], [0.019999999552965164], [0.019999999552965164], [0.019999999552965164], [0.019999999552965164], [0.019999999552965164], [0.019999999552965164], [0.019999999552965164], [0.019999999552965164], [0.019999999552965164], [0.019999999552965164], [0.019999999552965164]], [[0.019999999552965164], [0.019999999552965164], [0.019999999552965164], [0.019999999552965164], [0.019999999552965164], [0.019999999552965164], [0.019999999552965164], [0.019999999552965164], [0.019999999552965164], [0.019999999552965164], [0.019999999552965164], [0.019999999552965164], [0.019999999552965164], [0.019999999552965164], [0.019999999552965164], [0.019999999552965164], [0.019999999552965164], [0.019999999552965164], [0.019999999552965164], [0.019999999552965164], [0.019999999552965164], [0.019999999552965164], [0.019999999552965164], [0.019999999552965164], [0.019999999552965164], [0.019999999552965164], [0.019999999552965164], [0.019999999552965164], [0.019999999552965164], [0.019999999552965164], [0.019999999552965164], [0.019999999552965164], [0.019999999552965164], [0.019999999552965164], [0.019999999552965164], [0.019999999552965164], [0.019999999552965164], [0.019999999552965164], [0.019999999552965164], [0.019999999552965164], [0.019999999552965164], [0.019999999552965164], [0.019999999552965164], [0.019999999552965164], [0.019999999552965164], [0.019999999552965164], [0.019999999552965164], [0.019999999552965164], [0.019999999552965164], [0.019999999552965164]], [[0.019999999552965164], [0.019999999552965164], [0.019999999552965164], [0.019999999552965164], [0.019999999552965164], [0.019999999552965164], [0.019999999552965164], [0.019999999552965164], [0.019999999552965164], [0.019999999552965164], [0.019999999552965164], [0.019999999552965164], [0.019999999552965164], [0.019999999552965164], [0.019999999552965164], [0.019999999552965164], [0.019999999552965164], [0.019999999552965164], [0.019999999552965164], [0.019999999552965164], [0.019999999552965164], [0.019999999552965164], [0.019999999552965164], [0.019999999552965164], [0.019999999552965164], [0.019999999552965164], [0.019999999552965164], [0.019999999552965164], [0.019999999552965164], [0.019999999552965164], [0.019999999552965164], [0.019999999552965164], [0.019999999552965164], [0.019999999552965164], [0.019999999552965164], [0.019999999552965164], [0.019999999552965164], [0.019999999552965164], [0.019999999552965164], [0.019999999552965164], [0.019999999552965164], [0.019999999552965164], [0.019999999552965164], [0.019999999552965164], [0.019999999552965164], [0.019999999552965164], [0.019999999552965164], [0.019999999552965164], [0.019999999552965164], [0.019999999552965164]]];\n", 220 | " window.pos_doc=['作者的觀點獨特', '語言犀利', '深刻的總結了男人與女人之間的是非恩怨', '把男人與女人在生活中積累的宿怨一語道破', '原來非本質問題', '而是雙方本能', '此書不足之處乃後面章節雷同', '越看越有寡味之感', '但是還是要感謝作者帶我們遊覽一次男人', '女人的本能世界', '呵呵', '建議讀者可以看一看', '有則改之', '無則加勉'];\n", 221 | " window.neg_doc=['書太小了', '拿到手裡很是失望本以為會多麼精緻', '真的是適合小孩看啊可很小的孩子的又看不懂還不是大人給小孩子講所以應該精緻一點才好價錢還是不太值啊'];\n", 222 | " " 223 | ], 224 | "text/plain": [ 225 | "" 226 | ] 227 | }, 228 | "execution_count": 122, 229 | "metadata": {}, 230 | "output_type": "execute_result" 231 | } 232 | ], 233 | "source": [ 234 | "from IPython.display import Javascript\n", 235 | "\n", 236 | "#runs arbitrary javascript, client-side\n", 237 | "Javascript(\"\"\"\n", 238 | " window.pos_attention={};\n", 239 | " window.neg_attention={};\n", 240 | " window.pos_doc={};\n", 241 | " window.neg_doc={};\n", 242 | " \"\"\".format(json.dumps(pos_attention.tolist()),\n", 243 | " json.dumps(neg_attention.tolist()),\n", 244 | " pos_data[-1],\n", 245 | " neg_data[-1]))\n" 246 | ] 247 | }, 248 | { 249 | "cell_type": "code", 250 | "execution_count": 123, 251 | "metadata": { 252 | "collapsed": false 253 | }, 254 | "outputs": [ 255 | { 256 | "data": { 257 | "application/javascript": [ 258 | "\n", 259 | "function visualizer (doc, attention) {\n", 260 | " const elements = attention\n", 261 | " .filter((row, i) => i < doc.length)\n", 262 | " .map((row, i) => {\n", 263 | " const sent = doc[i];\n", 264 | " const elems = row\n", 265 | " .filter((col, j) => j < sent.length)\n", 266 | " .map((col, j) => {\n", 267 | " const char = sent[j];\n", 268 | " const weight = col * 10;\n", 269 | " return '' + char + '';\n", 270 | " })\n", 271 | " .join('');\n", 272 | " return '

' + elems + '

';\n", 273 | " });\n", 274 | "\n", 275 | " return elements.join('\\n');\n", 276 | "}\n", 277 | "\n", 278 | "const visualized_pos_doc = visualizer(pos_doc, pos_attention)\n", 279 | "element.append('

Positive Document with attetion

' + visualized_pos_doc + '
')\n", 280 | "\n", 281 | "const visualized_neg_doc = visualizer(neg_doc, neg_attention)\n", 282 | "element.append('

Negative Document with attetion

' + visualized_neg_doc + '
')" 283 | ], 284 | "text/plain": [ 285 | "" 286 | ] 287 | }, 288 | "metadata": {}, 289 | "output_type": "display_data" 290 | } 291 | ], 292 | "source": [ 293 | "%%javascript\n", 294 | "\n", 295 | "function visualizer (doc, attention) {\n", 296 | " const elements = attention\n", 297 | " .filter((row, i) => i < doc.length)\n", 298 | " .map((row, i) => {\n", 299 | " const sent = doc[i];\n", 300 | " const elems = row\n", 301 | " .filter((col, j) => j < sent.length)\n", 302 | " .map((col, j) => {\n", 303 | " const char = sent[j];\n", 304 | " const weight = col * 10;\n", 305 | " return '' + char + '';\n", 306 | " })\n", 307 | " .join('');\n", 308 | " return '

' + elems + '

';\n", 309 | " });\n", 310 | "\n", 311 | " return elements.join('\\n');\n", 312 | "}\n", 313 | "\n", 314 | "const visualized_pos_doc = visualizer(pos_doc, pos_attention)\n", 315 | "element.append('

Positive Document with attention

' + visualized_pos_doc + '
')\n", 316 | "\n", 317 | "const visualized_neg_doc = visualizer(neg_doc, neg_attention)\n", 318 | "element.append('

Negative Document with attention

' + visualized_neg_doc + '
')" 319 | ] 320 | }, 321 | { 322 | "cell_type": "code", 323 | "execution_count": null, 324 | "metadata": { 325 | "collapsed": true 326 | }, 327 | "outputs": [], 328 | "source": [] 329 | } 330 | ], 331 | "metadata": { 332 | "kernelspec": { 333 | "display_name": "Python (ENV)", 334 | "language": "python", 335 | "name": "env" 336 | }, 337 | "language_info": { 338 | "codemirror_mode": { 339 | "name": "ipython", 340 | "version": 3 341 | }, 342 | "file_extension": ".py", 343 | "mimetype": "text/x-python", 344 | "name": "python", 345 | "nbconvert_exporter": "python", 346 | "pygments_lexer": "ipython3", 347 | "version": "3.5.2" 348 | } 349 | }, 350 | "nbformat": 4, 351 | "nbformat_minor": 2 352 | } 353 | --------------------------------------------------------------------------------