├── data ├── train.tgs ├── train.tks ├── .DS_Store ├── labellist ├── dev.txt ├── train.txt └── wordvec ├── model └── .DS_Store ├── __pycache__ ├── model.cpython-36.pyc ├── utils.cpython-36.pyc └── data_scripts.cpython-36.pyc ├── preprocess.py ├── data_scripts.py ├── utils.py ├── eval.py ├── train.py ├── README.md └── model.py /data/train.tgs: -------------------------------------------------------------------------------- 1 | O O O O Sentence O O O Sentence O O O O O O O O O Arrest-Jail O -------------------------------------------------------------------------------- /data/train.tks: -------------------------------------------------------------------------------- 1 | drug lord handed maximum sentence A US judge sentences Christopher ' Dudus ' Coke to 23 years in prison . -------------------------------------------------------------------------------- /data/.DS_Store: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/JoeZhouWenxuan/Self-regulation-Employing-a-Generative-Adversarial-Network-to-Improve-Event-Detection/HEAD/data/.DS_Store -------------------------------------------------------------------------------- /model/.DS_Store: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/JoeZhouWenxuan/Self-regulation-Employing-a-Generative-Adversarial-Network-to-Improve-Event-Detection/HEAD/model/.DS_Store -------------------------------------------------------------------------------- /__pycache__/model.cpython-36.pyc: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/JoeZhouWenxuan/Self-regulation-Employing-a-Generative-Adversarial-Network-to-Improve-Event-Detection/HEAD/__pycache__/model.cpython-36.pyc -------------------------------------------------------------------------------- /__pycache__/utils.cpython-36.pyc: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/JoeZhouWenxuan/Self-regulation-Employing-a-Generative-Adversarial-Network-to-Improve-Event-Detection/HEAD/__pycache__/utils.cpython-36.pyc -------------------------------------------------------------------------------- /__pycache__/data_scripts.cpython-36.pyc: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/JoeZhouWenxuan/Self-regulation-Employing-a-Generative-Adversarial-Network-to-Improve-Event-Detection/HEAD/__pycache__/data_scripts.cpython-36.pyc -------------------------------------------------------------------------------- /data/labellist: -------------------------------------------------------------------------------- 1 | O 2 | Sue 3 | Arrest-Jail 4 | Injure 5 | Phone-Write 6 | Attack 7 | Acquit 8 | End-Position 9 | Meet 10 | Convict 11 | Appeal 12 | Trial-Hearing 13 | Extradite 14 | Declare-Bankruptcy 15 | Be-Born 16 | Divorce 17 | Start-Org 18 | Execute 19 | Release-Parole 20 | Transfer-Ownership 21 | Die 22 | Marry 23 | Transfer-Money 24 | Demonstrate 25 | Nominate 26 | Start-Position 27 | Elect 28 | Pardon 29 | Sentence 30 | Merge-Org 31 | End-Org 32 | Fine 33 | Charge-Indict 34 | Transport 35 | -------------------------------------------------------------------------------- /preprocess.py: -------------------------------------------------------------------------------- 1 | import sys 2 | from data_scripts import * 3 | 4 | 5 | def sentence_to_token_ids(sentence, vocabulary, tokenizer): 6 | words = tokenizer(sentence) 7 | return [vocabulary.get(w, UNK_ID) for w in words if len(words) <= 80 and len(words) >= 8] 8 | 9 | 10 | def data_to_token_ids(data_path, vocabulary_path, tokenizer, start=True, lower=False): 11 | _, vocab = initialize_vocabulary(vocabulary_path, start) 12 | with open(data_path, "r", encoding="utf-8") as f: 13 | for line in f: 14 | if lower: 15 | line = line.lower() 16 | token_ids = sentence_to_token_ids(line, vocab, tokenizer) 17 | 18 | yield " ".join([str(tok) for tok in token_ids]) 19 | 20 | 21 | def merge(tks, tgs, file_path): 22 | with open(file_path, 'w') as f: 23 | for x, y in zip(tks, tgs): 24 | f.write(x + '\t' + y + '\n') 25 | 26 | 27 | if __name__ == '__main__': 28 | tokenizer = lambda x: x.split() 29 | tks = data_to_token_ids('data/train.tks', 'data/wordlist', tokenizer, lower=True) 30 | tgs = data_to_token_ids('data/train.tgs', 'data/labellist', tokenizer, False) 31 | merge(tks, tgs, 'data/train.txt') 32 | 33 | -------------------------------------------------------------------------------- /data_scripts.py: -------------------------------------------------------------------------------- 1 | import random 2 | import numpy as np 3 | 4 | 5 | PAD_TOKEN = '' # pad symbol 6 | UNK_TOKEN = '' # unknown word 7 | 8 | # we always put them at the start. 9 | _START_VOCAB = [PAD_TOKEN, UNK_TOKEN] 10 | PAD_ID = 0 11 | UNK_ID = 1 12 | 13 | 14 | def initialize_vocabulary(vocab_path, start=True): 15 | """ 16 | """ 17 | with open(vocab_path, 'r', encoding="utf-8") as f: 18 | rev_vocab = [line.strip() for line in f] 19 | if start: 20 | rev_vocab = _START_VOCAB + rev_vocab 21 | vocab = dict([(x, y) for (y, x) in enumerate(rev_vocab)]) 22 | return rev_vocab, vocab 23 | 24 | 25 | def load_data(file_path, training=False): 26 | with open(file_path, 'r') as f: 27 | lines = f.readlines() 28 | content = [line.strip() for line in lines if len(line.strip()) > 1] 29 | data = [(line.split('\t')[0].strip().split(), line.split('\t')[1].strip().split()) for line in content] 30 | if training: 31 | data = [([int(item) for item in sent], [int(item) for item in tag]) for sent, tag in data if len(sent) < 80 and len(sent) > 8] 32 | else: 33 | data = [([int(item) for item in sent], [int(item) for item in tag]) for sent, tag in data] 34 | return data 35 | 36 | 37 | def load_pretrain(file_path): 38 | with open(file_path, 'r') as f: 39 | lines = f.readlines() 40 | pretrain_embedding = [] 41 | 42 | for line in lines: 43 | embed_word = line.strip().split(',') 44 | embed_word = [float(item) for item in embed_word] 45 | pretrain_embedding.append(embed_word) 46 | 47 | pretrain_embed_dim = len(pretrain_embedding[0]) 48 | tmp = [] 49 | tmp.append([random.uniform(-1, 1) for _ in range(pretrain_embed_dim)]) 50 | tmp.append([random.uniform(-1, 1) for _ in range(pretrain_embed_dim)]) 51 | pretrain_embedding = tmp + pretrain_embedding 52 | return np.matrix(pretrain_embedding) -------------------------------------------------------------------------------- /utils.py: -------------------------------------------------------------------------------- 1 | import random 2 | import numpy as np 3 | import tensorflow as tf 4 | import os 5 | from collections import Counter 6 | 7 | def pad_batch(batch, max_len=-1, shuffle=True): 8 | sentences_in = list(batch[0]) 9 | targets = list(batch[1]) 10 | if shuffle: 11 | sentences_in.sort(key=lambda s: -1 * len(s)) 12 | targets.sort(key=lambda s: -1 * len(s)) 13 | 14 | lens = np.array([len(s) for s in sentences_in], dtype=np.int32) 15 | 16 | if max_len == -1: 17 | max_len = max(lens) 18 | 19 | batch_size = len(sentences_in) 20 | 21 | sentences_in_batch = np.zeros((batch_size, max_len), dtype=np.int32) 22 | targets_in_batch = np.zeros((batch_size, max_len), dtype=np.int32) 23 | 24 | for i in range(batch_size): 25 | sent = sentences_in[i] 26 | target = targets[i] 27 | 28 | l = len(sent) 29 | sentences_in_batch[i, :l] = sent 30 | targets_in_batch[i, :l] = target 31 | return sentences_in_batch, targets_in_batch, lens 32 | 33 | 34 | def uniform_tensor(shape, name, dtype=tf.float32): 35 | return tf.random_uniform(shape=shape, dtype=dtype, name=name) 36 | 37 | 38 | def batch_iter(data, batch_size, num_epochs, shuffle=True): 39 | """ 40 | Generates a batch iterator for a dataset. 41 | """ 42 | data = np.array(data) 43 | data_size = len(data) 44 | num_batches_per_epoch = int((len(data)-1)/batch_size) + 1 45 | for _ in range(num_epochs): 46 | # Shuffle the data at each epoch 47 | if shuffle: 48 | shuffle_indices = np.random.permutation(np.arange(data_size)) 49 | shuffled_data = data[shuffle_indices] 50 | else: 51 | shuffled_data = data 52 | for batch_num in range(num_batches_per_epoch): 53 | start_index = batch_num * batch_size 54 | end_index = min((batch_num + 1) * batch_size, data_size) 55 | yield shuffled_data[start_index:end_index] 56 | 57 | 58 | -------------------------------------------------------------------------------- /data/dev.txt: -------------------------------------------------------------------------------- 1 | 531 3620 4242 3779 2773 6 233 457 4893 2291 86 1 86 6730 8 1149 78 9 1180 4 0 0 0 0 29 0 0 0 29 0 0 0 0 0 0 0 0 0 3 0 2 | 531 3620 4242 3779 2773 6 233 457 4893 2291 86 1 86 6730 8 1149 78 9 1180 4 0 0 0 0 29 0 0 0 29 0 0 0 0 0 0 0 0 0 3 0 3 | 531 3620 4242 3779 2773 6 233 457 4893 2291 86 1 86 6730 8 1149 78 9 1180 4 0 0 0 0 29 0 0 0 29 0 0 0 0 0 0 0 0 0 3 0 4 | 531 3620 4242 3779 2773 6 233 457 4893 2291 86 1 86 6730 8 1149 78 9 1180 4 0 0 0 0 29 0 0 0 29 0 0 0 0 0 0 0 0 0 3 0 5 | 531 3620 4242 3779 2773 6 233 457 4893 2291 86 1 86 6730 8 1149 78 9 1180 4 0 0 0 0 29 0 0 0 29 0 0 0 0 0 0 0 0 0 3 0 6 | 531 3620 4242 3779 2773 6 233 457 4893 2291 86 1 86 6730 8 1149 78 9 1180 4 0 0 0 0 29 0 0 0 29 0 0 0 0 0 0 0 0 0 3 0 7 | 531 3620 4242 3779 2773 6 233 457 4893 2291 86 1 86 6730 8 1149 78 9 1180 4 0 0 0 0 29 0 0 0 29 0 0 0 0 0 0 0 0 0 3 0 8 | 531 3620 4242 3779 2773 6 233 457 4893 2291 86 1 86 6730 8 1149 78 9 1180 4 0 0 0 0 29 0 0 0 29 0 0 0 0 0 0 0 0 0 3 0 9 | 531 3620 4242 3779 2773 6 233 457 4893 2291 86 1 86 6730 8 1149 78 9 1180 4 0 0 0 0 29 0 0 0 29 0 0 0 0 0 0 0 0 0 3 0 10 | 531 3620 4242 3779 2773 6 233 457 4893 2291 86 1 86 6730 8 1149 78 9 1180 4 0 0 0 0 29 0 0 0 29 0 0 0 0 0 0 0 0 0 3 0 11 | 531 3620 4242 3779 2773 6 233 457 4893 2291 86 1 86 6730 8 1149 78 9 1180 4 0 0 0 0 29 0 0 0 29 0 0 0 0 0 0 0 0 0 3 0 12 | 531 3620 4242 3779 2773 6 233 457 4893 2291 86 1 86 6730 8 1149 78 9 1180 4 0 0 0 0 29 0 0 0 29 0 0 0 0 0 0 0 0 0 3 0 13 | 531 3620 4242 3779 2773 6 233 457 4893 2291 86 1 86 6730 8 1149 78 9 1180 4 0 0 0 0 29 0 0 0 29 0 0 0 0 0 0 0 0 0 3 0 14 | 531 3620 4242 3779 2773 6 233 457 4893 2291 86 1 86 6730 8 1149 78 9 1180 4 0 0 0 0 29 0 0 0 29 0 0 0 0 0 0 0 0 0 3 0 15 | 531 3620 4242 3779 2773 6 233 457 4893 2291 86 1 86 6730 8 1149 78 9 1180 4 0 0 0 0 29 0 0 0 29 0 0 0 0 0 0 0 0 0 3 0 16 | 531 3620 4242 3779 2773 6 233 457 4893 2291 86 1 86 6730 8 1149 78 9 1180 4 0 0 0 0 29 0 0 0 29 0 0 0 0 0 0 0 0 0 3 0 17 | 531 3620 4242 3779 2773 6 233 457 4893 2291 86 1 86 6730 8 1149 78 9 1180 4 0 0 0 0 29 0 0 0 29 0 0 0 0 0 0 0 0 0 3 0 18 | 531 3620 4242 3779 2773 6 233 457 4893 2291 86 1 86 6730 8 1149 78 9 1180 4 0 0 0 0 29 0 0 0 29 0 0 0 0 0 0 0 0 0 3 0 19 | 531 3620 4242 3779 2773 6 233 457 4893 2291 86 1 86 6730 8 1149 78 9 1180 4 0 0 0 0 29 0 0 0 29 0 0 0 0 0 0 0 0 0 3 0 20 | 531 3620 4242 3779 2773 6 233 457 4893 2291 86 1 86 6730 8 1149 78 9 1180 4 0 0 0 0 29 0 0 0 29 0 0 0 0 0 0 0 0 0 3 0 21 | -------------------------------------------------------------------------------- /data/train.txt: -------------------------------------------------------------------------------- 1 | 531 3620 4242 3779 2773 6 233 457 4893 2291 86 1 86 6730 8 1149 78 9 1180 4 0 0 0 0 29 0 0 0 29 0 0 0 0 0 0 0 0 0 3 0 2 | 531 3620 4242 3779 2773 6 233 457 4893 2291 86 1 86 6730 8 1149 78 9 1180 4 0 0 0 0 29 0 0 0 29 0 0 0 0 0 0 0 0 0 3 0 3 | 531 3620 4242 3779 2773 6 233 457 4893 2291 86 1 86 6730 8 1149 78 9 1180 4 0 0 0 0 29 0 0 0 29 0 0 0 0 0 0 0 0 0 3 0 4 | 531 3620 4242 3779 2773 6 233 457 4893 2291 86 1 86 6730 8 1149 78 9 1180 4 0 0 0 0 29 0 0 0 29 0 0 0 0 0 0 0 0 0 3 0 5 | 531 3620 4242 3779 2773 6 233 457 4893 2291 86 1 86 6730 8 1149 78 9 1180 4 0 0 0 0 29 0 0 0 29 0 0 0 0 0 0 0 0 0 3 0 6 | 531 3620 4242 3779 2773 6 233 457 4893 2291 86 1 86 6730 8 1149 78 9 1180 4 0 0 0 0 29 0 0 0 29 0 0 0 0 0 0 0 0 0 3 0 7 | 531 3620 4242 3779 2773 6 233 457 4893 2291 86 1 86 6730 8 1149 78 9 1180 4 0 0 0 0 29 0 0 0 29 0 0 0 0 0 0 0 0 0 3 0 8 | 531 3620 4242 3779 2773 6 233 457 4893 2291 86 1 86 6730 8 1149 78 9 1180 4 0 0 0 0 29 0 0 0 29 0 0 0 0 0 0 0 0 0 3 0 9 | 531 3620 4242 3779 2773 6 233 457 4893 2291 86 1 86 6730 8 1149 78 9 1180 4 0 0 0 0 29 0 0 0 29 0 0 0 0 0 0 0 0 0 3 0 10 | 531 3620 4242 3779 2773 6 233 457 4893 2291 86 1 86 6730 8 1149 78 9 1180 4 0 0 0 0 29 0 0 0 29 0 0 0 0 0 0 0 0 0 3 0 11 | 531 3620 4242 3779 2773 6 233 457 4893 2291 86 1 86 6730 8 1149 78 9 1180 4 0 0 0 0 29 0 0 0 29 0 0 0 0 0 0 0 0 0 3 0 12 | 531 3620 4242 3779 2773 6 233 457 4893 2291 86 1 86 6730 8 1149 78 9 1180 4 0 0 0 0 29 0 0 0 29 0 0 0 0 0 0 0 0 0 3 0 13 | 531 3620 4242 3779 2773 6 233 457 4893 2291 86 1 86 6730 8 1149 78 9 1180 4 0 0 0 0 29 0 0 0 29 0 0 0 0 0 0 0 0 0 3 0 14 | 531 3620 4242 3779 2773 6 233 457 4893 2291 86 1 86 6730 8 1149 78 9 1180 4 0 0 0 0 29 0 0 0 29 0 0 0 0 0 0 0 0 0 3 0 15 | 531 3620 4242 3779 2773 6 233 457 4893 2291 86 1 86 6730 8 1149 78 9 1180 4 0 0 0 0 29 0 0 0 29 0 0 0 0 0 0 0 0 0 3 0 16 | 531 3620 4242 3779 2773 6 233 457 4893 2291 86 1 86 6730 8 1149 78 9 1180 4 0 0 0 0 29 0 0 0 29 0 0 0 0 0 0 0 0 0 3 0 17 | 531 3620 4242 3779 2773 6 233 457 4893 2291 86 1 86 6730 8 1149 78 9 1180 4 0 0 0 0 29 0 0 0 29 0 0 0 0 0 0 0 0 0 3 0 18 | 531 3620 4242 3779 2773 6 233 457 4893 2291 86 1 86 6730 8 1149 78 9 1180 4 0 0 0 0 29 0 0 0 29 0 0 0 0 0 0 0 0 0 3 0 19 | 531 3620 4242 3779 2773 6 233 457 4893 2291 86 1 86 6730 8 1149 78 9 1180 4 0 0 0 0 29 0 0 0 29 0 0 0 0 0 0 0 0 0 3 0 20 | 531 3620 4242 3779 2773 6 233 457 4893 2291 86 1 86 6730 8 1149 78 9 1180 4 0 0 0 0 29 0 0 0 29 0 0 0 0 0 0 0 0 0 3 0 21 | -------------------------------------------------------------------------------- /eval.py: -------------------------------------------------------------------------------- 1 | import sys 2 | import tensorflow as tf 3 | from model import Model 4 | from data_scripts import load_data, initialize_vocabulary, load_pretrain 5 | 6 | if len(sys.argv) < 2: 7 | print('Usage: python eval.py test_file') 8 | exit(-1) 9 | 10 | _, vocab = initialize_vocabulary('data/wordlist') 11 | vocab_size = len(vocab) 12 | FLAGS = tf.app.flags.FLAGS 13 | tf.app.flags.DEFINE_integer('classes', 34, 'Number of classification (default: 34)') 14 | 15 | # Model Hyperparameters 16 | tf.app.flags.DEFINE_integer('sequence_length', 80, 'Sentence length (default: 80)') 17 | tf.app.flags.DEFINE_integer('num_layers', 1, 'Number of hidden layers (default: 1)') 18 | tf.app.flags.DEFINE_integer('hidden_size', 150, 'Hidden size (default: 150)') 19 | tf.app.flags.DEFINE_float('feature_weight_dropout', 0.2, 'Feature weight dropout rate (default: 0.2)') 20 | tf.app.flags.DEFINE_float('dropout_rate', 0, 'Dropout rate (default: 0)') 21 | tf.app.flags.DEFINE_string("rnn_unit", "lstm", "RNN unit type (default: lstm)") 22 | tf.app.flags.DEFINE_float('lr_decay', 0.95, 'LR decay rate (default: 0.95)') 23 | tf.app.flags.DEFINE_float('learning_rate', 0.3, 'Learning rate (default: 0.3)') 24 | tf.app.flags.DEFINE_float('l2_rate', 0.00, 'L2 rate (default: 0)') 25 | 26 | # Training parameters 27 | tf.app.flags.DEFINE_integer('num_epochs', 200, 'Number of training epochs (default: 200)') 28 | tf.app.flags.DEFINE_integer('train_max_patience', 100, 'default: 100') 29 | tf.app.flags.DEFINE_integer('batch_size', 10, 'Batch Size (default: 64)') 30 | tf.app.flags.DEFINE_string("model_path", "model/best.pkl", "Path model to be saved (default: model/best.pkl)") 31 | tf.app.flags.DEFINE_string("feature_weight_shape", "[" + str(vocab_size) + ", 300]", "Path model to be saved (default: [vocab_size, 300])") 32 | 33 | FLAGS._parse_flags() 34 | config = dict(FLAGS.__flags.items()) 35 | print("\nParameters:") 36 | for attr, value in sorted(FLAGS.__flags.items()): 37 | print("\t{} = {}".format(attr.upper(), value)) 38 | print("") 39 | pretrain_embedding = load_pretrain('data/wordvec') 40 | config['feature_init_weight'] = pretrain_embedding 41 | 42 | model = Model(config) 43 | test_data = load_data(sys.argv[1] , False) 44 | test_data_sent = [item[0] for item in test_data] 45 | test_data_label = [item[1] for item in test_data] 46 | test_data = (test_data_sent, test_data_label) 47 | 48 | saver = tf.train.Saver() 49 | saver.restore(model.sess, 'model/best.pkl') 50 | model.predict(test_data, True) -------------------------------------------------------------------------------- /train.py: -------------------------------------------------------------------------------- 1 | ''' 2 | Created on 2017年10月30日 3 | 4 | @author: zhouwenxuan 5 | ''' 6 | 7 | import tensorflow as tf 8 | import numpy as np 9 | import os 10 | from data_scripts import initialize_vocabulary, load_data, load_pretrain 11 | from model import Model 12 | 13 | 14 | THIS_DIR = os.path.abspath(os.path.dirname(__file__)) 15 | _, vocab = initialize_vocabulary('data/wordlist') 16 | vocab_size = len(vocab) 17 | 18 | FLAGS = tf.app.flags.FLAGS 19 | tf.app.flags.DEFINE_integer('classes', 34, 'Number of classification (default: 34)') 20 | 21 | # Model Hyperparameters 22 | tf.app.flags.DEFINE_integer('sequence_length', 80, 'Sentence length (default: 80)') 23 | tf.app.flags.DEFINE_integer('num_layers', 1, 'Number of hidden layers (default: 1)') 24 | tf.app.flags.DEFINE_integer('hidden_size', 150, 'Hidden size (default: 150)') 25 | tf.app.flags.DEFINE_float('feature_weight_dropout', 0.2, 'Feature weight dropout rate (default: 0.2)') 26 | tf.app.flags.DEFINE_float('dropout_rate', 0, 'Dropout rate (default: 0)') 27 | tf.app.flags.DEFINE_string("rnn_unit", "lstm", "RNN unit type (default: lstm)") 28 | tf.app.flags.DEFINE_float('lr_decay', 0.95, 'LR decay rate (default: 0.95)') 29 | tf.app.flags.DEFINE_float('learning_rate', 0.3, 'Learning rate (default: 0.3)') 30 | tf.app.flags.DEFINE_float('l2_rate', 0.00, 'L2 rate (default: 0)') 31 | 32 | # Training parameters 33 | tf.app.flags.DEFINE_integer('num_epochs', 200, 'Number of training epochs (default: 200)') 34 | tf.app.flags.DEFINE_integer('train_max_patience', 100, 'default: 100') 35 | tf.app.flags.DEFINE_integer('batch_size', 10, 'Batch Size (default: 64)') 36 | tf.app.flags.DEFINE_string("model_path", "model/best.pkl", "Path model to be saved (default: model/best.pkl)") 37 | tf.app.flags.DEFINE_string("feature_weight_shape", "[" + str(vocab_size) + ", 300]", "Shape of feature weight table (default: [vocab_size, 300])") 38 | 39 | FLAGS._parse_flags() 40 | config = dict(FLAGS.__flags.items()) 41 | print("\nParameters:") 42 | for attr, value in sorted(FLAGS.__flags.items()): 43 | print("\t{} = {}".format(attr.upper(), value)) 44 | print("") 45 | 46 | def main(): 47 | 48 | 49 | train_data = load_data('data/train.txt') 50 | dev_data = load_data('data/dev.txt', False) 51 | pretrain_embedding = load_pretrain('data/wordvec') 52 | config['feature_init_weight'] = pretrain_embedding 53 | 54 | model = Model(config) 55 | train_data_sent = [item[0] for item in train_data] 56 | train_data_label = [item[1] for item in train_data] 57 | dev_data_sent = [item[0] for item in dev_data] 58 | dev_data_label = [item[1] for item in dev_data] 59 | train_data = (train_data_sent, train_data_label) 60 | dev_data = (dev_data_sent, dev_data_label) 61 | model.fit(train_data, dev_data) 62 | 63 | #predict 64 | # saver = tf.train.Saver() 65 | # saver.restore(model.sess, config['model_path']) 66 | # model.predict(test_data) 67 | 68 | if __name__ == '__main__': 69 | main() 70 | -------------------------------------------------------------------------------- /README.md: -------------------------------------------------------------------------------- 1 | Self-regulation: Employing a Generative Adversarial Network to Improve Event Detection 2 | ===== 3 | 4 | It is slightly simplified implementation of our Self-regulation: Employing a Generative Adversarial Network to Improve Event Detection paper in Tensorflow. 5 | 6 | Requirements 7 | ----- 8 | Python 3.6 9 | Tensorflow 1.2 10 | Numpy 11 | sklearn 12 | 13 | 14 | 15 | Corpus: 16 | ---- 17 | The corpus of ACE 2005 has been used in our experiment. Though we have no right to share with anyone this corpus. 18 | One may submit a request to the Linguistic Data Consortium (LDC) for approval to download and use the corpus, 19 | or find a way to obtain it in the paper "The Automatic Content Extraction (ACE) Program, Task, Data, and Evaluation". 20 | 21 | If one may carry out the adaptation experiments using the TAC-KBP event nugget datation corpus, 22 | we suggest to access to LDC or the homepage of TAC program. 23 | 24 | 25 | Data sets: 26 | ---- 27 | There are 5 data sets need to be used in the experiments for event detection, including lexicon, training and 28 | development datasets, as well as the ones respectively contain event types and pretrained word embeddings. 29 | Listed below is the filenames of the datasets which are necessarily followed without any change. 30 | One may find the files in the directory of data: 31 | 32 | data/train.txt: training dataset 33 | data/dev.txt: development dataset 34 | data/wordlist: lexicon 35 | data/labellist: event types 36 | data/wordvec: pretrained word embeddings 37 | 38 | In this package, we have provided the files named in that way. There is an example reserved in each of the files data/train, data/dev and data/wordvec. 39 | By contronst, data/wordlist contains all the tokens occurred in the ACE corpus and data/labellist lists all the concerned event types. 40 | 41 | 42 | Note (about pretrained word and sentence embeddings): 43 | ---- 44 | The length of an input sentence is limited to be longtr than 8 but shorter than 80. 45 | If the real length of a sentence is out of the range, padding or pruning needs to be used for the generation of sentence embedding. 46 | 47 | One may provide a file which contains the word embeddings pretrained by her/himself. In such a case, the file name should be the same with that we mentioned above. 48 | If not, you'd better comment out the 57th line in train.py,and meanwhile modify the 58th line as config['feature_init_weight'] = None. 49 | We recommend to use the word embeddings which have been previously trained well in Feng et al's work (Feng et al. 2016. A language-independent neural network for event detection. ACL'16). 50 | 51 | 52 | Preprocess: 53 | ---- 54 | python preporcess.py 55 | Run by excuting the command of preprocess.py, so as to obtain the files of train.txt and dev.txt 56 | 57 | 58 | Train: 59 | ---- 60 | python train.py 61 | Run by excuting the command of train.py. The default parameters are used. 62 | 63 | python train.py --help 64 | 65 | usage: train.py [-h] [--classes CLASSES] [--sequence_length SEQUENCE_LENGTH] 66 | [--num_layers NUM_LAYERS] [--hidden_size HIDDEN_SIZE] 67 | [--feature_weight_dropout FEATURE_WEIGHT_DROPOUT] 68 | [--dropout_rate DROPOUT_RATE] [--rnn_unit RNN_UNIT] 69 | [--lr_decay LR_DECAY] [--learning_rate LEARNING_RATE] 70 | [--l2_rate L2_RATE] [--num_epochs NUM_EPOCHS] 71 | [--train_max_patience TRAIN_MAX_PATIENCE] 72 | [--batch_size BATCH_SIZE] [--model_path MODEL_PATH] 73 | [--feature_weight_shape FEATURE_WEIGHT_SHAPE] 74 | 75 | optional arguments: 76 | -h, --help show this help message and exit 77 | --classes CLASSES Number of classification (default: 34) 78 | --sequence_length SEQUENCE_LENGTH 79 | Sentence length (default: 80) 80 | --num_layers NUM_LAYERS 81 | Number of hidden layers (default: 1) 82 | --hidden_size HIDDEN_SIZE 83 | Hidden size (default: 150) 84 | --feature_weight_dropout FEATURE_WEIGHT_DROPOUT 85 | Feature weight dropout rate (default: 0.2) 86 | --dropout_rate DROPOUT_RATE 87 | Dropout rate (default: 0) 88 | --rnn_unit RNN_UNIT RNN unit type (default: lstm) 89 | --lr_decay LR_DECAY LR decay rate (default: 0.95) 90 | --learning_rate LEARNING_RATE 91 | Learning rate (default: 0.3) 92 | --l2_rate L2_RATE L2 rate (default: 0) 93 | --num_epochs NUM_EPOCHS 94 | Number of training epochs (default: 200) 95 | --train_max_patience TRAIN_MAX_PATIENCE 96 | default: 100 97 | --batch_size BATCH_SIZE 98 | Batch Size (default: 64) 99 | --model_path MODEL_PATH 100 | Path model to be saved (default: model/best.pkl) 101 | --feature_weight_shape FEATURE_WEIGHT_SHAPE 102 | Path model to be saved (default: [vocab_size, 300]) 103 | 104 | Eval: 105 | ---- 106 | python eval.py test.txt 107 | 108 | The model which has been trained well will be preserved under the directory of model/. 109 | From here on, one may let the model perform on the test data set. Run by excuting the command of eval.py. The output results will be written into the file of test.txt. 110 | One would like to take into consideration the entity embeddings. If so, the source code needs to be modified a little bit (it is your turn now). 111 | 112 | 113 | 114 | -------------------------------------------------------------------------------- /model.py: -------------------------------------------------------------------------------- 1 | ''' 2 | Created on 2017年10月28日 3 | 4 | @author: zhouwenxuan 5 | ''' 6 | 7 | import tensorflow as tf 8 | import time 9 | import datetime 10 | import math 11 | import numpy as np 12 | 13 | from tensorflow.contrib import rnn 14 | from sklearn.metrics import precision_recall_fscore_support, precision_score, recall_score, f1_score 15 | from utils import uniform_tensor, batch_iter, pad_batch 16 | 17 | 18 | class Model(object): 19 | def __init__(self, config): 20 | """ 21 | config: 参数字典 22 | sequence_length: padding后句子的长度 23 | 24 | """ 25 | self.sequence_length = config['sequence_length'] 26 | self.classes = config['classes'] 27 | self.hidden_size = config['hidden_size'] 28 | self.num_layers = config['num_layers'] 29 | self.feature_init_weight = config['feature_init_weight'] 30 | 31 | self.feature_weight_shape = eval(config['feature_weight_shape']) 32 | self.feature_weight_dropout = config['feature_weight_dropout'] 33 | self.dropout_rate = config['dropout_rate'] 34 | self.rnn_unit = config['rnn_unit'] 35 | self.model_path = config['model_path'] 36 | self.l2_rate = config['l2_rate'] 37 | self.learning_rate = config['learning_rate'] 38 | self.batch_size = config['batch_size'] 39 | self.num_epochs = config['num_epochs'] 40 | self.train_max_patience = config['train_max_patience'] 41 | self.max_dev_f1 = 0.0 42 | self.build() 43 | 44 | def build(self): 45 | """ 46 | 47 | """ 48 | 49 | self.input_ph = tf.placeholder(dtype=tf.int32, shape=[None, self.sequence_length], name='input') 50 | self.sequence_actual_lengths_ph = tf.placeholder(dtype=tf.int32, shape=[None], name='sequence_actual_lengths') 51 | self.sequence_last_index_ph = tf.placeholder(dtype=tf.int32, name='sequence_last_index') 52 | self.dropout_rate_ph = tf.placeholder(tf.float32, name='dropout_rate') 53 | self.label_ph = tf.placeholder(tf.int32, shape=[None, self.sequence_length], name='label') 54 | self.weight_dropout_ph = tf.placeholder(dtype=tf.float32,name='weight_dropout') 55 | 56 | if self.feature_init_weight is None: 57 | self.feature_weight = tf.Variable( 58 | initial_value=uniform_tensor( 59 | shape=self.feature_weight_shape, 60 | name='f_W', 61 | dtype=tf.float32 62 | ), 63 | name='feature_W', 64 | ) 65 | else: 66 | self.feature_weight = tf.Variable( 67 | initial_value=self.feature_init_weight, 68 | name='feature_W', 69 | dtype=tf.float32, 70 | trainable=True 71 | ) 72 | 73 | self.feature_embedding = tf.nn.dropout( 74 | x=tf.nn.embedding_lookup( 75 | self.feature_weight, 76 | self.input_ph, 77 | name='feature_embedding' 78 | ), 79 | keep_prob=1.0 - self.weight_dropout_ph, 80 | name='feature_embedding_dropout' 81 | ) 82 | 83 | # define rnn cell 84 | def cell(): 85 | if self.rnn_unit == 'lstm': 86 | return rnn.BasicLSTMCell(self.hidden_size, state_is_tuple=True) 87 | elif self.rnn_unit == 'gru': 88 | return rnn.GRUCell(self.hidden_size) 89 | else: 90 | raise ValueError('rnn unit must in (lstm, gru)') 91 | return 92 | 93 | 94 | def attn_cell(): 95 | return rnn.DropoutWrapper( 96 | cell(), 97 | output_keep_prob=1.0 98 | ) 99 | # 100 | cell_fw = rnn.MultiRNNCell( 101 | [attn_cell() for _ in range(self.num_layers)], 102 | state_is_tuple=True 103 | ) 104 | 105 | cell_bw = rnn.MultiRNNCell( 106 | [attn_cell() for _ in range(self.num_layers)], 107 | state_is_tuple=True 108 | ) 109 | # create gan's generator 110 | g_cell_fw = rnn.MultiRNNCell( 111 | [attn_cell() for _ in range(self.num_layers)], 112 | state_is_tuple=True 113 | ) 114 | 115 | g_cell_bw = rnn.MultiRNNCell( 116 | [attn_cell() for _ in range(self.num_layers)], 117 | state_is_tuple=True 118 | ) 119 | 120 | self.feature_entity_embedding = self.feature_embedding 121 | 122 | self.bi_rnn_outputs, _ = tf.nn.bidirectional_dynamic_rnn( 123 | cell_fw, 124 | cell_bw, 125 | inputs=self.feature_entity_embedding, 126 | sequence_length=self.sequence_actual_lengths_ph, 127 | dtype=tf.float32, 128 | scope='bi-rnn') 129 | 130 | # outputs of the gan's generator 131 | self.g_outputs, _ = tf.nn.bidirectional_dynamic_rnn( 132 | g_cell_fw, 133 | g_cell_bw, 134 | inputs=self.feature_entity_embedding, 135 | sequence_length=self.sequence_actual_lengths_ph, 136 | dtype=tf.float32, 137 | scope='g_bi-rnn') 138 | 139 | 140 | 141 | self.bi_rnn_outputs_dropout = tf.nn.dropout( 142 | tf.concat(self.bi_rnn_outputs, axis=-1, name='bi_rnn_outputs'), 143 | keep_prob=1.0, 144 | name='bi_rnn_outputs_dropout' 145 | ) 146 | 147 | self.g_bi_rnn_outputs_dropout = tf.nn.dropout( 148 | tf.concat(self.g_outputs, axis=-1, name='g_bi_rnn_outputs'), 149 | keep_prob=1.0, 150 | name='g_bi_rnn_outputs_dropout' 151 | ) 152 | # outputs of sequences without paddings 153 | mask = tf.sequence_mask(self.sequence_actual_lengths_ph, self.sequence_length) 154 | self.outputs = tf.boolean_mask(self.bi_rnn_outputs_dropout, mask, name='outputs') 155 | g_outputs = tf.boolean_mask(self.g_bi_rnn_outputs_dropout, mask, name='g_outputs') 156 | 157 | self.softmax_W = tf.get_variable('softmax_W', [self.hidden_size*2, self.classes], dtype=tf.float32, initializer=tf.contrib.layers.xavier_initializer()) 158 | self.g_softmax_W = tf.get_variable('g_softmax_W', [self.hidden_size*2, self.classes], dtype=tf.float32, initializer=tf.contrib.layers.xavier_initializer()) 159 | 160 | self.softmax_b = tf.get_variable('softmax_b', [self.classes], dtype=tf.float32, initializer=tf.contrib.layers.xavier_initializer()) 161 | self.g_softmax_b = tf.get_variable('g_softmax_b', [self.classes], dtype=tf.float32, initializer=tf.contrib.layers.xavier_initializer()) 162 | 163 | self.softmax_W_binary = tf.get_variable('softmax_W_binary', [self.hidden_size*2, 2], dtype=tf.float32, initializer=tf.contrib.layers.xavier_initializer()) 164 | self.softmax_b_binary = tf.get_variable('softmax_b_binary', [2], dtype=tf.float32, initializer=tf.contrib.layers.xavier_initializer()) 165 | 166 | self.logits = tf.nn.xw_plus_b(self.outputs, self.softmax_W, self.softmax_b, name='logits') 167 | self.g_logits = tf.nn.xw_plus_b(g_outputs, self.g_softmax_W, self.g_softmax_b, name='g_logits') 168 | 169 | labels = tf.contrib.layers.one_hot_encoding( 170 | tf.boolean_mask(self.label_ph, mask), 171 | num_classes=self.classes 172 | ) 173 | 174 | 175 | self.d_loss = tf.reduce_mean( 176 | tf.nn.softmax_cross_entropy_with_logits( 177 | labels=labels, 178 | logits=self.g_logits, 179 | ) 180 | ) 181 | 182 | self.g_loss = tf.reduce_mean( 183 | tf.multiply(labels, tf.to_float(tf.log(1- tf.nn.softmax(self.g_logits)))) 184 | ) 185 | 186 | # compute diff loss 187 | self.diff_loss = tf.norm(tf.matmul(tf.transpose(g_outputs, [1, 0]), self.outputs)) 188 | 189 | # all trainable variables 190 | all_trainable_vars = tf.trainable_variables() 191 | # variables related to discriminator 192 | vars_d = [var for var in all_trainable_vars if var.op.name == 'g_softmax_W' or var.op.name == 'g_softmax_b'] 193 | optimizer_d = tf.train.GradientDescentOptimizer(0.1) 194 | self.train_op_d = optimizer_d.minimize(self.d_loss, var_list=vars_d) 195 | 196 | # variables related to generator 197 | vars_g = [var for var in all_trainable_vars if var.op.name in \ 198 | ['g_bi-rnn/fw/multi_rnn_cell/cell_0/basic_lstm_cell/weights', \ 199 | 'g_bi-rnn/fw/multi_rnn_cell/cell_0/basic_lstm_cell/biases', \ 200 | 'g_bi-rnn/bw/multi_rnn_cell/cell_0/basic_lstm_cell/weights', \ 201 | 'g_bi-rnn/bw/multi_rnn_cell/cell_0/basic_lstm_cell/biases'] 202 | ] 203 | optimizer_g = tf.train.GradientDescentOptimizer(0.1) 204 | self.train_op_g = optimizer_g.minimize(self.g_loss + 0.1 * self.diff_loss, var_list=vars_g) 205 | 206 | 207 | self.logits_binary = tf.nn.xw_plus_b(self.outputs, self.softmax_W_binary, self.softmax_b_binary, name='logits_binary') 208 | self.loss = self.compute_loss() 209 | self.l2_loss = self.l2_rate * (tf.nn.l2_loss(self.softmax_W) + tf.nn.l2_loss(self.softmax_b)) 210 | self.total_loss = self.loss + self.l2_loss + self.diff_loss * 0.00001 211 | 212 | optimizer = tf.train.GradientDescentOptimizer(self.learning_rate) 213 | grads_and_vars = optimizer.compute_gradients(self.total_loss) 214 | self.global_step = tf.Variable(0, name='global_step', trainable=False) 215 | self.train_op = optimizer.apply_gradients( 216 | grads_and_vars = grads_and_vars, 217 | global_step = self.global_step 218 | ) 219 | 220 | gpu_options = tf.GPUOptions( 221 | visible_device_list='4,5,6,7', 222 | allow_growth=True 223 | ) 224 | session_config = tf.ConfigProto( 225 | allow_soft_placement=True, 226 | log_device_placement=False, 227 | gpu_options=gpu_options 228 | ) 229 | self.sess = tf.Session(config=session_config) 230 | self.sess.run(tf.global_variables_initializer()) 231 | 232 | 233 | 234 | 235 | def compute_loss(self): 236 | 237 | mask = tf.sequence_mask(self.sequence_actual_lengths_ph, self.sequence_length) 238 | labels = tf.contrib.layers.one_hot_encoding( 239 | tf.boolean_mask(self.label_ph, mask), 240 | num_classes=self.classes 241 | ) 242 | labels_binary = tf.contrib.layers.one_hot_encoding( 243 | tf.boolean_mask( 244 | tf.sign(self.label_ph), 245 | mask 246 | ), 247 | num_classes=2 248 | ) 249 | 250 | cross_entropy = tf.nn.softmax_cross_entropy_with_logits( 251 | labels=labels, 252 | logits=self.logits, 253 | ) 254 | 255 | cross_entropy_binary = tf.nn.softmax_cross_entropy_with_logits( 256 | labels=labels_binary, 257 | logits=self.logits_binary 258 | ) 259 | 260 | return tf.reduce_mean(cross_entropy) + tf.reduce_mean(cross_entropy_binary) 261 | 262 | 263 | def fit(self, training_data, dev_data): 264 | """ 265 | 266 | 参数: 267 | training_data: 训练集, 类型: dict 268 | key: 特征名(or label) 269 | value: np.array 270 | 271 | dev_data_dict: 272 | 273 | """ 274 | train_data_sent, train_data_label = training_data 275 | train_label_count = len(training_data[0]) 276 | train_data_indices = [i for i in range(train_label_count)] 277 | batches = batch_iter( 278 | list(zip(train_data_sent, train_data_label, train_data_indices)), 279 | batch_size=self.batch_size, 280 | num_epochs=self.num_epochs, shuffle=True 281 | ) 282 | 283 | self.saver = tf.train.Saver() 284 | train_label_count = len(training_data[0]) 285 | batch_num_in_epoch = int(math.ceil(train_label_count / float(self.batch_size))) 286 | 287 | max_dev_f1 = self.max_dev_f1 288 | current_patience = 0 289 | 290 | print('pretraining...') 291 | train_loss = 0.0 292 | for _ in range(batch_num_in_epoch): 293 | batch_data = batches.__next__() 294 | batch_data_sent, batch_data_label, batch_indices = zip(*batch_data) 295 | batch_data = (batch_data_sent, batch_data_label) 296 | batch_sequences, batch_targets, batch_sequence_actual_lengths = pad_batch(batch_data, max_len=self.sequence_length) 297 | index = np.array(list(enumerate(batch_sequence_actual_lengths))) 298 | feed_dict = dict() 299 | feed_dict[self.input_ph] = batch_sequences 300 | feed_dict[self.sequence_actual_lengths_ph] = batch_sequence_actual_lengths 301 | feed_dict[self.sequence_last_index_ph] = index 302 | feed_dict[self.weight_dropout_ph] = self.feature_weight_dropout 303 | feed_dict[self.dropout_rate_ph] = self.dropout_rate 304 | feed_dict[self.label_ph] = batch_targets 305 | _, loss = self.sess.run( 306 | [self.train_op_d, self.d_loss], 307 | feed_dict=feed_dict) 308 | time_str = datetime.datetime.now().isoformat() 309 | print("{} loss {:g}".format(time_str, loss)) 310 | 311 | for step in range(self.num_epochs): 312 | print('\nEpoch %d / %d:' % (step+1, self.num_epochs)) 313 | train_loss = 0.0 314 | for _ in range(batch_num_in_epoch): 315 | batch_data = batches.__next__() 316 | batch_data_sent, batch_data_label, batch_indices = zip(*batch_data) 317 | batch_data = (batch_data_sent, batch_data_label) 318 | batch_sequences, batch_targets, batch_sequence_actual_lengths = pad_batch(batch_data, max_len=self.sequence_length) 319 | index = np.array(list(enumerate(batch_sequence_actual_lengths))) 320 | feed_dict = dict() 321 | feed_dict[self.input_ph] = batch_sequences 322 | 323 | feed_dict[self.sequence_actual_lengths_ph] = batch_sequence_actual_lengths 324 | feed_dict[self.sequence_last_index_ph] = index 325 | feed_dict[self.weight_dropout_ph] = self.feature_weight_dropout 326 | feed_dict[self.dropout_rate_ph] = self.dropout_rate 327 | feed_dict[self.label_ph] = batch_targets 328 | 329 | _ = self.sess.run(self.train_op_d, feed_dict=feed_dict) 330 | _ = self.sess.run(self.train_op_g, feed_dict=feed_dict) 331 | 332 | _, loss, logits, bi_rnn_outputs, bi_rnn_outputs_dropout, outputs, global_step, _ = self.sess.run( 333 | [self.train_op, self.loss, self.logits, self.bi_rnn_outputs,self.bi_rnn_outputs_dropout, self.outputs, self.global_step, self.feature_weight], 334 | feed_dict=feed_dict) 335 | time_str = datetime.datetime.now().isoformat() 336 | per_epoch_step = global_step % batch_num_in_epoch 337 | 338 | print("{} step {} / {}, loss {:g}".format(time_str, per_epoch_step, batch_num_in_epoch, loss)) 339 | train_loss += loss 340 | 341 | if global_step % 100 == 0: 342 | print('dev set: ') 343 | dev_f1 = self.predict(dev_data) 344 | print('') 345 | 346 | if max_dev_f1 < dev_f1: 347 | max_dev_f1 = dev_f1 348 | current_patience = 0 349 | self.saver.save(self.sess, self.model_path) 350 | print('model has saved to %s!' % self.model_path) 351 | 352 | train_loss /= float(batch_num_in_epoch) 353 | print('train_loss: ', train_loss) 354 | print('training set: ') 355 | self.predict(training_data, True) 356 | # print('dev set: ') 357 | dev_f1 = self.predict(dev_data) 358 | 359 | 360 | if not self.model_path: 361 | continue 362 | 363 | if max_dev_f1 < dev_f1: 364 | max_dev_f1 = dev_f1 365 | current_patience = 0 366 | 367 | self.saver.save(self.sess, self.model_path) 368 | print('model has saved to %s!' % self.model_path) 369 | else: 370 | current_patience += 1 371 | # 提前终止 372 | if self.train_max_patience and current_patience >= self.train_max_patience: 373 | print() 374 | return 375 | return 376 | 377 | 378 | def predict(self, data, loginfo=False): 379 | test_label_count = len(data[1]) 380 | batch_num_in_epoch = int(math.ceil(test_label_count / float(self.batch_size))) 381 | 382 | sequences, targets, sequence_actual_lengths = pad_batch(data, max_len=self.sequence_length, shuffle=False) 383 | all_predicts = [] 384 | binary_all_predicts = [] 385 | predict_labels = [] 386 | labels = [] 387 | binary_labels = [] 388 | total_loss = 0.0 389 | for i in range(batch_num_in_epoch): 390 | feed_dict = dict() 391 | batch_indices = np.arange( 392 | i*self.batch_size, 393 | (i+1)*self.batch_size if (i+1)*self.batch_size <= test_label_count else test_label_count) 394 | 395 | feed_dict[self.input_ph] = sequences[batch_indices] 396 | feed_dict[self.sequence_actual_lengths_ph] = sequence_actual_lengths[batch_indices] 397 | 398 | index = np.array(list(enumerate(sequence_actual_lengths[batch_indices]))) 399 | feed_dict[self.sequence_last_index_ph] = index 400 | 401 | feed_dict[self.weight_dropout_ph] = 0.0 402 | feed_dict[self.dropout_rate_ph] = 0.0 403 | batch_targets = targets[batch_indices] 404 | feed_dict[self.label_ph] = batch_targets 405 | 406 | 407 | logits, loss = self.sess.run( 408 | [self.logits, self.loss], 409 | feed_dict=feed_dict 410 | ) 411 | total_loss += loss 412 | predicts = np.argmax(logits, axis=-1) 413 | 414 | all_predicts.extend(predicts) 415 | binary_all_predicts.extend(list(np.sign(predicts))) 416 | last_index = 0 417 | for j, length in enumerate(sequence_actual_lengths[batch_indices]): 418 | predict_labels.append(predicts[last_index:last_index + length]) 419 | last_index += length 420 | labels.extend(list(batch_targets[j, :length])) 421 | binary_labels.extend(list(np.sign(batch_targets[j, :length]))) 422 | 423 | precision, recall, f1, _ = precision_recall_fscore_support( 424 | labels, 425 | all_predicts, 426 | labels=list(range(1, self.classes)), 427 | average='micro' 428 | ) 429 | 430 | binary_precision = precision_score(binary_labels, binary_all_predicts, average="binary") 431 | binary_recall = recall_score(binary_labels, binary_all_predicts, average="binary") 432 | binary_f1 = f1_score(binary_labels, binary_all_predicts, average="binary") 433 | if loginfo: 434 | print("multi-classification precision {:g}, recall {:g}, f1 {:g}".format(precision, recall, f1)) 435 | print("binary-classification precision {:g}, recall {:g}, f1 {:g}".format(binary_precision, binary_recall, binary_f1)) 436 | return f1 437 | 438 | -------------------------------------------------------------------------------- /data/wordvec: -------------------------------------------------------------------------------- 1 | -0.937436,-0.168365,0.539765,-0.370030,-0.204125,0.062528,0.632555,0.115621,-0.902276,-0.077371,-0.480780,0.227101,0.860600,-0.016172,-0.662388,0.419993,0.447674,-0.995685,0.247876,0.217395,0.335700,0.093001,0.500984,-0.223958,1.029998,0.393279,-0.287581,0.075255,-0.194952,0.470862,0.686256,0.884906,0.199562,-1.143481,0.149407,-0.125736,0.029793,0.503307,-0.248135,-0.535440,-0.769623,-0.128522,-0.037355,1.045511,0.191341,-0.079319,0.129211,0.109247,-0.641507,0.411306,-0.801452,0.278042,0.294740,1.121067,0.052613,0.458381,-0.385055,0.168167,-0.632011,0.741978,0.086230,0.617590,0.123257,-0.334151,0.460036,-0.353927,0.616587,-0.717535,0.388793,-0.255788,0.450490,0.633205,-0.301909,-0.296402,-0.143066,-0.159420,-0.272678,-0.037839,0.919656,-0.421309,0.320229,-0.439898,-0.572834,1.155661,-0.090599,-0.413529,0.355392,-0.240098,0.719409,0.152348,0.001362,-0.294359,0.251789,-0.026479,0.239083,-0.991709,0.421570,0.173811,0.166073,-0.351407,0.065299,0.044694,0.775560,0.828814,-0.396172,0.299003,0.069457,-0.118018,0.174324,-0.685344,0.149709,-0.354950,-0.201990,0.150332,0.777870,-0.224895,0.708316,0.262308,1.000447,0.206941,-0.495479,0.443548,-0.673706,-0.674983,0.245530,0.184319,-0.317066,-0.829714,-0.798211,-0.288795,-0.096735,0.350601,-0.676436,-0.036475,-0.784548,-0.172600,-0.423848,0.038077,0.035916,0.211494,-0.823176,0.794440,-0.123926,-0.336717,-0.291716,-0.468576,0.015706,0.063692,0.221782,0.931200,-0.586847,-0.445718,-0.039831,-0.465279,0.937231,0.038192,-0.164040,0.216767,-0.001642,-0.291440,0.170108,0.233404,0.164336,-0.528485,-0.393503,0.046781,0.423696,0.036477,-0.605086,-0.267394,-0.644238,0.569905,-0.075988,0.262943,-0.077673,-0.431478,0.571065,0.100034,0.743153,0.104015,-0.221866,0.950510,-0.434268,0.557490,-0.509037,0.465427,-0.013788,0.405156,-0.401117,0.637959,0.276344,-0.155616,-0.048360,-0.669819,-0.574308,-0.505315,0.024664,-0.423942,-0.722710,-0.451877,0.353732,0.107610,-0.582275,-0.063797,-0.164477,0.141941,0.053494,-0.319014,-0.076826,0.354380,0.657047,0.100647,-0.404888,0.494134,0.431051,0.448970,0.412230,0.268986,-0.226154,0.608794,-0.505509,-0.130856,-0.396429,-1.003826,0.491228,0.170804,-0.432414,0.480957,0.034307,-0.692941,-0.736807,0.195499,-0.644598,-0.668773,-0.039237,0.190465,-0.037229,0.153985,0.249188,-0.103834,-0.141716,0.407783,0.312449,0.531738,-0.350408,-0.565788,-0.418789,0.293251,0.725070,0.698135,0.335168,0.303157,-0.093197,0.359078,-0.483857,-0.124825,0.187547,0.393542,-0.179755,0.550261,0.006450,0.390777,-0.871437,0.008073,-0.209881,0.198427,0.898189,0.444656,-0.428453,0.430047,-0.159604,0.256386,0.040941,0.976146,-0.710196,-1.010145,0.029112,-0.088737,-0.195580,-0.591295,-0.455491,0.020141,0.402173,-0.471278,-0.407163,-0.010571,0.067495,-0.054031,-0.254329,-0.233464,-0.513594,-0.271388,-0.023803,0.927814,0.927038,-1.201536,-0.603731,0.662657,-0.017641,0.314755 2 | 0.147287,0.282464,-0.894634,-0.692962,-0.334835,-0.956724,0.775113,0.323083,0.331419,-0.182455,-0.712207,-0.306732,0.527058,-1.034796,-1.173371,-0.402258,0.632288,-0.423927,0.331800,0.164267,0.854635,-0.135942,-0.323128,-0.111641,0.839495,0.915762,-0.245590,-0.113619,-0.674064,0.269736,0.903550,-0.215668,0.847709,-1.035685,0.742737,-1.127273,0.507596,0.719597,-1.644415,-0.767892,-0.569971,0.700674,-0.001636,0.404625,0.627306,1.444805,0.438960,1.035447,-1.021967,0.173993,-0.406609,-0.019628,-0.027386,1.578752,-1.085754,0.309872,-1.137323,0.425405,-0.487454,-0.140638,0.197164,0.306353,1.212443,0.232568,-0.045959,-0.061208,0.408055,-0.476633,-0.152202,-0.287141,0.145345,0.171736,-0.396141,-1.176532,0.302276,0.671755,-0.425577,-0.787986,0.673318,-0.613929,1.090411,-1.417992,-0.371447,1.441056,-0.117422,-0.586043,0.140245,-0.326897,-0.855468,0.167680,-0.485131,-0.304608,0.789781,-0.372974,-0.355212,-0.639518,-0.588487,-0.253044,-0.023734,-1.174664,0.602261,-0.278135,0.095077,0.830376,0.667216,0.080168,0.051221,-0.192082,-1.001172,-0.542507,-1.051569,0.976319,-0.296735,-0.498794,0.462442,-0.375927,0.407290,0.135616,1.145301,0.279551,-1.267900,0.442216,-1.091083,0.122264,0.566867,0.839277,0.843611,-1.266049,0.059967,-1.617642,-0.262955,0.709529,-0.929410,-0.521662,-1.737299,0.541877,-0.877821,-0.699419,-0.047403,0.203079,-0.386897,-0.060496,-0.028322,0.159605,0.318238,-0.630668,-0.863773,0.221783,0.722878,1.211035,-0.376508,-0.570796,0.191725,-0.096437,-0.480022,-0.104340,0.291820,0.445328,0.614408,0.126431,-0.862162,0.060376,-0.287114,-0.717108,0.635319,-0.351991,0.682761,-1.001641,-1.019432,0.826724,-0.790937,-0.301833,-0.214490,-0.003819,0.308634,-0.478947,0.486989,0.153646,-0.623642,0.555339,0.084479,0.379019,-0.334815,-1.101297,-0.580387,0.449234,0.155396,-0.733906,-0.686410,0.533236,-0.231304,0.226202,0.425650,0.414741,0.178060,-0.852575,0.099380,0.610190,-0.643339,-0.767314,0.059330,0.096166,0.206510,-0.255723,-0.868433,1.796528,0.381021,0.503201,0.549695,-0.420921,1.141577,1.239734,-1.102790,-0.062277,0.708917,-0.845324,-0.872799,0.042754,0.217722,-0.519873,0.223664,0.064623,0.286237,-0.759533,0.151092,0.483446,-0.089597,-0.827902,-0.476414,-1.113410,-2.031164,1.150830,0.028514,-0.153752,-0.118835,0.141997,-0.264851,-0.406413,-0.880232,-0.074141,-0.424991,-1.007890,-0.824675,-0.931893,-0.413018,-0.859099,-0.172481,-0.035632,-0.220708,0.514260,-0.449933,0.177098,-0.996372,0.731231,1.010195,0.419209,0.569405,-0.520031,-0.817616,0.585013,0.133878,1.249636,-1.300842,0.299050,-0.345815,0.868203,0.844962,0.946431,-0.760377,0.058096,-0.267946,-0.714735,-0.647734,1.100423,-0.177193,-1.746689,0.577881,-0.645920,-0.779389,0.093225,-0.181923,1.200616,-0.172855,-0.792571,0.878812,0.209226,-0.413413,-0.330753,1.104191,-0.497355,-0.296560,0.158186,0.366709,0.547657,1.345010,-1.630018,-0.096426,1.257327,0.111949,1.103092 3 | -0.513079,-0.151940,0.035074,-0.215926,-0.758543,-0.810340,0.199081,-0.540856,-0.700174,-0.109813,-0.457055,-0.094102,-0.461596,-0.782836,-0.973080,-0.368732,0.027030,-0.909136,0.086939,0.144771,0.247908,0.318592,0.340904,-0.248999,1.335871,-0.073384,-0.603202,0.038298,-0.283604,0.204930,1.005866,-0.512150,0.139915,-1.359692,0.159723,-0.012405,-0.147562,0.514936,-0.896219,-0.461398,0.398296,0.170519,0.117896,1.023350,0.009105,0.787789,0.112071,0.622899,-0.456043,0.275070,-0.186586,0.519454,-0.324394,0.300492,-0.236975,1.150064,-0.574805,0.564055,-0.869198,0.257660,0.503582,0.453052,-0.138489,-0.170624,0.686954,-0.477047,0.681175,-0.270748,0.740044,-0.383946,0.348675,0.080020,-0.532934,0.032719,0.115670,-0.141426,-0.797910,-0.343721,0.949408,-0.799539,0.131494,-0.602261,-0.866010,1.175660,0.127276,-0.090539,-0.398941,-0.846172,0.436599,-0.322843,-0.064073,-0.450215,0.200488,-0.200722,0.655547,-0.656270,-0.229354,-0.676045,-0.135386,-0.777576,-0.160910,0.036547,0.114924,-0.003643,0.327589,-0.242166,0.052349,-0.081847,-0.100958,-0.485316,-0.021268,-0.340936,-0.121145,0.474996,0.455747,-0.095931,0.554097,0.031981,1.033668,0.584793,-0.662014,0.696169,-0.020099,-0.257760,0.426107,0.354300,0.439881,-0.858767,-0.541432,-0.823502,-0.116130,0.871032,-0.838565,-0.176937,-1.069429,-0.604435,0.314838,-0.079184,-0.130942,0.763251,-0.657440,1.013914,-0.284603,-0.153515,-0.026623,-0.662682,0.197042,-0.205104,0.843559,1.225131,-0.520040,-0.977035,0.116074,-0.563239,0.336070,-0.065849,-0.654299,0.000338,0.204038,-0.044730,-0.134145,-0.088686,-0.057288,-0.286928,0.129864,-0.058231,0.079888,-0.503772,-0.445731,-0.259763,-0.515579,-0.328404,0.276174,0.752328,0.341988,-0.539323,0.564186,0.240818,0.249052,0.490840,-0.813986,0.936967,0.061049,0.152928,0.039030,1.331552,0.004597,-0.165835,-0.381743,0.705455,0.073933,0.124719,0.136671,-0.236303,-0.503864,-0.597840,-0.164421,0.353264,-0.751659,-0.502159,0.298969,-0.374852,-0.675975,-0.482665,-0.533619,0.596890,-0.524781,-0.454898,0.035701,-0.331711,1.022265,0.329422,-0.338419,0.764443,0.253412,0.465930,0.992527,0.167271,-0.045513,0.410415,-0.221239,0.227188,-0.298504,-0.702221,0.311819,0.555652,-0.696550,0.146151,-0.420506,-1.101730,-0.720450,0.585595,-0.320395,-0.663424,-0.255034,0.075114,-0.018168,-0.518005,0.472425,-0.064387,-0.352599,-0.200963,-0.094728,0.081393,0.211697,-0.546791,-0.133602,0.075898,-0.169179,0.244292,-0.012722,-0.154757,-0.806368,-0.024744,0.413133,-0.080290,-0.294107,0.231080,0.372545,0.332469,0.266846,0.107489,-0.776821,-0.136468,-0.139565,0.605008,1.408605,0.016712,-0.785689,0.759822,-0.509367,-0.245539,-0.351012,0.908141,-0.856900,-1.038444,0.040541,-0.280642,-0.385671,-0.419252,-0.245752,0.278905,-0.195281,-0.437511,-1.044588,-0.246827,-0.037025,0.243381,0.007534,-0.164146,-0.363017,-0.011106,0.470044,0.827413,1.146431,-1.461192,-0.160243,0.773005,0.442161,0.944765 4 | -1.594462,0.406524,-0.249126,-0.434121,-1.151135,-0.783572,-0.177892,-0.135009,-0.092414,0.091486,0.099619,0.638392,0.272673,-0.811266,-0.229400,-0.134299,0.252024,-0.405522,0.362713,-0.514319,0.924511,-0.807266,-0.733025,-0.109495,0.926414,0.650025,-0.741600,1.469303,0.452270,0.687912,-0.171021,1.560462,1.525411,-1.073306,-0.501078,-0.102923,1.055567,0.334101,-1.272807,-0.681295,-0.538386,0.899032,0.745189,0.432198,0.490678,0.872767,0.352616,1.030945,-0.543185,0.528443,0.363059,-0.458365,-0.949786,0.212932,-1.341704,-0.401634,-0.913186,1.102771,-0.470993,0.252850,-0.858972,0.153328,1.200814,0.127877,0.593505,-0.369830,0.565068,-1.166460,-0.334395,-0.147475,-0.821127,-0.283754,-0.792586,-1.385810,0.199532,1.293459,-0.470301,-1.165103,0.289606,-0.072437,1.213462,-1.899472,-1.228122,0.347261,-0.150505,-0.042239,1.104591,0.038171,0.775795,0.028077,-0.249586,0.171871,-0.230577,-0.346735,0.214007,-0.878986,-0.265078,0.152446,-0.245302,-1.547535,1.129751,-0.500786,1.140410,0.978040,0.683006,0.496767,0.354504,-0.453167,-0.278036,-1.177620,-0.362455,0.728620,-0.370581,0.212603,-0.813789,0.202000,-0.145255,-0.535888,0.878490,0.129664,-0.542293,0.373176,-0.530367,0.109574,0.569516,-0.228661,1.113103,-2.019980,0.043132,-0.213190,-0.209539,0.499308,-0.644650,-0.312540,-1.249894,-0.750838,-0.960956,-0.960899,0.320325,-0.444707,-0.665304,0.516587,-0.534511,-0.122945,-0.473501,0.603747,-0.434002,0.024944,0.429298,1.583043,-0.451650,-0.976316,1.282270,-0.143554,0.045750,-0.584159,-0.778383,-1.027595,0.422278,-1.316651,0.036267,-0.867778,0.177839,-0.161766,-1.312232,-0.777286,0.183734,-1.239935,-1.216359,0.133079,-0.443175,0.672742,0.661774,0.656453,0.470968,-1.305398,0.686321,-0.734794,-0.088182,0.430131,-0.058747,1.279872,0.134400,-0.175435,-0.568379,0.234626,-0.593847,0.105554,-1.224038,-0.255941,-1.605850,-0.401040,-0.169774,-0.172991,0.399750,-0.254085,1.267670,1.393368,-0.037132,-0.358498,-0.121131,-0.217288,-0.034124,-0.220316,-0.564958,0.487459,0.642502,0.241487,0.824020,0.683044,1.490677,0.870418,-0.608847,-0.525861,0.602297,-0.223309,-0.775644,0.125183,-0.719293,-0.649096,-0.104214,-0.248385,-0.260326,-0.861190,0.077839,0.149959,0.676479,0.532448,1.053152,-1.079703,-2.095854,0.533755,1.059878,0.013367,0.722650,0.556699,0.780150,0.303091,-0.285749,0.864704,0.098467,-0.516116,-0.192961,-0.225515,-0.902720,0.433737,-0.056604,-0.077259,0.383693,-0.345985,0.199556,-0.377425,0.042432,-0.353003,-0.543413,0.406085,0.141986,0.525204,-0.437457,0.910942,0.495839,1.540428,-1.948131,0.999215,-1.325323,0.165825,1.360764,0.764951,-0.328350,-0.522930,0.328048,0.479556,-1.692246,1.633483,-0.700160,-1.292469,0.804315,0.370843,-1.271516,0.181497,0.154791,0.868551,-0.335530,-1.630891,0.471020,0.882284,0.216433,-0.314454,0.863818,-0.038779,-0.563547,0.319074,-0.393909,0.795705,1.442309,-2.330797,1.033190,0.897345,0.172642,0.069717 5 | 0.418180,-0.693019,-0.875619,-0.375677,-1.469246,-1.296870,0.535194,0.692994,-0.319494,-0.373212,-0.023248,-0.129268,0.610582,-1.283730,-1.191970,-0.182932,-0.631731,-0.104666,1.238506,-0.125809,0.651235,-1.442413,-0.445775,-1.344149,-0.101548,1.117350,0.615417,0.707454,0.689209,1.655811,0.627763,0.064308,1.586305,-1.019513,0.051340,0.167180,-0.196860,0.432347,-0.495303,0.375936,-0.549693,0.638754,-0.525282,0.211276,0.827383,-0.279935,0.812773,0.857657,-1.989674,0.612527,-0.078684,0.464355,0.108759,0.552014,-0.122293,0.958918,-0.458095,-0.994770,-0.859716,1.187014,0.716241,0.619800,0.888772,1.039716,-0.879672,0.119853,1.223968,0.469670,-0.754393,-0.558184,0.026490,0.818612,0.002512,-0.189986,0.395749,-0.008037,-0.627083,0.300085,-0.010837,-0.703871,1.376044,-1.794504,-0.113756,1.659753,0.057704,-2.222547,-0.451828,0.389302,-0.874747,-0.689240,0.346953,0.007173,1.090564,0.451652,-0.179210,-1.173342,0.182974,-0.192296,-0.273896,-0.770482,-0.443220,-0.511264,-0.614717,0.595438,-0.110839,-0.239918,-0.106767,0.574319,0.110785,-0.022811,0.157273,0.537548,-1.303124,-0.379380,0.327518,-0.919917,0.089916,1.050530,-0.772661,-0.438446,-1.128280,-0.470259,-2.063411,-0.577564,0.813247,-0.137418,2.260394,-1.095874,0.705827,-0.195283,-0.784606,-0.055224,-0.724296,0.621381,-0.923710,0.008614,-1.174345,-0.879213,0.290489,-0.648929,-0.408881,0.070601,-0.056728,0.517358,0.246299,-0.542936,-0.412803,-1.138533,1.296304,1.417511,0.703094,0.487689,0.216940,0.373155,-0.349984,0.496590,-0.468205,0.867754,0.652214,0.941003,-0.243430,0.317024,0.175714,-0.689225,0.683348,-1.018283,1.787373,-0.853712,-1.079301,-0.184219,-0.741542,-0.262342,-1.197804,0.221298,-0.276918,-2.124103,-0.019246,0.710407,-0.282934,0.017640,-0.340290,0.300087,-0.666977,-1.174219,-0.200404,-0.028979,0.301143,-0.208647,-0.192469,1.343368,-0.147813,0.574440,0.424981,-1.706621,-0.627834,-0.966861,-1.345633,-0.122990,-0.669813,-1.381204,-0.005890,0.330504,-0.037761,-0.298085,-0.491208,0.452994,-0.131511,-0.383713,-0.533650,-0.802441,0.291814,1.059302,0.203463,-0.014089,1.178084,0.272888,0.559955,0.254943,0.377338,0.883330,0.188236,-0.260377,0.060310,-0.980036,0.933079,1.275543,0.625962,-0.192537,-0.215335,-0.441813,-1.913085,0.585861,0.266665,-0.452508,-0.397623,-0.295485,-0.589959,-1.048314,0.760394,-0.527205,-0.815580,0.208767,-0.339377,-0.547898,0.403400,-1.015483,0.729084,0.766586,-0.905223,-0.343988,1.109101,0.448713,0.251531,1.210197,1.099505,0.163308,1.321342,-1.065367,1.187812,0.709678,-0.128328,0.662675,-0.285415,-0.721350,-0.976344,0.144658,1.056713,0.640910,-0.383490,0.132487,-0.225372,-0.784323,-1.243899,0.546033,-0.702188,-1.690468,-0.016381,-0.136386,-1.837533,0.685976,0.122521,1.066672,-0.209794,-0.177772,-0.401335,0.617177,0.575700,-0.988903,2.216393,-0.148108,-0.090287,0.060640,-0.417848,0.663932,1.167355,-0.912014,-0.489646,0.925564,0.143334,0.599342 6 | -0.585758,-0.368797,-0.782447,0.164270,-0.623296,-1.137501,-0.066226,0.435951,0.329448,-0.558737,-0.157875,0.081836,0.822102,-0.580589,-0.668981,0.050494,0.198421,-0.476337,0.738750,-0.386526,0.171806,0.149147,-0.290441,0.475454,1.691693,0.937783,0.197456,0.415796,-0.355354,0.129000,1.156783,-0.271144,0.059208,-0.984842,-0.196791,0.124726,1.322729,-0.340942,-0.180036,-1.066626,0.228214,0.921244,0.426654,-0.556664,0.630847,0.841429,-0.445815,0.291270,0.015782,0.654540,-0.764558,0.021285,-0.947092,0.927368,-0.184049,0.213524,-0.178062,1.476226,-0.854119,0.145346,-0.421705,0.943249,0.866603,0.034193,0.708797,-0.150998,0.365569,-0.844951,0.216459,-0.468716,-0.181453,0.452766,-0.751859,-0.719112,0.104500,-0.481975,-0.750180,-0.264984,1.044863,0.049535,0.219898,0.475234,-1.140260,0.046599,-0.552463,-0.599672,0.213776,0.363301,-0.270103,-0.199304,-0.298352,0.224194,-0.126093,-0.104423,0.322415,-0.729919,-0.206130,-0.177045,-0.425137,-0.465356,-0.262799,-0.324426,0.697221,0.243539,0.017466,-0.116885,0.171552,-0.209308,0.808762,-1.187208,-0.448280,-0.084658,-0.376686,0.272469,0.287836,0.315793,-0.441556,0.285504,1.186522,0.309542,-1.241022,1.079886,0.066369,-0.103661,0.300039,0.866584,0.714629,-0.737749,0.040802,-0.918735,0.308019,0.302728,-1.254822,-0.249463,-1.138326,-0.234497,-0.463271,-0.105545,-0.240529,0.543134,-0.139446,0.463243,-0.278093,0.211408,-0.628354,-0.999806,-0.022161,0.042783,0.828358,1.129685,0.576140,-0.744323,-0.191233,-0.672758,-0.231177,0.462137,-0.459774,-0.124030,0.144843,-1.202932,-0.205371,0.119764,-0.062052,-0.310597,0.215286,0.089990,-0.523840,-1.248698,-0.629495,0.117723,-0.855542,1.011790,0.423249,-0.019148,0.279490,0.496455,0.363579,-0.019190,-0.267187,0.088041,-0.025356,0.287036,-0.060448,0.095085,0.107030,0.609729,0.413371,0.071338,-0.702650,0.847042,-0.806400,0.629671,-0.133414,-0.735668,-0.575156,-0.214861,0.639844,0.874601,0.141593,-0.642245,-0.006215,-1.154682,0.330803,-0.363734,-0.904969,0.638019,-0.004287,-0.011475,-0.148392,0.173296,0.599087,0.347498,-0.154976,-0.020759,0.023690,-0.590992,0.292796,-0.308257,-0.485843,0.018348,-0.625740,0.171394,0.040902,-1.321253,0.553529,-0.195595,-0.106854,0.215998,0.183089,-1.780314,-0.942328,0.381589,-0.142163,-0.348848,0.174287,0.420382,0.041918,-0.273763,0.518251,0.239639,-0.739404,-0.251109,0.237683,0.459105,-0.179345,-0.087151,-0.308974,-0.484942,0.891466,0.503045,0.155356,-0.325981,-0.733733,0.530586,-0.443957,0.282528,0.205441,-0.192684,-0.033110,-0.435065,0.485412,0.502951,-1.371366,0.967514,-0.200850,-0.383640,0.436946,1.090053,-0.451715,0.914160,-0.603081,-0.503978,-0.952338,1.056714,-0.338439,-0.485426,0.745661,-0.621858,-0.374575,0.319972,-0.651737,-0.333687,-0.340592,-0.545225,-0.093032,0.594565,-0.155588,0.073611,-0.519262,-0.872167,0.125528,-0.184797,-0.078691,0.371052,1.338964,-0.943370,-0.337731,1.078595,0.129922,0.311073 7 | -1.089749,-1.057427,-1.365978,-0.023344,-0.647130,-1.912406,0.268868,1.066077,-0.602481,-0.737993,-0.272000,0.298295,-0.752959,-1.932171,-2.298906,-0.600515,-0.283550,-0.402817,0.322532,0.345552,-0.365238,-0.042572,-0.236430,0.195805,1.513149,1.102426,-0.416766,0.451178,-0.896255,-0.105284,1.525299,-1.942206,-0.432974,-0.928601,-0.748941,-0.248867,1.148635,0.073991,-1.401846,0.646853,1.652585,1.169239,-0.322760,-1.372205,0.544668,1.495570,-0.460396,1.405502,-0.364150,-0.268092,-1.200866,0.530996,-0.923096,-0.239946,-0.605890,2.238200,-0.717257,-0.812652,1.104650,1.341106,1.873322,0.910802,-0.648724,-0.390397,-0.413249,-1.324307,0.107179,0.800169,0.491081,-0.562324,-0.439021,-0.588342,0.108767,0.146950,0.022014,0.689101,-0.281860,-1.738099,0.072874,-0.665537,-0.179575,-1.108847,-2.011185,0.847243,-1.879180,-0.681668,-0.989445,-0.620258,-1.128621,-0.028134,1.331300,-0.770721,-0.249652,1.192091,0.061224,-0.579604,-0.179358,-1.068062,-2.035883,-0.768676,0.821605,-0.050462,0.035120,-1.006568,0.434427,-0.335407,0.811302,-0.674044,-0.007388,-1.599790,-0.389705,0.106346,-1.448745,-0.554110,1.759262,0.705909,-1.304279,0.128995,2.405932,0.337519,-0.354221,1.510719,-0.542308,-0.741748,0.021689,0.611190,2.100638,-0.835308,-0.740715,-0.552764,-1.788060,0.385465,-2.358892,-0.424730,-1.655077,-0.179747,-0.166942,-0.459496,0.639957,0.322280,-0.207309,-0.013988,-1.550845,-0.227544,0.134130,-2.593570,0.983932,-0.385903,1.271902,0.711537,0.764731,-0.418449,0.737737,-1.997008,-0.040675,-0.962175,-0.557180,-0.455181,0.237492,1.631235,0.409101,-0.513608,-0.145210,-0.238286,1.479580,-0.489865,-0.112370,-0.736007,0.970651,0.854659,-0.930712,-0.253867,-0.398315,0.113430,0.727529,0.106703,-0.683086,-0.847928,1.150840,1.317798,-0.151110,0.880018,-0.779141,0.463763,-0.688017,2.434263,0.751276,-1.132912,-0.326621,1.667317,-0.195123,0.635789,-0.914418,-0.493110,0.408838,0.058108,-0.155375,0.577886,-0.810551,-1.007599,0.281926,-1.387011,0.528055,0.046610,-0.946733,0.407021,-1.340331,-0.509906,0.647348,-0.221821,0.296582,-0.002317,-0.666467,0.981968,-1.018453,-0.343323,-0.084685,-1.190050,0.954357,0.286221,0.150627,-0.500333,0.613991,-1.258969,-0.143621,0.061963,-0.255128,-0.383430,-0.943328,-0.541758,-0.348271,0.762014,-0.501984,-1.326584,-0.202504,-0.613949,-0.002878,-0.688824,1.089265,-0.698913,-1.520307,-1.645269,-0.736869,-0.157514,0.165095,-1.028924,-1.391207,0.150975,-0.586745,1.737289,0.302167,0.215233,-2.364888,0.465573,0.991961,-0.302453,-0.438348,0.437117,0.593182,1.449893,-0.602583,-0.325820,-0.980091,1.371414,0.702774,0.344769,0.762061,1.558531,-0.445034,-0.186787,0.164935,-1.609895,0.298779,1.168998,1.091957,-1.387111,-1.144299,-0.773642,-0.775243,0.573931,0.294368,-0.224854,-0.809303,-1.430954,-0.013087,-0.360698,-1.854969,0.321084,0.654827,-1.264646,0.070418,0.674890,1.174136,-0.065405,0.645151,-0.118451,-1.690799,0.437601,0.748126,1.474121 8 | -0.839474,0.648996,-0.173266,-0.512324,-0.507073,-0.227820,0.311060,0.299236,0.494813,0.917375,-2.136531,0.813387,1.068191,-0.488635,-0.405273,-0.535541,-0.565871,-1.120961,0.993784,0.485081,0.314426,0.280675,-0.033882,1.478219,1.142298,1.124701,-0.076285,0.883737,1.018477,1.539022,1.090042,-0.543273,1.195673,-0.592037,-0.753546,-0.927436,1.077846,0.993821,-0.904641,-0.213886,-1.384293,1.388431,0.248281,1.244254,0.852611,1.633315,1.630786,-0.386330,-0.262793,0.073999,-1.386658,0.652894,0.333876,0.911758,-1.476031,0.031164,-0.840062,0.807572,-0.907115,0.471418,-0.356621,0.480464,0.359074,0.004009,-0.675018,-0.076947,1.061197,-0.782914,0.811482,-0.926125,-0.348243,-1.026615,-1.102994,-0.410953,0.458915,0.329705,0.496727,-0.685691,2.065558,0.192711,0.048912,-1.437960,-0.407271,0.489839,-0.568083,-1.272518,0.223435,-0.989562,0.540599,-0.723775,-0.167559,-0.652865,0.239092,-0.174527,-0.414770,-0.416847,-0.964868,0.053187,-0.350264,-1.558449,1.695606,0.069282,1.222421,0.221185,0.745898,-0.361483,0.063631,-0.621199,0.319278,-1.174723,0.064587,0.223156,0.016781,-0.051470,1.457820,0.636795,-0.134469,0.832388,1.212556,0.567948,-1.054488,0.423345,0.500041,-0.123028,1.239261,0.139895,0.898187,-0.756642,1.741793,-0.485404,-0.204397,0.391494,-0.173677,-0.301359,-0.648287,-1.409114,-0.520676,-0.023859,-0.961353,-0.267412,-0.551472,-0.067343,-0.043898,-0.045122,0.433728,0.158361,-0.343982,0.129930,1.344529,2.218670,0.979127,-0.789202,0.170221,-0.809013,0.932837,-0.202554,-0.293430,0.132600,0.802039,0.021147,-0.142625,0.199844,0.068200,-0.439877,0.111449,0.220293,-0.836316,-0.005791,-0.186661,0.747000,-0.522491,-0.448608,-0.711173,0.076427,-0.221138,-0.569815,1.546966,0.333321,0.461957,-0.558753,0.744153,1.358607,-1.512683,-0.795668,0.342410,0.886866,-0.812795,0.261033,-2.186383,0.788910,-1.066111,1.492442,1.085210,0.665420,-0.846133,-0.392679,1.265503,1.047763,-0.233571,-0.754864,0.319418,0.114978,0.376205,-0.950011,-0.920301,0.261517,0.007701,-0.551122,-0.306210,0.101565,0.002509,0.607383,-0.310558,0.070231,0.083485,0.578514,-0.657897,-0.733094,0.435897,0.283828,-0.220779,0.866349,-1.547342,-0.366154,0.848011,-0.003440,0.102180,-0.341613,0.729688,-1.553348,-1.704105,0.989257,-0.471940,1.211881,-0.884779,0.009216,-0.033058,0.135282,-0.551175,0.282708,-0.285919,-0.018235,0.526728,-0.683335,1.009036,0.843372,-0.337378,0.219730,-0.426598,0.421875,0.222152,0.799020,-0.384592,-1.089141,-0.664907,0.698095,-0.267763,0.068656,0.692943,0.811447,-0.580352,0.378301,-0.399374,0.478959,-0.760477,-0.255330,1.822476,-0.661918,-0.132389,0.610365,0.244091,-0.092648,-1.067573,0.859977,-0.999146,-0.988350,0.423173,0.230510,-0.795452,0.146971,-0.114107,1.226351,-0.195695,-1.493458,0.507392,1.199097,-0.411861,-0.790537,-0.670668,-0.433877,-2.142631,0.420636,-0.244729,1.519868,1.362593,-1.855125,0.363162,1.328709,0.195788,0.678405 9 | -0.184238,-0.447333,-0.642250,-0.717166,-0.831022,0.127676,0.413819,-0.653410,-1.887663,-0.107911,-0.431546,-0.021384,0.064861,-0.759951,-0.459309,0.086034,-0.825090,-0.335412,-1.443431,-0.215466,-0.341153,0.281930,0.264265,-0.980708,-0.623639,0.556894,-0.151506,0.778832,-0.693382,0.377950,0.782579,-1.380790,-0.480157,-1.472509,0.308701,-0.613147,-1.236366,-0.967370,-0.454992,-0.514874,0.942929,-0.877667,-0.378248,1.238888,0.243107,-0.022581,-0.041283,1.894473,-0.718448,-0.365521,1.715549,0.613675,-0.372434,-0.073081,-0.510452,0.572877,-0.104765,0.351984,0.232784,0.570216,1.803063,0.105536,-0.327087,-0.803005,0.569104,-0.236478,0.954968,0.763863,0.587439,-0.065538,0.299511,0.294308,-0.149485,-0.780542,-1.166833,-1.151424,0.087398,-0.179622,0.361950,-0.306578,0.791530,0.310427,-0.477021,1.004748,-0.319338,0.100179,-0.752758,-0.781272,0.216010,-0.076632,0.292320,-0.111195,0.558226,0.054582,-0.187930,-0.657383,-0.384101,-0.487255,1.189278,-0.306785,0.393416,-0.644359,-0.477169,-0.106834,-0.828005,-0.943364,-0.396294,-0.685251,0.438724,-0.627614,0.218308,-0.820905,-0.059838,0.094586,-0.327365,-0.607751,0.689763,0.988680,0.651306,0.451558,-0.200326,0.542163,-0.957364,-0.536895,-0.501369,0.394479,0.109808,-0.898407,-0.663236,-0.742913,-1.079431,0.729105,0.086936,-0.747182,-1.032912,-0.280095,0.684768,1.033044,-0.476240,0.806224,-0.241501,-0.080308,-0.124564,0.126862,0.367788,0.172871,-0.112188,0.249299,-0.136557,0.269058,-0.193000,-0.478612,0.890986,-0.517145,0.756037,-1.621290,0.036383,0.591002,-0.709572,-0.140663,-0.168779,0.702587,-0.892486,0.101704,-0.242835,-0.629149,-0.166663,-1.651061,-0.913200,0.324100,-0.273593,-1.626258,-0.032101,1.900596,1.166781,-0.314594,0.481489,-0.119941,-0.102553,0.495830,-1.255130,1.126293,0.369050,0.988505,0.308349,-0.180983,0.697189,0.039819,-0.287005,1.227423,0.157674,-0.200831,0.289557,-0.400184,-0.700979,-1.086201,-1.256245,0.463869,-1.479127,-0.124381,0.152101,-0.644796,-0.778099,-0.133157,-0.727540,0.881911,0.309075,-1.375105,0.209759,0.100871,0.969907,-0.516236,-0.198443,1.727456,-0.470657,0.434774,1.555170,0.946086,0.161447,1.270721,0.505837,-0.074085,-0.140631,-1.292046,-0.548870,0.292743,-1.473643,0.714016,0.051044,-0.240330,0.090796,1.080296,0.865297,-2.105102,0.079798,0.571215,-0.185099,-0.419340,0.559124,0.619989,-0.035155,1.345597,0.156915,0.799677,0.854471,-1.506695,-0.366265,-0.554500,-0.059762,0.324542,0.074602,0.102678,-0.638194,-0.150395,1.635576,1.292736,-1.040212,0.396351,0.340540,0.248901,0.058679,-1.149222,0.869228,-1.482328,-0.869866,0.768299,1.219119,-0.289478,-0.298019,0.397200,-0.041979,0.496016,0.058706,0.406590,-1.074449,-1.212224,1.138781,-0.772553,-0.169656,-0.181380,-0.951425,-0.365818,0.481213,-0.312244,-1.393210,0.041927,0.449593,0.350593,0.620367,0.123277,0.352541,-0.644597,1.696072,1.582056,0.983876,-0.633176,-0.750841,0.525391,-0.711454,0.616156 10 | -0.609150,-0.092540,-0.448347,-0.866340,-0.746560,-0.609299,-0.233028,-0.449591,-0.286076,0.048211,-1.649621,-0.241642,-0.956354,-1.970618,-0.585839,-0.725950,0.776604,-0.360203,0.478956,-0.148225,-0.145575,-0.943161,0.367366,-0.694613,0.831086,0.467923,-0.475368,0.456604,-1.136370,-0.167805,2.439825,-0.237536,0.018911,-1.245393,-0.249941,-0.521570,-0.115621,0.559246,-1.935163,0.227003,0.433893,-0.098337,-0.358648,0.984825,0.119499,0.108557,0.057272,1.837883,-0.558014,-0.702512,0.474492,0.628556,-0.994673,1.140215,-0.913327,1.835654,0.392030,0.547589,0.604469,-0.478647,0.415322,0.955990,1.371717,0.166472,-0.468266,-1.040818,0.206456,0.484996,0.278460,-0.982444,1.243493,0.913864,0.255461,-0.731684,1.355931,0.072703,0.007099,-0.802224,1.313993,-1.159697,1.761076,-1.034750,0.694819,0.761522,0.628957,-0.558798,-1.128775,0.260065,0.364381,-0.369842,0.134492,-1.585150,0.918483,-0.632805,-0.009834,0.360483,-1.485133,-0.958679,0.491331,-1.193028,1.034143,1.368364,-0.638428,-0.144377,1.478045,-0.982334,-0.377017,-2.285575,-0.030127,-1.063135,-0.461509,-0.623630,0.259782,-0.385440,-0.418036,-0.037256,1.306601,0.529141,1.924936,0.949852,-1.410411,1.131358,-0.766591,-0.852573,-1.141385,1.583478,1.852145,-1.583617,-1.032486,-0.614279,-0.762166,1.690582,-0.560423,-0.828853,-1.101261,-0.317686,-0.123122,0.450589,0.622879,1.526640,-0.520799,0.225797,-0.411874,0.046576,0.366987,-0.379262,0.277800,-0.049917,1.917197,1.455897,0.997914,0.461753,1.265062,-0.815372,0.543375,-1.094542,0.447427,0.146686,0.718092,0.864277,-1.637047,0.554211,-0.340055,-0.559518,-0.192675,-0.373689,0.279260,-1.800138,0.591036,1.336334,0.289182,-1.035383,1.239838,1.247415,0.925992,-0.605557,0.346402,0.300664,-0.602403,2.285964,-0.122388,1.267383,-0.607049,-0.452005,-0.845977,1.722310,-0.084264,-1.585983,0.251996,1.625810,1.111635,-0.408088,0.274454,-1.337916,0.850409,-0.188783,-0.074566,-0.333843,-1.671642,-1.357627,-0.148222,0.408975,0.069964,-0.453550,-0.207572,1.006020,-0.608842,-1.611336,-0.342481,-1.430305,1.670680,0.759542,-0.249675,1.609338,-0.995139,-0.169686,1.322675,0.350013,0.318545,-0.152069,0.688602,-0.201180,0.219165,-1.008936,0.768020,1.269718,0.183225,-1.218278,-0.686530,-0.632300,-1.026586,1.431605,0.052874,-0.165771,-0.671886,-0.203117,-0.593223,-0.868107,1.580863,0.172663,-1.103014,-0.827859,0.153432,0.428536,-0.001766,-0.944160,-0.653644,-0.598316,0.100478,0.002690,-0.528477,0.760548,-2.055075,0.108511,1.831504,1.368729,-0.347494,0.348133,0.284249,0.740284,0.471634,1.404123,-1.180947,0.274901,-0.389429,1.771293,1.278628,0.360323,-0.166261,-0.066315,0.064224,-1.036967,-0.282451,1.090689,-0.351021,-0.781011,1.354155,-1.064357,-0.054551,-0.424524,-0.913202,0.293092,-0.095175,-0.802831,-0.272481,-0.014525,-0.759888,1.145089,0.512096,-0.672688,-0.366367,-0.120315,1.090980,0.313792,0.895073,-0.557947,-0.482312,0.703543,0.766124,0.281308 11 | -0.766452,0.678524,-0.154432,0.090965,-1.871739,-0.216188,0.452723,0.989681,-0.518276,-1.051309,-0.274452,-1.166217,0.352956,-0.239405,-1.042589,0.352880,-0.394471,-1.092885,-0.036189,0.030505,2.179200,-0.157936,0.324920,0.426221,-0.290701,0.820182,-0.516039,0.724679,-0.291100,0.617383,-0.319668,0.084185,-0.273457,-0.131955,1.547806,-1.242705,-0.328297,0.767794,-1.442759,-0.723466,0.286030,-1.052068,-0.116239,1.764882,0.169060,1.442379,0.084652,0.516327,-0.799355,-0.576865,-0.791306,-0.108641,0.303629,0.790693,-0.791891,0.270936,-0.161605,1.228934,-0.806536,0.573178,-0.155465,1.056112,0.658613,-0.059851,0.254166,-0.393002,0.648904,-0.622715,-0.547894,-0.445456,0.200695,0.131331,0.750655,-0.387933,-0.507922,1.313014,-0.660884,-0.094008,-0.343439,-0.251138,0.756684,-0.713660,-0.997423,0.637480,0.021176,-0.234977,-0.177449,-0.727768,-0.370932,-0.319672,-0.702628,0.220222,-0.349090,0.042412,0.285610,-0.330766,0.291088,-0.007029,0.421266,-1.617935,0.909403,-0.218955,0.913377,1.132483,-0.694603,-1.428892,0.543745,-0.221264,-1.481295,-1.308556,-0.036860,-0.414291,0.198316,-0.584350,-0.920481,-0.816033,0.411679,0.010336,-0.463041,0.871824,-0.835773,-0.721660,-1.024759,0.548625,0.711749,-1.510998,-0.109197,-0.490340,-0.091676,-1.610325,-0.662187,-0.539309,-0.752579,0.666596,-1.025596,-0.027201,-1.705371,-0.122213,0.226801,0.783218,-1.171915,-0.524298,-0.458539,-0.857602,-0.057162,-0.128371,0.210319,-0.775895,-0.152661,0.741674,0.307123,-1.109314,0.888304,-0.646245,0.526980,-0.407608,0.556044,-0.275678,0.100530,-0.461258,-0.107728,-0.288879,-0.942527,-1.009727,0.433012,-0.413401,0.926852,-1.150318,-1.388333,-0.156074,-0.292604,-0.791937,-0.040375,0.060664,-0.489316,-0.040480,0.851444,-0.725288,-0.220538,0.580762,-1.276796,-0.306003,-0.863997,-0.130428,-0.771517,-0.667394,0.034456,-0.593938,-0.754648,1.096088,-0.018196,-0.560454,0.977969,-0.192075,-1.143774,-0.353503,-0.216879,0.620555,-0.176701,0.101607,0.591265,-0.309185,0.035858,0.219355,-0.982639,0.396668,0.583017,-0.253359,0.769969,0.438332,1.595936,0.681587,0.852403,1.078833,0.997321,-0.129762,-0.823825,0.070169,-0.642029,-0.303597,0.823511,-0.573663,-0.418853,-0.601402,-0.311916,0.876349,-0.276961,-0.043877,-0.012919,-0.815128,-1.104121,0.226463,0.749290,-1.474870,0.180625,0.682701,0.228433,0.032768,0.662396,0.622389,-1.029921,0.878959,-0.625437,0.457850,0.576865,-0.383051,-0.206411,-0.081826,-0.691605,0.223161,0.105184,-0.356678,-0.008043,-0.931920,0.229810,-0.879502,0.341908,0.475578,-1.282690,1.428118,0.237099,0.686820,0.065813,-1.305957,0.007782,0.324990,1.381902,1.183101,-1.148625,-0.234601,-0.570476,0.352450,-0.383897,0.957293,-0.187314,-1.841123,-0.260863,-0.484536,-0.795570,-1.122422,-0.305579,0.455978,-0.227844,0.312720,-0.604173,0.376085,-0.187513,-0.612270,1.408257,-0.213929,-0.676101,0.332513,1.166946,1.737285,0.681068,-1.335483,-0.282797,0.057811,-0.879566,0.950383 12 | -0.920678,0.301710,0.077115,-0.761759,-1.088708,0.037100,1.437578,-0.975987,-0.587774,-0.673090,-0.652275,0.384543,0.285823,-1.118158,-0.985430,-0.265939,-0.394671,-0.405132,1.083082,1.077878,0.263509,0.116813,-0.050002,0.016410,1.155523,0.738560,0.132684,1.050874,0.347117,-0.785587,-0.590755,-0.552672,0.442670,-1.379442,-1.219313,-0.503318,-0.534415,-0.400902,-0.748602,-0.069421,0.227291,1.633993,0.350028,0.203584,-0.709607,0.859115,0.940502,1.072922,-0.356211,0.494418,-1.049127,-0.120324,-0.451257,0.372640,-1.359596,0.715820,-1.014898,-0.402596,-1.148925,1.198395,-0.627077,1.666457,0.693577,1.673072,1.115576,1.255029,0.585639,-0.310400,-0.006099,-0.907553,-1.126061,-0.721096,-1.938952,1.293861,1.745264,-0.104499,-0.097460,-0.925152,2.522063,-0.447396,1.482540,-1.476822,-1.291433,-0.066951,-0.133650,-0.011844,0.222114,-1.107217,0.797358,-0.445719,1.463682,-0.233228,0.487330,-0.364984,-0.401457,-0.867686,-0.687677,-1.673046,0.111322,-0.052544,0.025699,1.012292,0.224112,-0.417051,0.731678,-0.505015,-0.734315,-1.800260,-0.744620,-0.958188,1.035357,2.219620,-0.737929,1.269798,0.054025,0.320316,0.545542,0.670334,0.478679,0.160006,-0.347243,0.194107,-0.679604,-0.158130,0.917348,-0.364636,-0.366056,-1.718448,0.306737,-1.749619,-0.873690,0.847775,-0.627514,-0.921202,-2.123946,-0.805713,0.010823,-1.313881,0.703344,-0.650783,-0.591233,1.152341,-0.990267,0.052522,0.282056,-1.192056,0.083549,-0.713226,1.626245,1.435436,-0.748393,0.494824,0.717150,-0.059508,-0.123774,0.715930,-0.186524,0.594579,-0.115788,-0.464617,0.688449,-1.242250,0.173302,-0.728821,0.731135,-0.113005,0.395815,-0.250510,-0.758077,-0.797654,-1.212949,0.880597,-0.728120,0.374333,-0.038283,0.069502,-0.424923,-0.650525,-0.542469,0.165586,-0.573184,0.678888,-0.530315,0.360288,0.212792,1.542174,-0.358122,0.804090,0.304061,1.001706,-0.603216,-0.133910,-0.325141,0.183830,-0.220539,-1.317299,-0.165872,0.215579,-0.264616,-0.341746,0.104526,-0.523925,0.267509,-0.233191,-0.286149,-0.458639,-1.040245,-0.905165,-1.009876,-0.819336,1.387795,-0.740955,-0.796042,-0.293587,0.164145,0.552404,-0.580936,-0.985170,1.235682,-0.533284,-0.047139,-0.168083,1.318789,-0.814245,-0.452517,0.628864,0.137326,-0.049894,0.246353,-1.240847,-1.133569,-0.375069,-1.335131,-0.348486,-0.382698,0.523046,-0.416447,-0.713131,0.280240,-1.626636,0.359421,0.132114,-0.835899,0.332880,-0.893646,1.741476,0.684090,0.733844,-0.432627,1.233324,0.486312,-2.850373,-0.371036,0.514293,-1.363011,-0.950003,-0.445646,1.158309,1.038811,0.071787,-0.740697,-0.228066,-0.934466,0.885859,-1.348592,-0.540428,0.833666,-0.223900,-0.499835,-0.012541,0.287340,-1.480098,-0.707293,0.978777,-0.730356,-1.402433,0.266293,0.293088,-0.445769,0.990595,-0.474755,-0.498848,-0.469876,-1.277351,-0.259176,0.429653,-0.528101,-0.052843,0.985709,-1.582867,0.078082,0.539878,-0.790663,-0.212895,0.314825,-1.404799,0.032222,0.343635,-0.059098,0.177836 13 | -1.008541,0.330063,-0.159701,-0.629906,-1.101632,-0.006502,0.250176,-0.549575,-0.716456,-2.067057,-0.255830,-0.923665,0.065942,-3.112749,-0.246715,-0.355703,-0.816548,-1.581849,0.430665,-0.960934,0.167703,-0.121593,0.404203,-1.085400,-1.488886,0.575279,1.571988,0.304477,-0.520033,0.357170,0.088503,-0.042104,0.210379,0.115202,0.116118,-0.861523,-0.339128,0.099953,0.585045,0.215745,0.099956,-0.697560,0.276369,0.773955,-0.670888,-1.550647,-0.031099,-0.139764,0.575906,-0.316003,0.237456,-0.678690,0.430112,1.112323,-0.135140,-0.056655,-0.335184,-0.301253,-1.361798,0.021369,0.899364,0.050701,1.353426,1.299422,-0.674682,0.191585,-0.233896,0.546797,-0.411052,0.140083,0.352607,1.285561,0.438917,-0.015948,-0.819214,0.739866,0.766160,0.504153,-1.212530,-0.213411,0.271149,-1.978784,-1.432982,0.694651,-0.644076,0.213724,-0.397856,-1.009921,-1.037377,1.475538,0.291383,-0.649783,-1.260085,0.530089,-1.115165,-1.786094,-0.180193,-1.393791,1.029767,-1.371374,1.738780,0.021414,0.319066,1.971096,0.214465,-0.199989,0.588476,0.899945,0.445121,-1.492557,-0.215654,0.691077,0.819978,-0.628210,-1.117606,-0.425482,0.511869,-0.226767,-0.181428,0.796939,-1.768348,-1.192002,0.134818,0.330178,0.872791,-0.670888,-0.247982,-2.042890,0.921897,1.360430,0.006904,0.440888,-0.655960,-0.356711,-1.004729,-0.596510,0.930386,1.155682,-0.752367,-0.749958,-0.658395,-0.261054,0.570742,0.428588,-0.281791,0.628304,0.474631,-1.076939,-0.701139,0.236075,-0.753994,-0.727507,0.867169,-0.880185,0.946174,-0.682948,-0.607872,-0.677059,0.048270,0.472340,-0.554633,0.646997,-0.092817,-0.158015,0.620695,-0.003882,-0.408557,-0.014939,-0.266198,-1.426947,-1.626209,-0.501754,-0.283301,2.050977,-0.013009,-0.869020,1.379769,0.058401,0.333585,1.716969,0.736477,0.434072,-0.543612,0.262264,-0.034379,-0.274558,-1.245512,0.541408,-1.520285,0.952346,0.607806,-0.283143,0.784876,-1.445473,0.448287,-1.485496,-0.906809,-0.604093,-0.290339,-1.012429,0.161170,0.117362,-0.736095,-0.187715,0.432333,0.048795,-0.543602,-0.309915,1.332128,0.095272,1.182901,-1.191923,0.000004,0.232666,0.933035,0.152581,-0.648309,-0.541652,0.931852,0.484590,-0.679234,-0.732669,0.576924,-0.593505,-1.378288,0.867070,-0.050351,1.549497,-0.454060,-0.897460,0.072720,0.187053,1.166218,-1.008639,1.731802,0.625735,-1.137223,-0.273031,0.102664,-0.160946,0.537418,0.400068,-0.684314,0.802083,-0.116821,1.218661,1.792511,1.894910,-1.490759,1.040576,1.442068,-0.776538,-0.715329,0.424033,0.792844,-0.695811,-0.086944,0.645651,0.362055,0.728165,0.246275,0.351582,0.478243,-0.682305,-0.354793,0.907293,1.012559,1.921542,-1.788146,-0.394728,0.369792,2.538237,-0.826630,0.953295,0.790826,-0.444536,-1.184783,-0.376132,-1.513448,-1.139539,0.233747,0.443827,-0.961860,-1.055264,-0.240417,-0.431988,-0.652385,-0.364315,2.795229,0.173836,-1.008726,-1.208346,1.039683,0.407164,1.194078,-2.230313,-1.963998,0.299951,-0.521859,1.124635 14 | 0.933957,0.390043,-1.845580,-1.182154,0.460453,-1.369815,1.746278,-0.372899,-0.088203,-0.436080,-1.678101,-0.055997,0.089631,0.356143,-2.453030,-0.754297,-0.826455,0.281358,0.543625,0.138918,1.464388,0.995312,-0.721767,-0.219645,1.746421,1.052840,-0.468559,-0.271625,-0.628221,-0.178640,-0.288820,-0.712669,0.595985,-0.795157,-0.847735,-0.614767,0.519920,0.758676,-1.305699,0.396179,1.085143,-0.099288,-0.693955,-0.154403,-0.287330,1.060615,-0.165625,0.168542,-0.271266,-0.394645,-1.952538,1.060711,-0.019140,-0.453984,-1.861426,2.185849,-0.521813,0.530739,-2.706124,-1.050730,-0.194718,0.213011,-0.517226,-0.864765,1.607936,0.087597,0.834333,1.271336,-0.015939,-0.830485,0.854069,0.433428,-2.285452,-0.258368,1.988984,-0.876723,-1.143655,-0.225475,1.365362,-1.311913,-1.679963,-1.576684,0.957760,0.580769,-1.752765,0.115683,0.630947,-0.292485,-0.800122,0.561302,-0.548391,-0.146015,-0.058781,-0.216697,1.252650,-1.167414,-0.869232,1.276015,0.826848,-2.295337,-0.722142,-0.154282,-0.806497,0.891018,1.669777,-0.691524,0.210283,0.916065,0.052884,-0.640756,0.035636,-0.642380,0.814426,0.351645,1.152590,0.668851,-1.558892,-0.962454,0.465318,-0.056090,-1.061944,1.827103,-1.013948,0.343461,0.868662,-0.709835,0.853231,-1.732618,-0.149488,-1.175105,0.606218,0.204640,-0.693096,0.624218,-2.513323,0.639454,-0.707475,0.159930,0.445837,0.625187,0.059018,1.556301,-1.435071,-0.692398,-0.625031,0.712910,1.178980,1.599888,-0.054333,1.887000,0.300424,-0.594913,1.065812,-0.795215,-0.295735,-0.545226,-0.790581,1.060855,1.538970,-1.654720,0.213637,-0.586194,0.357798,0.524175,-0.069105,1.235165,0.422507,0.481218,0.032805,1.759212,-0.565924,1.100755,1.148664,-0.509447,0.702178,-0.524767,1.051820,0.012003,1.512895,-0.488316,1.345055,-0.267870,-0.899346,-1.430230,0.431222,1.332704,-0.692485,-0.206779,-0.303706,0.591233,-0.360971,1.769934,0.586693,0.701860,-1.917647,0.721234,0.143809,-0.573347,1.010385,-0.009217,0.051157,-0.675607,-0.007445,0.561374,0.335120,0.287725,0.813742,0.957035,0.831212,-0.850968,-0.805056,-1.331552,-0.880106,0.170827,-0.547075,-0.896036,0.019796,-0.535187,0.807422,0.332406,0.095696,0.361690,-0.182738,-1.198006,-1.171799,1.683014,-1.407463,-0.497755,-0.360703,-1.088315,-0.271825,1.668064,-0.610423,-1.070989,0.767504,-0.459998,-0.054584,-0.501738,-0.317018,0.444631,-0.770187,-0.264635,0.359007,-0.369465,-0.834348,-0.707860,1.058612,-0.133180,0.809551,-0.649172,-0.316788,-0.194453,-0.646585,-0.015741,0.114535,0.986212,-1.189478,-0.270695,0.615155,0.738960,-1.815038,0.044731,-0.886534,0.708093,-1.326333,0.252252,2.027697,0.528471,0.562324,1.008308,0.715664,-0.420892,-1.808547,0.553739,-1.839980,-0.920735,0.551851,-2.279049,-1.379551,0.134801,0.440161,0.285525,0.482807,-1.504942,0.560260,1.380603,-2.210471,-0.998484,1.463428,0.360555,0.148542,-0.729143,0.098110,0.046364,0.152854,-0.832713,0.827507,0.605064,3.048917,-0.392176 15 | -0.946400,-0.595823,-0.109062,0.143960,0.064316,-0.258862,1.403496,-1.363537,-0.180051,0.693287,-2.652002,1.694240,0.348284,-1.227828,-0.389525,-0.687531,-0.806373,0.662916,0.312929,-0.560282,0.220802,-0.328542,0.262561,-0.749310,0.224784,1.297221,-1.237585,0.095684,-1.079330,-0.656007,2.176455,-0.815754,-0.523492,-0.969128,1.040231,-0.943435,-0.443608,0.634900,-0.684868,0.327174,0.233439,0.538307,-0.288995,-0.534973,0.474606,0.048965,0.365753,0.583151,-0.649881,-2.001802,0.862082,0.973840,0.490760,2.176894,0.281965,3.178422,-0.384714,-0.393840,0.794414,1.588799,2.460278,1.050726,0.079737,0.297859,-0.601864,0.711630,0.493141,-0.507955,1.136088,-1.577551,0.439250,0.164589,0.691403,0.712887,-0.070948,-0.762617,0.907107,-0.336379,0.063871,-1.878428,0.414494,0.084144,0.580510,2.182700,0.182224,0.535515,-0.465353,0.624699,0.704336,0.049439,-1.099439,0.423545,0.800024,-0.376710,-0.351909,0.812734,-0.993713,-0.465004,1.048867,0.122380,0.958538,0.886029,-0.724120,0.210521,0.902964,-0.184517,-0.728849,-1.334001,0.283984,-0.756395,-0.281213,-0.136110,-0.265787,-0.874604,0.839592,-0.077936,0.426610,1.714526,1.277976,0.537023,0.307927,1.539562,-0.605614,-2.473148,-0.775326,1.744984,0.411288,-0.868669,-1.212092,-0.853448,-1.501904,1.943663,-0.410479,-0.109526,-0.765391,-0.855783,-0.146788,0.302336,0.200126,0.870203,-0.581432,-0.401523,0.033369,0.148275,0.109978,-1.469929,-1.330253,-0.313981,2.167562,2.018600,1.127855,1.955827,0.793035,0.600513,1.945376,-1.266137,-0.595596,0.936119,-0.539101,0.588354,-0.351592,1.028942,0.657116,-0.845144,-0.415904,0.966051,-0.850461,-0.990560,0.192337,2.202740,0.778462,-0.499066,-0.284125,0.766326,-0.306178,0.193665,-0.037820,0.733285,-0.356649,-0.052555,-0.284386,1.277856,-0.685158,0.740872,0.297626,0.056170,-0.368662,-2.361561,0.715007,0.074742,0.579111,0.167607,0.106591,-0.522509,-0.425569,-1.720422,-1.601145,0.627072,-1.149518,-1.063578,0.442203,0.290708,1.544805,-1.091535,-0.035247,-0.131671,-0.446416,-2.055786,-0.433499,-0.849845,0.206905,0.105680,-0.189753,1.253167,-1.247793,-0.837274,1.012092,0.495260,1.575887,-0.233635,0.432906,-0.072781,0.079903,-0.417614,0.397924,1.021834,-1.162666,-0.204917,-0.846478,0.649511,-0.233494,0.389188,-0.632331,-1.063585,-0.296445,0.852909,-0.542556,-0.380917,1.551030,0.145552,-1.654597,-0.813380,-0.049680,-0.372715,1.108860,-1.577424,0.428223,-0.770059,0.562181,0.658034,1.104852,1.465923,-1.967693,-0.203418,1.116743,0.465193,0.560348,0.305006,0.196736,-0.239776,-0.453025,1.458746,0.004327,-0.561057,-0.931845,1.684736,0.714389,-0.406109,-1.235455,0.669991,1.139089,-0.831870,-0.235184,-0.592156,-0.106686,-0.925832,1.874324,-1.468789,0.753250,-0.330915,-1.062735,-0.291400,1.175913,-0.791607,-0.521849,1.204839,-0.144073,-0.582349,0.402853,0.454438,-0.003836,-0.607952,0.607058,0.693067,0.633894,0.435537,-0.422115,1.355657,0.622976,0.197191 16 | -0.157787,-0.247780,-1.055612,-0.125959,-1.317364,-1.349144,0.677244,-0.966742,-1.008730,-0.011081,-2.030218,-0.427013,-2.109100,-1.525425,-0.523719,-0.799295,0.591748,-0.278833,0.318807,-0.907237,-0.629002,-0.559861,0.037883,-0.999653,-1.005423,0.356818,-0.065807,-0.527624,-1.020217,0.310970,2.153059,-1.272449,-0.045737,-0.785159,-0.404363,-0.844957,-0.441559,-0.118142,-0.413607,0.213846,0.843071,0.396083,-1.133383,0.694101,0.243500,-0.392702,0.343587,1.203851,0.022397,-1.235052,0.970755,0.321465,-0.083163,1.306412,0.472857,2.127628,-0.394862,-0.713350,0.488341,0.267501,1.871531,1.141255,0.294720,0.921924,-0.215575,-0.067824,0.198792,0.749126,0.345420,-0.831373,0.296846,0.472407,1.143565,-0.096895,0.005062,-0.986129,0.103509,-0.355102,0.992657,-0.601619,0.827022,-0.392036,-0.024545,1.591138,0.185009,0.052551,-0.981118,-0.109531,0.324727,-0.388647,0.183063,-1.349952,-0.175623,-0.413722,-0.480989,0.634869,-0.656382,-1.514080,0.108046,-0.521577,0.696737,-0.642983,-1.620681,0.525936,0.650340,-0.431827,-0.114172,-0.440920,-0.054842,-1.189504,-0.152026,-1.345954,0.394533,-0.576543,1.048554,0.145471,-0.228812,0.247707,1.325276,0.578267,-0.113610,1.009845,-0.085056,-0.809300,-0.839065,1.491977,0.732315,-1.909503,-0.694412,-1.232149,-0.553393,1.125505,0.036274,-1.210871,-0.287465,-0.519087,0.810573,0.574536,-0.370664,1.609415,-0.587274,-0.435091,0.882042,1.108570,-0.066535,-0.312210,-0.471869,-0.095746,1.386415,0.579973,0.903939,0.390405,0.272491,0.951842,0.971527,-1.014680,0.036068,0.575487,0.322927,1.300942,-0.691302,0.904268,-0.975558,-0.736361,-0.587207,0.337165,-0.880126,-1.447448,1.320685,1.356769,-0.156768,-1.303150,0.573304,1.391716,0.021923,-0.476391,0.137744,0.580511,-0.144738,1.021435,-0.816223,0.901567,0.265801,-0.993910,-0.540884,1.492656,-0.918738,-0.736321,0.898344,0.919262,1.021849,-0.007380,0.489885,-1.232310,0.757871,-0.610613,-2.061179,-0.861642,-0.539271,-1.429364,0.326230,-0.180489,0.405601,-0.902016,-0.769896,0.080791,-1.094423,-1.349475,0.000234,-0.061059,1.055771,-0.359201,-1.169820,1.279898,-0.166862,0.090058,1.030977,-0.213288,1.250258,0.851296,1.321671,-0.236061,0.341459,-1.139894,0.024754,0.413973,-0.457700,-0.518112,-1.210091,-0.307205,-1.186824,1.938928,-0.325608,-1.119521,0.166387,0.476512,-1.041349,-1.100028,1.466077,-0.494416,-1.598642,-0.350910,-1.079047,1.207306,1.294081,-1.270485,0.883971,0.470879,-0.904613,0.088979,0.814155,-0.361264,-1.359360,-0.036038,2.646375,-0.128468,0.361123,0.921880,1.162388,-0.038935,0.003877,0.312760,0.509546,-0.203146,0.405432,1.774434,0.701615,1.067521,-0.270267,0.669002,0.510520,-0.905272,0.526085,0.041142,-0.213860,-1.029630,1.296617,-0.797990,0.087257,-0.537538,-0.736646,0.846321,-0.220256,-0.740954,-0.714250,-0.051102,-0.802080,1.031883,1.482872,-1.237881,-0.409186,-0.494928,0.995822,-0.681551,1.032825,0.102595,-0.053006,-0.178989,0.064212,0.530882 17 | -0.479136,0.832304,0.833382,0.034704,-0.757510,0.370686,-0.026177,-0.335888,0.301754,0.917041,0.287581,-0.601938,-0.118722,-0.771904,-1.786214,0.120269,0.323017,0.197592,-1.719932,0.507609,0.911425,-0.092190,1.142383,-0.466171,0.367378,0.896669,-0.942333,-0.090293,0.275229,-0.449651,1.065943,-1.319876,-0.808674,-1.542430,0.994977,1.126854,-0.892717,-0.579825,-0.795092,0.006833,0.704180,-1.447393,-1.049511,0.901691,-1.529878,0.929795,-0.127582,0.835961,-0.811532,-0.097142,1.269983,0.755899,-0.560257,-1.923746,-0.001535,1.191304,0.815571,1.408575,0.808195,1.575331,0.593594,-0.264755,-0.506988,0.293636,1.456503,-0.501234,1.011885,-0.515656,-0.182950,0.582714,0.487090,-0.590035,-0.750713,-0.808443,0.051539,-1.292583,-0.606834,0.161864,0.510929,-1.265937,0.849127,0.016768,-0.698309,-0.205581,0.270039,1.056104,0.408125,-1.789359,1.082470,-0.499544,-1.429211,-1.024968,1.015826,-0.523604,1.206040,-1.187907,1.330098,0.889646,0.117485,0.391585,-0.449792,0.886676,-0.236897,-0.458323,-1.799058,-2.694963,0.895804,0.251986,0.140353,1.238015,0.190550,-0.652340,1.207355,0.160498,-1.207548,-1.569270,0.576311,0.860235,-0.686007,0.513092,0.656530,-0.327382,0.534888,0.028944,0.491157,-1.742578,-0.111109,0.378255,-0.863339,-0.509738,-0.851364,1.121876,0.436247,-1.172186,-2.042993,-0.971081,0.241941,0.478265,0.466868,2.118211,-0.347845,0.779844,1.175349,-1.341357,-1.491185,-0.764966,0.916647,-0.656124,0.103221,0.369811,-0.719417,-0.271200,-0.080124,0.063538,0.669539,-0.094282,-0.092295,0.706130,0.181949,0.597319,-0.359326,-0.412781,0.459696,-0.718118,0.095868,-0.553603,0.532008,0.105036,-0.004949,-0.139627,0.673086,-0.736794,0.546564,0.644479,1.507159,-0.755014,1.802510,-0.429502,0.505661,-0.624111,-2.174907,0.410149,-0.531335,1.275283,-0.324181,-0.152201,0.390231,-0.941159,0.762219,-0.126812,1.080336,-1.151364,-0.360786,0.584163,-0.308635,0.259029,-1.861613,1.931032,-0.681478,-0.732918,1.074465,0.839727,-0.769979,-0.098214,-0.396362,0.480001,-2.191900,-1.341405,0.776294,-0.913226,-0.125793,-0.701193,1.010163,1.312080,-0.603090,0.334349,-0.515840,1.292074,-3.403774,0.123405,-1.112068,-0.926022,-1.332588,-0.993187,-1.053361,1.836101,-0.550525,1.491693,0.619210,-0.198356,1.763705,0.949780,0.901294,-1.781468,-1.389414,-0.384535,-0.105692,-1.139536,0.533424,0.548913,-0.060088,1.036488,1.298182,0.197505,-0.267303,-0.365141,-1.067762,-1.162661,-0.111509,-1.355152,0.412270,1.337921,0.343413,-0.455841,2.293293,-0.777035,0.252591,0.013545,0.143528,2.044237,-0.033343,-1.856384,1.108464,-0.233156,-0.368604,0.513677,1.217055,-0.379375,-1.705987,-0.799930,-1.106816,-0.656829,0.666736,-0.200585,-0.522049,-0.525946,0.688767,0.006158,1.290461,-0.412836,0.165497,-0.482130,0.524984,1.985275,-1.313405,-2.220011,0.797979,2.774817,-0.125680,-0.311735,0.741067,1.039210,0.628759,1.673282,-0.441042,-1.295556,-1.421349,-0.921341,-0.624834,-0.131321 18 | -1.856430,0.766167,0.244743,1.201788,0.054338,1.306474,2.025461,-0.668905,0.111166,-0.427210,0.154570,0.451323,1.129082,-1.557572,0.368851,-1.087030,-1.674168,-0.785307,1.383922,-0.476395,0.742867,-1.132193,0.952707,-0.119086,-1.279887,1.613723,0.382505,0.792986,0.221730,0.654317,0.110658,0.500387,0.134136,-1.854706,0.952021,-1.183484,-0.358974,0.738289,0.826948,0.512357,-0.935451,0.431465,0.072592,1.536176,-0.584750,0.466507,0.752016,-1.299028,-0.748166,-2.465915,1.341578,1.404749,1.965857,1.604399,-1.504434,1.427023,-0.479335,0.097507,-0.750113,0.872346,0.965794,0.911906,1.227512,0.844943,-0.620840,0.878829,-0.028191,-0.408032,-0.602520,0.099457,1.168214,1.322073,0.393153,0.383968,-0.712355,0.277653,1.356092,0.462352,-0.412411,-1.459465,0.453318,-2.650791,-0.624517,1.582347,-0.446663,0.970063,-1.655523,-0.142702,0.179341,0.915655,-0.604475,-0.323515,0.214834,-0.448205,0.566220,-1.025800,-0.113515,-0.866161,0.217907,0.128748,1.207922,1.564629,-0.359989,2.462857,-0.172036,0.615067,0.581193,1.141102,-0.192236,0.268275,0.498410,0.992448,0.326232,-0.635146,-0.227405,-0.549856,-0.194164,0.522193,0.678011,0.883752,-1.468274,-1.005847,-0.483921,-1.555310,1.476673,-1.231620,-0.956537,-0.928208,1.211411,-0.210394,1.235960,1.158424,-1.000793,0.173634,-1.987759,0.212944,0.407391,0.150294,0.725464,0.135557,-1.830944,-0.002670,0.576934,-0.779703,-0.957278,-0.461814,-0.268559,-0.612833,1.116163,-0.716060,0.984970,0.516447,-1.012258,-0.345435,1.185003,-1.460164,-0.718765,0.032469,0.575300,1.268818,-0.534866,0.569142,1.544834,-0.286742,0.667817,1.263757,0.323156,0.339584,-0.855622,-0.561303,-1.193951,0.703170,-1.589471,0.086633,-0.248915,-1.306453,2.398030,0.498178,0.070974,0.812076,-0.328367,0.218809,-1.146800,0.128334,0.332656,-1.090248,0.784951,-1.811165,-1.763475,-0.593175,-0.141268,-0.301759,1.914077,-1.854472,-1.297928,-2.209180,-1.641026,0.639359,-0.124399,-1.278239,0.172291,0.402233,-0.079241,-0.299561,0.636377,0.364259,0.785073,-1.477855,0.856685,-0.284305,0.090410,0.128152,1.848750,0.027121,-0.135466,0.791190,-0.244405,-0.515728,0.718778,1.176024,-0.301463,0.024930,-0.391371,-0.640840,-0.401572,0.594971,0.605332,1.453263,-0.363078,0.548842,0.953452,-0.055737,0.889823,-1.907464,0.767700,0.813670,-1.079295,-0.785109,-0.310837,1.061700,-0.870039,-0.802367,-0.195137,-1.402168,-0.253158,0.597654,1.314205,0.072436,-0.144722,-0.167296,2.028271,0.790411,-1.691812,-0.526315,1.722786,-0.653592,-0.500732,0.755719,0.157491,1.677327,-0.765358,0.185663,0.686237,-1.063679,0.426128,0.825649,0.682359,0.035728,-2.067740,-0.038932,0.382220,2.170677,-0.347249,-0.537850,0.771915,-0.452999,-0.729981,-0.464606,0.618185,-0.785890,-0.795043,0.295430,0.175722,-0.652653,0.891722,-0.073973,1.147248,-0.093516,1.953967,-0.633815,-1.297915,0.033850,1.035892,1.231438,0.006756,-1.165297,-1.148123,0.030650,0.858650,2.035982 19 | -1.150256,0.366958,-1.167278,-0.112892,-1.011729,-0.444879,1.474637,-0.247307,-1.056845,0.087271,-1.501511,0.906389,1.159888,-2.553181,-1.957586,-0.164604,-1.612152,-1.617706,0.174846,0.657595,0.020218,-0.574329,-0.625138,0.823041,0.576663,0.668221,1.206394,-1.181854,-0.178663,-0.801444,0.474019,-0.546251,0.448674,-1.512193,-0.057505,-0.368179,0.586820,-0.032602,-0.810248,-1.100264,0.216053,0.539358,-0.199132,0.314717,0.112152,1.500777,0.443867,-0.447880,0.811445,0.829803,-1.250151,-0.715488,-1.151508,0.475672,-1.907334,0.818644,0.969242,1.288084,-0.368754,0.124592,-2.257133,-1.215410,0.656439,0.565062,0.426051,-1.333214,0.071211,-0.954159,0.666342,-2.115895,-0.950166,0.406406,-1.193448,-0.266942,0.200709,-1.220746,-1.157323,-0.403959,-0.238354,-0.510041,0.822419,0.256814,-0.607466,0.796446,-0.514024,-1.569463,-0.765813,0.035023,0.531389,-0.335567,0.047926,-0.115357,-0.199655,0.399615,-0.653044,-1.832131,-1.121577,0.379176,-0.328579,0.254866,0.894237,-0.242333,0.488692,-0.090368,-0.599564,-0.445250,0.340725,-0.760443,-0.004322,-1.367282,0.078141,-0.139039,-0.366995,-1.044036,1.055129,-0.499230,1.064005,1.304082,0.881939,-0.686053,-0.481982,0.004512,1.229582,1.241515,-0.000006,0.098841,1.817129,-1.295717,0.942666,-1.921400,-0.257559,-0.061459,-2.644663,0.116400,-1.614421,-0.525297,-1.482901,-0.750337,0.311055,0.216833,-0.351880,0.413666,-0.662305,-1.559745,0.463621,-0.595944,0.467848,-1.292416,0.290077,1.652969,0.613743,-0.834545,0.679654,-0.701951,-0.259829,1.034099,0.565162,-0.577731,0.599538,-1.453903,-0.063228,-0.185659,0.886782,-0.448824,-0.249338,-0.066868,-0.215494,-1.169713,-0.297918,0.895443,-0.422544,1.025509,-1.121414,0.676040,-0.013279,0.382022,1.642107,0.422819,0.006642,0.689271,0.906746,0.854375,-0.311983,-1.020237,0.322936,-0.010291,0.885934,-0.932486,-0.207108,0.751093,-0.016221,1.215095,0.226415,-0.340959,-0.349612,-0.331432,0.697115,1.635605,-0.351663,1.466980,0.438413,-1.423674,0.470283,-0.626450,-0.451479,1.009785,0.291850,0.179126,-0.559470,-0.897446,1.158955,-0.496504,0.102042,-1.351670,-1.179838,1.247771,0.517170,1.655837,-0.003092,0.722419,-1.184687,0.228837,-0.089308,-0.470142,0.708216,-0.631213,-0.154738,1.265589,-0.327559,-0.335037,-1.533396,-0.093783,-0.792559,0.247555,0.696159,-0.323758,-0.071083,-0.529394,1.463040,-0.260304,-0.394543,0.287012,-0.020505,0.246861,0.333142,1.982489,0.386730,-2.192430,0.895444,1.065361,-0.479390,-0.672085,-0.360575,-0.650979,-0.288450,0.684161,0.772706,-0.739313,0.636051,-1.169192,0.456569,1.023536,-1.292379,1.780131,-1.473077,0.765154,0.231989,-0.268570,-0.111354,-0.366398,-1.100562,0.097924,-1.886528,0.449069,-0.759628,-1.158497,-0.411967,0.083699,-0.570912,0.408501,-0.185175,-0.635480,0.326371,-0.502324,0.312373,0.620379,0.325670,0.332831,0.327729,-0.278622,1.193038,-1.699786,-0.932314,0.996985,2.440291,-0.393914,-0.221530,1.165395,0.763533,1.063211 20 | -0.216860,-0.608957,-0.320547,-1.625761,1.662373,-0.326709,2.114774,-0.480117,-0.170913,0.746427,-1.581493,0.626263,1.526870,-1.077497,0.939495,0.502782,-0.728792,-1.202679,-0.071292,-0.353512,0.248739,-0.870260,-0.195919,-0.047383,0.660834,2.139506,0.655300,-0.239398,-0.237381,0.479562,0.087498,-0.075669,0.764706,-0.431873,-1.196238,0.457565,0.143105,0.510396,-1.270026,-0.262325,1.099762,0.237712,-0.513951,-0.189632,-0.334760,0.944918,0.418011,-0.610616,-0.590283,0.463990,0.123678,0.533850,0.168704,0.556686,-0.853026,1.166376,-0.103373,1.802726,-0.077394,-0.072335,-1.043162,1.749267,-1.141484,0.484243,1.761843,0.560722,1.427062,-0.306646,1.269259,0.382413,-1.204841,-1.167578,-0.620570,-1.037050,0.064977,-1.000304,-0.312144,-0.517629,2.389392,-0.814748,-0.175321,-0.995060,-0.200614,1.293799,-0.792104,-1.839523,1.541064,-2.620300,1.412449,1.199674,1.010229,-0.601269,-0.060765,-1.125243,1.658675,-0.687222,-0.470061,0.245939,-0.919183,0.304198,-0.157280,-0.762704,0.791465,0.446203,0.528604,0.175170,1.383884,-0.043101,-0.856035,-0.523407,-0.312376,-0.133475,-2.195132,0.074985,-0.436731,-0.308676,-0.545554,1.406299,1.327197,-0.365088,-1.472938,1.978920,-0.658025,0.138231,-0.982444,-0.434368,-0.598376,-0.509118,-0.476730,-1.766646,-0.468852,2.373168,-0.336596,0.534497,-1.836199,1.017117,-1.411911,0.128922,0.372110,-0.065046,0.033331,2.165833,-1.769716,-0.644314,-0.301725,-0.633035,-0.930384,0.579471,0.198327,0.166301,-0.793769,-1.549833,-0.599514,-0.636819,-0.148703,0.923739,-0.757173,0.752909,0.915652,-1.597289,-1.169529,-1.762702,1.063225,-0.741192,-0.414238,-0.092190,0.132919,0.473003,-0.640843,-1.195023,0.165700,-0.125957,0.313654,0.460109,-0.761814,-0.169448,1.161959,1.063898,1.174698,-0.503011,0.147224,0.151589,-1.846070,-1.604394,-0.032596,-0.118563,-1.059233,-0.017342,-1.955142,-0.797003,0.292979,0.260075,1.439784,-0.610994,-2.169328,-1.693440,1.153682,-0.816688,-1.223177,0.465992,2.760820,1.152282,0.365565,0.190607,-1.127091,-1.090230,-0.636375,1.481859,-1.031282,-0.543370,0.317330,-1.178411,0.187822,1.190957,0.068512,0.037534,-0.357517,-0.272650,0.208941,-0.301289,-1.329993,1.020416,0.475724,-0.219020,-0.013300,0.915744,-0.455715,-0.313932,0.119286,-0.996243,-1.405403,0.017824,-0.899879,0.134315,0.307842,0.063578,-0.699275,0.582809,-1.101418,-0.441242,-0.550555,-1.528617,0.436432,-1.306541,-0.921401,-0.038499,0.174650,0.237727,0.322969,1.112154,0.357829,-0.338436,1.612362,0.936406,-0.588554,-1.397848,0.586650,-0.015867,-0.086419,0.057693,-2.011168,-1.130752,-1.246895,-0.008607,-0.043900,-0.013157,1.642235,-0.189814,-0.817859,0.684791,1.352213,-0.030684,0.762789,1.523576,-0.524379,-2.027763,-2.252846,0.385532,-0.420033,0.785896,0.762349,0.904683,-0.810073,0.309689,-0.430765,1.948185,-0.956747,-1.331902,-1.045795,-0.407219,-2.086252,-0.614834,-0.708849,0.245904,1.274567,-0.641425,-0.200312,0.281685,1.154072,0.237601 21 | -0.998749,-1.415091,0.910059,-0.457861,-1.254419,0.361319,-0.080752,-0.371233,0.455092,-0.934561,-2.564662,-0.354602,-0.860630,-0.804070,-0.614626,-0.771341,-0.339806,1.190087,1.895847,-1.191575,0.020063,0.078647,-0.988052,-0.870124,0.934475,1.414639,0.722054,1.284791,0.565809,-1.484868,1.180363,0.915352,1.292208,-1.268894,0.161058,-0.920904,0.221695,0.142613,0.786190,-1.814069,-0.042665,-0.502458,-0.994756,1.517211,0.764390,-0.147690,-0.497388,0.973426,-0.252039,-0.661744,-0.322238,-0.720830,0.042202,1.830517,-0.148400,0.577759,-0.234085,-0.242273,-1.096639,-0.015710,-1.389125,0.934872,1.270711,1.107318,0.531413,0.086089,0.410425,-1.373214,0.412204,-1.842335,-0.218055,1.197439,0.547252,-0.547614,1.544577,-0.180430,-0.036543,-1.106298,0.994088,-1.874812,0.721577,0.076340,-0.219990,0.396041,-0.881509,-0.746664,-0.088349,-0.116717,-0.860725,-0.279196,-0.552152,-1.152923,-0.075001,1.298401,-0.684606,-0.215354,-1.670066,-0.299216,-1.344940,-1.393870,0.108964,0.350579,0.084501,0.817922,-0.260732,0.021053,0.118314,-1.220453,1.070322,-0.268452,-0.008158,-0.429620,-1.460145,0.603722,1.576337,-0.894513,-0.118350,1.100434,1.553244,1.147282,0.452775,0.152436,-0.564426,0.271107,0.165063,0.303650,1.174197,-1.635120,0.805185,-0.150959,0.401434,1.302243,-0.074651,-1.303849,0.643430,0.385142,-0.354891,-0.150863,0.382009,1.445333,0.122790,-0.198576,-0.779168,1.330809,0.321244,-1.577108,-1.238973,-0.019511,1.740521,0.883340,0.534718,-0.878132,0.440896,-0.332498,0.297669,0.482832,-0.690580,-0.829363,-0.277225,-0.516686,0.268472,0.324294,-0.240313,-0.480334,-0.101006,-0.156978,0.710163,-1.231482,-1.860446,0.837489,-0.535430,-0.038463,0.322421,0.371110,0.382204,0.168282,-0.641788,-0.274841,-0.373338,-0.858488,0.331980,0.186132,-0.587664,0.107798,1.262876,0.727096,0.601329,0.203061,-0.556211,0.995075,-0.848443,-0.871126,0.476070,0.108027,0.084530,-0.604944,0.155326,1.264689,-1.739241,-1.306021,0.224567,1.059006,-0.098809,-0.686642,-0.494886,-1.084733,-0.179164,0.146675,0.446828,0.042120,0.634697,-0.352441,0.114379,-0.342386,-0.338710,-0.607479,0.875660,0.038134,-1.859924,0.362075,-0.070402,0.946438,1.121648,-1.171508,-0.029892,0.221039,0.367592,-0.291443,0.198629,-0.864400,-1.403860,0.778567,-0.566846,-0.648103,0.197422,-0.271806,0.241904,-0.341133,1.154059,-2.553236,-1.427972,0.505422,-0.330447,0.827032,-0.648940,-0.446252,1.028807,-0.768314,-1.372305,-0.079694,0.447746,-0.334653,-1.836048,-0.536545,1.335196,-0.461320,-1.376459,-0.654366,1.718541,-0.555700,1.363637,-0.908554,-0.307083,-0.182334,-0.249760,0.937082,0.086948,0.655369,-0.756774,1.244795,0.418657,0.618411,0.100008,-0.263162,0.108112,-1.134436,0.971189,-0.198254,-1.605707,-0.203963,-1.648245,0.016887,0.209016,-1.390482,-0.328216,2.032284,-0.524755,-0.313736,0.666153,-0.343622,0.457781,0.852711,0.716477,0.837260,0.730022,-2.639784,-1.101936,0.215908,0.265464,0.477184 22 | -1.304000,0.124132,-0.712401,1.779094,0.014999,-0.062498,0.748822,0.592698,0.303761,-0.790872,-0.887536,0.053224,1.059162,-0.597169,-1.395857,-0.347873,0.115210,-0.454261,1.199161,0.285927,1.103508,0.287609,1.062542,0.542625,1.113100,0.770438,0.174888,0.767146,0.278972,-0.735274,-0.201875,1.112278,-0.878934,-1.414159,0.295006,0.653587,2.257737,0.409095,-1.541833,-1.568439,-0.837330,0.842615,0.344485,-0.633286,0.701854,0.487229,-0.096140,-0.393060,0.192871,-0.357188,-1.123094,0.153666,-1.012688,1.611474,-1.788924,0.481977,0.546135,1.096993,-1.217111,0.359171,-1.648995,1.939158,2.029914,-0.086037,0.191042,-0.210418,0.407013,-0.392271,-0.397185,0.595618,0.541417,0.196554,-0.137835,-0.201942,0.458822,0.386149,0.957985,0.241672,1.186762,-0.760021,1.413453,-0.831514,0.700442,1.165195,0.069506,-0.816345,-0.866948,1.254825,0.477814,-1.807237,0.853677,0.977373,-0.131783,-0.600071,0.104745,-1.220580,-0.602665,-0.491385,-0.731590,-0.798586,0.435615,1.475874,0.448156,-0.100674,-0.420444,-0.954260,-0.401285,-1.590096,-0.507224,0.342208,-0.498328,-0.807747,-0.886781,0.280112,-0.602376,0.778002,-0.447712,1.107735,0.603893,0.561887,-2.277168,-0.640842,0.154275,0.555960,1.479655,0.167049,0.567504,-0.103680,1.199738,0.147777,1.205950,1.896522,-1.364329,0.432687,-1.901391,0.981209,-2.123231,0.158738,0.123471,1.272212,-1.001229,1.994883,-1.405573,0.156298,-1.759146,-1.063436,-1.143248,0.879551,-0.077193,-0.230199,0.503358,-0.381475,-0.537955,-0.544520,-0.300944,0.722657,1.641092,-0.827697,-0.120575,-0.615717,-0.188560,-0.005337,0.584794,-1.554022,-0.618960,0.190527,2.429688,-0.378964,-0.890633,0.792815,-0.437524,0.082667,-0.462469,-1.161542,-0.191397,-0.757370,1.007075,-0.596016,0.459584,1.009531,-1.205407,1.242942,-1.230176,0.270671,-0.043577,0.342180,0.316149,-0.104589,-0.887115,0.987548,0.010242,-0.041919,1.272144,-0.034546,-0.436861,-1.425003,1.664198,-0.425316,0.427726,-1.136252,-0.074198,0.804733,-0.676383,-0.445320,0.065512,0.776267,1.494876,-0.029273,0.432557,0.148841,0.071766,0.315850,-1.301029,-1.094924,0.878831,0.139419,-0.384902,0.071449,-1.306745,0.966505,-1.049735,-0.268380,0.648183,-0.423640,1.933675,1.454274,1.585343,-0.855865,-0.450208,-0.351862,-0.790275,0.140299,-1.815648,-0.496940,-0.853255,-0.969662,-0.538028,0.407007,0.067398,0.700693,0.290827,-0.046178,-0.331455,0.648301,-1.662162,1.543377,-1.457808,-1.453808,1.317594,-0.823549,-0.024923,0.279973,-0.324834,0.571222,-2.205916,1.473614,-0.259352,-0.708651,-0.722047,0.852948,0.458381,0.330453,-2.382101,1.570169,-0.254455,0.111876,0.397059,-0.666390,-0.742451,1.351555,-1.410908,0.853399,-1.427768,1.722663,-1.510099,-1.243393,0.400554,-1.230679,0.294472,-0.758481,-0.259361,1.412773,-1.418973,-1.315311,0.098638,0.482439,0.450657,1.170543,0.031613,-2.057191,0.136737,1.342688,0.282929,-1.055363,1.742234,-0.347338,-0.288155,0.195571,0.903981,-0.183414 23 | -1.248688,-1.115567,0.232452,-1.022686,-0.413442,1.854609,2.240937,-0.134844,-0.797220,0.900973,-0.668114,-0.065188,1.831254,0.907830,1.043514,2.211526,-2.611323,-0.609991,0.173321,1.842660,-0.844433,-1.405488,1.785289,-0.505819,0.549469,0.962302,-1.526483,-0.375849,-0.523590,1.077987,1.927537,1.231810,-0.352575,0.293008,2.301228,-0.050846,-1.742850,1.503155,-0.775309,1.899505,0.489326,-1.350697,0.716650,1.110305,-0.935407,-0.450381,-0.106247,1.449472,0.570337,0.349754,-1.084406,1.499094,1.285296,0.757364,-0.012109,2.984297,-0.287558,1.255919,-0.592149,2.446412,0.675219,0.695959,2.142939,-0.226878,-1.134461,0.056664,0.708495,-1.082821,1.098015,-0.973571,0.788872,0.972051,1.378993,0.212679,1.653408,0.041749,-0.653407,1.037867,-2.095508,-1.504081,1.512795,1.352572,0.659359,1.178484,-0.232049,0.928398,-0.905436,0.153544,0.489843,0.438951,-1.390323,1.163934,0.272313,0.571324,0.818793,0.344963,-0.551924,1.729473,-0.661833,-0.209445,0.891575,1.988520,2.067497,-1.229379,0.053582,-0.147041,1.078350,-2.430733,-0.070241,1.151543,-0.493581,-0.991417,0.353342,-1.503725,-2.187287,-0.175171,0.648732,1.304226,0.248644,1.390126,-0.175114,-1.100266,0.330184,-3.035201,-0.669826,-0.506503,0.904303,0.378807,-0.889557,-1.086292,-1.310217,1.052554,0.470883,1.023445,-0.483590,-0.042971,-0.270116,0.338517,0.592136,1.649029,-0.599562,0.019977,-1.032612,-2.231875,-1.075032,-2.144206,1.267053,0.383252,0.126871,1.056528,-1.376030,0.315223,2.150093,1.522786,0.681636,-0.051985,1.404930,1.378226,-1.556944,-0.103371,-0.480143,0.317609,1.502352,-1.187993,-1.223459,-0.322381,0.668791,-0.323634,1.376181,1.893980,-0.674329,-2.448179,1.743074,-0.220731,1.130782,1.447568,0.145826,-1.031422,-0.960975,0.459665,-0.725210,1.711836,-0.082413,2.561971,-0.632186,-0.297037,-0.753615,-1.089465,2.309061,-0.030206,0.436877,-2.365000,-0.394868,-0.745572,0.244332,-0.417697,-2.354213,0.294086,-1.078530,-1.408605,1.494415,0.953203,0.074418,-0.187495,-0.186541,0.138409,-0.259826,-1.279572,0.572169,0.296126,-0.053003,1.312908,0.876199,1.706324,-2.415097,-0.528847,-0.593101,0.245363,-1.653917,0.160359,0.997683,1.403837,-1.648976,-1.142214,-0.937755,1.135142,-0.980732,0.782437,0.492914,1.737779,0.392024,1.027145,-1.019753,-2.924597,-1.808935,-0.230834,-0.129358,0.805879,0.296065,1.993283,0.463447,0.267754,0.521234,0.419945,0.095282,-0.533724,-1.563079,-1.105108,0.862087,-0.662559,0.958442,0.278088,0.682559,0.750786,-1.162922,0.368591,-0.164416,-0.175261,0.382012,1.391188,-1.102499,-0.086835,0.164592,-0.043025,1.032741,0.174245,-0.807064,-1.527714,-0.671087,-1.049887,-1.457348,-0.495449,-0.094848,-1.569859,0.676411,-1.752416,1.783456,-0.627356,1.355681,-0.973419,-1.524183,-1.097109,2.269038,1.342706,1.098050,-0.645432,-0.395171,-1.250684,0.247806,-0.396881,0.319361,-0.798522,0.560912,1.899924,-0.492532,-0.933334,-1.142137,-1.594229,0.215407,0.431610 24 | -0.588807,-0.723197,-0.243265,1.281044,-0.640890,0.438200,1.549572,-0.137840,1.144898,-0.795156,-0.176715,1.046080,2.194542,0.657482,-0.904137,0.620034,-0.619022,-0.107835,1.371752,0.204048,2.327259,-0.025215,-0.106199,0.325143,0.400534,2.099764,-0.720470,0.706254,-1.069357,0.561615,0.279790,-0.747854,-0.574420,-0.895947,1.775848,-1.369675,-0.375636,0.671277,-1.121953,-0.172729,-0.774193,-0.020061,0.614287,-1.262205,0.534525,0.960733,-1.235749,-0.792102,-0.308914,-2.517130,-2.318554,1.274322,0.470483,1.970900,-0.595567,1.748855,-0.312845,1.917094,-0.706543,1.398525,0.626080,1.171951,1.444365,-0.099102,-0.579276,0.004611,0.986345,-0.858844,0.158515,-0.584497,-0.393021,0.179162,1.062088,-1.063060,-0.104201,0.615105,-1.179471,-0.744646,-1.010402,-2.013984,0.914593,-0.347226,0.970710,2.405051,-0.578230,0.710898,-0.209538,0.825779,-0.022911,0.613176,-1.854245,1.544337,0.250855,1.148046,0.229059,0.314032,-0.275771,0.720566,-0.712207,-1.224227,1.025676,-0.141849,0.897129,1.307197,1.300027,0.286896,-0.220112,-0.684623,-1.708113,-0.681073,-0.576262,1.440777,0.064776,-1.590325,0.332786,0.748487,0.228889,2.083195,0.842895,-0.970888,-1.771166,0.586986,-1.561741,-0.834054,0.909687,0.419726,0.578358,-1.402252,-0.803106,-1.821611,-0.746725,0.890896,-1.071133,1.109686,0.207274,1.038161,-1.854939,-0.169461,1.098007,0.606045,-2.496468,-1.214118,-0.906327,-1.497262,0.052174,-1.189401,-1.176488,0.011037,1.321339,2.859862,0.740445,1.200373,1.161133,0.155995,0.733245,-0.408445,1.056366,0.067205,-0.474850,-1.004743,0.463975,-0.015553,0.231842,-0.293673,1.358162,0.184558,0.225758,-1.122627,-1.397347,2.248270,0.737144,-0.225634,-1.101887,-0.307293,-1.447862,0.159597,0.022338,0.388086,0.100542,-0.329522,-0.319085,0.400991,-0.385240,0.914509,0.087132,-1.200491,1.548029,-1.976562,0.341644,1.115450,-0.139503,-0.474717,-0.319934,-0.719962,-1.614017,-0.601289,-0.038360,0.791073,-0.077277,-0.489616,0.600780,-0.423621,1.609690,-0.263016,-1.309942,0.042346,-0.401763,-1.044099,0.416409,-0.554994,0.061266,2.788116,0.663421,-0.564606,0.384737,-1.640707,-0.610965,-0.137266,0.679644,-1.178766,1.318470,-0.453785,-0.252775,-1.149986,0.471240,0.979598,-1.001807,-0.409347,-2.012918,-0.298935,-1.325317,-1.883114,0.338516,-0.264701,-1.387253,0.966866,1.165004,0.197642,1.311881,0.429977,-2.039191,0.130574,-0.754081,-0.190511,1.135177,-0.017103,1.277249,-0.674502,-0.180668,-0.115085,0.317972,0.423224,-0.736316,-1.093141,-0.456830,-0.780526,1.253315,-0.455932,0.098943,-0.831800,0.491392,2.497932,-0.119114,-1.543167,0.446703,1.501209,1.101280,1.098511,-1.642500,-0.035803,0.381790,-1.056692,-0.783019,-0.921001,0.418495,-1.705437,1.702981,-0.950974,-0.591322,0.805724,-0.235608,-0.829822,0.654858,0.334347,0.147617,1.931141,-0.855179,-2.268791,1.698942,0.116523,0.531586,0.525108,0.395538,0.159904,1.383705,0.075623,0.095012,1.252606,0.746824,-0.142484 --------------------------------------------------------------------------------