├── .DS_Store ├── architecture.png ├── models └── model.final.h5.zip ├── utils └── code_for_calculating_per-recording.zip ├── dataset ├── event-1-answers └── event-2-answers ├── .gitignore ├── Pre-training.py ├── README.md ├── Preprocessing.py ├── LeNet.py └── LICENSE /.DS_Store: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/JackAndCole/Sleep-apnea-detection-through-a-modified-LeNet-5-convolutional-neural-network/HEAD/.DS_Store -------------------------------------------------------------------------------- /architecture.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/JackAndCole/Sleep-apnea-detection-through-a-modified-LeNet-5-convolutional-neural-network/HEAD/architecture.png -------------------------------------------------------------------------------- /models/model.final.h5.zip: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/JackAndCole/Sleep-apnea-detection-through-a-modified-LeNet-5-convolutional-neural-network/HEAD/models/model.final.h5.zip -------------------------------------------------------------------------------- /utils/code_for_calculating_per-recording.zip: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/JackAndCole/Sleep-apnea-detection-through-a-modified-LeNet-5-convolutional-neural-network/HEAD/utils/code_for_calculating_per-recording.zip -------------------------------------------------------------------------------- /dataset/event-1-answers: -------------------------------------------------------------------------------- 1 | x01 A 2 | x02 A 3 | x03 B 4 | x04 C 5 | x05 A 6 | x06 C 7 | x07 A 8 | x08 A 9 | x09 A 10 | x10 B 11 | x11 B 12 | x12 B 13 | x13 A 14 | x14 A 15 | x15 A 16 | x16 B 17 | x17 C 18 | x18 C 19 | x19 A 20 | x20 A 21 | x21 A 22 | x22 C 23 | x23 A 24 | x24 C 25 | x25 A 26 | x26 A 27 | x27 A 28 | x28 A 29 | x29 C 30 | x30 A 31 | x31 A 32 | x32 A 33 | x33 C 34 | x34 C 35 | x35 C 36 | -------------------------------------------------------------------------------- /.gitignore: -------------------------------------------------------------------------------- 1 | # Byte-compiled / optimized / DLL files 2 | __pycache__/ 3 | *.py[cod] 4 | *$py.class 5 | 6 | # C extensions 7 | *.so 8 | 9 | # Distribution / packaging 10 | .Python 11 | build/ 12 | develop-eggs/ 13 | dist/ 14 | downloads/ 15 | eggs/ 16 | .eggs/ 17 | lib/ 18 | lib64/ 19 | parts/ 20 | sdist/ 21 | var/ 22 | wheels/ 23 | pip-wheel-metadata/ 24 | share/python-wheels/ 25 | *.egg-info/ 26 | .installed.cfg 27 | *.egg 28 | MANIFEST 29 | 30 | # PyInstaller 31 | # Usually these files are written by a python script from a template 32 | # before PyInstaller builds the exe, so as to inject date/other infos into it. 33 | *.manifest 34 | *.spec 35 | 36 | # Installer logs 37 | pip-log.txt 38 | pip-delete-this-directory.txt 39 | 40 | # Unit test / coverage reports 41 | htmlcov/ 42 | .tox/ 43 | .nox/ 44 | .coverage 45 | .coverage.* 46 | .cache 47 | nosetests.xml 48 | coverage.xml 49 | *.cover 50 | *.py,cover 51 | .hypothesis/ 52 | .pytest_cache/ 53 | 54 | # Translations 55 | *.mo 56 | *.pot 57 | 58 | # Django stuff: 59 | *.log 60 | local_settings.py 61 | db.sqlite3 62 | db.sqlite3-journal 63 | 64 | # Flask stuff: 65 | instance/ 66 | .webassets-cache 67 | 68 | # Scrapy stuff: 69 | .scrapy 70 | 71 | # Sphinx documentation 72 | docs/_build/ 73 | 74 | # PyBuilder 75 | target/ 76 | 77 | # Jupyter Notebook 78 | .ipynb_checkpoints 79 | 80 | # IPython 81 | profile_default/ 82 | ipython_config.py 83 | 84 | # pyenv 85 | .python-version 86 | 87 | # pipenv 88 | # According to pypa/pipenv#598, it is recommended to include Pipfile.lock in version control. 89 | # However, in case of collaboration, if having platform-specific dependencies or dependencies 90 | # having no cross-platform support, pipenv may install dependencies that don't work, or not 91 | # install all needed dependencies. 92 | #Pipfile.lock 93 | 94 | # PEP 582; used by e.g. github.com/David-OConnor/pyflow 95 | __pypackages__/ 96 | 97 | # Celery stuff 98 | celerybeat-schedule 99 | celerybeat.pid 100 | 101 | # SageMath parsed files 102 | *.sage.py 103 | 104 | # Environments 105 | .env 106 | .venv 107 | env/ 108 | venv/ 109 | ENV/ 110 | env.bak/ 111 | venv.bak/ 112 | 113 | # Spyder project settings 114 | .spyderproject 115 | .spyproject 116 | 117 | # Rope project settings 118 | .ropeproject 119 | 120 | # mkdocs documentation 121 | /site 122 | 123 | # mypy 124 | .mypy_cache/ 125 | .dmypy.json 126 | dmypy.json 127 | 128 | # Pyre type checker 129 | .pyre/ 130 | -------------------------------------------------------------------------------- /Pre-training.py: -------------------------------------------------------------------------------- 1 | import pickle 2 | 3 | import numpy as np 4 | import os 5 | from keras.models import load_model 6 | from scipy.interpolate import splev, splrep 7 | from sklearn.metrics import confusion_matrix 8 | 9 | base_dir = "dataset" 10 | 11 | ir = 3 12 | before = 2 13 | after = 2 14 | 15 | # normalize 16 | scaler = lambda arr: (arr - np.min(arr)) / (np.max(arr) - np.min(arr)) 17 | 18 | 19 | def load_data(): 20 | tm = np.arange(0, (before + 1 + after) * 60, step=1 / float(ir)) 21 | 22 | with open(os.path.join(base_dir, "apnea-ecg.pkl"), 'rb') as f: 23 | apnea_ecg = pickle.load(f) 24 | 25 | x_train = [] 26 | o_train, y_train = apnea_ecg["o_train"], apnea_ecg["y_train"] 27 | groups_train = apnea_ecg["groups_train"] 28 | for i in range(len(o_train)): 29 | (rri_tm, rri_signal), (ampl_tm, ampl_siganl) = o_train[i] 30 | rri_interp_signal = splev(tm, splrep(rri_tm, scaler(rri_signal), k=3), ext=1) 31 | ampl_interp_signal = splev(tm, splrep(ampl_tm, scaler(ampl_siganl), k=3), ext=1) 32 | x_train.append([rri_interp_signal, ampl_interp_signal]) 33 | x_train = np.array(x_train, dtype="float32").transpose((0, 2, 1)) 34 | y_train = np.array(y_train, dtype="float32") 35 | 36 | x_test = [] 37 | o_test, y_test = apnea_ecg["o_test"], apnea_ecg["y_test"] 38 | groups_test = apnea_ecg["groups_test"] 39 | for i in range(len(o_test)): 40 | (rri_tm, rri_signal), (ampl_tm, ampl_siganl) = o_test[i] 41 | rri_interp_signal = splev(tm, splrep(rri_tm, scaler(rri_signal), k=3), ext=1) 42 | ampl_interp_signal = splev(tm, splrep(ampl_tm, scaler(ampl_siganl), k=3), ext=1) 43 | x_test.append([rri_interp_signal, ampl_interp_signal]) 44 | x_test = np.array(x_test, dtype="float32").transpose((0, 2, 1)) 45 | y_test = np.array(y_test, dtype="float32") 46 | 47 | return (x_train, y_train, groups_train), (x_test, y_test, groups_test) 48 | 49 | 50 | if __name__ == "__main__": 51 | (x_train, y_train, groups_train), (x_test, y_test, groups_test) = load_data() 52 | 53 | model = load_model(os.path.join("./models", "model.final.h5")) 54 | model.summary() 55 | 56 | print("training:") 57 | y_true, y_pred = y_train, np.argmax(model.predict(x_train, batch_size=1024, verbose=1), axis=-1) 58 | 59 | C = confusion_matrix(y_true, y_pred, labels=(1, 0)) 60 | TP, TN, FP, FN = C[0, 0], C[1, 1], C[1, 0], C[0, 1] 61 | acc, sn, sp = 1. * (TP + TN) / (TP + TN + FP + FN), 1. * TP / (TP + FN), 1. * TN / (TN + FP) 62 | print("acc: {}, sn: {}, sp: {}".format(acc, sn, sp)) 63 | 64 | print("testing:") 65 | y_true, y_pred = y_test, np.argmax(model.predict(x_test, batch_size=1024, verbose=1), axis=-1) 66 | 67 | C = confusion_matrix(y_true, y_pred, labels=(1, 0)) 68 | TP, TN, FP, FN = C[0, 0], C[1, 1], C[1, 0], C[0, 1] 69 | acc, sn, sp = 1. * (TP + TN) / (TP + TN + FP + FN), 1. * TP / (TP + FN), 1. * TN / (TN + FP) 70 | print("acc: {}, sn: {}, sp: {}".format(acc, sn, sp)) 71 | -------------------------------------------------------------------------------- /README.md: -------------------------------------------------------------------------------- 1 | # Code for: Sleep apnea detection from a single-lead ECG signal with automatic feature-extraction through a modified LeNet-5 convolutional neural network 2 | 3 | ## Abstract 4 | 5 | Sleep apnea (SA) is the most common respiratory sleep disorder, leading to some serious neurological and cardiovascular diseases if left untreated. The diagnosis of SA is traditionally made using Polysomnography (PSG). However, this method requires many electrodes and wires, as well as an expert to monitor the test. Several researchers have proposed instead using a single channel signal for SA diagnosis. Among these options, the ECG signal is one of the most physiologically relevant signals of SA occurrence, and one that can be easily recorded using a wearable device. However, existing ECG signal-based methods mainly use features (i.e. frequency domain, time domain, and other nonlinear features) acquired from ECG and its derived signals in order to construct the model. This requires researchers to have rich experience in ECG, which is not common. A convolutional neural network (CNN) is a kind of deep neural network that can automatically learn effective feature representation from training data and has been successfully applied in many fields. Meanwhile, most studies have not considered the impact of adjacent segments on SA detection. Therefore, in this study, we propose a modified LeNet-5 convolutional neural network with adjacent segments for SA detection. Our experimental results show that our proposed method is useful for SA detection, and achieves better or comparable results when compared with traditional machine learning methods. 6 | 7 | architecture 8 | 9 | ## Dataset 10 | 11 | [apnea-ecg](https://physionet.org/content/apnea-ecg/1.0.0/), [event-1-answers](dataset/event-1-answers), [event-2-answers](dataset/event-2-answers) 12 | 13 | ## Usage 14 | 15 | - Reproduce the results 16 | 17 | > A pre-trained model is provided in the model directory. To reproduce our results, you should, first, download the Apnea-ecg database from the above link and save it in the dataset directory. Then, execute `Preprocessing.py` to obtain the training and test dataset. After that, just run `Pre-training.py` and you will get the results of our paper. 18 | 19 | - Re-train the model 20 | 21 | > Just replace the last step of run `Pre-training.py` with `LeNet.py`. 22 | > 23 | > Noted that the optimization function of Keras and TensorFlow is slightly different in different versions. Therefore, to reproduce our results, it suggests that using the same version of Keras and TensorFlow as us, in our work the version of Keras is 2.3.1 and TensorFlow is 1.15.0. In addition, Keras and TensorFlow have a certain randomness, the actual results may be somewhat floating. 24 | 25 | ## Requirements 26 | 27 | Python==2.7 28 | Keras==2.3 29 | Tensorflow==2.1 30 | 31 | ## Cite 32 | 33 | If our work is helpful to you, please cite: 34 | 35 | Wang T, Lu C, Shen G, et al. Sleep apnea detection from a single-lead ECG signal with automatic feature-extraction through a modified LeNet-5 convolutional neural network[J]. PeerJ, 2019, 7: e7731. [https://doi.org/10.7717/peerj.7731](https://doi.org/10.7717/peerj.7731) 36 | 37 | ## Email: 38 | 39 | If you have any questions, please email to: [wtustc@mail.ustc.edu.cn](mailto:wtustc@mail.ustc.edu.cn) 40 | -------------------------------------------------------------------------------- /Preprocessing.py: -------------------------------------------------------------------------------- 1 | import pickle 2 | import sys 3 | from concurrent.futures import ProcessPoolExecutor, as_completed 4 | 5 | import biosppy.signals.tools as st 6 | import numpy as np 7 | import os 8 | import wfdb 9 | from biosppy.signals.ecg import correct_rpeaks, hamilton_segmenter 10 | from scipy.signal import medfilt 11 | from sklearn.utils import cpu_count 12 | from tqdm import tqdm 13 | 14 | # PhysioNet Apnea-ECG dataset 15 | # url: https://physionet.org/physiobank/database/apnea-ecg/ 16 | base_dir = "dataset" 17 | 18 | fs = 100 19 | sample = fs * 60 # 1 min's sample points 20 | 21 | before = 2 # forward interval (min) 22 | after = 2 # backward interval (min) 23 | hr_min = 20 24 | hr_max = 300 25 | 26 | num_worker = 35 if cpu_count() > 35 else cpu_count() - 1 # Setting according to the number of CPU cores 27 | 28 | 29 | def worker(name, labels): 30 | X = [] 31 | y = [] 32 | groups = [] 33 | signals = wfdb.rdrecord(os.path.join(base_dir, name), channels=[0]).p_signal[:, 0] 34 | for j in tqdm(range(len(labels)), desc=name, file=sys.stdout): 35 | if j < before or \ 36 | (j + 1 + after) > len(signals) / float(sample): 37 | continue 38 | signal = signals[int((j - before) * sample):int((j + 1 + after) * sample)] 39 | signal, _, _ = st.filter_signal(signal, ftype='FIR', band='bandpass', order=int(0.3 * fs), 40 | frequency=[3, 45], sampling_rate=fs) 41 | # Find R peaks 42 | rpeaks, = hamilton_segmenter(signal, sampling_rate=fs) 43 | rpeaks, = correct_rpeaks(signal, rpeaks=rpeaks, sampling_rate=fs, tol=0.1) 44 | if len(rpeaks) / (1 + after + before) < 40 or \ 45 | len(rpeaks) / (1 + after + before) > 200: # Remove abnormal R peaks signal 46 | continue 47 | # Extract RRI, Ampl signal 48 | rri_tm, rri_signal = rpeaks[1:] / float(fs), np.diff(rpeaks) / float(fs) 49 | rri_signal = medfilt(rri_signal, kernel_size=3) 50 | ampl_tm, ampl_siganl = rpeaks / float(fs), signal[rpeaks] 51 | hr = 60 / rri_signal 52 | # Remove physiologically impossible HR signal 53 | if np.all(np.logical_and(hr >= hr_min, hr <= hr_max)): 54 | # Save extracted signal 55 | X.append([(rri_tm, rri_signal), (ampl_tm, ampl_siganl)]) 56 | y.append(0. if labels[j] == 'N' else 1.) 57 | groups.append(name) 58 | return X, y, groups 59 | 60 | 61 | if __name__ == "__main__": 62 | apnea_ecg = {} 63 | 64 | names = [ 65 | "a01", "a02", "a03", "a04", "a05", "a06", "a07", "a08", "a09", "a10", 66 | "a11", "a12", "a13", "a14", "a15", "a16", "a17", "a18", "a19", "a20", 67 | "b01", "b02", "b03", "b04", "b05", 68 | "c01", "c02", "c03", "c04", "c05", "c06", "c07", "c08", "c09", "c10" 69 | ] 70 | 71 | o_train = [] 72 | y_train = [] 73 | groups_train = [] 74 | print('Training...') 75 | with ProcessPoolExecutor(max_workers=num_worker) as executor: 76 | task_list = [] 77 | for i in range(len(names)): 78 | labels = wfdb.rdann(os.path.join(base_dir, names[i]), extension="apn").symbol 79 | task_list.append(executor.submit(worker, names[i], labels)) 80 | 81 | for task in as_completed(task_list): 82 | X, y, groups = task.result() 83 | o_train.extend(X) 84 | y_train.extend(y) 85 | groups_train.extend(groups) 86 | 87 | print() 88 | 89 | answers = {} 90 | with open(os.path.join(base_dir, "event-2-answers"), "r") as f: 91 | for answer in f.read().split("\n\n"): 92 | answers[answer[:3]] = list("".join(answer.split()[2::2])) 93 | 94 | names = [ 95 | "x01", "x02", "x03", "x04", "x05", "x06", "x07", "x08", "x09", "x10", 96 | "x11", "x12", "x13", "x14", "x15", "x16", "x17", "x18", "x19", "x20", 97 | "x21", "x22", "x23", "x24", "x25", "x26", "x27", "x28", "x29", "x30", 98 | "x31", "x32", "x33", "x34", "x35" 99 | ] 100 | 101 | o_test = [] 102 | y_test = [] 103 | groups_test = [] 104 | print("Testing...") 105 | with ProcessPoolExecutor(max_workers=num_worker) as executor: 106 | task_list = [] 107 | for i in range(len(names)): 108 | labels = answers[names[i]] 109 | task_list.append(executor.submit(worker, names[i], labels)) 110 | 111 | for task in as_completed(task_list): 112 | X, y, groups = task.result() 113 | o_test.extend(X) 114 | y_test.extend(y) 115 | groups_test.extend(groups) 116 | 117 | apnea_ecg = dict(o_train=o_train, y_train=y_train, groups_train=groups_train, o_test=o_test, y_test=y_test, 118 | groups_test=groups_test) 119 | with open(os.path.join(base_dir, "apnea-ecg.pkl"), "wb") as f: 120 | pickle.dump(apnea_ecg, f, protocol=2) 121 | 122 | print("\nok!") 123 | -------------------------------------------------------------------------------- /LeNet.py: -------------------------------------------------------------------------------- 1 | """NOTES: Batch data is different each time in keras, which result in slight differences in results.""" 2 | import pickle 3 | 4 | import keras 5 | import matplotlib.pyplot as plt 6 | import numpy as np 7 | import os 8 | from keras.callbacks import LearningRateScheduler 9 | from keras.layers import Conv1D, Dense, Dropout, Flatten, MaxPooling1D 10 | from keras.models import Input, Model 11 | from keras.regularizers import l2 12 | from scipy.interpolate import splev, splrep 13 | import pandas as pd 14 | 15 | base_dir = "dataset" 16 | 17 | ir = 3 # interpolate interval 18 | before = 2 19 | after = 2 20 | 21 | # normalize 22 | scaler = lambda arr: (arr - np.min(arr)) / (np.max(arr) - np.min(arr)) 23 | 24 | 25 | def load_data(): 26 | tm = np.arange(0, (before + 1 + after) * 60, step=1 / float(ir)) 27 | 28 | with open(os.path.join(base_dir, "apnea-ecg.pkl"), 'rb') as f: # read preprocessing result 29 | apnea_ecg = pickle.load(f) 30 | 31 | x_train = [] 32 | o_train, y_train = apnea_ecg["o_train"], apnea_ecg["y_train"] 33 | groups_train = apnea_ecg["groups_train"] 34 | for i in range(len(o_train)): 35 | (rri_tm, rri_signal), (ampl_tm, ampl_siganl) = o_train[i] 36 | # Curve interpolation 37 | rri_interp_signal = splev(tm, splrep(rri_tm, scaler(rri_signal), k=3), ext=1) 38 | ampl_interp_signal = splev(tm, splrep(ampl_tm, scaler(ampl_siganl), k=3), ext=1) 39 | x_train.append([rri_interp_signal, ampl_interp_signal]) 40 | x_train = np.array(x_train, dtype="float32").transpose((0, 2, 1)) # convert to numpy format 41 | y_train = np.array(y_train, dtype="float32") 42 | 43 | x_test = [] 44 | o_test, y_test = apnea_ecg["o_test"], apnea_ecg["y_test"] 45 | groups_test = apnea_ecg["groups_test"] 46 | for i in range(len(o_test)): 47 | (rri_tm, rri_signal), (ampl_tm, ampl_siganl) = o_test[i] 48 | # Curve interpolation 49 | rri_interp_signal = splev(tm, splrep(rri_tm, scaler(rri_signal), k=3), ext=1) 50 | ampl_interp_signal = splev(tm, splrep(ampl_tm, scaler(ampl_siganl), k=3), ext=1) 51 | x_test.append([rri_interp_signal, ampl_interp_signal]) 52 | x_test = np.array(x_test, dtype="float32").transpose((0, 2, 1)) 53 | y_test = np.array(y_test, dtype="float32") 54 | 55 | return x_train, y_train, groups_train, x_test, y_test, groups_test 56 | 57 | 58 | def create_model(input_shape, weight=1e-3): 59 | """Create a Modified LeNet-5 model""" 60 | inputs = Input(shape=input_shape) 61 | 62 | # Conv1 63 | x = Conv1D(32, kernel_size=5, strides=2, padding="valid", activation="relu", kernel_initializer="he_normal", 64 | kernel_regularizer=l2(weight), bias_regularizer=l2(weight))(inputs) 65 | x = MaxPooling1D(pool_size=3)(x) 66 | 67 | # Conv3 68 | x = Conv1D(64, kernel_size=5, strides=2, padding="valid", activation="relu", kernel_initializer="he_normal", 69 | kernel_regularizer=l2(1e-3), bias_regularizer=l2(weight))(x) 70 | x = MaxPooling1D(pool_size=3)(x) 71 | 72 | x = Dropout(0.8)(x) # Avoid overfitting 73 | 74 | # FC6 75 | x = Flatten()(x) 76 | x = Dense(32, activation="relu")(x) 77 | outputs = Dense(2, activation="softmax")(x) 78 | 79 | model = Model(inputs=inputs, outputs=outputs) 80 | return model 81 | 82 | 83 | def lr_schedule(epoch, lr): 84 | if epoch > 70 and \ 85 | (epoch - 1) % 10 == 0: 86 | lr *= 0.1 87 | print("Learning rate: ", lr) 88 | return lr 89 | 90 | 91 | def plot(history): 92 | """Plot performance curve""" 93 | fig, axes = plt.subplots(1, 2, figsize=(10, 4)) 94 | axes[0].plot(history["loss"], "r-", history["val_loss"], "b-", linewidth=0.5) 95 | axes[0].set_title("Loss") 96 | axes[1].plot(history["acc"], "r-", history["val_acc"], "b-", linewidth=0.5) 97 | axes[1].set_title("Accuracy") 98 | fig.tight_layout() 99 | fig.show() 100 | 101 | 102 | if __name__ == "__main__": 103 | x_train, y_train, groups_train, x_test, y_test, groups_test = load_data() 104 | 105 | y_train = keras.utils.to_categorical(y_train, num_classes=2) # Convert to two categories 106 | y_test = keras.utils.to_categorical(y_test, num_classes=2) 107 | 108 | print("train num:", len(y_train)) 109 | print("test num:", len(y_test)) 110 | 111 | model = create_model(input_shape=x_train.shape[1:]) 112 | model.summary() 113 | 114 | from keras.utils import plot_model 115 | 116 | plot_model(model, "model.png") # Plot model 117 | 118 | model = keras.utils.multi_gpu_model(model, gpus=2) # Multi-gpu acceleration (optional) 119 | model.compile(optimizer="adam", loss="categorical_crossentropy", metrics=['accuracy']) 120 | 121 | lr_scheduler = LearningRateScheduler(lr_schedule) # Dynamic adjustment learning rate 122 | history = model.fit(x_train, y_train, batch_size=128, epochs=100, validation_data=(x_test, y_test), 123 | callbacks=[lr_scheduler]) 124 | model.save(os.path.join("models", "model.final.h5")) # Save training model 125 | 126 | loss, accuracy = model.evaluate(x_test, y_test) # test the model 127 | print("Test loss: ", loss) 128 | print("Accuracy: ", accuracy) 129 | 130 | # save prediction score 131 | y_score = model.predict(x_test) 132 | output = pd.DataFrame({"y_true": y_test[:, 1], "y_score": y_score[:, 1], "subject": groups_test}) 133 | output.to_csv(os.path.join("output", "LeNet.csv"), index=False) 134 | 135 | plot(history.history) 136 | -------------------------------------------------------------------------------- /LICENSE: -------------------------------------------------------------------------------- 1 | Apache License 2 | Version 2.0, January 2004 3 | http://www.apache.org/licenses/ 4 | 5 | TERMS AND CONDITIONS FOR USE, REPRODUCTION, AND DISTRIBUTION 6 | 7 | 1. Definitions. 8 | 9 | "License" shall mean the terms and conditions for use, reproduction, 10 | and distribution as defined by Sections 1 through 9 of this document. 11 | 12 | "Licensor" shall mean the copyright owner or entity authorized by 13 | the copyright owner that is granting the License. 14 | 15 | "Legal Entity" shall mean the union of the acting entity and all 16 | other entities that control, are controlled by, or are under common 17 | control with that entity. For the purposes of this definition, 18 | "control" means (i) the power, direct or indirect, to cause the 19 | direction or management of such entity, whether by contract or 20 | otherwise, or (ii) ownership of fifty percent (50%) or more of the 21 | outstanding shares, or (iii) beneficial ownership of such entity. 22 | 23 | "You" (or "Your") shall mean an individual or Legal Entity 24 | exercising permissions granted by this License. 25 | 26 | "Source" form shall mean the preferred form for making modifications, 27 | including but not limited to software source code, documentation 28 | source, and configuration files. 29 | 30 | "Object" form shall mean any form resulting from mechanical 31 | transformation or translation of a Source form, including but 32 | not limited to compiled object code, generated documentation, 33 | and conversions to other media types. 34 | 35 | "Work" shall mean the work of authorship, whether in Source or 36 | Object form, made available under the License, as indicated by a 37 | copyright notice that is included in or attached to the work 38 | (an example is provided in the Appendix below). 39 | 40 | "Derivative Works" shall mean any work, whether in Source or Object 41 | form, that is based on (or derived from) the Work and for which the 42 | editorial revisions, annotations, elaborations, or other modifications 43 | represent, as a whole, an original work of authorship. For the purposes 44 | of this License, Derivative Works shall not include works that remain 45 | separable from, or merely link (or bind by name) to the interfaces of, 46 | the Work and Derivative Works thereof. 47 | 48 | "Contribution" shall mean any work of authorship, including 49 | the original version of the Work and any modifications or additions 50 | to that Work or Derivative Works thereof, that is intentionally 51 | submitted to Licensor for inclusion in the Work by the copyright owner 52 | or by an individual or Legal Entity authorized to submit on behalf of 53 | the copyright owner. For the purposes of this definition, "submitted" 54 | means any form of electronic, verbal, or written communication sent 55 | to the Licensor or its representatives, including but not limited to 56 | communication on electronic mailing lists, source code control systems, 57 | and issue tracking systems that are managed by, or on behalf of, the 58 | Licensor for the purpose of discussing and improving the Work, but 59 | excluding communication that is conspicuously marked or otherwise 60 | designated in writing by the copyright owner as "Not a Contribution." 61 | 62 | "Contributor" shall mean Licensor and any individual or Legal Entity 63 | on behalf of whom a Contribution has been received by Licensor and 64 | subsequently incorporated within the Work. 65 | 66 | 2. Grant of Copyright License. Subject to the terms and conditions of 67 | this License, each Contributor hereby grants to You a perpetual, 68 | worldwide, non-exclusive, no-charge, royalty-free, irrevocable 69 | copyright license to reproduce, prepare Derivative Works of, 70 | publicly display, publicly perform, sublicense, and distribute the 71 | Work and such Derivative Works in Source or Object form. 72 | 73 | 3. Grant of Patent License. Subject to the terms and conditions of 74 | this License, each Contributor hereby grants to You a perpetual, 75 | worldwide, non-exclusive, no-charge, royalty-free, irrevocable 76 | (except as stated in this section) patent license to make, have made, 77 | use, offer to sell, sell, import, and otherwise transfer the Work, 78 | where such license applies only to those patent claims licensable 79 | by such Contributor that are necessarily infringed by their 80 | Contribution(s) alone or by combination of their Contribution(s) 81 | with the Work to which such Contribution(s) was submitted. If You 82 | institute patent litigation against any entity (including a 83 | cross-claim or counterclaim in a lawsuit) alleging that the Work 84 | or a Contribution incorporated within the Work constitutes direct 85 | or contributory patent infringement, then any patent licenses 86 | granted to You under this License for that Work shall terminate 87 | as of the date such litigation is filed. 88 | 89 | 4. Redistribution. You may reproduce and distribute copies of the 90 | Work or Derivative Works thereof in any medium, with or without 91 | modifications, and in Source or Object form, provided that You 92 | meet the following conditions: 93 | 94 | (a) You must give any other recipients of the Work or 95 | Derivative Works a copy of this License; and 96 | 97 | (b) You must cause any modified files to carry prominent notices 98 | stating that You changed the files; and 99 | 100 | (c) You must retain, in the Source form of any Derivative Works 101 | that You distribute, all copyright, patent, trademark, and 102 | attribution notices from the Source form of the Work, 103 | excluding those notices that do not pertain to any part of 104 | the Derivative Works; and 105 | 106 | (d) If the Work includes a "NOTICE" text file as part of its 107 | distribution, then any Derivative Works that You distribute must 108 | include a readable copy of the attribution notices contained 109 | within such NOTICE file, excluding those notices that do not 110 | pertain to any part of the Derivative Works, in at least one 111 | of the following places: within a NOTICE text file distributed 112 | as part of the Derivative Works; within the Source form or 113 | documentation, if provided along with the Derivative Works; or, 114 | within a display generated by the Derivative Works, if and 115 | wherever such third-party notices normally appear. The contents 116 | of the NOTICE file are for informational purposes only and 117 | do not modify the License. You may add Your own attribution 118 | notices within Derivative Works that You distribute, alongside 119 | or as an addendum to the NOTICE text from the Work, provided 120 | that such additional attribution notices cannot be construed 121 | as modifying the License. 122 | 123 | You may add Your own copyright statement to Your modifications and 124 | may provide additional or different license terms and conditions 125 | for use, reproduction, or distribution of Your modifications, or 126 | for any such Derivative Works as a whole, provided Your use, 127 | reproduction, and distribution of the Work otherwise complies with 128 | the conditions stated in this License. 129 | 130 | 5. Submission of Contributions. Unless You explicitly state otherwise, 131 | any Contribution intentionally submitted for inclusion in the Work 132 | by You to the Licensor shall be under the terms and conditions of 133 | this License, without any additional terms or conditions. 134 | Notwithstanding the above, nothing herein shall supersede or modify 135 | the terms of any separate license agreement you may have executed 136 | with Licensor regarding such Contributions. 137 | 138 | 6. Trademarks. This License does not grant permission to use the trade 139 | names, trademarks, service marks, or product names of the Licensor, 140 | except as required for reasonable and customary use in describing the 141 | origin of the Work and reproducing the content of the NOTICE file. 142 | 143 | 7. Disclaimer of Warranty. Unless required by applicable law or 144 | agreed to in writing, Licensor provides the Work (and each 145 | Contributor provides its Contributions) on an "AS IS" BASIS, 146 | WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or 147 | implied, including, without limitation, any warranties or conditions 148 | of TITLE, NON-INFRINGEMENT, MERCHANTABILITY, or FITNESS FOR A 149 | PARTICULAR PURPOSE. You are solely responsible for determining the 150 | appropriateness of using or redistributing the Work and assume any 151 | risks associated with Your exercise of permissions under this License. 152 | 153 | 8. Limitation of Liability. In no event and under no legal theory, 154 | whether in tort (including negligence), contract, or otherwise, 155 | unless required by applicable law (such as deliberate and grossly 156 | negligent acts) or agreed to in writing, shall any Contributor be 157 | liable to You for damages, including any direct, indirect, special, 158 | incidental, or consequential damages of any character arising as a 159 | result of this License or out of the use or inability to use the 160 | Work (including but not limited to damages for loss of goodwill, 161 | work stoppage, computer failure or malfunction, or any and all 162 | other commercial damages or losses), even if such Contributor 163 | has been advised of the possibility of such damages. 164 | 165 | 9. Accepting Warranty or Additional Liability. While redistributing 166 | the Work or Derivative Works thereof, You may choose to offer, 167 | and charge a fee for, acceptance of support, warranty, indemnity, 168 | or other liability obligations and/or rights consistent with this 169 | License. However, in accepting such obligations, You may act only 170 | on Your own behalf and on Your sole responsibility, not on behalf 171 | of any other Contributor, and only if You agree to indemnify, 172 | defend, and hold each Contributor harmless for any liability 173 | incurred by, or claims asserted against, such Contributor by reason 174 | of your accepting any such warranty or additional liability. 175 | 176 | END OF TERMS AND CONDITIONS 177 | 178 | APPENDIX: How to apply the Apache License to your work. 179 | 180 | To apply the Apache License to your work, attach the following 181 | boilerplate notice, with the fields enclosed by brackets "[]" 182 | replaced with your own identifying information. (Don't include 183 | the brackets!) The text should be enclosed in the appropriate 184 | comment syntax for the file format. We also recommend that a 185 | file or class name and description of purpose be included on the 186 | same "printed page" as the copyright notice for easier 187 | identification within third-party archives. 188 | 189 | Copyright [yyyy] [name of copyright owner] 190 | 191 | Licensed under the Apache License, Version 2.0 (the "License"); 192 | you may not use this file except in compliance with the License. 193 | You may obtain a copy of the License at 194 | 195 | http://www.apache.org/licenses/LICENSE-2.0 196 | 197 | Unless required by applicable law or agreed to in writing, software 198 | distributed under the License is distributed on an "AS IS" BASIS, 199 | WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. 200 | See the License for the specific language governing permissions and 201 | limitations under the License. 202 | -------------------------------------------------------------------------------- /dataset/event-2-answers: -------------------------------------------------------------------------------- 1 | x01 2 | 0 NNNNNNNNNNNNNNNNNNNNNNNNNAAAAAAAAAAAAAAAAANNNNNNNNNNNNNNNNNN 3 | 1 NNNAAAAAAAAAAAAAAAAAAAAAAAAAAAANNNNNNNNNNNNNNNNNNNNNNNNAAAAA 4 | 2 AANAAAAAAAAAAAAAAAAANNAAAAANNAANNAAAAAAAAAAAAAAAAAAAAAAAAAAA 5 | 3 AAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAANNNNNANNNANNNNNNNNNN 6 | 4 NNNNNNNNNNNNAAAAAAAAAAAAAAAAAAAANAAAAAAAAAAAAAAAAAAAAAAANAAA 7 | 5 AAAAAAAANNNNNAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAA 8 | 6 AAAAAAAAAAAAAAAAAANAANAAAAAAAAAAAAAAAANNNNNNAAANNNNNAAANNNNN 9 | 7 NNNAANNNNAAAAAAAAAAAAAAAAAAAAAAANNAAAAAAAAAAAAAAAAAAAAAAAAAA 10 | 8 ANNAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAANNNANN 11 | 12 | x02 13 | 0 NNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNN 14 | 1 NNNNNNNNNAAAAANNNNNNNNAAAAAAAAAAAAAAAAAANNAANNNNNNNNNNNNNNNN 15 | 2 NNNNNNNNNNNNNAAAAAAAAAAAAAAAAAAAAAAAAAANNNNNNNNNNNNNNNNNNNNN 16 | 3 NNNNNNNNNNNNNNNNNNNNNNAAAAAAAAAAAAAAAAAAAAAANNNNNNNNNNNNNNNN 17 | 4 NNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNAAAAAAAAAAAA 18 | 5 AAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAA 19 | 6 ANNNNNNNNNNNAAAAAAAAAAAAAAAAAAAANNNNNNNNAAAAAAANNNNNNNNNNNNN 20 | 7 NNNNNNNNNNNNNAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAA 21 | 22 | x03 23 | 0 NNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNN 24 | 1 NNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNN 25 | 2 NNNNNNNNNNAAAANNNNNNNNAANNNNNNANNNNNANNNNNNNNNNNNNNNNNNNNNNN 26 | 3 NNNNNNNNNNNNNNNNNNNNNNNNNNNNNNANNAANNNNNANNNNNNNNNNNNNNNNNNN 27 | 4 NNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNN 28 | 5 NNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNN 29 | 6 NNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNN 30 | 7 NNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNN 31 | 32 | x04 33 | 0 NNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNN 34 | 1 NNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNN 35 | 2 NNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNN 36 | 3 NNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNN 37 | 4 NNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNN 38 | 5 NNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNN 39 | 6 NNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNN 40 | 7 NNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNN 41 | 8 NN 42 | 43 | x05 44 | 0 NNNNNNNNNNNNNNNNNNNNNNNNNANNNNNNNNNNNAAAANNANNAAAAAAAAAAAAAA 45 | 1 AAAAAAANNNNNNNNNNNNNNNNNAAAAAAAAAAAAAANNNNNNNNNNNAANNANNAAAA 46 | 2 AAAAAAAAAAAAAANNNNNNNNNNNNNNNNNNNNNNNAAAAAAAAAAAAAAAAAAAAAAA 47 | 3 AAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAA 48 | 4 AAAAAAAAAAAAAAAAAAAAAAAAANNNAANNNNNAAAAAAAAAAAAAAAAAAAAAAAAA 49 | 5 ANNNAAAAAAAAAAAAAAAAAAAAAAAAAAAANANNNNNNNNNNNNNNNNNNNNNNNNNN 50 | 6 NNNNNNNNNNNNNAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAA 51 | 7 NANNNNNNNNNNNNNNNNNNNNNAANAAAAAAAAAAAAAAAAAAAAAAAAAAANNNNNNN 52 | 8 NNNANNNNNANAANNAAAAAANAAN 53 | 54 | x06 55 | 0 NNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNN 56 | 1 NNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNN 57 | 2 NNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNN 58 | 3 NNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNN 59 | 4 NNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNN 60 | 5 NNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNN 61 | 6 NNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNN 62 | 7 NNNNNNNNNNNNNNNNNNNNNNNNNNNNNN 63 | 64 | x07 65 | 0 NNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNAAAAANNAAAAAAAAAAA 66 | 1 AAAAAAAAAAAAAAAAAAAAAAAAAAAAANNNNNNNNNNNAAAAAAAAAAAAAAAANNNN 67 | 2 NNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNN 68 | 3 NNNAAAAAAAAAAAAAAAAAAAAAANNNNNNNNNNNNNNNNNNNNNNNNNNNNAAAAAAA 69 | 4 ANNNAAANNNNNNNNNNNAAAAANNNNNNNNNNAAAAAANNAAAANNNNNNNNNNNNNNN 70 | 5 NNNNNNNNNNNNAAAAAAAAAAAAAANNAAAAAAAAAAAAAAAAAAAAAAAAAAAANNAA 71 | 6 AAAAAAAAAAAAAAAAAAAAAAANNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNN 72 | 7 NNNNAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAANNNNNAAA 73 | 8 AAAAANNNAAANNNNNNNNAANNNNNAAA 74 | 75 | x08 76 | 0 NNNNNNNNNNNNNNNNNANNNNNNNANNNNNNNAAANNAAAAAAAAAANNNAAAAAAAAA 77 | 1 NNNNNNANAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAA 78 | 2 AAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAANNNNNNNNNNN 79 | 3 NNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNN 80 | 4 NNNNNNNNNNNNNNNNNNNAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAA 81 | 5 NNNANNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNAAAAAAAAAAA 82 | 6 AANAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAA 83 | 7 AAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAN 84 | 8 AANAAAAAAAANNAAAAAAAAAAAAAANNNNNNNAAA 85 | 86 | x09 87 | 0 NNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNN 88 | 1 NNNNNNNNNNNNNNNNNNNNNNAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAA 89 | 2 AAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAA 90 | 3 AAAAAAAAAAAAAAAAAAAAAAAAAAAAANNNNNNNNNNNNNNNNNNNNNNNNNNNNNNN 91 | 4 NNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNAAAAAAAAAAAAAAAAAAAAAAN 92 | 5 NNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNN 93 | 6 NNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNN 94 | 7 NNNNNNNNNNNNAAAAAAAAAAAAAAAAAANNNNNNNNNNNNNNNNNNNNNNNNNNNNNN 95 | 8 NNNNNNNNNNNNNNNNNNNNNNNNNNNN 96 | 97 | x10 98 | 0 NNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNN 99 | 1 NNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNAAAAAAAAAA 100 | 2 ANNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNN 101 | 3 NNNNNNNNNNNNNNNNNNNNNNNNNNANNNNNNNNAAAAAAAAANNNNAAAAAAAAAAAA 102 | 4 AAAAAAAAAAANNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNN 103 | 5 NNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNN 104 | 6 AAAAAAAAAAAAAAAAAAAAAAAAANAAANNNNNNNANNNNNNNNNNNNNNNNNNNNNNN 105 | 7 NNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNAAAAAAAAAAAAAAAAAAA 106 | 8 ANNNNNNNNNNNNNNNAAANNNNNNNNNNN 107 | 108 | x11 109 | 0 NNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNN 110 | 1 NNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNANNN 111 | 2 NNNNNNNNNNNANNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNN 112 | 3 NNNNNAAAANNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNN 113 | 4 NNNNNNNNNNNNNNANNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNN 114 | 5 NNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNN 115 | 6 NNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNANNANAANNNA 116 | 7 NNNNNNNNNNANNNNNNNNNNNNNNNNNNNNNNNNNN 117 | 118 | x12 119 | 0 NNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNN 120 | 1 NNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNN 121 | 2 NNNNNNNNNNNNNNNNNNNNNNNNNNAAANNNNNNNNNNNNNAAAAAAANNNNNNNNNNN 122 | 3 NNNNAAAAAAAANNAAAAANNNNNNNNNNNNNNNNNNNNNNNAAAAAAAAANNNNNNNNN 123 | 4 NNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNANNNNAAAAAAAAAANNNNNNNNNNNNNN 124 | 5 NNNNNNNNNNNNNNNNNNNANNNNNNNNNNNANNNNNNNNNNNNNNNNNNNNNNNNNNNN 125 | 6 NNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNN 126 | 7 NNAANNNNNNNNNNNNNNNNAAAANNNNNNNNNNNNNNAAAAANNNNNNNNNNNNNANNN 127 | 8 NNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNN 128 | 129 | x13 130 | 0 AANNNNNAAAAAAANNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNAAANNNNNNAA 131 | 1 AAAAAAAAAAAAAAANNNNNNNNNNNNNAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAA 132 | 2 AAAAAAAAAANNNNAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAA 133 | 3 AAAAAAAAAAAAAAAAAAAANNNAAAANNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNN 134 | 4 AAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAA 135 | 5 AAAAAAAAAANNNNNNNNNNNNNAAAAAAAAAAAAANNNNNNAAAAAAAAAAAAAANNNN 136 | 6 NNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNN 137 | 7 NNNNNAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAANNNNNAAAANNNNNN 138 | 8 NNNNNNNNNNNNNNNNAAAAAAAAAA 139 | 140 | x14 141 | 0 NNNNNNNNNNNNNNNNNNNNAAAAANNNNNNNNNNNNNNNNNNNNAAAAAAAAAAAAAAA 142 | 1 AAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAA 143 | 2 AAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAANNNNAAAAAAAAA 144 | 3 AAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAA 145 | 4 AAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAA 146 | 5 AAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAA 147 | 6 AAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAANNNNNNNAAAAAAAA 148 | 7 AAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAA 149 | 8 AAAAAAAAAA 150 | 151 | x15 152 | 0 NNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNN 153 | 1 NNNNNNNNNNNAAAAAAAAAAAAAAAANNAAAAANNNNNNNNNNNNNNNNNNNNNNNNNN 154 | 2 NAAAAAAAAAAAAAAAAAAAANNNNNAANNNNNNNNNNNNNNNNNNNNNNNNNNNNNNAA 155 | 3 AAAAAAAAAAAAAAAAAAAAAAAAAAAAAAANNNNNNNNNNNNNNNNNNNNNNNNNNNNN 156 | 4 NNNNNNNNNNNNNNNNNNNNNNNAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAA 157 | 5 AAAAAANNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNAAAAAAA 158 | 6 AAAAAAAAAAAAAAAAAAAAAAAAAAAAANNNNNNNNNNNNNNNNNNNNNNNNNNNAAAA 159 | 7 AAAAAAAAAAAAAAAAAAAAAAAAAAAAANNNNNNNNNNNNNNNNNNNNNNNNNNNNNNA 160 | 8 AAAAAAAAAAANNNNNNN 161 | 162 | x16 163 | 0 NNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNN 164 | 1 NNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNN 165 | 2 NNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNAAAAAAAAAAAAAANNNNNNNNNNN 166 | 3 NNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNN 167 | 4 NNNNNNNNNNNNNNNNNNNNNNNNAANANNNNNNAAANNAAAAAAAAAAANNNAAAAANA 168 | 5 NNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNAAAAAAAA 169 | 6 AANNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNN 170 | 7 AAAAAANNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNAAAAAAANN 171 | 8 NNNNNNNNNNNNNNNNNNNNNNNAAAAANNNNNNN 172 | 173 | x17 174 | 0 NNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNN 175 | 1 NNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNN 176 | 2 NNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNN 177 | 3 NNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNN 178 | 4 NNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNN 179 | 5 NNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNN 180 | 6 NNNNNNNNNNNNNNNNNNNNNNNNNANNNNNNNNNNNNNN 181 | 182 | x18 183 | 0 NNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNN 184 | 1 NNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNAN 185 | 2 NNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNN 186 | 3 NNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNN 187 | 4 NNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNN 188 | 5 NNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNN 189 | 6 NNNNNNNNNANNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNN 190 | 7 NNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNN 191 | 192 | x19 193 | 0 NNNNNNNNNNNAAAAAAAAAAAAAAAAAAAAAAAAANNAAAAAAAAAAAAAAAAAAAAAA 194 | 1 AAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAANNAAAAAAAAAAAAAAAAAAAAAAAAAA 195 | 2 AAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAANNNNNNNNNNNNNNNNNAAAAAANNNN 196 | 3 NNNAANNNNNNNAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAANNNNNAAAAAAAA 197 | 4 AAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAA 198 | 5 AAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAA 199 | 6 NNNNNNNNNNNNNNNNNNNNNNNNNNNNNAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAA 200 | 7 AAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAA 201 | 8 AAAAAAA 202 | 203 | x20 204 | 0 NNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNAAA 205 | 1 AAAAAAAAAAAAAANNANNNNAAAAANNNNNNNNANAAAAAAAAAAAAAAAAANAAAAAA 206 | 2 AAAAAAAAAAAAAANNNNNNNNNNNNNNNNAANAANNNNNNNNNNNAANAAAAAAAAAAA 207 | 3 AAAANNNNNNNNANAAAAAAAAAAAANNAAAAAAAAAAAAAAAAAANNAAAAAAAAAAAA 208 | 4 AAAAAAAAAAANAAAAAAAAAANNNNANAAAAAAAAAAANNNNNNNNNNNNNNNNNNNNN 209 | 5 NNNNNNNNNNNNNNNNNNNNNNNNNNNNAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAA 210 | 6 AAANAAAAAAAAAAAAAAAAAAAAAAAAAANANNANNNNNANNANANNAAANAAAANNAA 211 | 7 AAAAAANNNAANNNNNNNNNNNNNNNNNNNNNNNNNNNNAAAAAAAAAAAAAAAAANNNN 212 | 8 NNNNNNNNNNNNNAAANANAANNNNNNNNNNNN 213 | 214 | x21 215 | 0 NNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNN 216 | 1 NNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNN 217 | 2 NAANNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNANNNNNNNNNNNNNN 218 | 3 NAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAANNNNNNNNNNNAANNNNN 219 | 4 NNNNNNNNNNNNNNNNNNNNNNNNNNNAAAAAAAAAAAAAANNNNNNNNNNNNNANAAAN 220 | 5 NANNNNNNNNNNNNANNNNNNNNNNNNNAAAAAAAAAAAAAAAAAAAAAAAAAAANNNNN 221 | 6 NNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNAAANNNNNNNNNNNNNNNNA 222 | 7 AAAAAAAANAAAAAAANNNNNNNNNNAAAAANNNNNNNNNNNNNNNNNNNNNNNNNNNNN 223 | 8 NNNNNNNANNNNNNNNNNNNNNANNNNNAN 224 | 225 | x22 226 | 0 NNNNNNNNNNNNNNNNNNNNNNNNANNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNN 227 | 1 NNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNN 228 | 2 NNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNN 229 | 3 NNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNN 230 | 4 NNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNN 231 | 5 NNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNN 232 | 6 NNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNN 233 | 7 NNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNANNNNNNNNNNNN 234 | 8 NN 235 | 236 | x23 237 | 0 NNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNN 238 | 1 NNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNN 239 | 2 NAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAA 240 | 3 AAANNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNN 241 | 4 NNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNN 242 | 5 NNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNN 243 | 6 NNNNNNNNNNNNNNNNNNNNNNNNNNNNAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAA 244 | 7 AAAAAAAAAAAAAAAAAAAAAAAAANNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNN 245 | 8 NNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNN 246 | 247 | x24 248 | 0 NNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNN 249 | 1 NNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNN 250 | 2 NNNNNNNNNNNNANNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNN 251 | 3 NNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNN 252 | 4 NNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNN 253 | 5 NNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNN 254 | 6 NNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNN 255 | 7 NNNNNNNNN 256 | 257 | x25 258 | 0 NNNNNNNNNNNANNANNNNNNNNNNNNNNNAANAAANNNNNNNNAAAAAAAAAAAAAAAA 259 | 1 AAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAA 260 | 2 ANNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNAAAAA 261 | 3 AAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAANNNNNNNNNNNNNNNNNNNN 262 | 4 NNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNAAAAAAAA 263 | 5 AAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAANNNNNNNNNNNN 264 | 6 NAANNNNNNNNNNNNNNNNNNNNAANAANAAAAAAAAAAAAAAAAAAAANNNNAAAAAAN 265 | 7 NAAAAAANAAAAAAAAAAAAAANNNAAAAAAAAAAAAAAAANANNAAAAAAAAAAAAAAN 266 | 8 ANNAAAAAAAAAAAAANNNNNAAAAAAAAA 267 | 268 | x26 269 | 0 NNNNNNNNNNNNNNNNAAAAAANNNNNNNNNNNNNNNNNNNNNNNNNNNAAAAAAAAAAA 270 | 1 AAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAANNNAAAAAAAAAAAAA 271 | 2 AAAAAAAAANNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNAAAAAAAAANNNNN 272 | 3 NNAAANNNNNNAAANNNNNNNNNNNNNNNNNNNNNNNNNAAAAAAAAAAAAAAAAAAAAA 273 | 4 AAAAAAAAAAAAAAANNNNAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAA 274 | 5 NNNNNNNNNNNNNNNNNAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAA 275 | 6 AAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAANNNN 276 | 7 NNNNAAAANNNNNNNNNNNNNNNNNNNAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAA 277 | 8 AAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAANNNNNNN 278 | 279 | x27 280 | 0 NNAAAAAAAAANNAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAA 281 | 1 AAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAANNNNAAAAAAAAA 282 | 2 AAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAA 283 | 3 AAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAANNAAAAAAAAAAAA 284 | 4 AAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAA 285 | 5 AAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAA 286 | 6 AAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAA 287 | 7 AAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAA 288 | 8 AAANAAAAAAAAAAAAAA 289 | 290 | x28 291 | 0 AAAAAAAAAAAAAAANAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAA 292 | 1 AAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAA 293 | 2 AAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAANNAAAAAAA 294 | 3 AAAAAAAAAAAAANAAAAAAAANAAAAAAAAAAAAAAAAAAAAAAAAAAAANNNNAAAAA 295 | 4 AAAAAAANNAAANNAAAAAAAAAAAAANNAAANAAAAAAAANNNNNNNNAAAAAAAAAAA 296 | 5 AAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAA 297 | 6 AAAAAAAAAAAAAAAAAAAAAAANAAAAAAANNNAANNAANNNNNNNNNNNNNNNNNNNN 298 | 7 NNNNNAAAANAAAAAAAANNAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAANAAAAAA 299 | 8 AAAAAAANNAAAAAN 300 | 301 | x29 302 | 0 NNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNN 303 | 1 NNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNN 304 | 2 NNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNN 305 | 3 NNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNN 306 | 4 NNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNN 307 | 5 NNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNN 308 | 6 NNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNN 309 | 7 NNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNN 310 | 311 | x30 312 | 0 NNNNNNNNNNNNNNNNAAAAAAAAAAANNAAAANNNNNNNNNNNNNNNNNNNNNNNNNNN 313 | 1 NNNAAAANNNNNNNNNNNNNNNNNNAAAAAANNAAAAAAAAAAAAANNNNNNNNNNNNAA 314 | 2 NNNNNNNNNNNNAANNNAAAAAAAAANNNAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAA 315 | 3 AAAAANNNNNNNAAANNNNNNAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAA 316 | 4 AAANNAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAA 317 | 5 AAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAANNNNNNNNNNNNNNNAA 318 | 6 AAAAAAAAAAAAAAAAAAAAAAANNNNANNNNNNNAAAAAAAANNNNNNNNNNNNNNNNN 319 | 7 NNNNNNNNNNNNNNNAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAA 320 | 8 NNNNNNNNNAAAAAAAAAAAAAAAAANNNNN 321 | 322 | x31 323 | 0 NNNNNNNNNNNNNNNNNNNNNNNNNNNAAAAAAAAAAAAAAAAAAAAAAAAAAAAANNNA 324 | 1 AAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAA 325 | 2 AAAAAAAAAAAAAAAAAAANNAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAA 326 | 3 AAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAA 327 | 4 AAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAA 328 | 5 AAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAA 329 | 6 AAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAA 330 | 7 AAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAANNNNNNNAAAAAAAAAAAAAAA 331 | 8 AAAAAAAAAAAAAANNAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAA 332 | 9 AAAAAAAAAAAAAAAAA 333 | 334 | x32 335 | 0 NNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNAANNNNNNNNAAAAAAAAAAAAAAAAA 336 | 1 AAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAA 337 | 2 AAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAA 338 | 3 AAAAAAAAAAAAAAAAAAAAAAAAAAANNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNN 339 | 4 NNNNNNNNNNNNNNNNAAANNNNAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAA 340 | 5 AAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAA 341 | 6 AAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAA 342 | 7 AAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAANN 343 | 8 NNNNNNNNNNNNNNNAAAAANNAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAA 344 | 345 | x33 346 | 0 NANNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNN 347 | 1 NNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNN 348 | 2 NNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNANNNNNNNNN 349 | 3 NNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNN 350 | 4 NNNNNNNNNNNNNNNNNNANNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNN 351 | 5 NNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNN 352 | 6 NNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNN 353 | 7 NNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNN 354 | 355 | x34 356 | 0 NNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNN 357 | 1 NNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNN 358 | 2 NNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNANNNNNANN 359 | 3 NNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNN 360 | 4 NNNNNNNNNNNNNNNNNNNANNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNN 361 | 5 NNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNN 362 | 6 NNNNNNNNNNNNNANNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNN 363 | 7 NNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNN 364 | 365 | x35 366 | 0 NNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNN 367 | 1 NNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNN 368 | 2 NNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNN 369 | 3 NNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNN 370 | 4 NNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNN 371 | 5 NNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNN 372 | 6 NNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNN 373 | 7 NNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNN 374 | 8 NNN 375 | 376 | --------------------------------------------------------------------------------