├── puLearning ├── __init__.py └── puAdapter.py ├── models ├── __init__.py ├── multi_registry.py ├── binary_registry.py ├── svm.py ├── regular.py ├── pu_learning.py └── base_model.py ├── feature_engineering ├── __init__.py ├── registry.py ├── length.py ├── tf_idf.py ├── tf_ilf.py ├── tf.py ├── utils.py └── vectorizer.py ├── preprocess ├── __init__.py ├── registry.py ├── bgl_preprocessor.py ├── open_source_logs.py └── utils.py ├── __init__.py ├── reporting ├── __init__.py ├── accuracy.py ├── macrof1.py ├── microf1.py ├── multi_class_acc.py ├── confusion_matrix.py ├── bb_registry.py ├── wb_registry.py └── top_k_svm.py ├── requirements.txt ├── decorators.py ├── LICENSE ├── .gitignore ├── run_binary.py ├── train_binary.py ├── utils.py ├── train_multi.py ├── compare_pu.py ├── init_params.py ├── logclass.py ├── data └── open_source_logs │ ├── Apache │ └── groundtruth.seq │ ├── proxifier │ └── groundtruth.seq │ ├── hadoop │ └── groundtruth.seq │ ├── zookeeper │ └── groundtruth.seq │ ├── hdfs │ └── groundtruth.seq │ ├── hpc │ └── groundtruth.seq │ └── bgl │ └── groundtruth.seq └── README.md /puLearning/__init__.py: -------------------------------------------------------------------------------- 1 | -------------------------------------------------------------------------------- /models/__init__.py: -------------------------------------------------------------------------------- 1 | __all__ = ["regular", "svm", "pu_learning"] -------------------------------------------------------------------------------- /feature_engineering/__init__.py: -------------------------------------------------------------------------------- 1 | __all__ = ["length", "tf_idf", "tf_ilf", "tf"] -------------------------------------------------------------------------------- /preprocess/__init__.py: -------------------------------------------------------------------------------- 1 | __all__ = [ 2 | "bgl_preprocessor", 3 | "open_source_logs", 4 | ] 5 | 6 | -------------------------------------------------------------------------------- /__init__.py: -------------------------------------------------------------------------------- 1 | __all__ = ["utils", "logclass"] 2 | 3 | from .preprocess import * 4 | from .feature_engineering import * 5 | from .models import * 6 | from .reporting import * 7 | -------------------------------------------------------------------------------- /reporting/__init__.py: -------------------------------------------------------------------------------- 1 | __all__ = [ 2 | "accuracy", 3 | "confusion_matrix", 4 | "multi_class_acc", 5 | "top_k_svm", 6 | "microf1", 7 | "macrof1", 8 | ] 9 | -------------------------------------------------------------------------------- /reporting/accuracy.py: -------------------------------------------------------------------------------- 1 | from .bb_registry import register 2 | from sklearn.metrics import f1_score 3 | 4 | 5 | @register('acc') 6 | def model_accuracy(y, pred): 7 | return f1_score(y, pred) 8 | -------------------------------------------------------------------------------- /reporting/macrof1.py: -------------------------------------------------------------------------------- 1 | from .bb_registry import register 2 | from sklearn.metrics import f1_score 3 | 4 | 5 | @register('macro') 6 | def model_accuracy(y, pred): 7 | return f1_score(y, pred, average='macro') 8 | -------------------------------------------------------------------------------- /reporting/microf1.py: -------------------------------------------------------------------------------- 1 | from .bb_registry import register 2 | from sklearn.metrics import f1_score 3 | 4 | 5 | @register('micro') 6 | def model_accuracy(y, pred): 7 | return f1_score(y, pred, average='micro') 8 | -------------------------------------------------------------------------------- /reporting/multi_class_acc.py: -------------------------------------------------------------------------------- 1 | from .bb_registry import register 2 | from sklearn.metrics import accuracy_score 3 | 4 | 5 | @register('multi_acc') 6 | def model_accuracy(y, pred): 7 | return accuracy_score(y, pred) 8 | -------------------------------------------------------------------------------- /reporting/confusion_matrix.py: -------------------------------------------------------------------------------- 1 | from .bb_registry import register 2 | from sklearn.metrics import confusion_matrix 3 | 4 | 5 | @register('confusion_matrix') 6 | def report(y, pred): 7 | return confusion_matrix(y, pred) 8 | -------------------------------------------------------------------------------- /requirements.txt: -------------------------------------------------------------------------------- 1 | certifi==2019.9.11 2 | joblib==0.14.0 3 | numpy==1.17.4 4 | pandas==0.25.3 5 | python-dateutil==2.8.1 6 | pytz==2019.3 7 | scikit-learn==0.21.3 8 | scipy==1.3.3 9 | six==1.13.0 10 | sklearn==0.0 11 | tqdm==4.39.0 12 | wincertstore==0.2 13 | -------------------------------------------------------------------------------- /reporting/bb_registry.py: -------------------------------------------------------------------------------- 1 | """Registry for black box reports or metrics.""" 2 | 3 | _BB_REPORTS = dict() 4 | 5 | 6 | def register(name): 7 | """Registers a new black box report or metric function.""" 8 | 9 | def add_to_dict(func): 10 | _BB_REPORTS[name] = func 11 | return func 12 | 13 | return add_to_dict 14 | 15 | 16 | def get_bb_report(model): 17 | """Fetches the black box report or metric function.""" 18 | return _BB_REPORTS[model] 19 | -------------------------------------------------------------------------------- /reporting/wb_registry.py: -------------------------------------------------------------------------------- 1 | """Registry for white box reports or metrics.""" 2 | 3 | _WB_REPORTS = dict() 4 | 5 | 6 | def register(name): 7 | """Registers a new white box report or metric function.""" 8 | 9 | def add_to_dict(func): 10 | _WB_REPORTS[name] = func 11 | return func 12 | 13 | return add_to_dict 14 | 15 | 16 | def get_wb_report(model): 17 | """Fetches the white box report or metric function.""" 18 | return _WB_REPORTS[model] 19 | -------------------------------------------------------------------------------- /models/multi_registry.py: -------------------------------------------------------------------------------- 1 | """Registry for multi-class models to be used for anomaly classification.""" 2 | 3 | _MULTI_MODELS = dict() 4 | 5 | 6 | def register(name): 7 | """Registers a new multi-class anomaly classification model.""" 8 | 9 | def add_to_dict(func): 10 | _MULTI_MODELS[name] = func 11 | return func 12 | 13 | return add_to_dict 14 | 15 | 16 | def get_multi_model(model): 17 | """Fetches the multi-class anomaly classification model.""" 18 | return _MULTI_MODELS[model] 19 | -------------------------------------------------------------------------------- /models/binary_registry.py: -------------------------------------------------------------------------------- 1 | """Registry for binary models to be used for anomaly detection.""" 2 | 3 | _BINARY_MODELS = dict() 4 | 5 | 6 | def register(name): 7 | """Registers a new binary classification anomaly detection model.""" 8 | 9 | def add_to_dict(func): 10 | _BINARY_MODELS[name] = func 11 | return func 12 | 13 | return add_to_dict 14 | 15 | 16 | def get_binary_model(model): 17 | """Fetches the binary classification anomaly detection model.""" 18 | return _BINARY_MODELS[model] 19 | -------------------------------------------------------------------------------- /preprocess/registry.py: -------------------------------------------------------------------------------- 1 | """Basic registry for logs preprocessors. These read the rawlog file and 2 | outputs filtered logs removing parameter words or tokens with non-letter 3 | characters keeping only text words.""" 4 | 5 | _PREPROCESSORS = dict() 6 | 7 | 8 | def register(name): 9 | """Registers a new logs preprocessor function under the given name.""" 10 | 11 | def add_to_dict(func): 12 | _PREPROCESSORS[name] = func 13 | return func 14 | 15 | return add_to_dict 16 | 17 | 18 | def get_preprocessor(data_src): 19 | """Fetches the logs preprocessor function associated with the given raw logs""" 20 | return _PREPROCESSORS[data_src] 21 | -------------------------------------------------------------------------------- /feature_engineering/registry.py: -------------------------------------------------------------------------------- 1 | """Basic registry for logs vector feature engineering. These take the 2 | log messages as input and extract and return a feature to be appended to 3 | the feature vector.""" 4 | 5 | _FEATURE_EXTRACTORS = dict() 6 | 7 | 8 | def register(name): 9 | """Registers a new log message feature extraction function under the 10 | given name.""" 11 | 12 | def add_to_dict(func): 13 | _FEATURE_EXTRACTORS[name] = func 14 | return func 15 | 16 | return add_to_dict 17 | 18 | 19 | def get_feature_extractor(feature): 20 | """Fetches the feature extraction function associated with the given 21 | raw logs""" 22 | return _FEATURE_EXTRACTORS[feature] 23 | -------------------------------------------------------------------------------- /feature_engineering/length.py: -------------------------------------------------------------------------------- 1 | from .registry import register 2 | import numpy as np 3 | 4 | 5 | @register("length") 6 | def create_length_feature(params, input_vector, **kwargs): 7 | """ 8 | Returns an array of lengths of each tokenized log message from the input. 9 | 10 | Parameters 11 | ---------- 12 | params : dict of experiment parameters. 13 | input_vector : numpy Array vector of word indexes from each log message line. 14 | 15 | Returns 16 | ------- 17 | numpy array of lengths of each tokenized log message from the input 18 | with shape (number_of_logs, N). 19 | """ 20 | length = np.vectorize(len) 21 | length_feature = length(input_vector) 22 | length_feature = length_feature.reshape(-1, 1) 23 | return length_feature 24 | -------------------------------------------------------------------------------- /feature_engineering/tf_idf.py: -------------------------------------------------------------------------------- 1 | from .registry import register 2 | from .vectorizer import ( 3 | get_tf, 4 | calculate_idf, 5 | calculate_tf_invf_train, 6 | create_invf_vector, 7 | ) 8 | from .utils import save_feature_dict, load_feature_dict 9 | 10 | 11 | @register("tfidf") 12 | def create_tfidf_feature(params, train_vector, **kwargs): 13 | """ 14 | Returns the tf-idf matrix of features. 15 | """ 16 | if params['train']: 17 | invf_dict = calculate_tf_invf_train( 18 | train_vector, 19 | get_f=get_tf, 20 | calc_invf=calculate_idf 21 | ) 22 | save_feature_dict(params, invf_dict, "tfidf") 23 | else: 24 | invf_dict = load_feature_dict(params, "tfidf") 25 | 26 | features = create_invf_vector( 27 | train_vector, invf_dict, kwargs['vocabulary']) 28 | return features 29 | -------------------------------------------------------------------------------- /feature_engineering/tf_ilf.py: -------------------------------------------------------------------------------- 1 | from .registry import register 2 | from .vectorizer import ( 3 | get_lf, 4 | calculate_ilf, 5 | calculate_tf_invf_train, 6 | create_invf_vector, 7 | ) 8 | from .utils import save_feature_dict, load_feature_dict 9 | 10 | 11 | @register("tfilf") 12 | def create_tfilf_feature(params, train_vector, **kwargs): 13 | """ 14 | Returns the tf-ilf matrix of features. 15 | """ 16 | if params['train']: 17 | invf_dict = calculate_tf_invf_train( 18 | train_vector, 19 | get_f=get_lf, 20 | calc_invf=calculate_ilf 21 | ) 22 | save_feature_dict(params, invf_dict, "tfilf") 23 | else: 24 | invf_dict = load_feature_dict(params, "tfilf") 25 | 26 | features = create_invf_vector( 27 | train_vector, invf_dict, kwargs['vocabulary']) 28 | return features 29 | -------------------------------------------------------------------------------- /decorators.py: -------------------------------------------------------------------------------- 1 | import functools 2 | 3 | 4 | # Borrowed from https://realpython.com/primer-on-python-decorators/ 5 | def debug(func): 6 | """Print the function signature and return value""" 7 | @functools.wraps(func) 8 | def wrapper_debug(*args, **kwargs): 9 | args_repr = [repr(a) for a in args] # 1 10 | kwargs_repr = [f"{k}={v!r}" for k, v in kwargs.items()] # 2 11 | signature = ", ".join(args_repr + kwargs_repr) # 3 12 | print(f"Calling {func.__name__}({signature})") 13 | value = func(*args, **kwargs) 14 | print(f"{func.__name__!r} returned {value!r}") # 4 15 | return value 16 | return wrapper_debug 17 | 18 | 19 | def print_step(func): 20 | """Print the function signature and return value""" 21 | @functools.wraps(func) 22 | def wrapper_print_name(*args, **kwargs): 23 | print(f"Calling {func.__qualname__}") 24 | value = func(*args, **kwargs) 25 | return value 26 | return wrapper_print_name 27 | -------------------------------------------------------------------------------- /LICENSE: -------------------------------------------------------------------------------- 1 | MIT License 2 | 3 | Copyright (c) 2019 Federico Zaiter 4 | 5 | Permission is hereby granted, free of charge, to any person obtaining a copy 6 | of this software and associated documentation files (the "Software"), to deal 7 | in the Software without restriction, including without limitation the rights 8 | to use, copy, modify, merge, publish, distribute, sublicense, and/or sell 9 | copies of the Software, and to permit persons to whom the Software is 10 | furnished to do so, subject to the following conditions: 11 | 12 | The above copyright notice and this permission notice shall be included in all 13 | copies or substantial portions of the Software. 14 | 15 | THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR 16 | IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, 17 | FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE 18 | AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER 19 | LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, 20 | OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE 21 | SOFTWARE. 22 | -------------------------------------------------------------------------------- /feature_engineering/tf.py: -------------------------------------------------------------------------------- 1 | from .registry import register 2 | from .vectorizer import get_tf 3 | import numpy as np 4 | from .utils import save_feature_dict, load_feature_dict 5 | 6 | 7 | def create_tf_vector(input_vector, tf_dict, vocabulary): 8 | tf_vector = [] 9 | # Creating the idf/ilf vector for each log message 10 | for line in input_vector: 11 | cur_tf_vector = np.zeros(len(vocabulary)) 12 | for token_index in line: 13 | cur_tf_vector[token_index] = len(tf_dict[token_index]) 14 | tf_vector.append(cur_tf_vector) 15 | 16 | tf_vector = np.array(tf_vector) 17 | return tf_vector 18 | 19 | 20 | @register("tf") 21 | def create_term_count_feature(params, input_vector, **kwargs): 22 | """ 23 | Returns an array of the counts of each word per log message. 24 | """ 25 | if params['train']: 26 | tf_dict = get_tf(input_vector) 27 | save_feature_dict(params, tf_dict, "tf") 28 | else: 29 | tf_dict = load_feature_dict(params, "tf") 30 | 31 | tf_features =\ 32 | create_tf_vector(input_vector, tf_dict, kwargs['vocabulary']) 33 | 34 | return tf_features 35 | -------------------------------------------------------------------------------- /reporting/top_k_svm.py: -------------------------------------------------------------------------------- 1 | from .wb_registry import register 2 | import numpy as np 3 | 4 | 5 | def get_feature_names(params, vocabulary, add_length=True): 6 | feature_names = zip(vocabulary.keys(), vocabulary.values()) 7 | feature_names = sorted(feature_names, key=lambda x: x[1]) 8 | feature_names = [x[0] for x in feature_names] 9 | if 'length' in params['features']: 10 | feature_names.append('LENGTH') 11 | return np.array(feature_names) 12 | 13 | 14 | @register('top_k_svm') 15 | def get_top_k_SVM_features(params, model, vocabulary, **kwargs): 16 | hparms = { 17 | 'target_names': [], 18 | 'top_features': 5, 19 | } 20 | hparms.update(kwargs) 21 | 22 | top_k_label = {} 23 | feature_names = get_feature_names(params, vocabulary) 24 | for i, label in enumerate(hparms['target_names']): 25 | if len(hparms['target_names']) < 3 and i == 1: 26 | break # coef is unidemensional when there's only two labels 27 | coef = model.coef_[i] 28 | top_coefficients = np.argsort(coef)[-hparms['top_features']:] 29 | top_k_features = feature_names[top_coefficients] 30 | top_k_label[label] = list(reversed(top_k_features)) 31 | return top_k_label 32 | -------------------------------------------------------------------------------- /models/svm.py: -------------------------------------------------------------------------------- 1 | from .multi_registry import register 2 | from sklearn.svm import LinearSVC 3 | from .base_model import BaseModel 4 | import os 5 | import pickle 6 | 7 | 8 | class SVMWrapper(BaseModel): 9 | def __init__(self, model, params): 10 | super().__init__(model, params) 11 | 12 | def save(self, **kwargs): 13 | multi_file = os.path.join( 14 | self.params['models_dir'], 15 | "multi.pkl" 16 | ) 17 | with open(multi_file, 'wb') as multi_clf_file: 18 | pickle.dump(self.model, multi_clf_file) 19 | 20 | def load(self, **kwargs): 21 | multi_file = os.path.join( 22 | self.params['models_dir'], 23 | "multi.pkl" 24 | ) 25 | with open(multi_file, 'rb') as multi_clf_file: 26 | multi_classifier = pickle.load(multi_clf_file) 27 | self.model = multi_classifier 28 | 29 | 30 | @register("svm") 31 | def instatiate_svm(params, **kwargs): 32 | """ 33 | Returns a RF wrapped by the PU Learning Adapter. 34 | """ 35 | hparms = { 36 | 'penalty': "l2", 37 | 'dual': False, 38 | 'tol': 1e-1, 39 | } 40 | hparms.update(kwargs) 41 | wrapped_svm = SVMWrapper(LinearSVC(**hparms), params) 42 | return wrapped_svm 43 | -------------------------------------------------------------------------------- /models/regular.py: -------------------------------------------------------------------------------- 1 | from .binary_registry import register 2 | from sklearn.ensemble import RandomForestClassifier 3 | from .base_model import BaseModel 4 | import os 5 | import pickle 6 | 7 | 8 | class RegularClassifierWrapper(BaseModel): 9 | def __init__(self, model, params): 10 | super().__init__(model, params) 11 | 12 | def save(self, **kwargs): 13 | regular_file = os.path.join( 14 | self.params['models_dir'], 15 | "regular.pkl" 16 | ) 17 | with open(regular_file, 'wb') as regular_clf_file: 18 | pickle.dump(self.model, regular_clf_file) 19 | 20 | def load(self, **kwargs): 21 | regular_file = os.path.join( 22 | self.params['models_dir'], 23 | "regular.pkl" 24 | ) 25 | with open(regular_file, 'rb') as regular_clf_file: 26 | regular_classifier = pickle.load(regular_clf_file) 27 | self.model = regular_classifier 28 | 29 | 30 | @register("regular") 31 | def instatiate_regular_classifier(params, **kwargs): 32 | """ 33 | Returns a RF wrapped by the PU Learning Adapter. 34 | """ 35 | hparms = { 36 | 'n_estimators': 10, 37 | 'bootstrap': True, 38 | 'n_jobs': -1, 39 | } 40 | hparms.update(kwargs) 41 | wrapped_regular = RegularClassifierWrapper( 42 | RandomForestClassifier(**hparms), params) 43 | return wrapped_regular 44 | -------------------------------------------------------------------------------- /preprocess/bgl_preprocessor.py: -------------------------------------------------------------------------------- 1 | from .registry import register 2 | from .utils import process_logs, remove_parameters 3 | import re 4 | 5 | 6 | recid_regx = re.compile(r"^(\d+)") 7 | separator = re.compile(r"(?:-.{1,3}){2} (.+)$") 8 | msg_split_regx = re.compile(r"x'.+'") 9 | severity = re.compile(r"(\w+)\s+(INFO|WARN|ERROR|FATAL)") 10 | 11 | 12 | def process_line(line): 13 | line = line.strip() 14 | sep = separator.search(line) 15 | if sep: 16 | msg = sep.group(1).strip().split(' ')[-1].strip() 17 | msg = msg_split_regx.split(msg)[-1].strip() 18 | error_label = severity.search(line) 19 | recid = recid_regx.search(line) 20 | if recid and error_label and len(msg) > 20: 21 | # recid = recid.group(1).strip() We may want to use it later 22 | general_label = error_label.group(2) 23 | label = error_label.group(1) 24 | if general_label == 'WARN': 25 | return '' 26 | if general_label == 'INFO': # or label == 'WARN': 27 | label = 'unlabeled' 28 | msg = remove_parameters(msg) 29 | if msg: 30 | msg = ' '.join((label, msg)) 31 | msg = ''.join((msg, '\n')) 32 | return msg 33 | return '' 34 | 35 | 36 | @register("bgl") 37 | def preprocess_dataset(params): 38 | """ 39 | Runs BGL logs preprocessing executor. 40 | """ 41 | input_source = params['raw_logs'] 42 | output = params['logs'] 43 | params['healthy_label'] = 'unlabeled' 44 | process_logs(input_source, output, process_line) 45 | -------------------------------------------------------------------------------- /preprocess/open_source_logs.py: -------------------------------------------------------------------------------- 1 | import os 2 | from multiprocessing import Pool 3 | 4 | from tqdm import tqdm 5 | 6 | from .registry import register 7 | from .utils import remove_parameters 8 | 9 | 10 | def process_line(line): 11 | label = line[0].strip() 12 | msg = " ".join(line[1].strip().split()[1:]) 13 | msg = remove_parameters(msg) 14 | if msg: 15 | msg = " ".join((label, msg)) 16 | msg = "".join((msg, "\n")) 17 | return msg 18 | return "" 19 | 20 | 21 | def process_open_source(input_source, output): 22 | with open(output, "w", encoding="latin-1") as f: 23 | gtruth = os.path.join(input_source, "groundtruth.seq") 24 | rawlog = os.path.join(input_source, "rawlog.log") 25 | with open(gtruth, "r", encoding="latin-1") as IN: 26 | line_count = sum(1 for line in IN) 27 | with open(gtruth, "r", encoding="latin-1") as in_gtruth: 28 | with open(rawlog, "r", encoding="latin-1") as in_log: 29 | IN = zip(in_gtruth, in_log) 30 | with Pool() as pool: 31 | results = pool.imap(process_line, IN, chunksize=10000) 32 | f.writelines(tqdm(results, total=line_count)) 33 | 34 | 35 | open_source_datasets = [ 36 | "open_Apache", 37 | "open_bgl", 38 | "open_hadoop", 39 | "open_hdfs", 40 | "open_hpc", 41 | "open_proxifier", 42 | "open_zookeeper", 43 | ] 44 | for dataset in open_source_datasets: 45 | 46 | @register(dataset) 47 | def preprocess_dataset(params): 48 | """ 49 | Runs open source logs preprocessing executor. 50 | """ 51 | input_source = params["raw_logs"] 52 | output = params["logs"] 53 | params["healthy_label"] = "NA" 54 | process_open_source(input_source, output) 55 | -------------------------------------------------------------------------------- /models/pu_learning.py: -------------------------------------------------------------------------------- 1 | from .binary_registry import register 2 | from ..puLearning.puAdapter import PUAdapter 3 | from sklearn.ensemble import RandomForestClassifier 4 | from .base_model import BaseModel 5 | import os 6 | import pickle 7 | 8 | 9 | class PUAdapterWrapper(BaseModel): 10 | def __init__(self, model, params): 11 | super().__init__(model, params) 12 | 13 | def save(self, **kwargs): 14 | pu_estimator_file = os.path.join( 15 | self.params['models_dir'], 16 | "pu_estimator.pkl" 17 | ) 18 | pu_saver = {'estimator': self.model.estimator, 19 | 'c': self.model.c} 20 | with open(pu_estimator_file, 'wb') as pu_estimator_file: 21 | pickle.dump(pu_saver, pu_estimator_file) 22 | 23 | def load(self, **kwargs): 24 | pu_estimator_file = os.path.join( 25 | self.params['models_dir'], 26 | "pu_estimator.pkl" 27 | ) 28 | with open(pu_estimator_file, 'rb') as pu_estimator_file: 29 | pu_saver = pickle.load(pu_estimator_file) 30 | estimator = pu_saver['estimator'] 31 | pu_estimator = PUAdapter(estimator) 32 | pu_estimator.c = pu_saver['c'] 33 | pu_estimator.estimator_fitted = True 34 | self.model = pu_estimator 35 | 36 | 37 | @register("pu_learning") 38 | def instatiate_pu_adapter(params, **kwargs): 39 | """ 40 | Returns a RF adapted to do PU Learning wrapped by the PUAdapterWrapper. 41 | """ 42 | hparms = { 43 | 'n_estimators': 10, 44 | 'criterion': "entropy", 45 | 'bootstrap': True, 46 | 'n_jobs': -1, 47 | } 48 | hparms.update(kwargs) 49 | estimator = RandomForestClassifier(**hparms) 50 | wrapped_pu_estimator = PUAdapterWrapper(PUAdapter(estimator), params) 51 | return wrapped_pu_estimator 52 | -------------------------------------------------------------------------------- /.gitignore: -------------------------------------------------------------------------------- 1 | # Byte-compiled / optimized / DLL files 2 | __pycache__/ 3 | *.py[cod] 4 | *$py.class 5 | *.pyc 6 | 7 | # C extensions 8 | *.so 9 | 10 | # Distribution / packaging 11 | .Python 12 | build/ 13 | develop-eggs/ 14 | dist/ 15 | downloads/ 16 | eggs/ 17 | .eggs/ 18 | lib/ 19 | lib64/ 20 | parts/ 21 | sdist/ 22 | var/ 23 | output/ 24 | wheels/ 25 | *.egg-info/ 26 | .installed.cfg 27 | *.egg 28 | MANIFEST 29 | 30 | # PyInstaller 31 | # Usually these files are written by a python script from a template 32 | # before PyInstaller builds the exe, so as to inject date/other infos into it. 33 | *.manifest 34 | *.spec 35 | 36 | # Installer logs 37 | pip-log.txt 38 | pip-delete-this-directory.txt 39 | 40 | # Unit test / coverage reports 41 | htmlcov/ 42 | .tox/ 43 | .coverage 44 | .coverage.* 45 | .cache 46 | nosetests.xml 47 | coverage.xml 48 | *.cover 49 | .hypothesis/ 50 | .pytest_cache/ 51 | 52 | # Translations 53 | *.mo 54 | *.pot 55 | 56 | # Django stuff: 57 | *.log 58 | local_settings.py 59 | db.sqlite3 60 | 61 | # Flask stuff: 62 | instance/ 63 | .webassets-cache 64 | 65 | # Scrapy stuff: 66 | .scrapy 67 | 68 | # Sphinx documentation 69 | docs/_build/ 70 | 71 | # PyBuilder 72 | target/ 73 | 74 | # Jupyter Notebook 75 | .ipynb_checkpoints 76 | 77 | # pyenv 78 | .python-version 79 | 80 | # celery beat schedule file 81 | celerybeat-schedule 82 | 83 | # SageMath parsed files 84 | *.sage.py 85 | 86 | # Environments 87 | .env 88 | .venv 89 | env/ 90 | venv/ 91 | ENV/ 92 | env.bak/ 93 | venv.bak/ 94 | 95 | # Spyder project settings 96 | .spyderproject 97 | .spyproject 98 | 99 | # Rope project settings 100 | .ropeproject 101 | 102 | # mkdocs documentation 103 | /site 104 | 105 | # mypy 106 | .mypy_cache/ 107 | 108 | # vs settings 109 | .vscode/ 110 | 111 | # data 112 | data/** 113 | !data/open_source_logs/ 114 | !data/open_source_logs/** 115 | -------------------------------------------------------------------------------- /run_binary.py: -------------------------------------------------------------------------------- 1 | from .utils import ( 2 | load_params, 3 | file_handling, 4 | print_params, 5 | ) 6 | from .preprocess import registry as preprocess_registry 7 | from .preprocess.utils import load_logs 8 | from .feature_engineering.utils import ( 9 | binary_train_gtruth, 10 | extract_features, 11 | ) 12 | from .models import binary_registry as binary_classifier_registry 13 | from .reporting import bb_registry as black_box_report_registry 14 | from .init_params import init_main_args, parse_main_args 15 | 16 | 17 | def init_args(): 18 | """Init command line args used for configuration.""" 19 | 20 | parser = init_main_args() 21 | return parser.parse_args() 22 | 23 | 24 | def parse_args(args): 25 | """Parse provided args for runtime configuration.""" 26 | params = parse_main_args(args) 27 | params.update({'train': False}) 28 | return params 29 | 30 | 31 | def inference(params, x_data, y_data, target_names): 32 | # Inference 33 | # Feature engineering 34 | x_test, _ = extract_features(x_data, params) 35 | # Binary training features 36 | y_test = binary_train_gtruth(y_data) 37 | # Binary PU estimator with RF 38 | # Load Trained PU Estimator 39 | binary_clf_getter =\ 40 | binary_classifier_registry.get_binary_model( 41 | params['binary_classifier']) 42 | binary_clf = binary_clf_getter(params) 43 | binary_clf.load() 44 | # Anomaly detection 45 | y_pred_pu = binary_clf.predict(x_test) 46 | get_accuracy = black_box_report_registry.get_bb_report('acc') 47 | binary_acc = get_accuracy(y_test, y_pred_pu) 48 | 49 | print(binary_acc) 50 | for report in params['report']: 51 | try: 52 | get_bb_report = black_box_report_registry.get_bb_report(report) 53 | result = get_bb_report(y_test, y_pred_pu) 54 | except Exception: 55 | pass 56 | else: 57 | print(f'Binary classification {report} report:') 58 | print(result) 59 | 60 | 61 | def main(): 62 | # Init params 63 | params = parse_args(init_args()) 64 | load_params(params) 65 | print_params(params) 66 | file_handling(params) 67 | # Filter params from raw logs 68 | if "raw_logs" in params: 69 | preprocess = preprocess_registry.get_preprocessor(params['logs_type']) 70 | preprocess(params) 71 | # Load filtered params from file 72 | print('Loading logs') 73 | x_data, y_data, target_names = load_logs(params) 74 | inference(params, x_data, y_data, target_names) 75 | 76 | 77 | if __name__ == "__main__": 78 | main() 79 | -------------------------------------------------------------------------------- /models/base_model.py: -------------------------------------------------------------------------------- 1 | from abc import ABC, abstractmethod 2 | from time import time 3 | from ..decorators import print_step 4 | 5 | 6 | class BaseModel(ABC): 7 | """ Abstract class used to wrap models and add further functionality. 8 | 9 | Attributes 10 | ---------- 11 | model : model that implements fit and predict functions as sklearn 12 | ML models do. 13 | params : dict of experiment parameters. 14 | name : str of the original model class name. 15 | train_time : time it took to run fit in seconds. 16 | run_time : time it took to run predict in seconds. 17 | 18 | Methods 19 | ------- 20 | save(self, **kwargs) 21 | Abstract method for the subclass to implement how the model is 22 | saved. Should use the experiment id as reference. 23 | load(self, **kwargs) 24 | Abstract method for the subclass to implement how it's meant to be 25 | loaded. Should correspond to how the save method saves the model. 26 | predict(self, X, **kwargs) 27 | Wraps original model predict and times its running time. 28 | fit(self, X, Y, **kwargs) 29 | Wraps original model fit, times fit running time and saves the model. 30 | 31 | """ 32 | def __init__(self, model, params): 33 | self.model = model 34 | self.params = params 35 | self.name = type(model).__name__ 36 | self.train_time = None 37 | self.run_time = None 38 | 39 | @abstractmethod 40 | def save(self, **kwargs): 41 | """ 42 | Abstract method for the subclass to implement how the model is 43 | saved. Should use the experiment id as reference. 44 | """ 45 | pass 46 | 47 | @abstractmethod 48 | def load(self, **kwargs): 49 | """ 50 | Abstract method for the subclass to implement how it's meant to be 51 | loaded. Should correspond to how the save method saves the model. 52 | """ 53 | pass 54 | 55 | @print_step 56 | def predict(self, X, **kwargs): 57 | """ 58 | Wraps original model predict and times its running time. 59 | """ 60 | t0 = time() 61 | pred = self.model.predict(X, **kwargs) 62 | t1 = time() 63 | lapse = t1 - t0 64 | self.run_time = lapse 65 | print(f"{self.name} took {lapse}s to run inference.") 66 | return pred 67 | 68 | @print_step 69 | def fit(self, X, Y, **kwargs): 70 | """ 71 | Wraps original model fit, times fit running time and saves the model. 72 | """ 73 | t0 = time() 74 | self.model.fit(X, Y, **kwargs) 75 | t1 = time() 76 | lapse = t1 - t0 77 | self.train_time = lapse 78 | print(f"{self.name} took {lapse}s to train.") 79 | self.save() 80 | -------------------------------------------------------------------------------- /feature_engineering/utils.py: -------------------------------------------------------------------------------- 1 | import os 2 | import pickle 3 | import numpy as np 4 | from .vectorizer import log_to_vector, build_vocabulary 5 | from . import registry as feature_registry 6 | from ..decorators import print_step 7 | 8 | 9 | def load_feature_dict(params, name): 10 | dict_file = os.path.join(params['features_dir'], f"{name}.pkl") 11 | with open(dict_file, "rb") as fp: 12 | feat_dict = pickle.load(fp) 13 | return feat_dict 14 | 15 | 16 | def save_feature_dict(params, feat_dict, name): 17 | dict_file = os.path.join(params['features_dir'], f"{name}.pkl") 18 | with open(dict_file, "wb") as fp: 19 | pickle.dump(feat_dict, fp) 20 | 21 | 22 | def binary_train_gtruth(y): 23 | return np.where(y == -1.0, -1.0, 1.0) 24 | 25 | 26 | def multi_features(x, y): 27 | anomalous = (y != -1) 28 | x_multi, y_multi = x[anomalous], y[anomalous] 29 | return x_multi, y_multi 30 | 31 | 32 | @print_step 33 | def get_features_vector(log_vector, vocabulary, params): 34 | """ Extracts all specified features from the vectorized logs. 35 | 36 | For each feature specified in params it gets the feature function from the 37 | feature registry and applies to the data. 38 | A numpy array vector of shape (number_of_logs, N) is expected for each to 39 | be concatenated along the second axis. 40 | 41 | Parameters 42 | ---------- 43 | log_vector : numpy Array vector of word indexes from each log message line. 44 | vocabulary : dict mapping a word to an index. 45 | params : dict of experiment parameters. 46 | 47 | Returns 48 | ------- 49 | x_features : numpy ndArray of all specified features. 50 | 51 | """ 52 | feature_vectors = [] 53 | for feature in params['features']: 54 | extract_feature = feature_registry.get_feature_extractor(feature) 55 | feature_vector = extract_feature( 56 | params, log_vector, vocabulary=vocabulary) 57 | feature_vectors.append(feature_vector) 58 | X = np.hstack(feature_vectors) 59 | return X 60 | 61 | 62 | @print_step 63 | def extract_features(x, params): 64 | """ Gets vocabulary and specified features from the preprocessed logs. 65 | 66 | Creates a vocabulary from the preprocessed logs to vectorize each message. 67 | Extracts all specified features in params from the logs vector and 68 | vocabulary, then returns them both. 69 | 70 | Parameters 71 | ---------- 72 | x : list of preprocessed logs. One log message per line. 73 | params : dict of experiment parameters. 74 | 75 | Returns 76 | ------- 77 | x_features : numpy ndArray of all specified features. 78 | vocabulary : dict mapping a word to an index. 79 | 80 | """ 81 | # Build Vocabulary 82 | if params['train']: 83 | vocabulary = build_vocabulary(x) 84 | save_feature_dict(params, vocabulary, "vocab") 85 | else: 86 | vocabulary = load_feature_dict(params, "vocab") 87 | # Feature Engineering 88 | x_vector = log_to_vector(x, vocabulary) 89 | x_features = get_features_vector(x_vector, vocabulary, params) 90 | return x_features, vocabulary 91 | -------------------------------------------------------------------------------- /train_binary.py: -------------------------------------------------------------------------------- 1 | from sklearn.model_selection import StratifiedKFold 2 | from .utils import ( 3 | save_params, 4 | file_handling, 5 | TestingParameters, 6 | print_params, 7 | ) 8 | from .preprocess import registry as preprocess_registry 9 | from .preprocess.utils import load_logs 10 | from .feature_engineering.utils import ( 11 | binary_train_gtruth, 12 | extract_features, 13 | ) 14 | from tqdm import tqdm 15 | from .models import binary_registry as binary_classifier_registry 16 | from .reporting import bb_registry as black_box_report_registry 17 | from .init_params import init_main_args, parse_main_args 18 | 19 | 20 | def init_args(): 21 | """Init command line args used for configuration.""" 22 | 23 | parser = init_main_args() 24 | return parser.parse_args() 25 | 26 | 27 | def parse_args(args): 28 | """Parse provided args for runtime configuration.""" 29 | params = parse_main_args(args) 30 | params.update({'train': True}) 31 | return params 32 | 33 | 34 | def train(params, x_data, y_data, target_names): 35 | # KFold Cross Validation 36 | kfold = StratifiedKFold(n_splits=params['kfold']).split(x_data, y_data) 37 | best_pu_fs = 0. 38 | for train_index, test_index in tqdm(kfold): 39 | x_train, x_test = x_data[train_index], x_data[test_index] 40 | y_train, y_test = y_data[train_index], y_data[test_index] 41 | x_train, _ = extract_features(x_train, params) 42 | with TestingParameters(params): 43 | x_test, _ = extract_features(x_test, params) 44 | # Binary training features 45 | y_test_pu = binary_train_gtruth(y_test) 46 | y_train_pu = binary_train_gtruth(y_train) 47 | # Binary PULearning with RF 48 | binary_clf_getter =\ 49 | binary_classifier_registry.get_binary_model( 50 | params['binary_classifier']) 51 | binary_clf = binary_clf_getter(params) 52 | binary_clf.fit(x_train, y_train_pu) 53 | y_pred_pu = binary_clf.predict(x_test) 54 | get_accuracy = black_box_report_registry.get_bb_report('acc') 55 | binary_acc = get_accuracy(y_test_pu, y_pred_pu) 56 | better_results = binary_acc > best_pu_fs 57 | if better_results: 58 | if binary_acc > best_pu_fs: 59 | best_pu_fs = binary_acc 60 | save_params(params) 61 | binary_clf.save() 62 | print(binary_acc) 63 | 64 | for report in params['report']: 65 | try: 66 | get_bb_report = black_box_report_registry.get_bb_report(report) 67 | result = get_bb_report(y_test_pu, y_pred_pu) 68 | except Exception: 69 | pass 70 | else: 71 | print(f'Binary classification {report} report:') 72 | print(result) 73 | 74 | 75 | def main(): 76 | # Init params 77 | params = parse_args(init_args()) 78 | file_handling(params) 79 | # Filter params from raw logs 80 | if "raw_logs" in params: 81 | preprocess = preprocess_registry.get_preprocessor(params['logs_type']) 82 | preprocess(params) 83 | # Load filtered params from file 84 | print('Loading logs') 85 | x_data, y_data, target_names = load_logs(params) 86 | print_params(params) 87 | train(params, x_data, y_data, target_names) 88 | 89 | 90 | if __name__ == "__main__": 91 | main() 92 | -------------------------------------------------------------------------------- /preprocess/utils.py: -------------------------------------------------------------------------------- 1 | import re 2 | import numpy as np 3 | from tqdm import tqdm 4 | from ..decorators import print_step 5 | from multiprocessing import Pool 6 | 7 | 8 | # Compiling for optimization 9 | re_sub_1 = re.compile(r"(:(?=\s))|((?<=\s):)") 10 | re_sub_2 = re.compile(r"(\d+\.)+\d+") 11 | re_sub_3 = re.compile(r"\d{2}:\d{2}:\d{2}") 12 | re_sub_4 = re.compile(r"Mar|Apr|Dec|Jan|Feb|Nov|Oct|May|Jun|Jul|Aug|Sep") 13 | re_sub_5 = re.compile(r":?(\w+:)+") 14 | re_sub_6 = re.compile(r"\.|\(|\)|\<|\>|\/|\-|\=|\[|\]") 15 | p = re.compile(r"[^(A-Za-z)]") 16 | def remove_parameters(msg): 17 | # Removing parameters with Regex 18 | msg = re.sub(re_sub_1, "", msg) 19 | msg = re.sub(re_sub_2, "", msg) 20 | msg = re.sub(re_sub_3, "", msg) 21 | msg = re.sub(re_sub_4, "", msg) 22 | msg = re.sub(re_sub_5, "", msg) 23 | msg = re.sub(re_sub_6, " ", msg) 24 | L = msg.split() 25 | # Filtering strings that have non-letter tokens 26 | new_msg = [k for k in L if not p.search(k)] 27 | msg = " ".join(new_msg) 28 | return msg 29 | 30 | 31 | def remove_parameters_slower(msg): 32 | # Removing parameters with Regex 33 | msg = re.sub(r"(:(?=\s))|((?<=\s):)", "", msg) 34 | msg = re.sub(r"(\d+\.)+\d+", "", msg) 35 | msg = re.sub(r"\d{2}:\d{2}:\d{2}", "", msg) 36 | msg = re.sub(r"Mar|Apr|Dec|Jan|Feb|Nov|Oct|May|Jun|Jul|Aug|Sep", "", msg) 37 | msg = re.sub(r":?(\w+:)+", "", msg) 38 | msg = re.sub(r"\.|\(|\)|\<|\>|\/|\-|\=|\[|\]", " ", msg) 39 | L = msg.split() 40 | p = re.compile("[^(A-Za-z)]") 41 | # Filtering strings that have non-letter tokens 42 | new_msg = [k for k in L if not p.search(k)] 43 | msg = " ".join(new_msg) 44 | return msg 45 | 46 | 47 | @print_step 48 | def process_logs(input_source, output, process_line=None): 49 | with open(output, "w", encoding='latin-1') as f: 50 | # counting first to show progress with tqdm 51 | with open(input_source, 'r', encoding='latin-1') as IN: 52 | line_count = sum(1 for line in IN) 53 | with open(input_source, 'r', encoding='latin-1') as IN: 54 | with Pool() as pool: 55 | results = pool.imap(process_line, IN, chunksize=10000) 56 | f.writelines(tqdm(results, total=line_count)) 57 | 58 | 59 | @print_step 60 | def load_logs(params, ignore_unlabeled=False): 61 | log_path = params['logs'] 62 | unlabel_label = params['healthy_label'] 63 | x_data = [] 64 | y_data = [] 65 | label_dict = {} 66 | target_names = [] 67 | with open(log_path, 'r', encoding='latin-1') as IN: 68 | line_count = sum(1 for line in IN) 69 | with open(log_path, 'r', encoding='latin-1') as IN: 70 | for line in tqdm(IN, total=line_count): 71 | L = line.strip().split() 72 | label = L[0] 73 | if label not in label_dict: 74 | if ignore_unlabeled and label == unlabel_label: 75 | continue 76 | if label == unlabel_label: 77 | label_dict[label] = -1.0 78 | elif label not in label_dict: 79 | label_dict[label] = len(label_dict) 80 | target_names.append(label) 81 | x_data.append(" ".join(L[1:])) 82 | y_data.append(label_dict[label]) 83 | x_data = np.array(x_data) 84 | y_data = np.array(y_data) 85 | return x_data, y_data, target_names 86 | -------------------------------------------------------------------------------- /utils.py: -------------------------------------------------------------------------------- 1 | import os 2 | import json 3 | import shutil 4 | import pandas as pd 5 | 6 | 7 | # trim is only used when showing the top keywords for each class 8 | def trim(s): 9 | """Trim string to fit on terminal (assuming 80-column display)""" 10 | return s if len(s) <= 80 else s[:77] + "..." 11 | 12 | 13 | class TestingParameters(): 14 | def __init__(self, params): 15 | self.params = params 16 | self.original_state = params['train'] 17 | 18 | def __enter__(self): 19 | self.params['train'] = False 20 | 21 | def __exit__(self, exc_type, exc_value, traceback): 22 | self.params['train'] = self.original_state 23 | 24 | 25 | def load_params(params): 26 | params_file = os.path.join( 27 | params['id_dir'], f"best_params.json") 28 | with open(params_file, "r") as fp: 29 | best_params = json.load(fp) 30 | params.update(best_params) 31 | 32 | 33 | def save_params(params): 34 | params_file = os.path.join( 35 | params['id_dir'], f"best_params.json") 36 | with open(params_file, "w") as fp: 37 | json.dump(params, fp) 38 | 39 | 40 | def file_handling(params): 41 | if "raw_logs" in params: 42 | if not os.path.exists(params['raw_logs']): 43 | raise FileNotFoundError( 44 | f"File {params['raw_logs']} doesn't exist. " 45 | + "Please provide the raw logs path." 46 | ) 47 | logs_directory = os.path.dirname(params['logs']) 48 | if not os.path.exists(logs_directory): 49 | os.makedirs(logs_directory) 50 | else: 51 | # Checks if preprocessed logs exist as input 52 | if not os.path.exists(params['logs']): 53 | raise FileNotFoundError( 54 | f"File {params['base_dir']} doesn't exist. " 55 | + "Preprocess target logs first and provide their path." 56 | ) 57 | 58 | if params['train']: 59 | # Checks if the experiment id already exists 60 | if os.path.exists(params["id_dir"]) and not params["force"]: 61 | raise FileExistsError( 62 | f"directory '{params['id_dir']} already exists. " 63 | + "Run with --force to overwrite." 64 | + f"If --force is used, you could lose your training results." 65 | ) 66 | if os.path.exists(params["id_dir"]): 67 | shutil.rmtree(params["id_dir"]) 68 | for target_dir in ['id_dir', 'models_dir', 'features_dir']: 69 | os.makedirs(params[target_dir]) 70 | else: 71 | # Checks if input models and features are provided 72 | for concern in ['models_dir', 'features_dir']: 73 | target_path = params[concern] 74 | if not os.path.exists(target_path): 75 | raise FileNotFoundError( 76 | "directory '{} doesn't exist. ".format(target_path) 77 | + "Run train first before running inference." 78 | ) 79 | 80 | 81 | def print_params(params): 82 | print("{:-^80}".format("params")) 83 | print("Beginning experiment using the following configuration:\n") 84 | for param, value in params.items(): 85 | print("\t{:>13}: {}".format(param, value)) 86 | print() 87 | print("-" * 80) 88 | 89 | 90 | def save_results(results, params): 91 | df = pd.DataFrame(results) 92 | file_name = os.path.join( 93 | params['id_dir'], 94 | "results.csv", 95 | ) 96 | df.to_csv(file_name, index=False) 97 | -------------------------------------------------------------------------------- /train_multi.py: -------------------------------------------------------------------------------- 1 | from sklearn.model_selection import StratifiedKFold 2 | from .utils import ( 3 | save_params, 4 | file_handling, 5 | TestingParameters, 6 | print_params, 7 | save_results, 8 | ) 9 | from .preprocess import registry as preprocess_registry 10 | from .preprocess.utils import load_logs 11 | from .feature_engineering.utils import ( 12 | multi_features, 13 | extract_features, 14 | ) 15 | from tqdm import tqdm 16 | from .models import multi_registry as multi_classifier_registry 17 | from .reporting import bb_registry as black_box_report_registry 18 | from .init_params import init_main_args, parse_main_args 19 | 20 | 21 | def init_args(): 22 | """Init command line args used for configuration.""" 23 | 24 | parser = init_main_args() 25 | return parser.parse_args() 26 | 27 | 28 | def parse_args(args): 29 | """Parse provided args for runtime configuration.""" 30 | params = parse_main_args(args) 31 | params.update({'train': True}) 32 | return params 33 | 34 | 35 | def init_results(): 36 | results = { 37 | 'exp_name': [], 38 | 'logs_type': [], 39 | 'macro': [], 40 | 'micro': [], 41 | 'train_time': [], 42 | 'run_time': [], 43 | } 44 | return results 45 | 46 | 47 | def add_result(results, params, macro, micro, train_time, run_time): 48 | results['exp_name'].append(params['id']) 49 | results['logs_type'].append(params['logs_type']) 50 | results['macro'].append(macro) 51 | results['micro'].append(micro) 52 | results['train_time'].append(train_time) 53 | results['run_time'].append(run_time) 54 | 55 | 56 | def train(params, x_data, y_data, target_names): 57 | results = init_results() 58 | # KFold Cross Validation 59 | kfold = StratifiedKFold(n_splits=params['kfold']).split(x_data, y_data) 60 | best_multi = 0. 61 | for train_index, test_index in tqdm(kfold): 62 | # Test & Train are interchanged to enable testing with 10% of the data 63 | if params['swap']: 64 | x_test, x_train = x_data[train_index], x_data[test_index] 65 | y_test, y_train = y_data[train_index], y_data[test_index] 66 | else: 67 | x_train, x_test = x_data[train_index], x_data[test_index] 68 | y_train, y_test = y_data[train_index], y_data[test_index] 69 | x_train, _ = extract_features(x_train, params) 70 | print(y_train.shape, y_test.shape) 71 | with TestingParameters(params): 72 | x_test, _ = extract_features(x_test, params) 73 | # Multi-class training features 74 | x_train_multi, y_train_multi =\ 75 | multi_features(x_train, y_train) 76 | x_test_multi, y_test_multi = multi_features(x_test, y_test) 77 | # MultiClass 78 | multi_classifier_getter =\ 79 | multi_classifier_registry.get_multi_model(params['multi_classifier']) 80 | multi_classifier = multi_classifier_getter(params) 81 | multi_classifier.fit(x_train_multi, y_train_multi) 82 | pred = multi_classifier.predict(x_test_multi) 83 | get_multi_acc = black_box_report_registry.get_bb_report('macro') 84 | macro = get_multi_acc(y_test_multi, pred) 85 | get_multi_acc = black_box_report_registry.get_bb_report('micro') 86 | micro = get_multi_acc(y_test_multi, pred) 87 | better_results = macro > best_multi 88 | if better_results: 89 | save_params(params) 90 | best_multi = macro 91 | print(macro) 92 | 93 | add_result( 94 | results, 95 | params, 96 | macro, 97 | micro, 98 | multi_classifier.train_time, 99 | multi_classifier.run_time 100 | ) 101 | 102 | save_results(results, params) 103 | 104 | 105 | def main(): 106 | # Init params 107 | params = parse_args(init_args()) 108 | print_params(params) 109 | file_handling(params) 110 | # Filter params from raw logs 111 | if "raw_logs" in params: 112 | preprocess = preprocess_registry.get_preprocessor(params['logs_type']) 113 | preprocess(params) 114 | # Load filtered params from file 115 | x_data, y_data, target_names = load_logs(params) 116 | train(params, x_data, y_data, target_names) 117 | 118 | 119 | if __name__ == "__main__": 120 | main() 121 | -------------------------------------------------------------------------------- /feature_engineering/vectorizer.py: -------------------------------------------------------------------------------- 1 | import numpy as np 2 | from ..decorators import print_step 3 | from collections import defaultdict, Counter 4 | 5 | 6 | def get_ngrams(n, line): 7 | line = line.strip().split() 8 | cur_len = len(line) 9 | ngrams_list = [] 10 | if cur_len == 0: 11 | # Token list is empty 12 | pass 13 | elif cur_len < n: 14 | # Token list fits in one ngram 15 | ngrams_list.append(" ".join(line)) 16 | else: 17 | # Token list spans multiple ngrams 18 | loop_num = cur_len - n + 1 19 | for i in range(loop_num): 20 | cur_gram = " ".join(line[i: i + n]) 21 | ngrams_list.append(cur_gram) 22 | return ngrams_list 23 | 24 | 25 | def tokenize(line): 26 | return line.strip().split() 27 | 28 | 29 | @print_step 30 | def build_vocabulary(inputData): 31 | """ Divides log into tokens and creates vocabulary. 32 | 33 | Parameter 34 | --------- 35 | inputData: list of log message lines 36 | 37 | Returns 38 | ------- 39 | vocabulary : word to index dict 40 | 41 | """ 42 | vocabulary = {} 43 | for line in inputData: 44 | token_list = tokenize(line) 45 | for token in token_list: 46 | if token not in vocabulary: 47 | vocabulary[token] = len(vocabulary) 48 | return vocabulary 49 | 50 | 51 | @print_step 52 | def log_to_vector(inputData, vocabulary): 53 | """ Vectorizes each log message using a dict of words to index. 54 | 55 | Parameter 56 | --------- 57 | inputData: list of log message lines. 58 | vocabulary : word to index dict. 59 | 60 | Returns 61 | ------- 62 | numpy Array vector of word indexes from each log message line. 63 | 64 | """ 65 | result = [] 66 | for line in inputData: 67 | temp = [] 68 | token_list = tokenize(line) 69 | if token_list: 70 | for token in token_list: 71 | if token not in vocabulary: 72 | continue 73 | else: 74 | temp.append(vocabulary[token]) 75 | result.append(temp) 76 | return np.array(result) 77 | 78 | 79 | def setTrainDataForILF(x, y): 80 | x_res, indices = np.unique(x, return_index=True) 81 | y_res = y[indices] 82 | return x_res, y_res 83 | 84 | 85 | def calculate_inv_freq(total, num): 86 | return np.log(float(total) / float(num + 0.01)) 87 | 88 | 89 | def get_max_line(inputVector): 90 | return len(max(inputVector, key=len)) 91 | 92 | 93 | def get_tf(inputVector): 94 | token_index_dict = defaultdict(set) 95 | # Counting the number of logs the word appears in 96 | for index, line in enumerate(inputVector): 97 | for token in line: 98 | token_index_dict[token].add(index) 99 | return token_index_dict 100 | 101 | 102 | def get_lf(inputVector): 103 | token_index_ilf_dict = defaultdict(set) 104 | for line in inputVector: 105 | for location, token in enumerate(line): 106 | token_index_ilf_dict[token].add(location) 107 | return token_index_ilf_dict 108 | 109 | 110 | def calculate_idf(token_index_dict, inputVector): 111 | idf_dict = {} 112 | total_log_num = len(inputVector) 113 | for token in token_index_dict: 114 | idf_dict[token] = calculate_inv_freq(total_log_num, 115 | len(token_index_dict[token])) 116 | return idf_dict 117 | 118 | 119 | def calculate_ilf(token_index_dict, inputVector): 120 | ilf_dict = {} 121 | max_length = get_max_line(inputVector) 122 | # calculating ilf for each token 123 | for token in token_index_dict: 124 | ilf_dict[token] = calculate_inv_freq(max_length, 125 | len(token_index_dict[token])) 126 | return ilf_dict 127 | 128 | 129 | def create_invf_vector(inputVector, invf_dict, vocabulary): 130 | tfinvf = [] 131 | # Creating the idf/ilf vector for each log message 132 | for line in inputVector: 133 | cur_tfinvf = np.zeros(len(vocabulary)) 134 | count_dict = Counter(line) 135 | for token_index in line: 136 | cur_tfinvf[token_index] = ( 137 | float(count_dict[token_index]) * invf_dict[token_index] 138 | ) 139 | tfinvf.append(cur_tfinvf) 140 | tfinvf = np.array(tfinvf) 141 | return tfinvf 142 | 143 | 144 | def normalize_tfinvf(tfinvf): 145 | return 2.*(tfinvf - np.min(tfinvf))/np.ptp(tfinvf)-1 146 | 147 | 148 | def calculate_tf_invf_train( 149 | inputVector, get_f=get_tf, calc_invf=calculate_idf 150 | ): 151 | token_index_dict = get_f(inputVector) 152 | invf_dict = calc_invf(token_index_dict, inputVector) 153 | return invf_dict 154 | -------------------------------------------------------------------------------- /puLearning/puAdapter.py: -------------------------------------------------------------------------------- 1 | #!/usr/bin/python 2 | # -*- coding: UTF-8 -*- 3 | 4 | # ********************************************************** 5 | # * Author : Weibin Meng 6 | # * Email : m_weibin@163.com 7 | # * Create time : 2019-07-24 15:10 8 | # * Last modified : 2019-07-24 15:10 9 | # * Filename : puAdapter.py 10 | # * Description : 11 | ''' 12 | ''' 13 | # ********************************************************** 14 | #!/usr/bin/env python 15 | #-*- coding:utf-8 -*- 16 | """ 17 | Created on Dec 21, 2012 18 | 19 | @author: Alexandre 20 | """ 21 | import numpy as np 22 | 23 | class PUAdapter(object): 24 | """ 25 | Adapts any probabilistic binary classifier to positive-unlabled learning using the PosOnly method proposed by 26 | Elkan and Noto: 27 | 28 | Elkan, Charles, and Keith Noto. \"Learning classifiers from only positive and unlabeled data.\" 29 | Proceeding of the 14th ACM SIGKDD international conference on Knowledge discovery and data mining. ACM, 2008. 30 | """ 31 | 32 | 33 | def __init__(self, estimator, hold_out_ratio=0.1, precomputed_kernel=False): 34 | """ 35 | estimator -- An estimator of p(s=1|x) that must implement: 36 | * predict_proba(X): Takes X, which can be a list of feature vectors or a precomputed 37 | kernel matrix and outputs p(s=1|x) for each example in X 38 | * fit(X,y): Takes X, which can be a list of feature vectors or a precomputed 39 | kernel matrix and takes y, which are the labels associated to the 40 | examples in X 41 | hold_out_ratio -- The ratio of training examples that must be held out of the training set of examples 42 | to estimate p(s=1|y=1) after training the estimator 43 | precomputed_kernel -- Specifies if the X matrix for predict_proba and fit is a precomputed kernel matrix 44 | """ 45 | self.estimator = estimator 46 | self.c = 1.0 47 | self.hold_out_ratio = hold_out_ratio 48 | 49 | if precomputed_kernel: 50 | self.fit = self.__fit_precomputed_kernel 51 | else: 52 | self.fit = self.__fit_no_precomputed_kernel 53 | 54 | self.estimator_fitted = False 55 | 56 | def __str__(self): 57 | return 'Estimator:' + str(self.estimator) + '\n' + 'p(s=1|y=1,x) ~= ' + str(self.c) + '\n' + \ 58 | 'Fitted: ' + str(self.estimator_fitted) 59 | 60 | 61 | def __fit_precomputed_kernel(self, X, y): 62 | """ 63 | Fits an estimator of p(s=1|x) and estimates the value of p(s=1|y=1) using a subset of the training examples 64 | 65 | X -- Precomputed kernel matrix 66 | y -- Labels associated to each example in X (Positive label: 1.0, Negative label: -1.0) 67 | """ 68 | positives = np.where(y == 1.)[0] 69 | hold_out_size = np.ceil(len(positives) * self.hold_out_ratio) 70 | 71 | if len(positives) <= hold_out_size: 72 | raise('Not enough positive examples to estimate p(s=1|y=1,x). Need at least ' + str(hold_out_size + 1) + '.') 73 | 74 | np.random.shuffle(positives) 75 | hold_out = positives[:hold_out_size] 76 | 77 | #Hold out test kernel matrix 78 | X_test_hold_out = X[hold_out] 79 | keep = list(set(np.arange(len(y))) - set(hold_out)) 80 | X_test_hold_out = X_test_hold_out[:,keep] 81 | 82 | #New training kernel matrix 83 | X = X[:, keep] 84 | X = X[keep] 85 | 86 | y = np.delete(y, hold_out) 87 | 88 | self.estimator.fit(X, y) 89 | 90 | hold_out_predictions = self.estimator.predict_proba(X_test_hold_out) 91 | 92 | try: 93 | hold_out_predictions = hold_out_predictions[:,1] 94 | except: 95 | pass 96 | 97 | c = np.mean(hold_out_predictions) 98 | self.c = c 99 | 100 | self.estimator_fitted = True 101 | 102 | 103 | def __fit_no_precomputed_kernel(self, X, y): 104 | """ 105 | Fits an estimator of p(s=1|x) and estimates the value of p(s=1|y=1,x) 106 | 107 | X -- List of feature vectors 108 | y -- Labels associated to each feature vector in X (Positive label: 1.0, Negative label: -1.0) 109 | """ 110 | positives = np.where(y == 1.)[0] 111 | hold_out_size = np.ceil(len(positives) * self.hold_out_ratio) 112 | 113 | if len(positives) <= hold_out_size: 114 | raise('Not enough positive examples to estimate p(s=1|y=1,x). Need at least ' + str(hold_out_size + 1) + '.') 115 | 116 | np.random.shuffle(positives) 117 | #print hold_out_size 118 | #print type(hold_out_size) 119 | hold_out = positives[:int(hold_out_size)] 120 | X_hold_out = X[hold_out] 121 | X = np.delete(X, hold_out,0) 122 | y = np.delete(y, hold_out) 123 | 124 | self.estimator.fit(X, y) 125 | 126 | hold_out_predictions = self.estimator.predict_proba(X_hold_out) 127 | 128 | try: 129 | hold_out_predictions = hold_out_predictions[:,1] 130 | except: 131 | pass 132 | 133 | c = np.mean(hold_out_predictions) 134 | self.c = c 135 | 136 | self.estimator_fitted = True 137 | 138 | 139 | def predict_proba(self, X): 140 | """ 141 | Predicts p(y=1|x) using the estimator and the value of p(s=1|y=1) estimated in fit(...) 142 | 143 | X -- List of feature vectors or a precomputed kernel matrix 144 | """ 145 | if not self.estimator_fitted: 146 | raise Exception('The estimator must be fitted before calling predict_proba(...).') 147 | 148 | probabilistic_predictions = self.estimator.predict_proba(X) 149 | 150 | try: 151 | probabilistic_predictions = probabilistic_predictions[:,1] 152 | except: 153 | pass 154 | 155 | return probabilistic_predictions / self.c 156 | 157 | 158 | def predict(self, X, treshold=0.5): 159 | """ 160 | Assign labels to feature vectors based on the estimator's predictions 161 | 162 | X -- List of feature vectors or a precomputed kernel matrix 163 | treshold -- The decision treshold between the positive and the negative class 164 | """ 165 | if not self.estimator_fitted: 166 | raise Exception('The estimator must be fitted before calling predict(...).') 167 | 168 | return np.array([1. if p > treshold else -1. for p in self.predict_proba(X)]) 169 | 170 | 171 | 172 | -------------------------------------------------------------------------------- /compare_pu.py: -------------------------------------------------------------------------------- 1 | from sklearn.model_selection import StratifiedKFold 2 | from .utils import ( 3 | file_handling, 4 | TestingParameters, 5 | print_params, 6 | save_results, 7 | ) 8 | from .preprocess import registry as preprocess_registry 9 | from .preprocess.utils import load_logs 10 | from .feature_engineering.utils import ( 11 | binary_train_gtruth, 12 | extract_features, 13 | ) 14 | from .models import binary_registry as binary_classifier_registry 15 | from .reporting import bb_registry as black_box_report_registry 16 | from .init_params import init_main_args, parse_main_args 17 | import numpy as np 18 | 19 | 20 | def init_args(): 21 | """Init command line args used for configuration.""" 22 | 23 | parser = init_main_args() 24 | parser.add_argument( 25 | "--ratio", 26 | metavar="ratio", 27 | type=int, 28 | nargs=1, 29 | default=[8], 30 | help="ratio", 31 | ) 32 | parser.add_argument( 33 | "--top_percentage", 34 | metavar="top_percentage", 35 | type=int, 36 | nargs=1, 37 | default=[11], 38 | help="top_percentage", 39 | ) 40 | parser.add_argument( 41 | "--step", 42 | metavar="step", 43 | type=int, 44 | nargs=1, 45 | default=[2], 46 | help="step", 47 | ) 48 | return parser.parse_args() 49 | 50 | 51 | def parse_args(args): 52 | """Parse provided args for runtime configuration.""" 53 | params = parse_main_args(args) 54 | additional_params = { 55 | "ratio": args.ratio[0], 56 | "top_percentage": args.top_percentage[0], 57 | "step": args.step[0], 58 | "train": True, 59 | } 60 | params.update(additional_params) 61 | return params 62 | 63 | 64 | def force_ratio(params, x_data, y_data): 65 | """Force a ratio between anomalous and healthy logs""" 66 | ratio = params['ratio'] 67 | if ratio > 0: 68 | anomalous = np.where(y_data == 1.0)[0] 69 | healthy = np.where(y_data == -1.0)[0] 70 | if len(anomalous) * ratio <= len(healthy): 71 | keep_anomalous = len(anomalous) 72 | keep_healthy = keep_anomalous * ratio 73 | else: 74 | keep_anomalous = len(healthy) // ratio 75 | keep_healthy = len(healthy) 76 | np.random.seed(10) 77 | permut = np.random.permutation(len(healthy)) 78 | keep = permut[:keep_healthy] 79 | healthy = healthy[keep] 80 | permut = np.random.permutation(len(anomalous)) 81 | keep = permut[:keep_anomalous] 82 | anomalous = anomalous[keep] 83 | result = sorted(np.concatenate((anomalous, healthy))) 84 | y_data = y_data[result] 85 | x_data = x_data[result] 86 | return x_data, y_data 87 | 88 | 89 | def init_results(params): 90 | results = { 91 | 'exp_name': [], 92 | 'logs_type': [], 93 | 'percentage': [], 94 | 'pu_f1': [], 95 | f"{params['binary_classifier']}_f1": [], 96 | } 97 | return results 98 | 99 | 100 | def add_result(results, params, percentage, pu_acc, b_clf_acc): 101 | results['exp_name'].append(params['id']) 102 | results['logs_type'].append(params['logs_type']) 103 | results['percentage'].append(percentage) 104 | results['pu_f1'].append(pu_acc) 105 | results[f"{params['binary_classifier']}_f1"].append(b_clf_acc) 106 | 107 | 108 | def run_test(params, x_data, y_data): 109 | results = init_results(params) 110 | # Binary training features 111 | y_data = binary_train_gtruth(y_data) 112 | x_data, y_data = force_ratio(params, x_data, y_data) 113 | print("total logs", len(y_data)) 114 | print(len(np.where(y_data == -1.0)[0]), " are unlabeled") 115 | print(len(np.where(y_data == 1.0)[0]), " are anomalous") 116 | # KFold Cross Validation 117 | kfold = StratifiedKFold(n_splits=params['kfold']).split(x_data, y_data) 118 | for train_index, test_index in kfold: 119 | x_train, x_test = x_data[train_index], x_data[test_index] 120 | y_train, y_test = y_data[train_index], y_data[test_index] 121 | x_train, _ = extract_features(x_train, params) 122 | with TestingParameters(params): 123 | x_test, _ = extract_features(x_test, params) 124 | np.random.seed(5) 125 | permut = np.random.permutation(len(y_train)) 126 | x_train = x_train[permut] 127 | y_train = y_train[permut] 128 | top_percentage = params['top_percentage'] 129 | step = params['step'] 130 | # Relabeling anomalous logs to unlabeled to test PU Learning Robustness 131 | for i in range(0, top_percentage, step): 132 | y_train_pu = np.copy(y_train) 133 | if i > 0: 134 | n_unlabeled = len(np.where(y_train_pu == -1.0)[0]) 135 | sacrifice_size = (i*n_unlabeled)//(100 - i) 136 | print(i, n_unlabeled, sacrifice_size) 137 | pos = np.where(y_train == 1.0)[0] 138 | np.random.shuffle(pos) 139 | sacrifice = pos[: sacrifice_size] 140 | y_train_pu[sacrifice] = -1.0 141 | 142 | print(f"{i}% of anomalous log in unlabeled logs:") 143 | print(len(np.where(y_train_pu == -1.0)[0]), " are unlabeled") 144 | print(len(np.where(y_train_pu == 1.0)[0]), " are anomalous") 145 | # Binary PULearning with RF 146 | pu_getter =\ 147 | binary_classifier_registry.get_binary_model("pu_learning") 148 | binary_clf = pu_getter(params) 149 | binary_clf.fit(x_train, y_train_pu) 150 | y_pred_pu = binary_clf.predict(x_test) 151 | get_accuracy = black_box_report_registry.get_bb_report('acc') 152 | pu_acc = get_accuracy(y_test, y_pred_pu) 153 | # Comparing given Binary Classifier with PU Learning 154 | comparison_clf_getter =\ 155 | binary_classifier_registry.get_binary_model( 156 | params['binary_classifier']) 157 | binary_clf = comparison_clf_getter(params) 158 | binary_clf.fit(x_train, y_train_pu) 159 | y_pred = binary_clf.predict(x_test) 160 | b_clf_acc = get_accuracy(y_test, y_pred) 161 | print(f"PU Acc: {pu_acc}\n{params['binary_classifier']}" 162 | + " Acc: {b_clf_acc}") 163 | 164 | add_result( 165 | results, 166 | params, 167 | i, 168 | pu_acc, 169 | b_clf_acc 170 | ) 171 | 172 | save_results(results, params) 173 | 174 | 175 | def main(): 176 | # Init params 177 | params = parse_args(init_args()) 178 | print_params(params) 179 | file_handling(params) 180 | # Filter params from raw logs 181 | if "raw_logs" in params: 182 | preprocess = preprocess_registry.get_preprocessor(params['logs_type']) 183 | preprocess(params) 184 | # Load filtered params from file 185 | print('Loading logs') 186 | x_data, y_data, _ = load_logs(params) 187 | run_test(params, x_data, y_data) 188 | 189 | 190 | if __name__ == "__main__": 191 | main() 192 | -------------------------------------------------------------------------------- /init_params.py: -------------------------------------------------------------------------------- 1 | import argparse 2 | import os 3 | import sys 4 | import warnings 5 | from uuid import uuid4 6 | 7 | 8 | def init_main_args(): 9 | """Init command line args used for configuration.""" 10 | 11 | parser = argparse.ArgumentParser( 12 | description="Runs experiment using LogClass Framework", 13 | formatter_class=argparse.ArgumentDefaultsHelpFormatter, 14 | ) 15 | parser.add_argument( 16 | "--raw_logs", 17 | metavar="raw_logs", 18 | type=str, 19 | nargs=1, 20 | help="input raw logs file path", 21 | ) 22 | base_dir_default = os.path.join( 23 | os.path.dirname(os.path.realpath(__file__)), "output" 24 | ) 25 | parser.add_argument( 26 | "--base_dir", 27 | metavar="base_dir", 28 | type=str, 29 | nargs=1, 30 | default=[base_dir_default], 31 | help="base output directory for pipeline output files", 32 | ) 33 | parser.add_argument( 34 | "--logs", 35 | metavar="logs", 36 | type=str, 37 | nargs=1, 38 | help="input logs file path and output for raw logs preprocessing", 39 | ) 40 | parser.add_argument( 41 | "--models_dir", 42 | metavar="models_dir", 43 | type=str, 44 | nargs=1, 45 | help="trained models input/output directory path", 46 | ) 47 | parser.add_argument( 48 | "--features_dir", 49 | metavar="features_dir", 50 | type=str, 51 | nargs=1, 52 | help="trained features_dir input/output directory path", 53 | ) 54 | parser.add_argument( 55 | "--logs_type", 56 | metavar="logs_type", 57 | type=str, 58 | nargs=1, 59 | default=["open_Apache"], 60 | choices=[ 61 | "bgl", 62 | "open_Apache", 63 | "open_bgl", 64 | "open_hadoop", 65 | "open_hdfs", 66 | "open_hpc", 67 | "open_proxifier", 68 | "open_zookeeper", 69 | ], 70 | help="Input type of logs.", 71 | ) 72 | parser.add_argument( 73 | "--kfold", 74 | metavar="kfold", 75 | type=int, 76 | nargs=1, 77 | help="kfold crossvalidation", 78 | ) 79 | parser.add_argument( 80 | "--healthy_label", 81 | metavar='healthy_label', 82 | type=str, 83 | nargs=1, 84 | default=["unlabeled"], 85 | help="the labels of unlabeled logs", 86 | ) 87 | parser.add_argument( 88 | "--features", 89 | metavar="features", 90 | type=str, 91 | nargs='+', 92 | default=["tfilf"], 93 | choices=["tfidf", "tfilf", "length", "tf"], 94 | help="Features to be extracted from the logs messages.", 95 | ) 96 | parser.add_argument( 97 | "--report", 98 | metavar="report", 99 | type=str, 100 | nargs='+', 101 | default=["confusion_matrix"], 102 | choices=["confusion_matrix", 103 | "acc", 104 | "multi_acc", 105 | "top_k_svm", 106 | "micro", 107 | "macro" 108 | ], 109 | help="Reports to be generated from the model and its predictions.", 110 | ) 111 | parser.add_argument( 112 | "--binary_classifier", 113 | metavar="binary_classifier", 114 | type=str, 115 | nargs=1, 116 | default=["pu_learning"], 117 | choices=["pu_learning", "regular"], 118 | help="Binary classifier to be used as anomaly detector.", 119 | ) 120 | parser.add_argument( 121 | "--multi_classifier", 122 | metavar="multi_classifier", 123 | type=str, 124 | nargs=1, 125 | default=["svm"], 126 | choices=["svm"], 127 | help="Multi-clas classifier to classify anomalies.", 128 | ) 129 | parser.add_argument( 130 | "--train", 131 | action="store_true", 132 | default=False, 133 | help="If set, logclass will train on the given data. Otherwise" 134 | + "it will run inference on it.", 135 | ) 136 | parser.add_argument( 137 | "--force", 138 | action="store_true", 139 | default=False, 140 | help="Force training overwriting previous output with same id.", 141 | ) 142 | parser.add_argument( 143 | "--id", 144 | metavar="id", 145 | type=str, 146 | nargs=1, 147 | help="Experiment id. Automatically generated if not specified.", 148 | ) 149 | parser.add_argument( 150 | "--swap", 151 | action="store_true", 152 | default=False, 153 | help="Swap testing/training data in kfold cross validation.", 154 | ) 155 | 156 | return parser 157 | 158 | 159 | def parse_main_args(args): 160 | """Parse provided args for runtime configuration.""" 161 | params = { 162 | "report": args.report, 163 | "train": args.train, 164 | "force": args.force, 165 | "base_dir": args.base_dir[0], 166 | "logs_type": args.logs_type[0], 167 | "healthy_label": args.healthy_label[0], 168 | "features": args.features, 169 | "binary_classifier": args.binary_classifier[0], 170 | "multi_classifier": args.multi_classifier[0], 171 | "swap": args.swap, 172 | } 173 | if args.raw_logs: 174 | params["raw_logs"] = os.path.normpath(args.raw_logs[0]) 175 | if args.kfold: 176 | params["kfold"] = args.kfold[0] 177 | if args.logs: 178 | params['logs'] = os.path.normpath(args.logs[0]) 179 | else: 180 | params['logs'] = os.path.join( 181 | params['base_dir'], 182 | "preprocessed_logs", 183 | f"{params['logs_type']}.txt" 184 | ) 185 | if args.id: 186 | params['id'] = args.id[0] 187 | else: 188 | if not params["train"]: 189 | warnings.warn( 190 | "--id parameter is not set when running inference." 191 | "If --train is not set, you might want to provide the" 192 | "experiment id of your best training experiment run," 193 | " E.g. `--id 2310136305`" 194 | ) 195 | params['id'] = str(uuid4().time_low) 196 | 197 | print(f"\nExperiment ID: {params['id']}") 198 | # Creating experiments results folder with the format 199 | # {experiment_module_name}_{logs_type}_{id} 200 | experiment_name = os.path.basename(sys.argv[0]).split('.')[0] 201 | params['id_dir'] = os.path.join( 202 | params['base_dir'], 203 | '_'.join(( 204 | experiment_name, params['logs_type'], params['id'] 205 | )) 206 | ) 207 | if args.models_dir: 208 | params['models_dir'] = os.path.normpath(args.models_dir[0]) 209 | else: 210 | params['models_dir'] = os.path.join( 211 | params['id_dir'], 212 | "models", 213 | ) 214 | if args.features_dir: 215 | params['features_dir'] = os.path.normpath(args.features_dir[0]) 216 | else: 217 | params['features_dir'] = os.path.join( 218 | params['id_dir'], 219 | "features", 220 | ) 221 | params['results_dir'] = os.path.join(params['id_dir'], "results") 222 | 223 | return params 224 | -------------------------------------------------------------------------------- /logclass.py: -------------------------------------------------------------------------------- 1 | from sklearn.model_selection import StratifiedKFold 2 | from .utils import ( 3 | save_params, 4 | load_params, 5 | file_handling, 6 | TestingParameters, 7 | print_params, 8 | ) 9 | from .preprocess import registry as preprocess_registry 10 | from .preprocess.utils import load_logs 11 | from .feature_engineering.utils import ( 12 | binary_train_gtruth, 13 | multi_features, 14 | extract_features, 15 | ) 16 | from tqdm import tqdm 17 | from .models import binary_registry as binary_classifier_registry 18 | from .models import multi_registry as multi_classifier_registry 19 | from .reporting import bb_registry as black_box_report_registry 20 | from .reporting import wb_registry as white_box_report_registry 21 | from .init_params import init_main_args, parse_main_args 22 | 23 | 24 | def init_args(): 25 | """Init command line args used for configuration.""" 26 | 27 | parser = init_main_args() 28 | return parser.parse_args() 29 | 30 | 31 | def parse_args(args): 32 | """Parse provided args for runtime configuration.""" 33 | params = parse_main_args(args) 34 | return params 35 | 36 | 37 | def inference(params, x_data, y_data, target_names): 38 | # Inference 39 | # Feature engineering 40 | x_test, vocabulary = extract_features(x_data, params) 41 | # Binary training features 42 | y_test = binary_train_gtruth(y_data) 43 | # Binary PU estimator with RF 44 | # Load Trained PU Estimator 45 | binary_clf_getter =\ 46 | binary_classifier_registry.get_binary_model( 47 | params['binary_classifier']) 48 | binary_clf = binary_clf_getter(params) 49 | binary_clf.load() 50 | # Anomaly detection 51 | y_pred_pu = binary_clf.predict(x_test) 52 | get_accuracy = black_box_report_registry.get_bb_report('acc') 53 | binary_acc = get_accuracy(y_test, y_pred_pu) 54 | # MultiClass remove healthy logs 55 | x_infer_multi, y_infer_multi = multi_features(x_test, y_data) 56 | # Load MultiClass 57 | multi_classifier_getter =\ 58 | multi_classifier_registry.get_multi_model(params['multi_classifier']) 59 | multi_classifier = multi_classifier_getter(params) 60 | multi_classifier.load() 61 | # Anomaly Classification 62 | pred = multi_classifier.predict(x_infer_multi) 63 | get_multi_acc = black_box_report_registry.get_bb_report('multi_acc') 64 | score = get_multi_acc(y_infer_multi, pred) 65 | 66 | print(binary_acc, score) 67 | for report in params['report']: 68 | try: 69 | get_bb_report = black_box_report_registry.get_bb_report(report) 70 | result = get_bb_report(y_test, y_pred_pu) 71 | except Exception: 72 | pass 73 | else: 74 | print(f'Binary classification {report} report:') 75 | print(result) 76 | 77 | try: 78 | get_bb_report = black_box_report_registry.get_bb_report(report) 79 | result = get_bb_report(y_infer_multi, pred) 80 | except Exception: 81 | pass 82 | else: 83 | print(f'Multi classification {report} report:') 84 | print(result) 85 | 86 | try: 87 | get_wb_report = white_box_report_registry.get_wb_report(report) 88 | result =\ 89 | get_wb_report(params, binary_clf.model, vocabulary, 90 | target_names=target_names, top_features=5) 91 | except Exception: 92 | pass 93 | else: 94 | print(f'Multi classification {report} report:') 95 | print(result) 96 | 97 | 98 | def train(params, x_data, y_data, target_names): 99 | # KFold Cross Validation 100 | kfold = StratifiedKFold(n_splits=params['kfold']).split(x_data, y_data) 101 | best_pu_fs = 0. 102 | best_multi = 0. 103 | for train_index, test_index in tqdm(kfold): 104 | x_train, x_test = x_data[train_index], x_data[test_index] 105 | y_train, y_test = y_data[train_index], y_data[test_index] 106 | x_train, vocabulary = extract_features(x_train, params) 107 | with TestingParameters(params): 108 | x_test, _ = extract_features(x_test, params) 109 | # Binary training features 110 | y_test_pu = binary_train_gtruth(y_test) 111 | y_train_pu = binary_train_gtruth(y_train) 112 | # Binary PULearning with RF 113 | binary_clf_getter =\ 114 | binary_classifier_registry.get_binary_model( 115 | params['binary_classifier']) 116 | binary_clf = binary_clf_getter(params) 117 | binary_clf.fit(x_train, y_train_pu) 118 | y_pred_pu = binary_clf.predict(x_test) 119 | get_accuracy = black_box_report_registry.get_bb_report('acc') 120 | binary_acc = get_accuracy(y_test_pu, y_pred_pu) 121 | # Multi-class training features 122 | x_train_multi, y_train_multi =\ 123 | multi_features(x_train, y_train) 124 | x_test_multi, y_test_multi = multi_features(x_test, y_test) 125 | # MultiClass 126 | multi_classifier_getter =\ 127 | multi_classifier_registry.get_multi_model(params['multi_classifier']) 128 | multi_classifier = multi_classifier_getter(params) 129 | multi_classifier.fit(x_train_multi, y_train_multi) 130 | pred = multi_classifier.predict(x_test_multi) 131 | get_multi_acc = black_box_report_registry.get_bb_report('multi_acc') 132 | score = get_multi_acc(y_test_multi, pred) 133 | better_results = ( 134 | binary_acc > best_pu_fs 135 | or (binary_acc == best_pu_fs and score > best_multi) 136 | ) 137 | 138 | if better_results: 139 | if binary_acc > best_pu_fs: 140 | best_pu_fs = binary_acc 141 | save_params(params) 142 | if score > best_multi: 143 | best_multi = score 144 | print(binary_acc, score) 145 | 146 | # TryCatch are necessary since I'm trying to consider all 147 | # reports the same when they are not 148 | for report in params['report']: 149 | try: 150 | get_bb_report = black_box_report_registry.get_bb_report(report) 151 | result = get_bb_report(y_test_pu, y_pred_pu) 152 | except Exception: 153 | pass 154 | else: 155 | print(f'Binary classification {report} report:') 156 | print(result) 157 | 158 | try: 159 | get_bb_report = black_box_report_registry.get_bb_report(report) 160 | result = get_bb_report(y_test_multi, pred) 161 | except Exception: 162 | pass 163 | else: 164 | print(f'Multi classification {report} report:') 165 | print(result) 166 | 167 | try: 168 | get_wb_report = white_box_report_registry.get_wb_report(report) 169 | result =\ 170 | get_wb_report(params, multi_classifier.model, vocabulary, 171 | target_names=target_names, top_features=5) 172 | except Exception: 173 | pass 174 | else: 175 | print(f'Multi classification {report} report:') 176 | print(result) 177 | 178 | 179 | def main(): 180 | # Init params 181 | params = parse_args(init_args()) 182 | if not params['train']: 183 | load_params(params) 184 | print_params(params) 185 | file_handling(params) # TODO: handle the case when the experiment ID already exists - this I think is the only one that matters 186 | # Filter params from raw logs 187 | if 'raw_logs' in params: 188 | preprocess = preprocess_registry.get_preprocessor(params['logs_type']) 189 | preprocess(params) 190 | # Load filtered params from file 191 | x_data, y_data, target_names = load_logs(params) 192 | if params['train']: 193 | train(params, x_data, y_data, target_names) 194 | else: 195 | inference(params, x_data, y_data, target_names) 196 | 197 | 198 | if __name__ == "__main__": 199 | main() 200 | -------------------------------------------------------------------------------- /data/open_source_logs/Apache/groundtruth.seq: -------------------------------------------------------------------------------- 1 | 2 2 | 3 3 | 1 4 | 1 5 | 1 6 | 2 7 | 2 8 | 2 9 | 3 10 | 3 11 | 3 12 | 1 13 | 1 14 | 1 15 | 2 16 | 2 17 | 3 18 | 1 19 | 1 20 | 1 21 | 1 22 | 2 23 | 2 24 | 2 25 | 3 26 | 3 27 | 3 28 | 1 29 | 1 30 | 1 31 | 2 32 | 2 33 | 3 34 | 3 35 | 1 36 | 1 37 | 1 38 | 2 39 | 2 40 | 3 41 | 3 42 | 1 43 | 1 44 | 1 45 | 2 46 | 3 47 | 1 48 | 1 49 | 1 50 | 1 51 | 2 52 | 2 53 | 2 54 | 2 55 | 3 56 | 3 57 | 3 58 | 3 59 | 1 60 | 2 61 | 3 62 | 1 63 | 1 64 | 2 65 | 2 66 | 3 67 | 3 68 | 1 69 | 1 70 | 2 71 | 2 72 | 3 73 | 3 74 | 1 75 | 2 76 | 3 77 | 1 78 | 2 79 | 3 80 | 1 81 | 1 82 | 2 83 | 2 84 | 3 85 | 3 86 | 1 87 | 2 88 | 3 89 | 1 90 | 1 91 | 2 92 | 3 93 | 2 94 | 3 95 | 1 96 | 1 97 | 1 98 | 1 99 | 1 100 | 1 101 | 1 102 | 1 103 | 1 104 | 1 105 | 1 106 | 2 107 | 3 108 | 2 109 | 3 110 | 2 111 | 3 112 | 2 113 | 3 114 | 2 115 | 3 116 | 2 117 | 3 118 | 2 119 | 3 120 | 1 121 | 2 122 | 3 123 | 1 124 | 1 125 | 2 126 | 3 127 | 2 128 | 3 129 | 1 130 | 2 131 | 3 132 | 4 133 | 1 134 | 2 135 | 3 136 | 1 137 | 1 138 | 2 139 | 2 140 | 3 141 | 1 142 | 1 143 | 1 144 | 2 145 | 3 146 | 1 147 | 1 148 | 1 149 | 1 150 | 2 151 | 3 152 | 2 153 | 3 154 | 2 155 | 3 156 | 2 157 | 3 158 | 1 159 | 1 160 | 1 161 | 2 162 | 2 163 | 3 164 | 3 165 | 1 166 | 1 167 | 1 168 | 1 169 | 2 170 | 3 171 | 2 172 | 3 173 | 2 174 | 3 175 | 2 176 | 3 177 | 1 178 | 1 179 | 1 180 | 1 181 | 1 182 | 1 183 | 1 184 | 1 185 | 2 186 | 2 187 | 2 188 | 2 189 | 2 190 | 3 191 | 3 192 | 3 193 | 3 194 | 3 195 | 1 196 | 2 197 | 3 198 | 1 199 | 1 200 | 1 201 | 2 202 | 2 203 | 3 204 | 3 205 | 1 206 | 2 207 | 3 208 | 2 209 | 3 210 | 1 211 | 1 212 | 2 213 | 2 214 | 3 215 | 1 216 | 2 217 | 3 218 | 1 219 | 1 220 | 1 221 | 2 222 | 2 223 | 3 224 | 3 225 | 1 226 | 1 227 | 1 228 | 2 229 | 2 230 | 3 231 | 3 232 | 1 233 | 2 234 | 3 235 | 1 236 | 1 237 | 1 238 | 2 239 | 2 240 | 3 241 | 3 242 | 1 243 | 2 244 | 1 245 | 3 246 | 1 247 | 1 248 | 2 249 | 2 250 | 3 251 | 3 252 | 1 253 | 1 254 | 1 255 | 1 256 | 1 257 | 1 258 | 1 259 | 1 260 | 2 261 | 2 262 | 2 263 | 3 264 | 3 265 | 3 266 | 1 267 | 1 268 | 1 269 | 1 270 | 1 271 | 1 272 | 2 273 | 2 274 | 2 275 | 2 276 | 2 277 | 3 278 | 3 279 | 3 280 | 3 281 | 3 282 | 1 283 | 2 284 | 3 285 | 1 286 | 2 287 | 3 288 | 1 289 | 1 290 | 2 291 | 3 292 | 2 293 | 3 294 | 1 295 | 1 296 | 1 297 | 2 298 | 2 299 | 2 300 | 3 301 | 3 302 | 3 303 | 1 304 | 1 305 | 2 306 | 2 307 | 3 308 | 3 309 | 1 310 | 1 311 | 2 312 | 1 313 | 2 314 | 1 315 | 3 316 | 3 317 | 2 318 | 2 319 | 3 320 | 3 321 | 1 322 | 1 323 | 1 324 | 2 325 | 2 326 | 2 327 | 3 328 | 3 329 | 1 330 | 1 331 | 1 332 | 1 333 | 1 334 | 1 335 | 1 336 | 1 337 | 1 338 | 1 339 | 1 340 | 2 341 | 2 342 | 3 343 | 1 344 | 1 345 | 1 346 | 1 347 | 2 348 | 2 349 | 3 350 | 3 351 | 1 352 | 1 353 | 1 354 | 2 355 | 3 356 | 2 357 | 3 358 | 2 359 | 3 360 | 1 361 | 1 362 | 1 363 | 1 364 | 2 365 | 2 366 | 2 367 | 3 368 | 1 369 | 1 370 | 1 371 | 1 372 | 1 373 | 1 374 | 1 375 | 2 376 | 3 377 | 1 378 | 1 379 | 1 380 | 1 381 | 1 382 | 1 383 | 2 384 | 3 385 | 2 386 | 3 387 | 2 388 | 3 389 | 2 390 | 3 391 | 1 392 | 2 393 | 3 394 | 1 395 | 1 396 | 2 397 | 2 398 | 3 399 | 3 400 | 1 401 | 1 402 | 2 403 | 2 404 | 3 405 | 3 406 | 1 407 | 1 408 | 1 409 | 2 410 | 3 411 | 1 412 | 1 413 | 1 414 | 1 415 | 1 416 | 1 417 | 1 418 | 1 419 | 1 420 | 1 421 | 1 422 | 1 423 | 2 424 | 2 425 | 2 426 | 3 427 | 3 428 | 3 429 | 1 430 | 1 431 | 1 432 | 1 433 | 2 434 | 3 435 | 1 436 | 1 437 | 1 438 | 1 439 | 1 440 | 1 441 | 1 442 | 2 443 | 2 444 | 3 445 | 3 446 | 2 447 | 3 448 | 2 449 | 3 450 | 2 451 | 3 452 | 1 453 | 1 454 | 1 455 | 1 456 | 2 457 | 2 458 | 3 459 | 3 460 | 2 461 | 3 462 | 1 463 | 1 464 | 1 465 | 1 466 | 1 467 | 1 468 | 1 469 | 2 470 | 3 471 | 2 472 | 2 473 | 3 474 | 3 475 | 1 476 | 1 477 | 1 478 | 1 479 | 1 480 | 1 481 | 1 482 | 1 483 | 1 484 | 1 485 | 1 486 | 1 487 | 2 488 | 2 489 | 2 490 | 2 491 | 2 492 | 3 493 | 3 494 | 3 495 | 3 496 | 3 497 | 1 498 | 1 499 | 1 500 | 1 501 | 1 502 | 1 503 | 1 504 | 1 505 | 1 506 | 1 507 | 1 508 | 1 509 | 1 510 | 2 511 | 2 512 | 2 513 | 3 514 | 3 515 | 3 516 | 1 517 | 1 518 | 1 519 | 2 520 | 3 521 | 2 522 | 3 523 | 2 524 | 3 525 | 1 526 | 1 527 | 1 528 | 2 529 | 3 530 | 2 531 | 2 532 | 3 533 | 3 534 | 1 535 | 2 536 | 3 537 | 1 538 | 1 539 | 2 540 | 2 541 | 3 542 | 3 543 | 1 544 | 1 545 | 1 546 | 2 547 | 2 548 | 2 549 | 3 550 | 3 551 | 1 552 | 1 553 | 1 554 | 2 555 | 3 556 | 1 557 | 1 558 | 1 559 | 1 560 | 1 561 | 1 562 | 1 563 | 1 564 | 1 565 | 1 566 | 2 567 | 3 568 | 2 569 | 3 570 | 2 571 | 3 572 | 2 573 | 3 574 | 2 575 | 3 576 | 2 577 | 3 578 | 2 579 | 3 580 | 4 581 | 4 582 | 4 583 | 4 584 | 4 585 | 4 586 | 4 587 | 4 588 | 4 589 | 4 590 | 4 591 | 4 592 | 1 593 | 4 594 | 2 595 | 3 596 | 1 597 | 1 598 | 2 599 | 3 600 | 1 601 | 1 602 | 1 603 | 2 604 | 3 605 | 2 606 | 3 607 | 2 608 | 3 609 | 1 610 | 2 611 | 3 612 | 1 613 | 1 614 | 1 615 | 1 616 | 2 617 | 2 618 | 2 619 | 2 620 | 3 621 | 3 622 | 3 623 | 3 624 | 1 625 | 1 626 | 2 627 | 3 628 | 1 629 | 1 630 | 1 631 | 1 632 | 1 633 | 1 634 | 1 635 | 2 636 | 3 637 | 2 638 | 3 639 | 2 640 | 3 641 | 2 642 | 3 643 | 2 644 | 3 645 | 1 646 | 1 647 | 1 648 | 1 649 | 1 650 | 1 651 | 1 652 | 1 653 | 1 654 | 1 655 | 2 656 | 2 657 | 2 658 | 2 659 | 2 660 | 3 661 | 3 662 | 3 663 | 3 664 | 3 665 | 1 666 | 1 667 | 1 668 | 1 669 | 1 670 | 1 671 | 2 672 | 3 673 | 2 674 | 3 675 | 2 676 | 3 677 | 2 678 | 3 679 | 2 680 | 3 681 | 1 682 | 1 683 | 1 684 | 1 685 | 1 686 | 2 687 | 3 688 | 2 689 | 3 690 | 2 691 | 3 692 | 2 693 | 3 694 | 2 695 | 3 696 | 1 697 | 1 698 | 1 699 | 2 700 | 2 701 | 2 702 | 3 703 | 3 704 | 3 705 | 1 706 | 1 707 | 1 708 | 1 709 | 1 710 | 1 711 | 1 712 | 1 713 | 2 714 | 2 715 | 2 716 | 2 717 | 3 718 | 3 719 | 3 720 | 3 721 | 1 722 | 2 723 | 3 724 | 1 725 | 1 726 | 1 727 | 1 728 | 1 729 | 1 730 | 1 731 | 1 732 | 1 733 | 2 734 | 2 735 | 2 736 | 2 737 | 2 738 | 3 739 | 3 740 | 3 741 | 3 742 | 3 743 | 1 744 | 2 745 | 3 746 | 1 747 | 1 748 | 1 749 | 1 750 | 1 751 | 1 752 | 1 753 | 1 754 | 1 755 | 1 756 | 2 757 | 2 758 | 2 759 | 2 760 | 3 761 | 3 762 | 3 763 | 3 764 | 1 765 | 1 766 | 2 767 | 2 768 | 3 769 | 3 770 | 1 771 | 2 772 | 3 773 | 4 774 | 1 775 | 1 776 | 1 777 | 1 778 | 1 779 | 1 780 | 1 781 | 1 782 | 1 783 | 1 784 | 1 785 | 5 786 | 1 787 | 1 788 | 1 789 | 5 790 | 1 791 | 2 792 | 3 793 | 2 794 | 3 795 | 2 796 | 6 797 | 2 798 | 3 799 | 2 800 | 3 801 | 2 802 | 6 803 | 2 804 | 3 805 | 4 806 | 4 807 | 1 808 | 2 809 | 3 810 | 1 811 | 1 812 | 2 813 | 3 814 | 1 815 | 1 816 | 1 817 | 2 818 | 3 819 | 2 820 | 3 821 | 2 822 | 3 823 | 1 824 | 1 825 | 1 826 | 1 827 | 1 828 | 4 829 | 2 830 | 3 831 | 2 832 | 3 833 | 2 834 | 3 835 | 2 836 | 3 837 | 2 838 | 3 839 | 1 840 | 1 841 | 1 842 | 1 843 | 2 844 | 3 845 | 2 846 | 3 847 | 2 848 | 3 849 | 2 850 | 3 851 | 1 852 | 1 853 | 2 854 | 2 855 | 3 856 | 3 857 | 1 858 | 1 859 | 2 860 | 3 861 | 2 862 | 3 863 | 1 864 | 1 865 | 2 866 | 2 867 | 3 868 | 1 869 | 3 870 | 2 871 | 3 872 | 1 873 | 1 874 | 1 875 | 2 876 | 3 877 | 2 878 | 3 879 | 2 880 | 3 881 | 1 882 | 1 883 | 1 884 | 1 885 | 2 886 | 3 887 | 2 888 | 3 889 | 2 890 | 3 891 | 2 892 | 3 893 | 1 894 | 1 895 | 1 896 | 1 897 | 1 898 | 1 899 | 2 900 | 2 901 | 2 902 | 2 903 | 3 904 | 3 905 | 3 906 | 3 907 | 1 908 | 1 909 | 1 910 | 1 911 | 2 912 | 3 913 | 2 914 | 3 915 | 2 916 | 3 917 | 2 918 | 3 919 | 1 920 | 1 921 | 1 922 | 1 923 | 2 924 | 3 925 | 2 926 | 3 927 | 2 928 | 3 929 | 2 930 | 3 931 | 1 932 | 1 933 | 1 934 | 1 935 | 2 936 | 2 937 | 2 938 | 2 939 | 3 940 | 3 941 | 3 942 | 3 943 | 1 944 | 1 945 | 1 946 | 2 947 | 2 948 | 2 949 | 1 950 | 3 951 | 2 952 | 1 953 | 1 954 | 2 955 | 3 956 | 1 957 | 1 958 | 1 959 | 1 960 | 1 961 | 1 962 | 2 963 | 3 964 | 2 965 | 3 966 | 2 967 | 3 968 | 2 969 | 3 970 | 1 971 | 2 972 | 3 973 | 1 974 | 1 975 | 1 976 | 1 977 | 1 978 | 1 979 | 1 980 | 1 981 | 1 982 | 1 983 | 1 984 | 1 985 | 1 986 | 1 987 | 1 988 | 2 989 | 2 990 | 2 991 | 3 992 | 3 993 | 3 994 | 1 995 | 1 996 | 1 997 | 1 998 | 2 999 | 3 1000 | 1 1001 | 1 1002 | 1 1003 | 2 1004 | 3 1005 | 2 1006 | 3 1007 | 2 1008 | 3 1009 | 1 1010 | 1 1011 | 1 1012 | 2 1013 | 2 1014 | 3 1015 | 3 1016 | 1 1017 | 1 1018 | 1 1019 | 2 1020 | 2 1021 | 2 1022 | 3 1023 | 3 1024 | 3 1025 | 1 1026 | 1 1027 | 1 1028 | 1 1029 | 1 1030 | 1 1031 | 1 1032 | 5 1033 | 1 1034 | 2 1035 | 3 1036 | 2 1037 | 6 1038 | 2 1039 | 3 1040 | 5 1041 | 2 1042 | 6 1043 | 5 1044 | 2 1045 | 6 1046 | 5 1047 | 1 1048 | 2 1049 | 3 1050 | 2 1051 | 6 1052 | 4 1053 | 4 1054 | 1 1055 | 2 1056 | 3 1057 | 1 1058 | 2 1059 | 3 1060 | 4 1061 | 1 1062 | 2 1063 | 3 1064 | 1 1065 | 1 1066 | 1 1067 | 1 1068 | 2 1069 | 2 1070 | 2 1071 | 2 1072 | 3 1073 | 3 1074 | 3 1075 | 3 1076 | 1 1077 | 2 1078 | 3 1079 | 1 1080 | 4 1081 | 2 1082 | 3 1083 | 1 1084 | 1 1085 | 1 1086 | 2 1087 | 2 1088 | 1 1089 | 1 1090 | 1 1091 | 1 1092 | 1 1093 | 1 1094 | 1 1095 | 2 1096 | 3 1097 | 2 1098 | 3 1099 | 2 1100 | 3 1101 | 2 1102 | 3 1103 | 1 1104 | 1 1105 | 1 1106 | 1 1107 | 2 1108 | 2 1109 | 2 1110 | 2 1111 | 3 1112 | 3 1113 | 3 1114 | 3 1115 | 1 1116 | 1 1117 | 1 1118 | 1 1119 | 2 1120 | 3 1121 | 2 1122 | 3 1123 | 2 1124 | 3 1125 | 2 1126 | 3 1127 | 1 1128 | 1 1129 | 1 1130 | 1 1131 | 1 1132 | 2 1133 | 3 1134 | 2 1135 | 3 1136 | 2 1137 | 3 1138 | 2 1139 | 3 1140 | 1 1141 | 1 1142 | 1 1143 | 2 1144 | 2 1145 | 3 1146 | 3 1147 | 1 1148 | 1 1149 | 1 1150 | 1 1151 | 1 1152 | 1 1153 | 1 1154 | 1 1155 | 2 1156 | 1 1157 | 1 1158 | 1 1159 | 1 1160 | 1 1161 | 1 1162 | 1 1163 | 1 1164 | 1 1165 | 1 1166 | 1 1167 | 2 1168 | 2 1169 | 3 1170 | 2 1171 | 3 1172 | 2 1173 | 3 1174 | 2 1175 | 3 1176 | 2 1177 | 3 1178 | 2 1179 | 3 1180 | 3 1181 | 1 1182 | 1 1183 | 1 1184 | 2 1185 | 2 1186 | 2 1187 | 3 1188 | 3 1189 | 3 1190 | 1 1191 | 1 1192 | 1 1193 | 1 1194 | 1 1195 | 1 1196 | 1 1197 | 1 1198 | 1 1199 | 1 1200 | 1 1201 | 1 1202 | 1 1203 | 2 1204 | 3 1205 | 2 1206 | 3 1207 | 2 1208 | 3 1209 | 2 1210 | 3 1211 | 1 1212 | 2 1213 | 3 1214 | 1 1215 | 2 1216 | 3 1217 | 4 1218 | 1 1219 | 1 1220 | 2 1221 | 3 1222 | 2 1223 | 3 1224 | 1 1225 | 1 1226 | 1 1227 | 2 1228 | 2 1229 | 3 1230 | 3 1231 | 2 1232 | 3 1233 | 1 1234 | 1 1235 | 1 1236 | 2 1237 | 2 1238 | 2 1239 | 3 1240 | 3 1241 | 3 1242 | 1 1243 | 1 1244 | 1 1245 | 1 1246 | 1 1247 | 2 1248 | 3 1249 | 2 1250 | 3 1251 | 2 1252 | 3 1253 | 2 1254 | 3 1255 | 1 1256 | 1 1257 | 1 1258 | 1 1259 | 1 1260 | 2 1261 | 2 1262 | 2 1263 | 2 1264 | 3 1265 | 3 1266 | 3 1267 | 3 1268 | 1 1269 | 1 1270 | 1 1271 | 1 1272 | 1 1273 | 1 1274 | 1 1275 | 2 1276 | 3 1277 | 2 1278 | 3 1279 | 2 1280 | 3 1281 | 1 1282 | 2 1283 | 3 1284 | 1 1285 | 1 1286 | 1 1287 | 1 1288 | 1 1289 | 1 1290 | 2 1291 | 2 1292 | 1 1293 | 3 1294 | 1 1295 | 3 1296 | 2 1297 | 2 1298 | 3 1299 | 3 1300 | 1 1301 | 1 1302 | 1 1303 | 1 1304 | 2 1305 | 2 1306 | 3 1307 | 3 1308 | 1 1309 | 1 1310 | 2 1311 | 3 1312 | 2 1313 | 3 1314 | 1 1315 | 1 1316 | 1 1317 | 2 1318 | 3 1319 | 1 1320 | 1 1321 | 2 1322 | 2 1323 | 3 1324 | 3 1325 | 1 1326 | 1 1327 | 1 1328 | 1 1329 | 2 1330 | 3 1331 | 2 1332 | 1 1333 | 1 1334 | 1 1335 | 1 1336 | 1 1337 | 1 1338 | 2 1339 | 3 1340 | 2 1341 | 3 1342 | 2 1343 | 3 1344 | 1 1345 | 1 1346 | 1 1347 | 1 1348 | 1 1349 | 5 1350 | 5 1351 | 1 1352 | 2 1353 | 3 1354 | 2 1355 | 3 1356 | 2 1357 | 3 1358 | 2 1359 | 3 1360 | 2 1361 | 3 1362 | 2 1363 | 6 1364 | 2 1365 | 6 1366 | 4 1367 | 1 1368 | 1 1369 | 2 1370 | 3 1371 | 2 1372 | 3 1373 | 1 1374 | 2 1375 | 3 1376 | 1 1377 | 2 1378 | 3 1379 | 1 1380 | 1 1381 | 1 1382 | 1 1383 | 1 1384 | 1 1385 | 1 1386 | 2 1387 | 2 1388 | 2 1389 | 2 1390 | 3 1391 | 3 1392 | 3 1393 | 3 1394 | 1 1395 | 1 1396 | 1 1397 | 1 1398 | 2 1399 | 2 1400 | 2 1401 | 2 1402 | 3 1403 | 3 1404 | 3 1405 | 3 1406 | 1 1407 | 1 1408 | 1 1409 | 1 1410 | 2 1411 | 3 1412 | 2 1413 | 3 1414 | 2 1415 | 3 1416 | 2 1417 | 3 1418 | 1 1419 | 2 1420 | 3 1421 | 4 1422 | 4 1423 | 1 1424 | 1 1425 | 1 1426 | 1 1427 | 1 1428 | 1 1429 | 2 1430 | 3 1431 | 2 1432 | 3 1433 | 2 1434 | 3 1435 | 2 1436 | 3 1437 | 1 1438 | 1 1439 | 1 1440 | 1 1441 | 1 1442 | 1 1443 | 1 1444 | 1 1445 | 1 1446 | 2 1447 | 3 1448 | 2 1449 | 3 1450 | 2 1451 | 3 1452 | 2 1453 | 3 1454 | 1 1455 | 1 1456 | 1 1457 | 1 1458 | 1 1459 | 1 1460 | 1 1461 | 1 1462 | 2 1463 | 2 1464 | 3 1465 | 2 1466 | 3 1467 | 1 1468 | 1 1469 | 2 1470 | 3 1471 | 2 1472 | 3 1473 | 1 1474 | 1 1475 | 1 1476 | 1 1477 | 2 1478 | 2 1479 | 3 1480 | 3 1481 | 1 1482 | 1 1483 | 2 1484 | 2 1485 | 3 1486 | 3 1487 | 1 1488 | 2 1489 | 3 1490 | 1 1491 | 2 1492 | 3 1493 | 4 1494 | 1 1495 | 1 1496 | 2 1497 | 2 1498 | 3 1499 | 3 1500 | 1 1501 | 1 1502 | 2 1503 | 2 1504 | 3 1505 | 1 1506 | 1 1507 | 1 1508 | 1 1509 | 1 1510 | 1 1511 | 1 1512 | 1 1513 | 1 1514 | 1 1515 | 2 1516 | 2 1517 | 2 1518 | 2 1519 | 2 1520 | 2 1521 | 2 1522 | 3 1523 | 3 1524 | 3 1525 | 3 1526 | 3 1527 | 3 1528 | 3 1529 | 1 1530 | 1 1531 | 1 1532 | 1 1533 | 1 1534 | 1 1535 | 2 1536 | 3 1537 | 2 1538 | 3 1539 | 2 1540 | 3 1541 | 5 1542 | 2 1543 | 6 1544 | 5 1545 | 2 1546 | 6 1547 | 5 1548 | 2 1549 | 6 1550 | 5 1551 | 2 1552 | 6 1553 | 1 1554 | 1 1555 | 1 1556 | 1 1557 | 1 1558 | 1 1559 | 2 1560 | 3 1561 | 2 1562 | 3 1563 | 2 1564 | 3 1565 | 2 1566 | 3 1567 | 1 1568 | 2 1569 | 3 1570 | 1 1571 | 1 1572 | 2 1573 | 2 1574 | 3 1575 | 3 1576 | 1 1577 | 1 1578 | 2 1579 | 3 1580 | 2 1581 | 3 1582 | 1 1583 | 1 1584 | 2 1585 | 3 1586 | 2 1587 | 3 1588 | 1 1589 | 2 1590 | 3 1591 | 1 1592 | 2 1593 | 3 1594 | 1 1595 | 1 1596 | 1 1597 | 1 1598 | 2 1599 | 3 1600 | 2 1601 | 3 1602 | 2 1603 | 3 1604 | 2 1605 | 3 1606 | 1 1607 | 1 1608 | 1 1609 | 1 1610 | 1 1611 | 1 1612 | 2 1613 | 3 1614 | 2 1615 | 3 1616 | 2 1617 | 3 1618 | 2 1619 | 3 1620 | 1 1621 | 1 1622 | 1 1623 | 1 1624 | 1 1625 | 1 1626 | 1 1627 | 1 1628 | 1 1629 | 1 1630 | 1 1631 | 2 1632 | 3 1633 | 2 1634 | 3 1635 | 2 1636 | 3 1637 | 2 1638 | 3 1639 | 1 1640 | 1 1641 | 1 1642 | 1 1643 | 1 1644 | 1 1645 | 2 1646 | 2 1647 | 3 1648 | 3 1649 | 2 1650 | 2 1651 | 3 1652 | 3 1653 | 1 1654 | 1 1655 | 2 1656 | 2 1657 | 3 1658 | 3 1659 | 1 1660 | 1 1661 | 1 1662 | 1 1663 | 1 1664 | 1 1665 | 1 1666 | 1 1667 | 1 1668 | 1 1669 | 1 1670 | 1 1671 | 2 1672 | 2 1673 | 3 1674 | 3 1675 | 1 1676 | 1 1677 | 1 1678 | 2 1679 | 2 1680 | 2 1681 | 3 1682 | 3 1683 | 3 1684 | 1 1685 | 1 1686 | 1 1687 | 1 1688 | 1 1689 | 1 1690 | 1 1691 | 1 1692 | 2 1693 | 2 1694 | 2 1695 | 3 1696 | 3 1697 | 3 1698 | 1 1699 | 1 1700 | 2 1701 | 2 1702 | 3 1703 | 3 1704 | 1 1705 | 1 1706 | 1 1707 | 2 1708 | 2 1709 | 2 1710 | 3 1711 | 3 1712 | 3 1713 | 1 1714 | 1 1715 | 1 1716 | 2 1717 | 1 1718 | 1 1719 | 1 1720 | 1 1721 | 2 1722 | 2 1723 | 2 1724 | 2 1725 | 3 1726 | 3 1727 | 3 1728 | 3 1729 | 1 1730 | 1 1731 | 1 1732 | 1 1733 | 1 1734 | 2 1735 | 2 1736 | 3 1737 | 3 1738 | 1 1739 | 1 1740 | 1 1741 | 1 1742 | 1 1743 | 1 1744 | 1 1745 | 2 1746 | 2 1747 | 2 1748 | 2 1749 | 3 1750 | 3 1751 | 3 1752 | 3 1753 | 1 1754 | 1 1755 | 1 1756 | 1 1757 | 1 1758 | 1 1759 | 1 1760 | 1 1761 | 1 1762 | 1 1763 | 1 1764 | 1 1765 | 2 1766 | 3 1767 | 2 1768 | 3 1769 | 2 1770 | 3 1771 | 1 1772 | 4 1773 | 2 1774 | 3 1775 | 1 1776 | 1 1777 | 1 1778 | 2 1779 | 3 1780 | 2 1781 | 3 1782 | 2 1783 | 3 1784 | 1 1785 | 1 1786 | 1 1787 | 1 1788 | 1 1789 | 2 1790 | 3 1791 | 2 1792 | 3 1793 | 2 1794 | 3 1795 | 1 1796 | 2 1797 | 3 1798 | 1 1799 | 1 1800 | 1 1801 | 1 1802 | 1 1803 | 2 1804 | 3 1805 | 2 1806 | 3 1807 | 2 1808 | 3 1809 | 1 1810 | 2 1811 | 3 1812 | 1 1813 | 1 1814 | 1 1815 | 1 1816 | 2 1817 | 2 1818 | 2 1819 | 3 1820 | 3 1821 | 3 1822 | 1 1823 | 1 1824 | 1 1825 | 1 1826 | 1 1827 | 2 1828 | 3 1829 | 2 1830 | 3 1831 | 2 1832 | 3 1833 | 1 1834 | 1 1835 | 2 1836 | 3 1837 | 2 1838 | 3 1839 | 1 1840 | 1 1841 | 1 1842 | 2 1843 | 3 1844 | 2 1845 | 3 1846 | 2 1847 | 3 1848 | 1 1849 | 1 1850 | 1 1851 | 2 1852 | 3 1853 | 2 1854 | 3 1855 | 2 1856 | 3 1857 | 1 1858 | 1 1859 | 1 1860 | 2 1861 | 3 1862 | 2 1863 | 3 1864 | 2 1865 | 3 1866 | 1 1867 | 1 1868 | 1 1869 | 2 1870 | 2 1871 | 2 1872 | 3 1873 | 3 1874 | 3 1875 | 1 1876 | 1 1877 | 1 1878 | 1 1879 | 1 1880 | 1 1881 | 1 1882 | 1 1883 | 1 1884 | 2 1885 | 2 1886 | 2 1887 | 3 1888 | 3 1889 | 3 1890 | 4 1891 | 1 1892 | 4 1893 | 2 1894 | 3 1895 | 1 1896 | 1 1897 | 2 1898 | 3 1899 | 2 1900 | 3 1901 | 1 1902 | 1 1903 | 2 1904 | 3 1905 | 2 1906 | 3 1907 | 1 1908 | 1 1909 | 2 1910 | 2 1911 | 3 1912 | 3 1913 | 1 1914 | 1 1915 | 2 1916 | 3 1917 | 2 1918 | 3 1919 | 1 1920 | 1 1921 | 2 1922 | 2 1923 | 3 1924 | 3 1925 | 1 1926 | 2 1927 | 3 1928 | 1 1929 | 1 1930 | 2 1931 | 3 1932 | 2 1933 | 3 1934 | 1 1935 | 1 1936 | 2 1937 | 2 1938 | 3 1939 | 3 1940 | 1 1941 | 1 1942 | 2 1943 | 3 1944 | 2 1945 | 3 1946 | 1 1947 | 1 1948 | 2 1949 | 3 1950 | 1 1951 | 2 1952 | 3 1953 | 1 1954 | 1 1955 | 2 1956 | 3 1957 | 2 1958 | 3 1959 | 1 1960 | 1 1961 | 2 1962 | 3 1963 | 2 1964 | 3 1965 | 1 1966 | 1 1967 | 2 1968 | 3 1969 | 2 1970 | 3 1971 | 1 1972 | 2 1973 | 3 1974 | 1 1975 | 1 1976 | 2 1977 | 3 1978 | 2 1979 | 3 1980 | 1 1981 | 1 1982 | 2 1983 | 2 1984 | 3 1985 | 3 1986 | 1 1987 | 2 1988 | 3 1989 | 4 1990 | 1 1991 | 2 1992 | 3 1993 | 1 1994 | 4 1995 | 2 1996 | 3 1997 | 1 1998 | 1 1999 | 2 2000 | 3 2001 | -------------------------------------------------------------------------------- /data/open_source_logs/proxifier/groundtruth.seq: -------------------------------------------------------------------------------- 1 | 1 2 | 1 3 | 3 4 | 3 5 | 2 6 | 3 7 | 3 8 | 1 9 | 3 10 | 3 11 | 3 12 | 1 13 | 3 14 | 1 15 | 2 16 | 3 17 | 2 18 | 1 19 | 3 20 | 1 21 | 2 22 | 3 23 | 3 24 | 1 25 | 1 26 | 1 27 | 3 28 | 3 29 | 1 30 | 1 31 | 2 32 | 3 33 | 3 34 | 2 35 | 2 36 | 2 37 | 1 38 | 1 39 | 3 40 | 3 41 | 3 42 | 3 43 | 1 44 | 2 45 | 2 46 | 2 47 | 3 48 | 3 49 | 3 50 | 1 51 | 2 52 | 3 53 | 3 54 | 3 55 | 3 56 | 1 57 | 2 58 | 2 59 | 2 60 | 3 61 | 3 62 | 3 63 | 1 64 | 3 65 | 1 66 | 3 67 | 1 68 | 1 69 | 1 70 | 1 71 | 1 72 | 3 73 | 3 74 | 1 75 | 3 76 | 2 77 | 2 78 | 1 79 | 2 80 | 2 81 | 2 82 | 3 83 | 3 84 | 3 85 | 1 86 | 3 87 | 3 88 | 1 89 | 1 90 | 1 91 | 1 92 | 1 93 | 3 94 | 3 95 | 1 96 | 1 97 | 3 98 | 3 99 | 1 100 | 1 101 | 1 102 | 2 103 | 1 104 | 3 105 | 3 106 | 3 107 | 2 108 | 2 109 | 1 110 | 2 111 | 1 112 | 3 113 | 1 114 | 1 115 | 3 116 | 1 117 | 1 118 | 3 119 | 3 120 | 3 121 | 2 122 | 1 123 | 3 124 | 1 125 | 1 126 | 3 127 | 3 128 | 1 129 | 3 130 | 3 131 | 1 132 | 2 133 | 2 134 | 2 135 | 3 136 | 3 137 | 3 138 | 3 139 | 3 140 | 3 141 | 1 142 | 3 143 | 3 144 | 1 145 | 3 146 | 1 147 | 1 148 | 2 149 | 2 150 | 2 151 | 2 152 | 1 153 | 3 154 | 3 155 | 3 156 | 3 157 | 3 158 | 3 159 | 3 160 | 1 161 | 1 162 | 1 163 | 3 164 | 1 165 | 1 166 | 3 167 | 1 168 | 1 169 | 1 170 | 3 171 | 3 172 | 1 173 | 1 174 | 3 175 | 1 176 | 3 177 | 3 178 | 3 179 | 1 180 | 1 181 | 1 182 | 2 183 | 2 184 | 2 185 | 1 186 | 3 187 | 3 188 | 3 189 | 3 190 | 3 191 | 3 192 | 1 193 | 1 194 | 1 195 | 3 196 | 3 197 | 1 198 | 3 199 | 3 200 | 1 201 | 1 202 | 1 203 | 1 204 | 3 205 | 3 206 | 3 207 | 1 208 | 3 209 | 1 210 | 3 211 | 3 212 | 3 213 | 3 214 | 3 215 | 3 216 | 1 217 | 3 218 | 3 219 | 3 220 | 1 221 | 1 222 | 1 223 | 2 224 | 2 225 | 2 226 | 3 227 | 3 228 | 3 229 | 3 230 | 3 231 | 3 232 | 3 233 | 2 234 | 1 235 | 3 236 | 2 237 | 2 238 | 2 239 | 2 240 | 3 241 | 3 242 | 3 243 | 3 244 | 2 245 | 2 246 | 2 247 | 1 248 | 3 249 | 3 250 | 3 251 | 1 252 | 1 253 | 2 254 | 1 255 | 2 256 | 3 257 | 3 258 | 1 259 | 3 260 | 3 261 | 1 262 | 1 263 | 3 264 | 1 265 | 3 266 | 1 267 | 3 268 | 3 269 | 1 270 | 1 271 | 2 272 | 3 273 | 2 274 | 1 275 | 3 276 | 3 277 | 1 278 | 3 279 | 2 280 | 2 281 | 2 282 | 2 283 | 2 284 | 2 285 | 2 286 | 2 287 | 3 288 | 3 289 | 3 290 | 3 291 | 3 292 | 2 293 | 2 294 | 2 295 | 2 296 | 1 297 | 2 298 | 1 299 | 1 300 | 3 301 | 3 302 | 2 303 | 2 304 | 2 305 | 2 306 | 2 307 | 2 308 | 3 309 | 2 310 | 3 311 | 3 312 | 3 313 | 3 314 | 3 315 | 1 316 | 1 317 | 1 318 | 2 319 | 1 320 | 3 321 | 1 322 | 3 323 | 3 324 | 3 325 | 3 326 | 2 327 | 2 328 | 2 329 | 3 330 | 1 331 | 2 332 | 1 333 | 1 334 | 1 335 | 1 336 | 3 337 | 3 338 | 3 339 | 1 340 | 1 341 | 2 342 | 3 343 | 3 344 | 1 345 | 1 346 | 3 347 | 3 348 | 3 349 | 3 350 | 3 351 | 2 352 | 3 353 | 2 354 | 2 355 | 3 356 | 3 357 | 3 358 | 2 359 | 2 360 | 3 361 | 2 362 | 2 363 | 2 364 | 3 365 | 2 366 | 3 367 | 2 368 | 3 369 | 3 370 | 3 371 | 1 372 | 1 373 | 3 374 | 3 375 | 3 376 | 3 377 | 1 378 | 1 379 | 2 380 | 1 381 | 2 382 | 2 383 | 3 384 | 3 385 | 1 386 | 3 387 | 3 388 | 3 389 | 3 390 | 3 391 | 3 392 | 1 393 | 3 394 | 3 395 | 3 396 | 3 397 | 1 398 | 3 399 | 1 400 | 3 401 | 3 402 | 1 403 | 3 404 | 3 405 | 3 406 | 3 407 | 1 408 | 3 409 | 1 410 | 3 411 | 3 412 | 1 413 | 2 414 | 2 415 | 3 416 | 3 417 | 3 418 | 3 419 | 3 420 | 3 421 | 3 422 | 1 423 | 2 424 | 3 425 | 3 426 | 1 427 | 1 428 | 3 429 | 3 430 | 3 431 | 2 432 | 2 433 | 3 434 | 3 435 | 3 436 | 3 437 | 2 438 | 3 439 | 2 440 | 2 441 | 3 442 | 3 443 | 2 444 | 2 445 | 2 446 | 1 447 | 3 448 | 2 449 | 2 450 | 2 451 | 1 452 | 2 453 | 3 454 | 3 455 | 2 456 | 3 457 | 2 458 | 2 459 | 1 460 | 3 461 | 3 462 | 1 463 | 1 464 | 2 465 | 3 466 | 3 467 | 3 468 | 2 469 | 2 470 | 1 471 | 3 472 | 3 473 | 3 474 | 2 475 | 2 476 | 3 477 | 3 478 | 2 479 | 2 480 | 2 481 | 2 482 | 2 483 | 3 484 | 3 485 | 3 486 | 3 487 | 3 488 | 3 489 | 2 490 | 3 491 | 2 492 | 2 493 | 3 494 | 2 495 | 2 496 | 2 497 | 2 498 | 3 499 | 2 500 | 2 501 | 2 502 | 3 503 | 3 504 | 2 505 | 2 506 | 2 507 | 3 508 | 2 509 | 2 510 | 2 511 | 2 512 | 3 513 | 1 514 | 2 515 | 2 516 | 3 517 | 1 518 | 2 519 | 2 520 | 3 521 | 2 522 | 1 523 | 1 524 | 3 525 | 3 526 | 3 527 | 1 528 | 1 529 | 2 530 | 2 531 | 1 532 | 2 533 | 2 534 | 3 535 | 3 536 | 3 537 | 1 538 | 1 539 | 1 540 | 1 541 | 2 542 | 2 543 | 3 544 | 3 545 | 3 546 | 3 547 | 1 548 | 2 549 | 3 550 | 2 551 | 3 552 | 2 553 | 3 554 | 3 555 | 2 556 | 2 557 | 3 558 | 2 559 | 3 560 | 1 561 | 1 562 | 2 563 | 3 564 | 3 565 | 2 566 | 2 567 | 3 568 | 2 569 | 2 570 | 3 571 | 2 572 | 3 573 | 2 574 | 2 575 | 2 576 | 2 577 | 2 578 | 3 579 | 3 580 | 3 581 | 3 582 | 3 583 | 3 584 | 3 585 | 3 586 | 1 587 | 1 588 | 1 589 | 3 590 | 2 591 | 3 592 | 3 593 | 3 594 | 2 595 | 3 596 | 3 597 | 3 598 | 2 599 | 2 600 | 2 601 | 2 602 | 3 603 | 3 604 | 2 605 | 3 606 | 2 607 | 2 608 | 2 609 | 2 610 | 2 611 | 2 612 | 3 613 | 2 614 | 1 615 | 3 616 | 2 617 | 1 618 | 3 619 | 3 620 | 1 621 | 3 622 | 1 623 | 3 624 | 2 625 | 3 626 | 3 627 | 3 628 | 3 629 | 3 630 | 3 631 | 1 632 | 3 633 | 3 634 | 2 635 | 2 636 | 2 637 | 3 638 | 3 639 | 2 640 | 2 641 | 2 642 | 3 643 | 2 644 | 3 645 | 3 646 | 3 647 | 2 648 | 3 649 | 2 650 | 2 651 | 2 652 | 2 653 | 3 654 | 3 655 | 2 656 | 3 657 | 3 658 | 3 659 | 3 660 | 3 661 | 3 662 | 2 663 | 3 664 | 1 665 | 3 666 | 1 667 | 3 668 | 3 669 | 3 670 | 2 671 | 3 672 | 3 673 | 1 674 | 2 675 | 3 676 | 2 677 | 3 678 | 1 679 | 1 680 | 2 681 | 2 682 | 2 683 | 3 684 | 2 685 | 3 686 | 1 687 | 1 688 | 1 689 | 3 690 | 3 691 | 3 692 | 2 693 | 3 694 | 1 695 | 2 696 | 2 697 | 3 698 | 1 699 | 3 700 | 3 701 | 3 702 | 2 703 | 3 704 | 3 705 | 2 706 | 2 707 | 3 708 | 2 709 | 1 710 | 3 711 | 3 712 | 2 713 | 2 714 | 2 715 | 3 716 | 3 717 | 3 718 | 3 719 | 3 720 | 3 721 | 1 722 | 2 723 | 3 724 | 3 725 | 3 726 | 3 727 | 2 728 | 2 729 | 2 730 | 2 731 | 2 732 | 2 733 | 2 734 | 2 735 | 3 736 | 3 737 | 3 738 | 2 739 | 2 740 | 2 741 | 2 742 | 2 743 | 3 744 | 3 745 | 3 746 | 3 747 | 3 748 | 3 749 | 3 750 | 3 751 | 3 752 | 3 753 | 3 754 | 1 755 | 3 756 | 2 757 | 3 758 | 3 759 | 3 760 | 3 761 | 1 762 | 3 763 | 3 764 | 1 765 | 2 766 | 3 767 | 2 768 | 3 769 | 2 770 | 2 771 | 3 772 | 2 773 | 2 774 | 3 775 | 1 776 | 2 777 | 2 778 | 2 779 | 3 780 | 3 781 | 1 782 | 2 783 | 3 784 | 3 785 | 1 786 | 1 787 | 3 788 | 3 789 | 3 790 | 2 791 | 2 792 | 2 793 | 3 794 | 2 795 | 3 796 | 2 797 | 2 798 | 3 799 | 3 800 | 2 801 | 3 802 | 1 803 | 1 804 | 2 805 | 2 806 | 2 807 | 2 808 | 3 809 | 3 810 | 3 811 | 3 812 | 3 813 | 3 814 | 3 815 | 3 816 | 1 817 | 2 818 | 1 819 | 2 820 | 3 821 | 3 822 | 2 823 | 2 824 | 2 825 | 3 826 | 3 827 | 3 828 | 3 829 | 1 830 | 3 831 | 3 832 | 1 833 | 3 834 | 2 835 | 3 836 | 3 837 | 2 838 | 2 839 | 1 840 | 2 841 | 2 842 | 2 843 | 3 844 | 3 845 | 3 846 | 3 847 | 2 848 | 3 849 | 1 850 | 3 851 | 1 852 | 1 853 | 2 854 | 2 855 | 2 856 | 2 857 | 2 858 | 2 859 | 1 860 | 3 861 | 3 862 | 1 863 | 2 864 | 2 865 | 2 866 | 2 867 | 2 868 | 2 869 | 3 870 | 1 871 | 3 872 | 3 873 | 3 874 | 2 875 | 1 876 | 3 877 | 3 878 | 1 879 | 3 880 | 3 881 | 1 882 | 1 883 | 1 884 | 3 885 | 3 886 | 1 887 | 3 888 | 1 889 | 3 890 | 1 891 | 3 892 | 1 893 | 3 894 | 3 895 | 5 896 | 3 897 | 5 898 | 3 899 | 3 900 | 2 901 | 3 902 | 3 903 | 2 904 | 3 905 | 1 906 | 3 907 | 2 908 | 3 909 | 1 910 | 3 911 | 3 912 | 1 913 | 3 914 | 3 915 | 4 916 | 3 917 | 2 918 | 2 919 | 1 920 | 1 921 | 6 922 | 3 923 | 3 924 | 3 925 | 2 926 | 3 927 | 3 928 | 3 929 | 2 930 | 1 931 | 1 932 | 1 933 | 2 934 | 2 935 | 2 936 | 2 937 | 3 938 | 1 939 | 3 940 | 3 941 | 1 942 | 3 943 | 2 944 | 3 945 | 1 946 | 3 947 | 3 948 | 3 949 | 4 950 | 4 951 | 3 952 | 3 953 | 3 954 | 1 955 | 7 956 | 3 957 | 3 958 | 3 959 | 3 960 | 3 961 | 2 962 | 3 963 | 3 964 | 3 965 | 2 966 | 2 967 | 2 968 | 3 969 | 2 970 | 2 971 | 2 972 | 2 973 | 3 974 | 3 975 | 3 976 | 7 977 | 3 978 | 3 979 | 3 980 | 2 981 | 3 982 | 3 983 | 3 984 | 3 985 | 2 986 | 3 987 | 2 988 | 2 989 | 2 990 | 2 991 | 3 992 | 4 993 | 4 994 | 2 995 | 2 996 | 3 997 | 1 998 | 1 999 | 3 1000 | 3 1001 | 4 1002 | 4 1003 | 4 1004 | 3 1005 | 1 1006 | 2 1007 | 2 1008 | 3 1009 | 3 1010 | 3 1011 | 2 1012 | 3 1013 | 2 1014 | 2 1015 | 2 1016 | 2 1017 | 2 1018 | 2 1019 | 3 1020 | 3 1021 | 3 1022 | 3 1023 | 3 1024 | 3 1025 | 3 1026 | 7 1027 | 2 1028 | 3 1029 | 3 1030 | 3 1031 | 2 1032 | 3 1033 | 2 1034 | 3 1035 | 3 1036 | 2 1037 | 2 1038 | 7 1039 | 2 1040 | 3 1041 | 3 1042 | 3 1043 | 2 1044 | 2 1045 | 3 1046 | 1 1047 | 3 1048 | 1 1049 | 3 1050 | 2 1051 | 2 1052 | 3 1053 | 3 1054 | 2 1055 | 2 1056 | 2 1057 | 2 1058 | 2 1059 | 3 1060 | 3 1061 | 3 1062 | 3 1063 | 3 1064 | 3 1065 | 3 1066 | 3 1067 | 3 1068 | 3 1069 | 4 1070 | 2 1071 | 1 1072 | 2 1073 | 2 1074 | 2 1075 | 2 1076 | 2 1077 | 2 1078 | 3 1079 | 3 1080 | 3 1081 | 3 1082 | 3 1083 | 2 1084 | 2 1085 | 3 1086 | 3 1087 | 3 1088 | 3 1089 | 3 1090 | 3 1091 | 3 1092 | 2 1093 | 2 1094 | 2 1095 | 2 1096 | 3 1097 | 3 1098 | 3 1099 | 3 1100 | 2 1101 | 1 1102 | 2 1103 | 2 1104 | 2 1105 | 3 1106 | 3 1107 | 2 1108 | 2 1109 | 3 1110 | 3 1111 | 2 1112 | 3 1113 | 1 1114 | 3 1115 | 2 1116 | 2 1117 | 2 1118 | 2 1119 | 2 1120 | 2 1121 | 3 1122 | 3 1123 | 3 1124 | 3 1125 | 2 1126 | 4 1127 | 3 1128 | 3 1129 | 3 1130 | 3 1131 | 3 1132 | 3 1133 | 3 1134 | 3 1135 | 2 1136 | 2 1137 | 2 1138 | 3 1139 | 3 1140 | 1 1141 | 2 1142 | 2 1143 | 2 1144 | 3 1145 | 2 1146 | 3 1147 | 2 1148 | 2 1149 | 2 1150 | 2 1151 | 3 1152 | 3 1153 | 3 1154 | 3 1155 | 1 1156 | 3 1157 | 1 1158 | 3 1159 | 2 1160 | 2 1161 | 2 1162 | 3 1163 | 3 1164 | 3 1165 | 3 1166 | 2 1167 | 3 1168 | 3 1169 | 1 1170 | 1 1171 | 1 1172 | 2 1173 | 3 1174 | 3 1175 | 2 1176 | 3 1177 | 3 1178 | 2 1179 | 1 1180 | 3 1181 | 2 1182 | 3 1183 | 2 1184 | 2 1185 | 3 1186 | 2 1187 | 2 1188 | 3 1189 | 3 1190 | 2 1191 | 2 1192 | 3 1193 | 2 1194 | 1 1195 | 3 1196 | 1 1197 | 1 1198 | 3 1199 | 3 1200 | 2 1201 | 3 1202 | 2 1203 | 3 1204 | 3 1205 | 2 1206 | 3 1207 | 1 1208 | 3 1209 | 2 1210 | 1 1211 | 3 1212 | 1 1213 | 2 1214 | 2 1215 | 3 1216 | 1 1217 | 3 1218 | 3 1219 | 2 1220 | 2 1221 | 3 1222 | 3 1223 | 1 1224 | 2 1225 | 3 1226 | 3 1227 | 1 1228 | 2 1229 | 2 1230 | 3 1231 | 2 1232 | 3 1233 | 4 1234 | 2 1235 | 3 1236 | 2 1237 | 2 1238 | 2 1239 | 2 1240 | 2 1241 | 3 1242 | 2 1243 | 4 1244 | 3 1245 | 3 1246 | 4 1247 | 4 1248 | 3 1249 | 3 1250 | 3 1251 | 1 1252 | 3 1253 | 2 1254 | 2 1255 | 3 1256 | 3 1257 | 3 1258 | 1 1259 | 2 1260 | 2 1261 | 3 1262 | 3 1263 | 2 1264 | 2 1265 | 2 1266 | 3 1267 | 3 1268 | 3 1269 | 2 1270 | 3 1271 | 2 1272 | 3 1273 | 3 1274 | 3 1275 | 3 1276 | 1 1277 | 2 1278 | 2 1279 | 2 1280 | 2 1281 | 3 1282 | 3 1283 | 3 1284 | 3 1285 | 2 1286 | 3 1287 | 2 1288 | 2 1289 | 2 1290 | 1 1291 | 2 1292 | 1 1293 | 1 1294 | 3 1295 | 2 1296 | 2 1297 | 3 1298 | 3 1299 | 3 1300 | 2 1301 | 2 1302 | 1 1303 | 3 1304 | 3 1305 | 3 1306 | 3 1307 | 3 1308 | 3 1309 | 2 1310 | 3 1311 | 2 1312 | 2 1313 | 2 1314 | 2 1315 | 2 1316 | 3 1317 | 3 1318 | 3 1319 | 3 1320 | 3 1321 | 2 1322 | 2 1323 | 2 1324 | 3 1325 | 4 1326 | 2 1327 | 2 1328 | 3 1329 | 3 1330 | 1 1331 | 2 1332 | 3 1333 | 3 1334 | 2 1335 | 2 1336 | 2 1337 | 3 1338 | 2 1339 | 2 1340 | 3 1341 | 3 1342 | 3 1343 | 3 1344 | 1 1345 | 1 1346 | 3 1347 | 3 1348 | 3 1349 | 3 1350 | 3 1351 | 3 1352 | 1 1353 | 3 1354 | 1 1355 | 3 1356 | 2 1357 | 2 1358 | 3 1359 | 3 1360 | 2 1361 | 2 1362 | 3 1363 | 2 1364 | 3 1365 | 1 1366 | 1 1367 | 2 1368 | 2 1369 | 2 1370 | 2 1371 | 2 1372 | 1 1373 | 1 1374 | 3 1375 | 3 1376 | 3 1377 | 3 1378 | 3 1379 | 3 1380 | 3 1381 | 2 1382 | 3 1383 | 2 1384 | 3 1385 | 2 1386 | 2 1387 | 2 1388 | 3 1389 | 3 1390 | 1 1391 | 3 1392 | 2 1393 | 3 1394 | 3 1395 | 2 1396 | 2 1397 | 2 1398 | 3 1399 | 3 1400 | 3 1401 | 3 1402 | 3 1403 | 1 1404 | 2 1405 | 2 1406 | 3 1407 | 3 1408 | 3 1409 | 1 1410 | 2 1411 | 1 1412 | 3 1413 | 3 1414 | 3 1415 | 3 1416 | 3 1417 | 2 1418 | 3 1419 | 2 1420 | 2 1421 | 2 1422 | 3 1423 | 3 1424 | 2 1425 | 2 1426 | 2 1427 | 3 1428 | 3 1429 | 2 1430 | 3 1431 | 3 1432 | 3 1433 | 3 1434 | 1 1435 | 1 1436 | 3 1437 | 3 1438 | 2 1439 | 2 1440 | 1 1441 | 2 1442 | 2 1443 | 3 1444 | 2 1445 | 3 1446 | 3 1447 | 3 1448 | 1 1449 | 1 1450 | 3 1451 | 3 1452 | 2 1453 | 2 1454 | 3 1455 | 3 1456 | 1 1457 | 2 1458 | 3 1459 | 2 1460 | 2 1461 | 2 1462 | 1 1463 | 3 1464 | 2 1465 | 3 1466 | 2 1467 | 2 1468 | 3 1469 | 3 1470 | 3 1471 | 3 1472 | 2 1473 | 2 1474 | 3 1475 | 2 1476 | 2 1477 | 2 1478 | 2 1479 | 2 1480 | 3 1481 | 3 1482 | 3 1483 | 1 1484 | 3 1485 | 2 1486 | 1 1487 | 2 1488 | 2 1489 | 2 1490 | 2 1491 | 2 1492 | 3 1493 | 3 1494 | 3 1495 | 2 1496 | 2 1497 | 3 1498 | 2 1499 | 1 1500 | 3 1501 | 3 1502 | 3 1503 | 2 1504 | 3 1505 | 3 1506 | 2 1507 | 3 1508 | 2 1509 | 3 1510 | 3 1511 | 1 1512 | 3 1513 | 2 1514 | 2 1515 | 3 1516 | 3 1517 | 3 1518 | 2 1519 | 3 1520 | 3 1521 | 3 1522 | 2 1523 | 2 1524 | 2 1525 | 3 1526 | 3 1527 | 2 1528 | 2 1529 | 1 1530 | 1 1531 | 3 1532 | 3 1533 | 3 1534 | 1 1535 | 2 1536 | 2 1537 | 3 1538 | 3 1539 | 3 1540 | 3 1541 | 3 1542 | 3 1543 | 3 1544 | 3 1545 | 3 1546 | 3 1547 | 2 1548 | 3 1549 | 2 1550 | 2 1551 | 3 1552 | 1 1553 | 2 1554 | 2 1555 | 2 1556 | 2 1557 | 2 1558 | 2 1559 | 2 1560 | 3 1561 | 3 1562 | 3 1563 | 3 1564 | 2 1565 | 3 1566 | 2 1567 | 2 1568 | 3 1569 | 3 1570 | 3 1571 | 3 1572 | 3 1573 | 1 1574 | 2 1575 | 2 1576 | 2 1577 | 4 1578 | 4 1579 | 2 1580 | 2 1581 | 3 1582 | 3 1583 | 3 1584 | 1 1585 | 2 1586 | 2 1587 | 1 1588 | 1 1589 | 2 1590 | 2 1591 | 2 1592 | 2 1593 | 3 1594 | 3 1595 | 3 1596 | 3 1597 | 3 1598 | 3 1599 | 2 1600 | 2 1601 | 2 1602 | 3 1603 | 4 1604 | 2 1605 | 2 1606 | 2 1607 | 2 1608 | 2 1609 | 3 1610 | 1 1611 | 2 1612 | 3 1613 | 3 1614 | 3 1615 | 3 1616 | 3 1617 | 3 1618 | 3 1619 | 1 1620 | 3 1621 | 1 1622 | 1 1623 | 1 1624 | 2 1625 | 2 1626 | 3 1627 | 1 1628 | 3 1629 | 3 1630 | 1 1631 | 3 1632 | 2 1633 | 3 1634 | 1 1635 | 2 1636 | 2 1637 | 2 1638 | 3 1639 | 3 1640 | 1 1641 | 2 1642 | 3 1643 | 3 1644 | 3 1645 | 3 1646 | 1 1647 | 2 1648 | 2 1649 | 2 1650 | 3 1651 | 3 1652 | 3 1653 | 3 1654 | 1 1655 | 2 1656 | 2 1657 | 2 1658 | 3 1659 | 2 1660 | 2 1661 | 2 1662 | 3 1663 | 3 1664 | 1 1665 | 2 1666 | 3 1667 | 3 1668 | 3 1669 | 1 1670 | 3 1671 | 2 1672 | 3 1673 | 3 1674 | 3 1675 | 3 1676 | 3 1677 | 3 1678 | 1 1679 | 3 1680 | 1 1681 | 3 1682 | 3 1683 | 3 1684 | 3 1685 | 2 1686 | 2 1687 | 2 1688 | 2 1689 | 2 1690 | 2 1691 | 3 1692 | 3 1693 | 3 1694 | 3 1695 | 2 1696 | 3 1697 | 3 1698 | 2 1699 | 2 1700 | 2 1701 | 3 1702 | 3 1703 | 3 1704 | 1 1705 | 3 1706 | 3 1707 | 3 1708 | 3 1709 | 2 1710 | 3 1711 | 2 1712 | 2 1713 | 2 1714 | 2 1715 | 2 1716 | 2 1717 | 2 1718 | 3 1719 | 3 1720 | 3 1721 | 1 1722 | 2 1723 | 2 1724 | 3 1725 | 2 1726 | 2 1727 | 2 1728 | 2 1729 | 3 1730 | 3 1731 | 3 1732 | 3 1733 | 2 1734 | 3 1735 | 3 1736 | 3 1737 | 2 1738 | 2 1739 | 3 1740 | 2 1741 | 2 1742 | 3 1743 | 3 1744 | 3 1745 | 3 1746 | 3 1747 | 3 1748 | 1 1749 | 1 1750 | 3 1751 | 3 1752 | 2 1753 | 3 1754 | 1 1755 | 1 1756 | 2 1757 | 2 1758 | 2 1759 | 3 1760 | 3 1761 | 3 1762 | 3 1763 | 1 1764 | 1 1765 | 3 1766 | 2 1767 | 3 1768 | 3 1769 | 1 1770 | 3 1771 | 3 1772 | 2 1773 | 1 1774 | 3 1775 | 2 1776 | 2 1777 | 2 1778 | 2 1779 | 3 1780 | 3 1781 | 3 1782 | 3 1783 | 3 1784 | 3 1785 | 1 1786 | 3 1787 | 3 1788 | 2 1789 | 2 1790 | 2 1791 | 3 1792 | 3 1793 | 2 1794 | 3 1795 | 3 1796 | 3 1797 | 3 1798 | 2 1799 | 1 1800 | 3 1801 | 2 1802 | 2 1803 | 2 1804 | 2 1805 | 3 1806 | 3 1807 | 3 1808 | 2 1809 | 2 1810 | 3 1811 | 3 1812 | 2 1813 | 3 1814 | 3 1815 | 2 1816 | 1 1817 | 3 1818 | 2 1819 | 2 1820 | 3 1821 | 2 1822 | 2 1823 | 2 1824 | 3 1825 | 4 1826 | 2 1827 | 2 1828 | 2 1829 | 2 1830 | 2 1831 | 3 1832 | 3 1833 | 3 1834 | 3 1835 | 3 1836 | 3 1837 | 1 1838 | 1 1839 | 1 1840 | 3 1841 | 3 1842 | 4 1843 | 4 1844 | 4 1845 | 2 1846 | 2 1847 | 1 1848 | 3 1849 | 1 1850 | 2 1851 | 3 1852 | 4 1853 | 4 1854 | 4 1855 | 1 1856 | 1 1857 | 4 1858 | 4 1859 | 3 1860 | 3 1861 | 3 1862 | 3 1863 | 3 1864 | 3 1865 | 3 1866 | 3 1867 | 2 1868 | 3 1869 | 3 1870 | 2 1871 | 3 1872 | 2 1873 | 3 1874 | 2 1875 | 2 1876 | 3 1877 | 3 1878 | 1 1879 | 2 1880 | 3 1881 | 2 1882 | 3 1883 | 2 1884 | 2 1885 | 2 1886 | 2 1887 | 3 1888 | 3 1889 | 1 1890 | 1 1891 | 1 1892 | 1 1893 | 1 1894 | 3 1895 | 1 1896 | 3 1897 | 3 1898 | 1 1899 | 1 1900 | 1 1901 | 3 1902 | 3 1903 | 3 1904 | 2 1905 | 1 1906 | 1 1907 | 3 1908 | 3 1909 | 3 1910 | 2 1911 | 2 1912 | 3 1913 | 3 1914 | 3 1915 | 3 1916 | 3 1917 | 3 1918 | 2 1919 | 3 1920 | 3 1921 | 3 1922 | 3 1923 | 1 1924 | 3 1925 | 3 1926 | 3 1927 | 2 1928 | 2 1929 | 3 1930 | 2 1931 | 3 1932 | 3 1933 | 2 1934 | 3 1935 | 2 1936 | 2 1937 | 2 1938 | 2 1939 | 2 1940 | 3 1941 | 3 1942 | 2 1943 | 3 1944 | 3 1945 | 3 1946 | 2 1947 | 3 1948 | 2 1949 | 2 1950 | 2 1951 | 3 1952 | 2 1953 | 3 1954 | 3 1955 | 3 1956 | 3 1957 | 3 1958 | 3 1959 | 2 1960 | 3 1961 | 3 1962 | 2 1963 | 3 1964 | 3 1965 | 2 1966 | 2 1967 | 3 1968 | 3 1969 | 3 1970 | 3 1971 | 3 1972 | 3 1973 | 3 1974 | 1 1975 | 3 1976 | 3 1977 | 7 1978 | 4 1979 | 2 1980 | 1 1981 | 3 1982 | 1 1983 | 3 1984 | 1 1985 | 2 1986 | 2 1987 | 2 1988 | 4 1989 | 4 1990 | 2 1991 | 2 1992 | 2 1993 | 4 1994 | 4 1995 | 4 1996 | 4 1997 | 3 1998 | 3 1999 | 1 2000 | 3 2001 | -------------------------------------------------------------------------------- /data/open_source_logs/hadoop/groundtruth.seq: -------------------------------------------------------------------------------- 1 | 53 2 | 63 3 | 70 4 | 93 5 | 78 6 | 77 7 | 27 8 | 27 9 | 27 10 | 27 11 | 27 12 | 27 13 | 27 14 | 27 15 | 30 16 | 30 17 | 30 18 | 59 19 | 27 20 | 109 21 | 83 22 | 73 23 | 47 24 | 74 25 | 66 26 | 76 27 | 107 28 | 72 29 | 39 30 | 36 31 | 48 32 | 67 33 | 32 34 | 33 35 | 71 36 | 65 37 | 100 38 | 98 39 | 99 40 | 42 41 | 42 42 | 69 43 | 105 44 | 64 45 | 87 46 | 95 47 | 82 48 | 39 49 | 68 50 | 112 51 | 111 52 | 104 53 | 36 54 | 33 55 | 32 56 | 51 57 | 110 58 | 113 59 | 92 60 | 116 61 | 106 62 | 79 63 | 108 64 | 8 65 | 8 66 | 26 67 | 8 68 | 8 69 | 26 70 | 8 71 | 8 72 | 26 73 | 8 74 | 8 75 | 26 76 | 8 77 | 8 78 | 26 79 | 8 80 | 8 81 | 26 82 | 8 83 | 8 84 | 26 85 | 8 86 | 8 87 | 26 88 | 8 89 | 8 90 | 26 91 | 8 92 | 8 93 | 26 94 | 115 95 | 11 96 | 11 97 | 11 98 | 11 99 | 11 100 | 11 101 | 11 102 | 11 103 | 11 104 | 11 105 | 103 106 | 97 107 | 62 108 | 96 109 | 28 110 | 13 111 | 6 112 | 7 113 | 6 114 | 7 115 | 17 116 | 8 117 | 15 118 | 6 119 | 7 120 | 12 121 | 8 122 | 90 123 | 89 124 | 46 125 | 85 126 | 80 127 | 24 128 | 10 129 | 20 130 | 9 131 | 21 132 | 22 133 | 23 134 | 14 135 | 25 136 | 13 137 | 6 138 | 7 139 | 17 140 | 8 141 | 15 142 | 6 143 | 7 144 | 12 145 | 8 146 | 24 147 | 10 148 | 20 149 | 9 150 | 21 151 | 22 152 | 23 153 | 14 154 | 25 155 | 13 156 | 6 157 | 7 158 | 17 159 | 8 160 | 15 161 | 6 162 | 7 163 | 12 164 | 8 165 | 24 166 | 10 167 | 20 168 | 9 169 | 21 170 | 22 171 | 23 172 | 14 173 | 25 174 | 13 175 | 6 176 | 7 177 | 16 178 | 18 179 | 19 180 | 6 181 | 7 182 | 6 183 | 7 184 | 6 185 | 7 186 | 16 187 | 18 188 | 19 189 | 6 190 | 7 191 | 6 192 | 7 193 | 6 194 | 7 195 | 6 196 | 7 197 | 6 198 | 7 199 | 6 200 | 7 201 | 6 202 | 7 203 | 16 204 | 18 205 | 19 206 | 6 207 | 7 208 | 6 209 | 7 210 | 6 211 | 7 212 | 3 213 | 6 214 | 7 215 | 6 216 | 7 217 | 6 218 | 7 219 | 6 220 | 7 221 | 3 222 | 6 223 | 7 224 | 3 225 | 6 226 | 7 227 | 6 228 | 7 229 | 6 230 | 7 231 | 3 232 | 3 233 | 3 234 | 6 235 | 7 236 | 6 237 | 7 238 | 6 239 | 7 240 | 3 241 | 6 242 | 7 243 | 3 244 | 3 245 | 6 246 | 7 247 | 6 248 | 7 249 | 3 250 | 6 251 | 7 252 | 3 253 | 3 254 | 6 255 | 7 256 | 6 257 | 7 258 | 6 259 | 7 260 | 3 261 | 3 262 | 6 263 | 7 264 | 3 265 | 6 266 | 7 267 | 6 268 | 7 269 | 3 270 | 3 271 | 6 272 | 7 273 | 3 274 | 6 275 | 7 276 | 6 277 | 7 278 | 3 279 | 6 280 | 7 281 | 3 282 | 3 283 | 6 284 | 7 285 | 6 286 | 7 287 | 6 288 | 7 289 | 3 290 | 6 291 | 7 292 | 3 293 | 3 294 | 17 295 | 15 296 | 6 297 | 7 298 | 12 299 | 8 300 | 24 301 | 10 302 | 20 303 | 9 304 | 21 305 | 22 306 | 23 307 | 14 308 | 25 309 | 13 310 | 6 311 | 7 312 | 3 313 | 6 314 | 7 315 | 3 316 | 3 317 | 16 318 | 18 319 | 19 320 | 6 321 | 7 322 | 6 323 | 7 324 | 6 325 | 7 326 | 3 327 | 3 328 | 3 329 | 6 330 | 7 331 | 6 332 | 7 333 | 6 334 | 7 335 | 3 336 | 6 337 | 7 338 | 3 339 | 3 340 | 3 341 | 6 342 | 7 343 | 6 344 | 7 345 | 3 346 | 6 347 | 7 348 | 3 349 | 3 350 | 3 351 | 6 352 | 7 353 | 6 354 | 7 355 | 3 356 | 6 357 | 7 358 | 3 359 | 3 360 | 3 361 | 6 362 | 7 363 | 6 364 | 7 365 | 6 366 | 7 367 | 3 368 | 3 369 | 3 370 | 3 371 | 6 372 | 7 373 | 6 374 | 7 375 | 6 376 | 7 377 | 3 378 | 3 379 | 3 380 | 6 381 | 7 382 | 3 383 | 6 384 | 7 385 | 6 386 | 7 387 | 3 388 | 3 389 | 6 390 | 7 391 | 3 392 | 3 393 | 6 394 | 7 395 | 6 396 | 7 397 | 3 398 | 6 399 | 7 400 | 3 401 | 3 402 | 3 403 | 6 404 | 7 405 | 3 406 | 6 407 | 7 408 | 6 409 | 7 410 | 3 411 | 3 412 | 6 413 | 7 414 | 3 415 | 3 416 | 6 417 | 7 418 | 6 419 | 7 420 | 3 421 | 6 422 | 7 423 | 3 424 | 3 425 | 3 426 | 6 427 | 7 428 | 6 429 | 7 430 | 3 431 | 3 432 | 6 433 | 7 434 | 3 435 | 3 436 | 6 437 | 7 438 | 6 439 | 7 440 | 3 441 | 3 442 | 6 443 | 7 444 | 3 445 | 3 446 | 6 447 | 7 448 | 6 449 | 7 450 | 3 451 | 3 452 | 6 453 | 7 454 | 3 455 | 3 456 | 6 457 | 7 458 | 6 459 | 7 460 | 3 461 | 3 462 | 6 463 | 7 464 | 3 465 | 3 466 | 6 467 | 7 468 | 3 469 | 6 470 | 7 471 | 6 472 | 7 473 | 3 474 | 3 475 | 3 476 | 6 477 | 7 478 | 3 479 | 6 480 | 7 481 | 6 482 | 7 483 | 3 484 | 3 485 | 6 486 | 7 487 | 3 488 | 3 489 | 6 490 | 7 491 | 6 492 | 7 493 | 3 494 | 6 495 | 7 496 | 3 497 | 3 498 | 3 499 | 6 500 | 7 501 | 6 502 | 7 503 | 6 504 | 7 505 | 3 506 | 3 507 | 3 508 | 3 509 | 6 510 | 7 511 | 6 512 | 7 513 | 17 514 | 15 515 | 15 516 | 6 517 | 7 518 | 12 519 | 8 520 | 24 521 | 8 522 | 24 523 | 10 524 | 20 525 | 9 526 | 10 527 | 20 528 | 9 529 | 21 530 | 22 531 | 23 532 | 14 533 | 25 534 | 21 535 | 3 536 | 22 537 | 23 538 | 14 539 | 25 540 | 3 541 | 3 542 | 13 543 | 17 544 | 15 545 | 6 546 | 7 547 | 12 548 | 8 549 | 24 550 | 3 551 | 10 552 | 20 553 | 9 554 | 21 555 | 22 556 | 23 557 | 14 558 | 25 559 | 16 560 | 16 561 | 18 562 | 18 563 | 19 564 | 19 565 | 13 566 | 6 567 | 7 568 | 16 569 | 18 570 | 19 571 | 6 572 | 7 573 | 3 574 | 3 575 | 6 576 | 7 577 | 3 578 | 3 579 | 6 580 | 7 581 | 6 582 | 7 583 | 3 584 | 6 585 | 7 586 | 3 587 | 3 588 | 3 589 | 6 590 | 7 591 | 3 592 | 17 593 | 15 594 | 6 595 | 7 596 | 12 597 | 8 598 | 24 599 | 3 600 | 10 601 | 20 602 | 9 603 | 3 604 | 21 605 | 22 606 | 23 607 | 14 608 | 25 609 | 3 610 | 13 611 | 6 612 | 7 613 | 3 614 | 3 615 | 6 616 | 7 617 | 16 618 | 18 619 | 19 620 | 3 621 | 3 622 | 17 623 | 15 624 | 6 625 | 7 626 | 12 627 | 8 628 | 24 629 | 3 630 | 10 631 | 20 632 | 9 633 | 3 634 | 21 635 | 22 636 | 23 637 | 14 638 | 25 639 | 3 640 | 13 641 | 17 642 | 15 643 | 6 644 | 7 645 | 12 646 | 8 647 | 24 648 | 10 649 | 20 650 | 9 651 | 21 652 | 22 653 | 23 654 | 14 655 | 25 656 | 3 657 | 13 658 | 17 659 | 50 660 | 6 661 | 7 662 | 12 663 | 3 664 | 3 665 | 3 666 | 13 667 | 35 668 | 52 669 | 6 670 | 7 671 | 3 672 | 3 673 | 16 674 | 18 675 | 19 676 | 3 677 | 6 678 | 7 679 | 16 680 | 18 681 | 19 682 | 3 683 | 3 684 | 3 685 | 3 686 | 3 687 | 3 688 | 3 689 | 3 690 | 3 691 | 3 692 | 3 693 | 3 694 | 3 695 | 3 696 | 3 697 | 3 698 | 3 699 | 3 700 | 3 701 | 3 702 | 3 703 | 3 704 | 3 705 | 3 706 | 3 707 | 3 708 | 3 709 | 3 710 | 3 711 | 3 712 | 3 713 | 3 714 | 3 715 | 3 716 | 3 717 | 3 718 | 3 719 | 3 720 | 3 721 | 3 722 | 3 723 | 3 724 | 3 725 | 3 726 | 3 727 | 3 728 | 3 729 | 3 730 | 3 731 | 3 732 | 3 733 | 3 734 | 3 735 | 3 736 | 3 737 | 3 738 | 3 739 | 3 740 | 3 741 | 3 742 | 3 743 | 3 744 | 3 745 | 3 746 | 3 747 | 3 748 | 3 749 | 3 750 | 3 751 | 3 752 | 3 753 | 3 754 | 3 755 | 3 756 | 3 757 | 3 758 | 3 759 | 3 760 | 3 761 | 3 762 | 3 763 | 3 764 | 3 765 | 3 766 | 3 767 | 3 768 | 3 769 | 3 770 | 3 771 | 3 772 | 3 773 | 3 774 | 3 775 | 3 776 | 3 777 | 3 778 | 3 779 | 3 780 | 3 781 | 3 782 | 3 783 | 3 784 | 3 785 | 3 786 | 3 787 | 3 788 | 3 789 | 3 790 | 3 791 | 3 792 | 3 793 | 3 794 | 3 795 | 3 796 | 3 797 | 3 798 | 58 799 | 102 800 | 10 801 | 31 802 | 9 803 | 3 804 | 3 805 | 101 806 | 88 807 | 114 808 | 75 809 | 28 810 | 6 811 | 81 812 | 49 813 | 12 814 | 3 815 | 3 816 | 3 817 | 56 818 | 94 819 | 84 820 | 8 821 | 8 822 | 11 823 | 28 824 | 3 825 | 13 826 | 35 827 | 12 828 | 57 829 | 3 830 | 3 831 | 3 832 | 3 833 | 3 834 | 3 835 | 3 836 | 3 837 | 3 838 | 3 839 | 3 840 | 3 841 | 3 842 | 3 843 | 3 844 | 3 845 | 3 846 | 3 847 | 3 848 | 1 849 | 2 850 | 1 851 | 2 852 | 1 853 | 2 854 | 1 855 | 2 856 | 1 857 | 2 858 | 1 859 | 2 860 | 1 861 | 2 862 | 1 863 | 2 864 | 1 865 | 2 866 | 1 867 | 2 868 | 1 869 | 2 870 | 1 871 | 2 872 | 1 873 | 2 874 | 1 875 | 2 876 | 1 877 | 2 878 | 1 879 | 2 880 | 1 881 | 2 882 | 1 883 | 2 884 | 1 885 | 2 886 | 1 887 | 2 888 | 1 889 | 2 890 | 1 891 | 2 892 | 1 893 | 2 894 | 1 895 | 2 896 | 1 897 | 2 898 | 1 899 | 2 900 | 1 901 | 2 902 | 1 903 | 2 904 | 1 905 | 2 906 | 1 907 | 2 908 | 86 909 | 54 910 | 60 911 | 1 912 | 55 913 | 1 914 | 2 915 | 1 916 | 2 917 | 1 918 | 2 919 | 1 920 | 2 921 | 1 922 | 2 923 | 4 924 | 1 925 | 2 926 | 1 927 | 3 928 | 1 929 | 2 930 | 5 931 | 4 932 | 1 933 | 2 934 | 1 935 | 1 936 | 2 937 | 5 938 | 4 939 | 3 940 | 3 941 | 1 942 | 2 943 | 1 944 | 1 945 | 2 946 | 5 947 | 4 948 | 1 949 | 2 950 | 1 951 | 3 952 | 3 953 | 1 954 | 2 955 | 5 956 | 4 957 | 1 958 | 2 959 | 1 960 | 1 961 | 2 962 | 5 963 | 4 964 | 3 965 | 3 966 | 1 967 | 2 968 | 1 969 | 1 970 | 2 971 | 5 972 | 4 973 | 1 974 | 2 975 | 1 976 | 3 977 | 3 978 | 1 979 | 2 980 | 5 981 | 4 982 | 1 983 | 2 984 | 1 985 | 1 986 | 2 987 | 5 988 | 4 989 | 3 990 | 3 991 | 1 992 | 2 993 | 1 994 | 1 995 | 2 996 | 5 997 | 4 998 | 1 999 | 2 1000 | 1 1001 | 3 1002 | 1 1003 | 2 1004 | 3 1005 | 5 1006 | 4 1007 | 1 1008 | 2 1009 | 1 1010 | 1 1011 | 2 1012 | 5 1013 | 4 1014 | 1 1015 | 2 1016 | 3 1017 | 1 1018 | 3 1019 | 3 1020 | 38 1021 | 29 1022 | 29 1023 | 45 1024 | 10 1025 | 31 1026 | 9 1027 | 1 1028 | 2 1029 | 5 1030 | 4 1031 | 44 1032 | 34 1033 | 1 1034 | 37 1035 | 43 1036 | 8 1037 | 8 1038 | 11 1039 | 61 1040 | 91 1041 | 41 1042 | 40 1043 | 1 1044 | 2 1045 | 28 1046 | 1 1047 | 1 1048 | 2 1049 | 5 1050 | 4 1051 | 3 1052 | 3 1053 | 38 1054 | 29 1055 | 29 1056 | 45 1057 | 10 1058 | 31 1059 | 9 1060 | 44 1061 | 34 1062 | 1 1063 | 37 1064 | 43 1065 | 8 1066 | 8 1067 | 11 1068 | 41 1069 | 40 1070 | 1 1071 | 2 1072 | 28 1073 | 1 1074 | 1 1075 | 2 1076 | 5 1077 | 4 1078 | 1 1079 | 2 1080 | 1 1081 | 1 1082 | 2 1083 | 5 1084 | 4 1085 | 1 1086 | 2 1087 | 1 1088 | 1 1089 | 2 1090 | 5 1091 | 4 1092 | 1 1093 | 2 1094 | 1 1095 | 1 1096 | 2 1097 | 5 1098 | 4 1099 | 1 1100 | 1 1101 | 2 1102 | 5 1103 | 4 1104 | 1 1105 | 2 1106 | 1 1107 | 1 1108 | 2 1109 | 5 1110 | 4 1111 | 1 1112 | 2 1113 | 1 1114 | 1 1115 | 2 1116 | 5 1117 | 4 1118 | 1 1119 | 2 1120 | 1 1121 | 1 1122 | 2 1123 | 5 1124 | 4 1125 | 1 1126 | 2 1127 | 1 1128 | 1 1129 | 2 1130 | 5 1131 | 4 1132 | 1 1133 | 2 1134 | 1 1135 | 1 1136 | 2 1137 | 5 1138 | 4 1139 | 1 1140 | 2 1141 | 1 1142 | 1 1143 | 2 1144 | 5 1145 | 4 1146 | 1 1147 | 2 1148 | 1 1149 | 1 1150 | 2 1151 | 5 1152 | 4 1153 | 1 1154 | 2 1155 | 1 1156 | 1 1157 | 2 1158 | 5 1159 | 4 1160 | 1 1161 | 2 1162 | 1 1163 | 1 1164 | 2 1165 | 5 1166 | 4 1167 | 1 1168 | 2 1169 | 1 1170 | 1 1171 | 2 1172 | 5 1173 | 4 1174 | 1 1175 | 2 1176 | 1 1177 | 1 1178 | 2 1179 | 5 1180 | 4 1181 | 1 1182 | 2 1183 | 1 1184 | 1 1185 | 2 1186 | 5 1187 | 4 1188 | 1 1189 | 2 1190 | 1 1191 | 1 1192 | 2 1193 | 5 1194 | 4 1195 | 1 1196 | 2 1197 | 1 1198 | 1 1199 | 2 1200 | 5 1201 | 4 1202 | 1 1203 | 2 1204 | 1 1205 | 1 1206 | 2 1207 | 5 1208 | 4 1209 | 1 1210 | 2 1211 | 1 1212 | 1 1213 | 2 1214 | 5 1215 | 4 1216 | 1 1217 | 2 1218 | 1 1219 | 1 1220 | 2 1221 | 5 1222 | 4 1223 | 1 1224 | 2 1225 | 1 1226 | 1 1227 | 2 1228 | 5 1229 | 4 1230 | 1 1231 | 2 1232 | 1 1233 | 1 1234 | 2 1235 | 5 1236 | 4 1237 | 1 1238 | 2 1239 | 1 1240 | 1 1241 | 2 1242 | 5 1243 | 4 1244 | 1 1245 | 2 1246 | 1 1247 | 1 1248 | 2 1249 | 5 1250 | 4 1251 | 1 1252 | 2 1253 | 1 1254 | 1 1255 | 2 1256 | 5 1257 | 4 1258 | 1 1259 | 2 1260 | 1 1261 | 1 1262 | 2 1263 | 5 1264 | 4 1265 | 1 1266 | 2 1267 | 1 1268 | 1 1269 | 2 1270 | 5 1271 | 4 1272 | 1 1273 | 2 1274 | 1 1275 | 1 1276 | 2 1277 | 5 1278 | 4 1279 | 1 1280 | 2 1281 | 1 1282 | 1 1283 | 2 1284 | 5 1285 | 4 1286 | 1 1287 | 2 1288 | 1 1289 | 1 1290 | 2 1291 | 5 1292 | 4 1293 | 1 1294 | 2 1295 | 1 1296 | 1 1297 | 2 1298 | 5 1299 | 4 1300 | 1 1301 | 2 1302 | 1 1303 | 1 1304 | 2 1305 | 5 1306 | 4 1307 | 1 1308 | 2 1309 | 1 1310 | 1 1311 | 2 1312 | 5 1313 | 4 1314 | 1 1315 | 2 1316 | 1 1317 | 1 1318 | 2 1319 | 5 1320 | 4 1321 | 1 1322 | 2 1323 | 1 1324 | 1 1325 | 2 1326 | 5 1327 | 4 1328 | 1 1329 | 2 1330 | 1 1331 | 1 1332 | 2 1333 | 5 1334 | 4 1335 | 1 1336 | 2 1337 | 1 1338 | 1 1339 | 2 1340 | 5 1341 | 4 1342 | 1 1343 | 2 1344 | 1 1345 | 1 1346 | 2 1347 | 5 1348 | 4 1349 | 1 1350 | 2 1351 | 1 1352 | 1 1353 | 2 1354 | 5 1355 | 4 1356 | 1 1357 | 2 1358 | 1 1359 | 1 1360 | 2 1361 | 5 1362 | 4 1363 | 1 1364 | 2 1365 | 1 1366 | 1 1367 | 2 1368 | 5 1369 | 4 1370 | 1 1371 | 2 1372 | 1 1373 | 1 1374 | 2 1375 | 5 1376 | 4 1377 | 1 1378 | 2 1379 | 1 1380 | 1 1381 | 2 1382 | 5 1383 | 4 1384 | 1 1385 | 2 1386 | 1 1387 | 1 1388 | 2 1389 | 5 1390 | 4 1391 | 1 1392 | 2 1393 | 1 1394 | 1 1395 | 2 1396 | 5 1397 | 4 1398 | 1 1399 | 2 1400 | 1 1401 | 1 1402 | 2 1403 | 5 1404 | 4 1405 | 1 1406 | 2 1407 | 1 1408 | 1 1409 | 2 1410 | 5 1411 | 4 1412 | 1 1413 | 2 1414 | 1 1415 | 1 1416 | 2 1417 | 5 1418 | 4 1419 | 1 1420 | 2 1421 | 1 1422 | 1 1423 | 2 1424 | 5 1425 | 4 1426 | 1 1427 | 2 1428 | 1 1429 | 1 1430 | 2 1431 | 5 1432 | 4 1433 | 1 1434 | 2 1435 | 1 1436 | 1 1437 | 2 1438 | 5 1439 | 4 1440 | 1 1441 | 2 1442 | 1 1443 | 1 1444 | 2 1445 | 5 1446 | 4 1447 | 1 1448 | 2 1449 | 1 1450 | 1 1451 | 2 1452 | 5 1453 | 4 1454 | 1 1455 | 2 1456 | 1 1457 | 1 1458 | 2 1459 | 5 1460 | 4 1461 | 1 1462 | 2 1463 | 1 1464 | 1 1465 | 2 1466 | 5 1467 | 4 1468 | 1 1469 | 2 1470 | 1 1471 | 1 1472 | 2 1473 | 5 1474 | 4 1475 | 1 1476 | 2 1477 | 1 1478 | 1 1479 | 2 1480 | 5 1481 | 4 1482 | 1 1483 | 2 1484 | 1 1485 | 1 1486 | 2 1487 | 5 1488 | 4 1489 | 1 1490 | 2 1491 | 1 1492 | 1 1493 | 2 1494 | 5 1495 | 4 1496 | 1 1497 | 2 1498 | 1 1499 | 1 1500 | 2 1501 | 5 1502 | 4 1503 | 1 1504 | 2 1505 | 1 1506 | 1 1507 | 2 1508 | 5 1509 | 4 1510 | 1 1511 | 2 1512 | 1 1513 | 1 1514 | 2 1515 | 5 1516 | 4 1517 | 1 1518 | 2 1519 | 1 1520 | 1 1521 | 2 1522 | 5 1523 | 4 1524 | 1 1525 | 2 1526 | 1 1527 | 1 1528 | 2 1529 | 5 1530 | 4 1531 | 1 1532 | 2 1533 | 1 1534 | 1 1535 | 2 1536 | 5 1537 | 4 1538 | 1 1539 | 2 1540 | 1 1541 | 1 1542 | 2 1543 | 5 1544 | 4 1545 | 1 1546 | 2 1547 | 1 1548 | 1 1549 | 2 1550 | 5 1551 | 4 1552 | 1 1553 | 2 1554 | 1 1555 | 1 1556 | 2 1557 | 5 1558 | 4 1559 | 1 1560 | 2 1561 | 1 1562 | 1 1563 | 2 1564 | 5 1565 | 4 1566 | 1 1567 | 2 1568 | 1 1569 | 1 1570 | 2 1571 | 5 1572 | 4 1573 | 1 1574 | 2 1575 | 1 1576 | 1 1577 | 2 1578 | 5 1579 | 4 1580 | 1 1581 | 2 1582 | 1 1583 | 1 1584 | 2 1585 | 5 1586 | 4 1587 | 1 1588 | 2 1589 | 1 1590 | 1 1591 | 2 1592 | 5 1593 | 4 1594 | 1 1595 | 2 1596 | 1 1597 | 1 1598 | 2 1599 | 5 1600 | 4 1601 | 1 1602 | 2 1603 | 1 1604 | 1 1605 | 2 1606 | 5 1607 | 4 1608 | 1 1609 | 2 1610 | 1 1611 | 1 1612 | 2 1613 | 5 1614 | 4 1615 | 1 1616 | 2 1617 | 1 1618 | 1 1619 | 2 1620 | 5 1621 | 4 1622 | 1 1623 | 2 1624 | 1 1625 | 1 1626 | 2 1627 | 5 1628 | 4 1629 | 1 1630 | 2 1631 | 1 1632 | 1 1633 | 2 1634 | 5 1635 | 4 1636 | 1 1637 | 2 1638 | 1 1639 | 1 1640 | 2 1641 | 5 1642 | 4 1643 | 1 1644 | 2 1645 | 1 1646 | 1 1647 | 2 1648 | 5 1649 | 4 1650 | 1 1651 | 2 1652 | 1 1653 | 1 1654 | 2 1655 | 5 1656 | 4 1657 | 1 1658 | 2 1659 | 1 1660 | 1 1661 | 2 1662 | 5 1663 | 4 1664 | 1 1665 | 2 1666 | 1 1667 | 1 1668 | 2 1669 | 5 1670 | 4 1671 | 1 1672 | 2 1673 | 1 1674 | 1 1675 | 2 1676 | 5 1677 | 4 1678 | 1 1679 | 2 1680 | 1 1681 | 1 1682 | 2 1683 | 5 1684 | 4 1685 | 1 1686 | 2 1687 | 1 1688 | 1 1689 | 2 1690 | 5 1691 | 4 1692 | 1 1693 | 2 1694 | 1 1695 | 1 1696 | 2 1697 | 5 1698 | 4 1699 | 1 1700 | 2 1701 | 1 1702 | 1 1703 | 2 1704 | 5 1705 | 4 1706 | 1 1707 | 2 1708 | 1 1709 | 1 1710 | 2 1711 | 5 1712 | 4 1713 | 1 1714 | 2 1715 | 1 1716 | 1 1717 | 2 1718 | 5 1719 | 4 1720 | 1 1721 | 2 1722 | 1 1723 | 1 1724 | 2 1725 | 5 1726 | 4 1727 | 1 1728 | 2 1729 | 1 1730 | 1 1731 | 2 1732 | 5 1733 | 4 1734 | 1 1735 | 2 1736 | 1 1737 | 1 1738 | 2 1739 | 5 1740 | 4 1741 | 1 1742 | 2 1743 | 1 1744 | 1 1745 | 2 1746 | 5 1747 | 4 1748 | 1 1749 | 2 1750 | 1 1751 | 1 1752 | 2 1753 | 5 1754 | 4 1755 | 1 1756 | 2 1757 | 1 1758 | 1 1759 | 2 1760 | 5 1761 | 4 1762 | 1 1763 | 2 1764 | 1 1765 | 1 1766 | 2 1767 | 5 1768 | 4 1769 | 1 1770 | 2 1771 | 1 1772 | 1 1773 | 2 1774 | 5 1775 | 4 1776 | 1 1777 | 2 1778 | 1 1779 | 1 1780 | 2 1781 | 5 1782 | 4 1783 | 1 1784 | 2 1785 | 1 1786 | 1 1787 | 2 1788 | 5 1789 | 4 1790 | 1 1791 | 2 1792 | 1 1793 | 1 1794 | 2 1795 | 5 1796 | 4 1797 | 1 1798 | 2 1799 | 1 1800 | 1 1801 | 2 1802 | 5 1803 | 4 1804 | 1 1805 | 2 1806 | 1 1807 | 1 1808 | 2 1809 | 5 1810 | 4 1811 | 1 1812 | 2 1813 | 1 1814 | 1 1815 | 2 1816 | 5 1817 | 4 1818 | 1 1819 | 2 1820 | 1 1821 | 1 1822 | 2 1823 | 5 1824 | 4 1825 | 1 1826 | 2 1827 | 1 1828 | 1 1829 | 2 1830 | 5 1831 | 4 1832 | 1 1833 | 2 1834 | 1 1835 | 1 1836 | 2 1837 | 5 1838 | 4 1839 | 1 1840 | 2 1841 | 1 1842 | 1 1843 | 2 1844 | 5 1845 | 4 1846 | 1 1847 | 2 1848 | 1 1849 | 1 1850 | 2 1851 | 5 1852 | 4 1853 | 1 1854 | 2 1855 | 1 1856 | 1 1857 | 2 1858 | 5 1859 | 4 1860 | 1 1861 | 2 1862 | 1 1863 | 1 1864 | 2 1865 | 5 1866 | 4 1867 | 1 1868 | 2 1869 | 1 1870 | 1 1871 | 2 1872 | 5 1873 | 4 1874 | 1 1875 | 2 1876 | 1 1877 | 1 1878 | 2 1879 | 5 1880 | 4 1881 | 1 1882 | 2 1883 | 1 1884 | 1 1885 | 2 1886 | 5 1887 | 4 1888 | 1 1889 | 2 1890 | 1 1891 | 1 1892 | 2 1893 | 5 1894 | 4 1895 | 1 1896 | 2 1897 | 1 1898 | 1 1899 | 2 1900 | 5 1901 | 4 1902 | 1 1903 | 2 1904 | 1 1905 | 1 1906 | 2 1907 | 5 1908 | 4 1909 | 1 1910 | 2 1911 | 1 1912 | 1 1913 | 2 1914 | 5 1915 | 4 1916 | 1 1917 | 2 1918 | 1 1919 | 1 1920 | 2 1921 | 5 1922 | 4 1923 | 1 1924 | 2 1925 | 1 1926 | 1 1927 | 2 1928 | 5 1929 | 4 1930 | 1 1931 | 2 1932 | 1 1933 | 1 1934 | 2 1935 | 5 1936 | 4 1937 | 1 1938 | 2 1939 | 1 1940 | 1 1941 | 2 1942 | 5 1943 | 4 1944 | 1 1945 | 2 1946 | 1 1947 | 1 1948 | 2 1949 | 5 1950 | 4 1951 | 1 1952 | 2 1953 | 1 1954 | 1 1955 | 2 1956 | 5 1957 | 4 1958 | 1 1959 | 2 1960 | 1 1961 | 1 1962 | 2 1963 | 5 1964 | 4 1965 | 1 1966 | 2 1967 | 1 1968 | 1 1969 | 2 1970 | 5 1971 | 4 1972 | 1 1973 | 2 1974 | 1 1975 | 1 1976 | 2 1977 | 5 1978 | 4 1979 | 1 1980 | 2 1981 | 1 1982 | 1 1983 | 2 1984 | 5 1985 | 4 1986 | 1 1987 | 2 1988 | 1 1989 | 1 1990 | 2 1991 | 5 1992 | 4 1993 | 1 1994 | 2 1995 | 1 1996 | 1 1997 | 2 1998 | 5 1999 | 4 2000 | 1 2001 | -------------------------------------------------------------------------------- /data/open_source_logs/zookeeper/groundtruth.seq: -------------------------------------------------------------------------------- 1 | 3 2 | 4 3 | 7 4 | 6 5 | 7 6 | 8 7 | 4 8 | 8 9 | 6 10 | 7 11 | 7 12 | 8 13 | 4 14 | 8 15 | 6 16 | 5 17 | 7 18 | 5 19 | 6 20 | 4 21 | 5 22 | 7 23 | 6 24 | 5 25 | 7 26 | 6 27 | 4 28 | 6 29 | 6 30 | 5 31 | 6 32 | 8 33 | 6 34 | 7 35 | 8 36 | 4 37 | 6 38 | 6 39 | 6 40 | 8 41 | 6 42 | 6 43 | 4 44 | 5 45 | 6 46 | 5 47 | 5 48 | 4 49 | 6 50 | 6 51 | 7 52 | 5 53 | 5 54 | 6 55 | 5 56 | 4 57 | 4 58 | 6 59 | 8 60 | 5 61 | 5 62 | 8 63 | 7 64 | 4 65 | 4 66 | 7 67 | 5 68 | 6 69 | 6 70 | 5 71 | 7 72 | 5 73 | 7 74 | 4 75 | 5 76 | 7 77 | 8 78 | 6 79 | 6 80 | 5 81 | 4 82 | 7 83 | 6 84 | 7 85 | 4 86 | 6 87 | 6 88 | 5 89 | 5 90 | 4 91 | 5 92 | 8 93 | 6 94 | 7 95 | 4 96 | 6 97 | 4 98 | 7 99 | 8 100 | 6 101 | 6 102 | 6 103 | 8 104 | 5 105 | 8 106 | 8 107 | 5 108 | 4 109 | 5 110 | 5 111 | 7 112 | 5 113 | 5 114 | 8 115 | 7 116 | 4 117 | 5 118 | 4 119 | 6 120 | 7 121 | 5 122 | 6 123 | 8 124 | 8 125 | 4 126 | 6 127 | 5 128 | 8 129 | 5 130 | 8 131 | 6 132 | 4 133 | 5 134 | 7 135 | 8 136 | 4 137 | 5 138 | 6 139 | 8 140 | 6 141 | 5 142 | 7 143 | 6 144 | 4 145 | 5 146 | 4 147 | 7 148 | 7 149 | 5 150 | 7 151 | 7 152 | 4 153 | 8 154 | 6 155 | 6 156 | 8 157 | 5 158 | 8 159 | 8 160 | 6 161 | 7 162 | 8 163 | 8 164 | 7 165 | 7 166 | 4 167 | 5 168 | 7 169 | 5 170 | 4 171 | 4 172 | 7 173 | 5 174 | 7 175 | 7 176 | 6 177 | 6 178 | 8 179 | 4 180 | 8 181 | 5 182 | 4 183 | 5 184 | 8 185 | 8 186 | 4 187 | 8 188 | 4 189 | 5 190 | 4 191 | 7 192 | 6 193 | 5 194 | 6 195 | 6 196 | 7 197 | 6 198 | 4 199 | 6 200 | 4 201 | 4 202 | 8 203 | 4 204 | 4 205 | 5 206 | 5 207 | 6 208 | 5 209 | 6 210 | 8 211 | 5 212 | 8 213 | 4 214 | 8 215 | 4 216 | 7 217 | 4 218 | 4 219 | 6 220 | 5 221 | 4 222 | 8 223 | 4 224 | 6 225 | 4 226 | 4 227 | 7 228 | 4 229 | 6 230 | 7 231 | 5 232 | 6 233 | 7 234 | 5 235 | 6 236 | 5 237 | 7 238 | 6 239 | 7 240 | 5 241 | 7 242 | 5 243 | 7 244 | 7 245 | 5 246 | 6 247 | 8 248 | 7 249 | 4 250 | 6 251 | 6 252 | 7 253 | 6 254 | 8 255 | 5 256 | 5 257 | 8 258 | 6 259 | 7 260 | 6 261 | 8 262 | 7 263 | 6 264 | 4 265 | 4 266 | 7 267 | 8 268 | 6 269 | 6 270 | 4 271 | 8 272 | 7 273 | 5 274 | 8 275 | 5 276 | 5 277 | 6 278 | 5 279 | 4 280 | 6 281 | 7 282 | 5 283 | 7 284 | 8 285 | 7 286 | 8 287 | 8 288 | 5 289 | 6 290 | 7 291 | 5 292 | 8 293 | 6 294 | 7 295 | 4 296 | 5 297 | 5 298 | 6 299 | 7 300 | 8 301 | 6 302 | 8 303 | 4 304 | 6 305 | 7 306 | 7 307 | 6 308 | 8 309 | 8 310 | 7 311 | 5 312 | 4 313 | 8 314 | 4 315 | 6 316 | 8 317 | 6 318 | 5 319 | 5 320 | 7 321 | 7 322 | 7 323 | 8 324 | 6 325 | 4 326 | 6 327 | 7 328 | 8 329 | 8 330 | 6 331 | 8 332 | 4 333 | 5 334 | 6 335 | 6 336 | 8 337 | 4 338 | 8 339 | 6 340 | 5 341 | 5 342 | 7 343 | 6 344 | 4 345 | 8 346 | 8 347 | 4 348 | 6 349 | 4 350 | 6 351 | 8 352 | 8 353 | 7 354 | 4 355 | 6 356 | 5 357 | 4 358 | 7 359 | 8 360 | 7 361 | 8 362 | 4 363 | 7 364 | 6 365 | 4 366 | 7 367 | 4 368 | 8 369 | 5 370 | 6 371 | 4 372 | 8 373 | 5 374 | 4 375 | 7 376 | 5 377 | 8 378 | 6 379 | 8 380 | 5 381 | 5 382 | 5 383 | 4 384 | 8 385 | 6 386 | 6 387 | 7 388 | 5 389 | 5 390 | 4 391 | 6 392 | 4 393 | 5 394 | 7 395 | 5 396 | 5 397 | 6 398 | 8 399 | 4 400 | 8 401 | 4 402 | 8 403 | 7 404 | 6 405 | 8 406 | 8 407 | 5 408 | 7 409 | 4 410 | 6 411 | 7 412 | 7 413 | 7 414 | 7 415 | 7 416 | 8 417 | 4 418 | 5 419 | 4 420 | 8 421 | 5 422 | 5 423 | 6 424 | 6 425 | 4 426 | 4 427 | 8 428 | 5 429 | 6 430 | 7 431 | 8 432 | 4 433 | 7 434 | 6 435 | 8 436 | 4 437 | 8 438 | 7 439 | 6 440 | 5 441 | 8 442 | 4 443 | 4 444 | 6 445 | 7 446 | 4 447 | 8 448 | 6 449 | 5 450 | 7 451 | 4 452 | 6 453 | 7 454 | 5 455 | 5 456 | 6 457 | 6 458 | 6 459 | 7 460 | 8 461 | 7 462 | 8 463 | 5 464 | 5 465 | 7 466 | 4 467 | 8 468 | 7 469 | 8 470 | 4 471 | 6 472 | 6 473 | 7 474 | 8 475 | 8 476 | 4 477 | 6 478 | 7 479 | 6 480 | 4 481 | 4 482 | 8 483 | 4 484 | 6 485 | 4 486 | 8 487 | 4 488 | 6 489 | 8 490 | 6 491 | 4 492 | 5 493 | 5 494 | 4 495 | 27 496 | 26 497 | 27 498 | 28 499 | 27 500 | 20 501 | 25 502 | 25 503 | 21 504 | 21 505 | 27 506 | 31 507 | 25 508 | 21 509 | 21 510 | 25 511 | 21 512 | 21 513 | 25 514 | 27 515 | 25 516 | 20 517 | 20 518 | 25 519 | 20 520 | 20 521 | 27 522 | 27 523 | 22 524 | 21 525 | 20 526 | 20 527 | 22 528 | 22 529 | 20 530 | 22 531 | 27 532 | 22 533 | 27 534 | 20 535 | 25 536 | 26 537 | 20 538 | 26 539 | 27 540 | 23 541 | 22 542 | 2 543 | 20 544 | 35 545 | 35 546 | 26 547 | 35 548 | 22 549 | 35 550 | 34 551 | 35 552 | 34 553 | 35 554 | 26 555 | 35 556 | 22 557 | 26 558 | 27 559 | 22 560 | 25 561 | 20 562 | 27 563 | 34 564 | 34 565 | 44 566 | 15 567 | 2 568 | 2 569 | 2 570 | 5 571 | 3 572 | 22 573 | 25 574 | 26 575 | 26 576 | 22 577 | 27 578 | 21 579 | 22 580 | 22 581 | 14 582 | 33 583 | 30 584 | 2 585 | 15 586 | 16 587 | 23 588 | 18 589 | 25 590 | 21 591 | 20 592 | 22 593 | 22 594 | 26 595 | 25 596 | 20 597 | 27 598 | 17 599 | 18 600 | 25 601 | 26 602 | 20 603 | 22 604 | 27 605 | 26 606 | 26 607 | 25 608 | 12 609 | 23 610 | 29 611 | 25 612 | 25 613 | 28 614 | 19 615 | 23 616 | 29 617 | 26 618 | 27 619 | 4 620 | 23 621 | 21 622 | 20 623 | 40 624 | 39 625 | 17 626 | 17 627 | 25 628 | 20 629 | 21 630 | 25 631 | 25 632 | 27 633 | 26 634 | 27 635 | 22 636 | 21 637 | 21 638 | 9 639 | 2 640 | 2 641 | 3 642 | 2 643 | 3 644 | 2 645 | 2 646 | 3 647 | 2 648 | 3 649 | 2 650 | 2 651 | 2 652 | 2 653 | 2 654 | 3 655 | 3 656 | 2 657 | 2 658 | 3 659 | 3 660 | 2 661 | 2 662 | 2 663 | 2 664 | 2 665 | 2 666 | 3 667 | 3 668 | 2 669 | 2 670 | 2 671 | 2 672 | 2 673 | 3 674 | 2 675 | 3 676 | 2 677 | 3 678 | 2 679 | 2 680 | 2 681 | 3 682 | 2 683 | 2 684 | 2 685 | 2 686 | 2 687 | 3 688 | 2 689 | 3 690 | 2 691 | 3 692 | 3 693 | 2 694 | 2 695 | 2 696 | 2 697 | 3 698 | 2 699 | 2 700 | 3 701 | 2 702 | 3 703 | 3 704 | 2 705 | 2 706 | 3 707 | 2 708 | 2 709 | 2 710 | 2 711 | 2 712 | 3 713 | 2 714 | 2 715 | 2 716 | 3 717 | 2 718 | 3 719 | 2 720 | 2 721 | 2 722 | 3 723 | 3 724 | 2 725 | 2 726 | 2 727 | 3 728 | 2 729 | 2 730 | 2 731 | 2 732 | 3 733 | 2 734 | 2 735 | 2 736 | 2 737 | 2 738 | 2 739 | 3 740 | 3 741 | 2 742 | 2 743 | 2 744 | 3 745 | 2 746 | 2 747 | 2 748 | 2 749 | 3 750 | 3 751 | 2 752 | 8 753 | 2 754 | 23 755 | 43 756 | 43 757 | 44 758 | 43 759 | 43 760 | 44 761 | 44 762 | 44 763 | 44 764 | 43 765 | 44 766 | 44 767 | 44 768 | 44 769 | 44 770 | 43 771 | 43 772 | 44 773 | 44 774 | 44 775 | 44 776 | 43 777 | 44 778 | 43 779 | 43 780 | 43 781 | 44 782 | 44 783 | 44 784 | 43 785 | 5 786 | 4 787 | 8 788 | 4 789 | 5 790 | 8 791 | 4 792 | 8 793 | 5 794 | 5 795 | 6 796 | 7 797 | 7 798 | 4 799 | 5 800 | 6 801 | 4 802 | 7 803 | 8 804 | 5 805 | 7 806 | 5 807 | 8 808 | 7 809 | 8 810 | 4 811 | 8 812 | 6 813 | 8 814 | 5 815 | 4 816 | 5 817 | 4 818 | 8 819 | 7 820 | 5 821 | 4 822 | 8 823 | 4 824 | 6 825 | 8 826 | 8 827 | 4 828 | 7 829 | 8 830 | 8 831 | 6 832 | 5 833 | 8 834 | 6 835 | 6 836 | 5 837 | 6 838 | 6 839 | 5 840 | 6 841 | 6 842 | 7 843 | 6 844 | 5 845 | 6 846 | 8 847 | 5 848 | 5 849 | 6 850 | 4 851 | 7 852 | 6 853 | 5 854 | 6 855 | 4 856 | 4 857 | 8 858 | 5 859 | 4 860 | 6 861 | 6 862 | 8 863 | 7 864 | 7 865 | 6 866 | 4 867 | 7 868 | 5 869 | 7 870 | 8 871 | 5 872 | 4 873 | 5 874 | 4 875 | 7 876 | 8 877 | 8 878 | 6 879 | 7 880 | 4 881 | 6 882 | 7 883 | 5 884 | 4 885 | 5 886 | 6 887 | 4 888 | 4 889 | 8 890 | 8 891 | 8 892 | 6 893 | 8 894 | 5 895 | 5 896 | 8 897 | 5 898 | 5 899 | 5 900 | 6 901 | 5 902 | 4 903 | 7 904 | 8 905 | 7 906 | 8 907 | 7 908 | 7 909 | 6 910 | 4 911 | 7 912 | 5 913 | 7 914 | 4 915 | 7 916 | 8 917 | 4 918 | 7 919 | 5 920 | 7 921 | 8 922 | 4 923 | 7 924 | 7 925 | 4 926 | 4 927 | 4 928 | 8 929 | 8 930 | 5 931 | 8 932 | 7 933 | 7 934 | 5 935 | 8 936 | 6 937 | 6 938 | 7 939 | 5 940 | 7 941 | 5 942 | 8 943 | 4 944 | 6 945 | 8 946 | 7 947 | 6 948 | 4 949 | 6 950 | 4 951 | 4 952 | 4 953 | 4 954 | 7 955 | 6 956 | 4 957 | 6 958 | 5 959 | 6 960 | 4 961 | 7 962 | 6 963 | 7 964 | 8 965 | 8 966 | 5 967 | 6 968 | 7 969 | 6 970 | 4 971 | 7 972 | 8 973 | 4 974 | 7 975 | 4 976 | 8 977 | 4 978 | 8 979 | 8 980 | 8 981 | 4 982 | 6 983 | 5 984 | 4 985 | 4 986 | 6 987 | 5 988 | 7 989 | 7 990 | 7 991 | 6 992 | 4 993 | 5 994 | 5 995 | 6 996 | 7 997 | 5 998 | 8 999 | 4 1000 | 4 1001 | 6 1002 | 8 1003 | 5 1004 | 6 1005 | 6 1006 | 6 1007 | 8 1008 | 4 1009 | 4 1010 | 8 1011 | 6 1012 | 7 1013 | 7 1014 | 7 1015 | 5 1016 | 5 1017 | 5 1018 | 8 1019 | 5 1020 | 6 1021 | 7 1022 | 4 1023 | 8 1024 | 7 1025 | 4 1026 | 7 1027 | 7 1028 | 5 1029 | 6 1030 | 6 1031 | 5 1032 | 8 1033 | 5 1034 | 4 1035 | 8 1036 | 7 1037 | 5 1038 | 8 1039 | 7 1040 | 5 1041 | 4 1042 | 6 1043 | 4 1044 | 5 1045 | 8 1046 | 8 1047 | 7 1048 | 5 1049 | 7 1050 | 8 1051 | 4 1052 | 5 1053 | 7 1054 | 6 1055 | 8 1056 | 4 1057 | 5 1058 | 4 1059 | 6 1060 | 7 1061 | 8 1062 | 5 1063 | 8 1064 | 7 1065 | 6 1066 | 5 1067 | 6 1068 | 4 1069 | 7 1070 | 5 1071 | 7 1072 | 8 1073 | 4 1074 | 6 1075 | 4 1076 | 5 1077 | 6 1078 | 5 1079 | 4 1080 | 8 1081 | 5 1082 | 5 1083 | 5 1084 | 4 1085 | 5 1086 | 8 1087 | 4 1088 | 7 1089 | 8 1090 | 6 1091 | 4 1092 | 5 1093 | 6 1094 | 8 1095 | 5 1096 | 8 1097 | 6 1098 | 7 1099 | 5 1100 | 8 1101 | 7 1102 | 6 1103 | 6 1104 | 6 1105 | 6 1106 | 8 1107 | 5 1108 | 4 1109 | 7 1110 | 6 1111 | 7 1112 | 6 1113 | 4 1114 | 8 1115 | 8 1116 | 7 1117 | 6 1118 | 8 1119 | 6 1120 | 7 1121 | 6 1122 | 6 1123 | 6 1124 | 5 1125 | 7 1126 | 6 1127 | 6 1128 | 7 1129 | 4 1130 | 7 1131 | 8 1132 | 6 1133 | 7 1134 | 5 1135 | 5 1136 | 6 1137 | 8 1138 | 7 1139 | 7 1140 | 4 1141 | 6 1142 | 6 1143 | 5 1144 | 5 1145 | 6 1146 | 7 1147 | 4 1148 | 4 1149 | 4 1150 | 6 1151 | 8 1152 | 8 1153 | 7 1154 | 8 1155 | 4 1156 | 5 1157 | 8 1158 | 7 1159 | 5 1160 | 5 1161 | 4 1162 | 7 1163 | 4 1164 | 6 1165 | 4 1166 | 8 1167 | 6 1168 | 8 1169 | 8 1170 | 7 1171 | 6 1172 | 8 1173 | 5 1174 | 8 1175 | 4 1176 | 6 1177 | 5 1178 | 6 1179 | 4 1180 | 8 1181 | 7 1182 | 8 1183 | 8 1184 | 6 1185 | 8 1186 | 7 1187 | 8 1188 | 7 1189 | 7 1190 | 4 1191 | 8 1192 | 5 1193 | 5 1194 | 8 1195 | 6 1196 | 5 1197 | 6 1198 | 7 1199 | 6 1200 | 5 1201 | 8 1202 | 8 1203 | 7 1204 | 6 1205 | 6 1206 | 5 1207 | 7 1208 | 7 1209 | 5 1210 | 4 1211 | 6 1212 | 6 1213 | 8 1214 | 4 1215 | 6 1216 | 4 1217 | 5 1218 | 5 1219 | 4 1220 | 4 1221 | 4 1222 | 6 1223 | 4 1224 | 4 1225 | 7 1226 | 8 1227 | 6 1228 | 5 1229 | 5 1230 | 8 1231 | 8 1232 | 4 1233 | 6 1234 | 4 1235 | 4 1236 | 5 1237 | 8 1238 | 4 1239 | 6 1240 | 6 1241 | 7 1242 | 5 1243 | 6 1244 | 7 1245 | 7 1246 | 5 1247 | 6 1248 | 8 1249 | 6 1250 | 6 1251 | 6 1252 | 7 1253 | 4 1254 | 6 1255 | 7 1256 | 6 1257 | 8 1258 | 46 1259 | 26 1260 | 21 1261 | 35 1262 | 35 1263 | 35 1264 | 21 1265 | 7 1266 | 22 1267 | 20 1268 | 17 1269 | 21 1270 | 21 1271 | 35 1272 | 21 1273 | 22 1274 | 20 1275 | 34 1276 | 35 1277 | 34 1278 | 34 1279 | 20 1280 | 35 1281 | 27 1282 | 35 1283 | 34 1284 | 35 1285 | 35 1286 | 21 1287 | 35 1288 | 20 1289 | 34 1290 | 35 1291 | 22 1292 | 21 1293 | 21 1294 | 34 1295 | 35 1296 | 27 1297 | 27 1298 | 26 1299 | 35 1300 | 25 1301 | 34 1302 | 27 1303 | 26 1304 | 21 1305 | 21 1306 | 25 1307 | 25 1308 | 21 1309 | 26 1310 | 35 1311 | 26 1312 | 35 1313 | 35 1314 | 26 1315 | 34 1316 | 34 1317 | 35 1318 | 35 1319 | 25 1320 | 25 1321 | 35 1322 | 27 1323 | 26 1324 | 35 1325 | 34 1326 | 35 1327 | 25 1328 | 22 1329 | 34 1330 | 34 1331 | 34 1332 | 35 1333 | 20 1334 | 22 1335 | 20 1336 | 26 1337 | 4 1338 | 26 1339 | 26 1340 | 25 1341 | 27 1342 | 20 1343 | 25 1344 | 25 1345 | 21 1346 | 20 1347 | 26 1348 | 25 1349 | 1 1350 | 24 1351 | 20 1352 | 27 1353 | 34 1354 | 34 1355 | 22 1356 | 20 1357 | 25 1358 | 34 1359 | 27 1360 | 35 1361 | 21 1362 | 21 1363 | 34 1364 | 35 1365 | 35 1366 | 34 1367 | 27 1368 | 20 1369 | 26 1370 | 27 1371 | 34 1372 | 22 1373 | 20 1374 | 26 1375 | 35 1376 | 35 1377 | 13 1378 | 4 1379 | 24 1380 | 37 1381 | 6 1382 | 27 1383 | 35 1384 | 25 1385 | 34 1386 | 34 1387 | 35 1388 | 35 1389 | 35 1390 | 25 1391 | 25 1392 | 21 1393 | 20 1394 | 34 1395 | 34 1396 | 34 1397 | 34 1398 | 2 1399 | 22 1400 | 22 1401 | 26 1402 | 20 1403 | 25 1404 | 22 1405 | 22 1406 | 29 1407 | 13 1408 | 11 1409 | 4 1410 | 30 1411 | 22 1412 | 21 1413 | 28 1414 | 26 1415 | 27 1416 | 25 1417 | 21 1418 | 23 1419 | 42 1420 | 17 1421 | 34 1422 | 35 1423 | 35 1424 | 20 1425 | 34 1426 | 35 1427 | 34 1428 | 36 1429 | 21 1430 | 39 1431 | 40 1432 | 39 1433 | 45 1434 | 17 1435 | 17 1436 | 34 1437 | 34 1438 | 34 1439 | 27 1440 | 34 1441 | 25 1442 | 35 1443 | 35 1444 | 21 1445 | 25 1446 | 26 1447 | 27 1448 | 27 1449 | 27 1450 | 35 1451 | 20 1452 | 22 1453 | 34 1454 | 13 1455 | 32 1456 | 38 1457 | 34 1458 | 35 1459 | 35 1460 | 15 1461 | 41 1462 | 2 1463 | 17 1464 | 17 1465 | 6 1466 | 8 1467 | 8 1468 | 7 1469 | 7 1470 | 7 1471 | 6 1472 | 5 1473 | 7 1474 | 7 1475 | 4 1476 | 8 1477 | 5 1478 | 4 1479 | 6 1480 | 8 1481 | 4 1482 | 7 1483 | 4 1484 | 7 1485 | 5 1486 | 8 1487 | 7 1488 | 8 1489 | 7 1490 | 8 1491 | 5 1492 | 7 1493 | 4 1494 | 4 1495 | 5 1496 | 5 1497 | 4 1498 | 8 1499 | 7 1500 | 4 1501 | 4 1502 | 8 1503 | 7 1504 | 6 1505 | 8 1506 | 8 1507 | 6 1508 | 6 1509 | 7 1510 | 8 1511 | 5 1512 | 4 1513 | 4 1514 | 6 1515 | 8 1516 | 7 1517 | 4 1518 | 7 1519 | 7 1520 | 4 1521 | 4 1522 | 7 1523 | 7 1524 | 7 1525 | 6 1526 | 6 1527 | 7 1528 | 4 1529 | 7 1530 | 7 1531 | 6 1532 | 8 1533 | 8 1534 | 8 1535 | 6 1536 | 7 1537 | 7 1538 | 5 1539 | 7 1540 | 4 1541 | 8 1542 | 8 1543 | 6 1544 | 5 1545 | 6 1546 | 7 1547 | 5 1548 | 7 1549 | 7 1550 | 5 1551 | 5 1552 | 7 1553 | 4 1554 | 6 1555 | 4 1556 | 7 1557 | 8 1558 | 5 1559 | 6 1560 | 7 1561 | 5 1562 | 5 1563 | 6 1564 | 4 1565 | 8 1566 | 7 1567 | 5 1568 | 4 1569 | 7 1570 | 8 1571 | 8 1572 | 4 1573 | 7 1574 | 6 1575 | 4 1576 | 8 1577 | 4 1578 | 5 1579 | 4 1580 | 6 1581 | 6 1582 | 8 1583 | 4 1584 | 6 1585 | 8 1586 | 4 1587 | 5 1588 | 7 1589 | 8 1590 | 5 1591 | 6 1592 | 6 1593 | 7 1594 | 5 1595 | 5 1596 | 7 1597 | 6 1598 | 7 1599 | 4 1600 | 4 1601 | 4 1602 | 6 1603 | 5 1604 | 6 1605 | 4 1606 | 8 1607 | 7 1608 | 6 1609 | 6 1610 | 5 1611 | 4 1612 | 8 1613 | 7 1614 | 6 1615 | 5 1616 | 8 1617 | 4 1618 | 6 1619 | 5 1620 | 4 1621 | 4 1622 | 8 1623 | 5 1624 | 4 1625 | 5 1626 | 7 1627 | 8 1628 | 6 1629 | 6 1630 | 6 1631 | 4 1632 | 6 1633 | 5 1634 | 8 1635 | 5 1636 | 8 1637 | 4 1638 | 5 1639 | 6 1640 | 7 1641 | 6 1642 | 6 1643 | 7 1644 | 6 1645 | 4 1646 | 8 1647 | 5 1648 | 6 1649 | 4 1650 | 4 1651 | 5 1652 | 5 1653 | 8 1654 | 4 1655 | 5 1656 | 4 1657 | 6 1658 | 6 1659 | 4 1660 | 8 1661 | 5 1662 | 5 1663 | 5 1664 | 4 1665 | 7 1666 | 4 1667 | 6 1668 | 6 1669 | 8 1670 | 6 1671 | 4 1672 | 6 1673 | 4 1674 | 7 1675 | 6 1676 | 7 1677 | 4 1678 | 7 1679 | 4 1680 | 4 1681 | 7 1682 | 5 1683 | 8 1684 | 6 1685 | 7 1686 | 7 1687 | 6 1688 | 4 1689 | 6 1690 | 6 1691 | 8 1692 | 6 1693 | 6 1694 | 5 1695 | 5 1696 | 8 1697 | 6 1698 | 8 1699 | 5 1700 | 5 1701 | 5 1702 | 6 1703 | 4 1704 | 6 1705 | 7 1706 | 4 1707 | 4 1708 | 5 1709 | 5 1710 | 4 1711 | 8 1712 | 8 1713 | 5 1714 | 8 1715 | 5 1716 | 4 1717 | 6 1718 | 4 1719 | 4 1720 | 8 1721 | 7 1722 | 8 1723 | 6 1724 | 4 1725 | 4 1726 | 7 1727 | 5 1728 | 6 1729 | 7 1730 | 6 1731 | 4 1732 | 4 1733 | 6 1734 | 7 1735 | 4 1736 | 7 1737 | 8 1738 | 8 1739 | 6 1740 | 4 1741 | 8 1742 | 4 1743 | 7 1744 | 6 1745 | 8 1746 | 5 1747 | 8 1748 | 8 1749 | 8 1750 | 7 1751 | 6 1752 | 6 1753 | 6 1754 | 5 1755 | 8 1756 | 5 1757 | 8 1758 | 6 1759 | 7 1760 | 4 1761 | 8 1762 | 8 1763 | 5 1764 | 8 1765 | 5 1766 | 7 1767 | 4 1768 | 7 1769 | 6 1770 | 6 1771 | 4 1772 | 6 1773 | 4 1774 | 8 1775 | 4 1776 | 6 1777 | 6 1778 | 5 1779 | 7 1780 | 8 1781 | 7 1782 | 5 1783 | 6 1784 | 6 1785 | 4 1786 | 5 1787 | 4 1788 | 4 1789 | 6 1790 | 4 1791 | 7 1792 | 6 1793 | 8 1794 | 8 1795 | 5 1796 | 5 1797 | 7 1798 | 8 1799 | 8 1800 | 5 1801 | 4 1802 | 6 1803 | 6 1804 | 5 1805 | 4 1806 | 7 1807 | 8 1808 | 8 1809 | 6 1810 | 4 1811 | 7 1812 | 5 1813 | 8 1814 | 8 1815 | 5 1816 | 4 1817 | 7 1818 | 4 1819 | 7 1820 | 8 1821 | 5 1822 | 6 1823 | 6 1824 | 7 1825 | 6 1826 | 4 1827 | 8 1828 | 8 1829 | 5 1830 | 4 1831 | 6 1832 | 8 1833 | 4 1834 | 6 1835 | 8 1836 | 8 1837 | 8 1838 | 7 1839 | 5 1840 | 4 1841 | 8 1842 | 7 1843 | 4 1844 | 6 1845 | 8 1846 | 4 1847 | 8 1848 | 8 1849 | 5 1850 | 5 1851 | 8 1852 | 5 1853 | 6 1854 | 6 1855 | 8 1856 | 8 1857 | 8 1858 | 4 1859 | 7 1860 | 5 1861 | 5 1862 | 8 1863 | 4 1864 | 6 1865 | 8 1866 | 8 1867 | 4 1868 | 6 1869 | 4 1870 | 6 1871 | 8 1872 | 5 1873 | 4 1874 | 8 1875 | 6 1876 | 4 1877 | 8 1878 | 4 1879 | 5 1880 | 6 1881 | 8 1882 | 8 1883 | 4 1884 | 6 1885 | 6 1886 | 4 1887 | 8 1888 | 8 1889 | 5 1890 | 8 1891 | 6 1892 | 4 1893 | 6 1894 | 7 1895 | 4 1896 | 5 1897 | 7 1898 | 4 1899 | 7 1900 | 7 1901 | 4 1902 | 4 1903 | 6 1904 | 4 1905 | 4 1906 | 4 1907 | 8 1908 | 8 1909 | 7 1910 | 8 1911 | 6 1912 | 4 1913 | 4 1914 | 8 1915 | 7 1916 | 5 1917 | 6 1918 | 4 1919 | 8 1920 | 5 1921 | 40 1922 | 17 1923 | 40 1924 | 21 1925 | 21 1926 | 25 1927 | 22 1928 | 21 1929 | 20 1930 | 20 1931 | 26 1932 | 25 1933 | 25 1934 | 20 1935 | 21 1936 | 21 1937 | 26 1938 | 21 1939 | 27 1940 | 26 1941 | 20 1942 | 25 1943 | 22 1944 | 21 1945 | 27 1946 | 27 1947 | 26 1948 | 21 1949 | 22 1950 | 22 1951 | 27 1952 | 21 1953 | 21 1954 | 21 1955 | 26 1956 | 8 1957 | 25 1958 | 15 1959 | 17 1960 | 21 1961 | 22 1962 | 25 1963 | 25 1964 | 25 1965 | 22 1966 | 20 1967 | 21 1968 | 27 1969 | 27 1970 | 20 1971 | 22 1972 | 20 1973 | 25 1974 | 26 1975 | 10 1976 | 23 1977 | 23 1978 | 25 1979 | 21 1980 | 21 1981 | 27 1982 | 26 1983 | 25 1984 | 27 1985 | 25 1986 | 22 1987 | 22 1988 | 4 1989 | 10 1990 | 23 1991 | 41 1992 | 27 1993 | 21 1994 | 25 1995 | 17 1996 | 20 1997 | 27 1998 | 34 1999 | 34 2000 | 35 2001 | -------------------------------------------------------------------------------- /data/open_source_logs/hdfs/groundtruth.seq: -------------------------------------------------------------------------------- 1 | 7 2 | 7 3 | 13 4 | 7 5 | 7 6 | 13 7 | 13 8 | 13 9 | 7 10 | 6 11 | 6 12 | 4 13 | 4 14 | 13 15 | 4 16 | 10 17 | 7 18 | 6 19 | 10 20 | 13 21 | 6 22 | 13 23 | 13 24 | 6 25 | 13 26 | 4 27 | 6 28 | 10 29 | 1 30 | 6 31 | 4 32 | 6 33 | 13 34 | 4 35 | 13 36 | 6 37 | 6 38 | 6 39 | 13 40 | 4 41 | 6 42 | 6 43 | 4 44 | 6 45 | 13 46 | 7 47 | 6 48 | 13 49 | 7 50 | 13 51 | 6 52 | 13 53 | 7 54 | 7 55 | 6 56 | 7 57 | 4 58 | 10 59 | 10 60 | 6 61 | 13 62 | 13 63 | 6 64 | 6 65 | 6 66 | 6 67 | 6 68 | 7 69 | 6 70 | 1 71 | 4 72 | 7 73 | 9 74 | 2 75 | 2 76 | 2 77 | 2 78 | 3 79 | 3 80 | 2 81 | 3 82 | 3 83 | 2 84 | 3 85 | 3 86 | 3 87 | 2 88 | 3 89 | 3 90 | 2 91 | 3 92 | 3 93 | 3 94 | 3 95 | 3 96 | 3 97 | 2 98 | 3 99 | 3 100 | 3 101 | 3 102 | 3 103 | 3 104 | 6 105 | 13 106 | 4 107 | 7 108 | 6 109 | 13 110 | 13 111 | 6 112 | 4 113 | 7 114 | 4 115 | 4 116 | 4 117 | 4 118 | 6 119 | 13 120 | 4 121 | 10 122 | 7 123 | 4 124 | 7 125 | 7 126 | 4 127 | 7 128 | 13 129 | 6 130 | 13 131 | 7 132 | 13 133 | 7 134 | 4 135 | 6 136 | 13 137 | 7 138 | 10 139 | 7 140 | 10 141 | 4 142 | 7 143 | 6 144 | 7 145 | 4 146 | 13 147 | 6 148 | 4 149 | 13 150 | 4 151 | 10 152 | 13 153 | 7 154 | 10 155 | 7 156 | 4 157 | 7 158 | 4 159 | 6 160 | 6 161 | 13 162 | 4 163 | 7 164 | 7 165 | 6 166 | 13 167 | 13 168 | 13 169 | 4 170 | 7 171 | 7 172 | 6 173 | 7 174 | 7 175 | 10 176 | 1 177 | 4 178 | 4 179 | 6 180 | 13 181 | 6 182 | 4 183 | 6 184 | 6 185 | 10 186 | 7 187 | 10 188 | 7 189 | 7 190 | 13 191 | 6 192 | 13 193 | 7 194 | 13 195 | 6 196 | 6 197 | 1 198 | 13 199 | 7 200 | 4 201 | 10 202 | 13 203 | 13 204 | 10 205 | 7 206 | 4 207 | 6 208 | 6 209 | 7 210 | 7 211 | 7 212 | 6 213 | 6 214 | 4 215 | 7 216 | 7 217 | 7 218 | 6 219 | 6 220 | 10 221 | 7 222 | 4 223 | 13 224 | 7 225 | 13 226 | 6 227 | 7 228 | 4 229 | 7 230 | 6 231 | 6 232 | 6 233 | 13 234 | 6 235 | 4 236 | 4 237 | 7 238 | 4 239 | 13 240 | 13 241 | 7 242 | 13 243 | 10 244 | 7 245 | 6 246 | 7 247 | 13 248 | 6 249 | 7 250 | 10 251 | 13 252 | 7 253 | 10 254 | 10 255 | 13 256 | 7 257 | 10 258 | 7 259 | 7 260 | 4 261 | 7 262 | 13 263 | 7 264 | 10 265 | 13 266 | 10 267 | 10 268 | 6 269 | 7 270 | 6 271 | 6 272 | 13 273 | 6 274 | 7 275 | 10 276 | 13 277 | 10 278 | 13 279 | 7 280 | 7 281 | 4 282 | 6 283 | 7 284 | 13 285 | 6 286 | 7 287 | 13 288 | 13 289 | 13 290 | 4 291 | 13 292 | 11 293 | 2 294 | 3 295 | 2 296 | 3 297 | 3 298 | 2 299 | 3 300 | 2 301 | 2 302 | 2 303 | 2 304 | 2 305 | 2 306 | 2 307 | 2 308 | 2 309 | 2 310 | 3 311 | 2 312 | 2 313 | 2 314 | 2 315 | 2 316 | 2 317 | 3 318 | 2 319 | 3 320 | 3 321 | 2 322 | 2 323 | 2 324 | 2 325 | 3 326 | 2 327 | 2 328 | 3 329 | 3 330 | 3 331 | 3 332 | 2 333 | 3 334 | 3 335 | 2 336 | 2 337 | 2 338 | 3 339 | 3 340 | 3 341 | 3 342 | 3 343 | 2 344 | 2 345 | 2 346 | 1 347 | 1 348 | 1 349 | 3 350 | 3 351 | 3 352 | 2 353 | 2 354 | 3 355 | 2 356 | 2 357 | 3 358 | 1 359 | 2 360 | 3 361 | 2 362 | 11 363 | 11 364 | 11 365 | 11 366 | 11 367 | 11 368 | 11 369 | 11 370 | 11 371 | 11 372 | 11 373 | 11 374 | 11 375 | 11 376 | 11 377 | 11 378 | 11 379 | 11 380 | 11 381 | 11 382 | 11 383 | 11 384 | 11 385 | 11 386 | 11 387 | 11 388 | 11 389 | 11 390 | 11 391 | 11 392 | 11 393 | 11 394 | 11 395 | 11 396 | 11 397 | 11 398 | 11 399 | 11 400 | 11 401 | 11 402 | 11 403 | 11 404 | 11 405 | 11 406 | 11 407 | 11 408 | 11 409 | 11 410 | 11 411 | 11 412 | 11 413 | 11 414 | 11 415 | 11 416 | 11 417 | 11 418 | 11 419 | 11 420 | 11 421 | 11 422 | 11 423 | 11 424 | 11 425 | 11 426 | 9 427 | 6 428 | 9 429 | 9 430 | 9 431 | 9 432 | 9 433 | 9 434 | 6 435 | 9 436 | 7 437 | 10 438 | 9 439 | 9 440 | 9 441 | 9 442 | 9 443 | 9 444 | 6 445 | 9 446 | 9 447 | 9 448 | 9 449 | 9 450 | 9 451 | 9 452 | 9 453 | 9 454 | 9 455 | 9 456 | 9 457 | 9 458 | 9 459 | 9 460 | 9 461 | 9 462 | 9 463 | 9 464 | 9 465 | 9 466 | 9 467 | 9 468 | 9 469 | 9 470 | 9 471 | 9 472 | 9 473 | 9 474 | 9 475 | 9 476 | 9 477 | 9 478 | 9 479 | 6 480 | 10 481 | 9 482 | 9 483 | 9 484 | 9 485 | 9 486 | 9 487 | 9 488 | 9 489 | 9 490 | 9 491 | 9 492 | 9 493 | 9 494 | 6 495 | 13 496 | 9 497 | 9 498 | 13 499 | 9 500 | 7 501 | 10 502 | 9 503 | 7 504 | 6 505 | 10 506 | 4 507 | 4 508 | 13 509 | 7 510 | 13 511 | 4 512 | 7 513 | 10 514 | 6 515 | 7 516 | 6 517 | 13 518 | 13 519 | 9 520 | 4 521 | 7 522 | 13 523 | 4 524 | 9 525 | 13 526 | 10 527 | 6 528 | 4 529 | 13 530 | 9 531 | 6 532 | 6 533 | 6 534 | 6 535 | 13 536 | 6 537 | 13 538 | 6 539 | 6 540 | 4 541 | 7 542 | 13 543 | 7 544 | 10 545 | 4 546 | 6 547 | 4 548 | 13 549 | 10 550 | 6 551 | 7 552 | 4 553 | 10 554 | 6 555 | 13 556 | 13 557 | 10 558 | 4 559 | 7 560 | 13 561 | 7 562 | 4 563 | 13 564 | 6 565 | 13 566 | 13 567 | 4 568 | 7 569 | 1 570 | 4 571 | 4 572 | 7 573 | 13 574 | 6 575 | 4 576 | 4 577 | 6 578 | 6 579 | 4 580 | 6 581 | 13 582 | 13 583 | 4 584 | 6 585 | 9 586 | 6 587 | 6 588 | 4 589 | 7 590 | 7 591 | 4 592 | 13 593 | 6 594 | 4 595 | 10 596 | 6 597 | 13 598 | 13 599 | 4 600 | 4 601 | 4 602 | 10 603 | 4 604 | 7 605 | 7 606 | 4 607 | 4 608 | 13 609 | 4 610 | 4 611 | 4 612 | 4 613 | 4 614 | 13 615 | 7 616 | 6 617 | 13 618 | 7 619 | 13 620 | 6 621 | 7 622 | 7 623 | 13 624 | 7 625 | 4 626 | 6 627 | 10 628 | 13 629 | 13 630 | 13 631 | 10 632 | 13 633 | 6 634 | 13 635 | 4 636 | 4 637 | 4 638 | 6 639 | 4 640 | 7 641 | 4 642 | 7 643 | 13 644 | 7 645 | 6 646 | 1 647 | 10 648 | 13 649 | 4 650 | 6 651 | 4 652 | 6 653 | 13 654 | 6 655 | 13 656 | 6 657 | 13 658 | 4 659 | 4 660 | 4 661 | 13 662 | 4 663 | 7 664 | 4 665 | 13 666 | 13 667 | 6 668 | 13 669 | 6 670 | 6 671 | 13 672 | 6 673 | 13 674 | 7 675 | 7 676 | 7 677 | 10 678 | 4 679 | 7 680 | 2 681 | 3 682 | 3 683 | 3 684 | 3 685 | 3 686 | 3 687 | 2 688 | 3 689 | 2 690 | 3 691 | 3 692 | 3 693 | 3 694 | 2 695 | 3 696 | 2 697 | 3 698 | 3 699 | 1 700 | 3 701 | 2 702 | 3 703 | 10 704 | 4 705 | 7 706 | 7 707 | 10 708 | 4 709 | 13 710 | 4 711 | 6 712 | 4 713 | 4 714 | 7 715 | 4 716 | 4 717 | 6 718 | 4 719 | 13 720 | 7 721 | 13 722 | 6 723 | 13 724 | 6 725 | 4 726 | 4 727 | 13 728 | 7 729 | 6 730 | 4 731 | 4 732 | 13 733 | 4 734 | 6 735 | 10 736 | 13 737 | 7 738 | 7 739 | 7 740 | 6 741 | 6 742 | 6 743 | 13 744 | 7 745 | 4 746 | 6 747 | 7 748 | 6 749 | 4 750 | 7 751 | 13 752 | 10 753 | 13 754 | 4 755 | 1 756 | 13 757 | 4 758 | 4 759 | 10 760 | 13 761 | 13 762 | 13 763 | 13 764 | 13 765 | 13 766 | 4 767 | 7 768 | 10 769 | 10 770 | 4 771 | 7 772 | 13 773 | 6 774 | 13 775 | 4 776 | 7 777 | 13 778 | 13 779 | 7 780 | 4 781 | 1 782 | 6 783 | 4 784 | 13 785 | 2 786 | 2 787 | 3 788 | 3 789 | 2 790 | 1 791 | 3 792 | 2 793 | 3 794 | 2 795 | 3 796 | 1 797 | 1 798 | 3 799 | 3 800 | 3 801 | 2 802 | 3 803 | 2 804 | 3 805 | 2 806 | 2 807 | 11 808 | 11 809 | 11 810 | 11 811 | 11 812 | 11 813 | 11 814 | 11 815 | 11 816 | 11 817 | 11 818 | 11 819 | 11 820 | 11 821 | 11 822 | 11 823 | 11 824 | 11 825 | 11 826 | 11 827 | 11 828 | 11 829 | 11 830 | 11 831 | 11 832 | 11 833 | 11 834 | 11 835 | 11 836 | 11 837 | 11 838 | 11 839 | 11 840 | 11 841 | 11 842 | 11 843 | 2 844 | 6 845 | 9 846 | 4 847 | 9 848 | 9 849 | 9 850 | 9 851 | 9 852 | 9 853 | 9 854 | 9 855 | 4 856 | 9 857 | 9 858 | 9 859 | 6 860 | 9 861 | 9 862 | 9 863 | 9 864 | 9 865 | 9 866 | 9 867 | 9 868 | 9 869 | 9 870 | 9 871 | 9 872 | 9 873 | 9 874 | 9 875 | 9 876 | 9 877 | 9 878 | 9 879 | 9 880 | 9 881 | 9 882 | 9 883 | 9 884 | 9 885 | 9 886 | 9 887 | 9 888 | 9 889 | 9 890 | 9 891 | 9 892 | 9 893 | 9 894 | 9 895 | 4 896 | 9 897 | 9 898 | 4 899 | 7 900 | 4 901 | 9 902 | 6 903 | 4 904 | 13 905 | 7 906 | 7 907 | 13 908 | 4 909 | 7 910 | 6 911 | 4 912 | 8 913 | 6 914 | 13 915 | 13 916 | 7 917 | 13 918 | 7 919 | 7 920 | 7 921 | 6 922 | 6 923 | 9 924 | 6 925 | 4 926 | 6 927 | 13 928 | 14 929 | 13 930 | 10 931 | 7 932 | 13 933 | 6 934 | 6 935 | 4 936 | 7 937 | 7 938 | 4 939 | 6 940 | 7 941 | 4 942 | 7 943 | 13 944 | 10 945 | 4 946 | 7 947 | 4 948 | 6 949 | 13 950 | 7 951 | 6 952 | 4 953 | 7 954 | 6 955 | 13 956 | 7 957 | 4 958 | 13 959 | 6 960 | 6 961 | 6 962 | 7 963 | 10 964 | 6 965 | 13 966 | 4 967 | 4 968 | 6 969 | 4 970 | 4 971 | 7 972 | 4 973 | 7 974 | 7 975 | 10 976 | 6 977 | 13 978 | 6 979 | 7 980 | 4 981 | 4 982 | 6 983 | 11 984 | 11 985 | 11 986 | 11 987 | 11 988 | 11 989 | 11 990 | 11 991 | 11 992 | 11 993 | 11 994 | 11 995 | 11 996 | 11 997 | 11 998 | 11 999 | 11 1000 | 11 1001 | 11 1002 | 9 1003 | 10 1004 | 9 1005 | 13 1006 | 9 1007 | 9 1008 | 10 1009 | 9 1010 | 9 1011 | 9 1012 | 9 1013 | 4 1014 | 9 1015 | 9 1016 | 9 1017 | 9 1018 | 9 1019 | 6 1020 | 4 1021 | 6 1022 | 7 1023 | 10 1024 | 4 1025 | 6 1026 | 13 1027 | 6 1028 | 6 1029 | 14 1030 | 4 1031 | 4 1032 | 7 1033 | 13 1034 | 7 1035 | 4 1036 | 4 1037 | 6 1038 | 13 1039 | 4 1040 | 7 1041 | 7 1042 | 7 1043 | 13 1044 | 10 1045 | 13 1046 | 13 1047 | 6 1048 | 4 1049 | 6 1050 | 4 1051 | 4 1052 | 13 1053 | 4 1054 | 6 1055 | 13 1056 | 6 1057 | 6 1058 | 4 1059 | 4 1060 | 13 1061 | 4 1062 | 7 1063 | 13 1064 | 4 1065 | 13 1066 | 13 1067 | 13 1068 | 7 1069 | 13 1070 | 4 1071 | 6 1072 | 7 1073 | 6 1074 | 7 1075 | 6 1076 | 10 1077 | 7 1078 | 10 1079 | 13 1080 | 4 1081 | 4 1082 | 6 1083 | 6 1084 | 6 1085 | 13 1086 | 13 1087 | 6 1088 | 6 1089 | 10 1090 | 4 1091 | 4 1092 | 13 1093 | 1 1094 | 13 1095 | 7 1096 | 7 1097 | 13 1098 | 7 1099 | 13 1100 | 7 1101 | 7 1102 | 6 1103 | 7 1104 | 7 1105 | 7 1106 | 13 1107 | 11 1108 | 11 1109 | 2 1110 | 3 1111 | 3 1112 | 2 1113 | 2 1114 | 3 1115 | 2 1116 | 2 1117 | 2 1118 | 2 1119 | 2 1120 | 3 1121 | 2 1122 | 3 1123 | 3 1124 | 2 1125 | 2 1126 | 2 1127 | 3 1128 | 2 1129 | 2 1130 | 11 1131 | 11 1132 | 11 1133 | 11 1134 | 11 1135 | 11 1136 | 11 1137 | 11 1138 | 11 1139 | 11 1140 | 11 1141 | 11 1142 | 11 1143 | 11 1144 | 11 1145 | 11 1146 | 11 1147 | 11 1148 | 11 1149 | 11 1150 | 11 1151 | 11 1152 | 9 1153 | 9 1154 | 9 1155 | 9 1156 | 9 1157 | 2 1158 | 6 1159 | 9 1160 | 9 1161 | 9 1162 | 9 1163 | 9 1164 | 9 1165 | 9 1166 | 9 1167 | 9 1168 | 9 1169 | 9 1170 | 9 1171 | 9 1172 | 9 1173 | 9 1174 | 9 1175 | 9 1176 | 6 1177 | 2 1178 | 6 1179 | 13 1180 | 13 1181 | 13 1182 | 4 1183 | 6 1184 | 4 1185 | 10 1186 | 10 1187 | 4 1188 | 13 1189 | 4 1190 | 13 1191 | 13 1192 | 6 1193 | 4 1194 | 7 1195 | 4 1196 | 6 1197 | 13 1198 | 7 1199 | 13 1200 | 13 1201 | 7 1202 | 4 1203 | 4 1204 | 7 1205 | 7 1206 | 7 1207 | 4 1208 | 13 1209 | 13 1210 | 13 1211 | 13 1212 | 4 1213 | 7 1214 | 7 1215 | 13 1216 | 4 1217 | 4 1218 | 4 1219 | 10 1220 | 13 1221 | 6 1222 | 7 1223 | 4 1224 | 10 1225 | 4 1226 | 6 1227 | 4 1228 | 4 1229 | 4 1230 | 4 1231 | 13 1232 | 7 1233 | 13 1234 | 10 1235 | 6 1236 | 13 1237 | 6 1238 | 7 1239 | 6 1240 | 7 1241 | 6 1242 | 7 1243 | 13 1244 | 10 1245 | 13 1246 | 10 1247 | 7 1248 | 4 1249 | 13 1250 | 6 1251 | 6 1252 | 13 1253 | 7 1254 | 7 1255 | 4 1256 | 6 1257 | 13 1258 | 7 1259 | 13 1260 | 7 1261 | 4 1262 | 7 1263 | 11 1264 | 11 1265 | 11 1266 | 9 1267 | 7 1268 | 6 1269 | 4 1270 | 13 1271 | 7 1272 | 10 1273 | 4 1274 | 6 1275 | 7 1276 | 13 1277 | 13 1278 | 6 1279 | 6 1280 | 7 1281 | 7 1282 | 4 1283 | 4 1284 | 6 1285 | 13 1286 | 7 1287 | 7 1288 | 13 1289 | 13 1290 | 7 1291 | 13 1292 | 7 1293 | 7 1294 | 6 1295 | 10 1296 | 7 1297 | 7 1298 | 13 1299 | 7 1300 | 4 1301 | 7 1302 | 7 1303 | 4 1304 | 4 1305 | 6 1306 | 7 1307 | 13 1308 | 13 1309 | 13 1310 | 6 1311 | 13 1312 | 7 1313 | 13 1314 | 13 1315 | 13 1316 | 6 1317 | 7 1318 | 4 1319 | 13 1320 | 7 1321 | 7 1322 | 6 1323 | 4 1324 | 4 1325 | 7 1326 | 7 1327 | 4 1328 | 4 1329 | 7 1330 | 13 1331 | 7 1332 | 4 1333 | 13 1334 | 7 1335 | 4 1336 | 13 1337 | 4 1338 | 7 1339 | 4 1340 | 6 1341 | 7 1342 | 7 1343 | 4 1344 | 13 1345 | 10 1346 | 7 1347 | 4 1348 | 4 1349 | 13 1350 | 6 1351 | 6 1352 | 4 1353 | 6 1354 | 7 1355 | 13 1356 | 11 1357 | 11 1358 | 11 1359 | 11 1360 | 11 1361 | 11 1362 | 11 1363 | 11 1364 | 11 1365 | 11 1366 | 11 1367 | 11 1368 | 11 1369 | 11 1370 | 11 1371 | 11 1372 | 11 1373 | 1 1374 | 11 1375 | 11 1376 | 11 1377 | 11 1378 | 11 1379 | 11 1380 | 11 1381 | 11 1382 | 9 1383 | 4 1384 | 9 1385 | 9 1386 | 9 1387 | 6 1388 | 9 1389 | 9 1390 | 9 1391 | 9 1392 | 9 1393 | 6 1394 | 9 1395 | 9 1396 | 9 1397 | 9 1398 | 9 1399 | 9 1400 | 9 1401 | 9 1402 | 9 1403 | 9 1404 | 9 1405 | 9 1406 | 9 1407 | 9 1408 | 4 1409 | 9 1410 | 9 1411 | 9 1412 | 9 1413 | 9 1414 | 9 1415 | 9 1416 | 9 1417 | 4 1418 | 9 1419 | 9 1420 | 13 1421 | 13 1422 | 4 1423 | 6 1424 | 7 1425 | 13 1426 | 4 1427 | 10 1428 | 13 1429 | 13 1430 | 6 1431 | 7 1432 | 4 1433 | 7 1434 | 13 1435 | 10 1436 | 6 1437 | 4 1438 | 13 1439 | 5 1440 | 13 1441 | 4 1442 | 13 1443 | 6 1444 | 7 1445 | 13 1446 | 4 1447 | 6 1448 | 13 1449 | 6 1450 | 7 1451 | 13 1452 | 4 1453 | 4 1454 | 13 1455 | 6 1456 | 13 1457 | 6 1458 | 7 1459 | 6 1460 | 4 1461 | 13 1462 | 7 1463 | 7 1464 | 4 1465 | 4 1466 | 4 1467 | 7 1468 | 6 1469 | 6 1470 | 6 1471 | 10 1472 | 4 1473 | 4 1474 | 13 1475 | 6 1476 | 10 1477 | 4 1478 | 6 1479 | 4 1480 | 7 1481 | 7 1482 | 4 1483 | 6 1484 | 13 1485 | 6 1486 | 6 1487 | 13 1488 | 7 1489 | 7 1490 | 13 1491 | 7 1492 | 11 1493 | 7 1494 | 6 1495 | 13 1496 | 13 1497 | 7 1498 | 6 1499 | 13 1500 | 13 1501 | 7 1502 | 13 1503 | 7 1504 | 4 1505 | 13 1506 | 7 1507 | 6 1508 | 6 1509 | 6 1510 | 7 1511 | 6 1512 | 10 1513 | 7 1514 | 13 1515 | 10 1516 | 6 1517 | 6 1518 | 7 1519 | 13 1520 | 7 1521 | 6 1522 | 7 1523 | 4 1524 | 13 1525 | 7 1526 | 4 1527 | 13 1528 | 4 1529 | 10 1530 | 7 1531 | 6 1532 | 6 1533 | 6 1534 | 10 1535 | 6 1536 | 4 1537 | 6 1538 | 13 1539 | 13 1540 | 13 1541 | 13 1542 | 10 1543 | 10 1544 | 4 1545 | 10 1546 | 13 1547 | 4 1548 | 13 1549 | 13 1550 | 7 1551 | 10 1552 | 7 1553 | 6 1554 | 10 1555 | 13 1556 | 4 1557 | 7 1558 | 7 1559 | 4 1560 | 4 1561 | 11 1562 | 11 1563 | 11 1564 | 11 1565 | 11 1566 | 11 1567 | 11 1568 | 11 1569 | 11 1570 | 11 1571 | 11 1572 | 11 1573 | 11 1574 | 11 1575 | 11 1576 | 11 1577 | 11 1578 | 9 1579 | 14 1580 | 9 1581 | 14 1582 | 6 1583 | 7 1584 | 9 1585 | 9 1586 | 9 1587 | 9 1588 | 9 1589 | 9 1590 | 9 1591 | 9 1592 | 9 1593 | 9 1594 | 9 1595 | 9 1596 | 9 1597 | 7 1598 | 9 1599 | 9 1600 | 9 1601 | 9 1602 | 9 1603 | 7 1604 | 9 1605 | 4 1606 | 4 1607 | 10 1608 | 13 1609 | 9 1610 | 10 1611 | 6 1612 | 7 1613 | 4 1614 | 13 1615 | 1 1616 | 13 1617 | 7 1618 | 10 1619 | 10 1620 | 6 1621 | 13 1622 | 7 1623 | 10 1624 | 7 1625 | 13 1626 | 6 1627 | 6 1628 | 7 1629 | 6 1630 | 13 1631 | 4 1632 | 7 1633 | 4 1634 | 7 1635 | 13 1636 | 10 1637 | 6 1638 | 6 1639 | 6 1640 | 10 1641 | 6 1642 | 6 1643 | 13 1644 | 4 1645 | 7 1646 | 4 1647 | 7 1648 | 13 1649 | 6 1650 | 7 1651 | 13 1652 | 7 1653 | 7 1654 | 13 1655 | 7 1656 | 4 1657 | 13 1658 | 7 1659 | 4 1660 | 10 1661 | 6 1662 | 13 1663 | 7 1664 | 13 1665 | 13 1666 | 7 1667 | 6 1668 | 7 1669 | 7 1670 | 7 1671 | 7 1672 | 13 1673 | 13 1674 | 6 1675 | 4 1676 | 13 1677 | 7 1678 | 6 1679 | 4 1680 | 6 1681 | 6 1682 | 6 1683 | 6 1684 | 4 1685 | 7 1686 | 6 1687 | 4 1688 | 6 1689 | 6 1690 | 7 1691 | 6 1692 | 4 1693 | 6 1694 | 4 1695 | 10 1696 | 10 1697 | 7 1698 | 7 1699 | 10 1700 | 7 1701 | 7 1702 | 4 1703 | 4 1704 | 10 1705 | 13 1706 | 4 1707 | 13 1708 | 6 1709 | 7 1710 | 11 1711 | 11 1712 | 11 1713 | 11 1714 | 11 1715 | 11 1716 | 11 1717 | 11 1718 | 11 1719 | 11 1720 | 11 1721 | 11 1722 | 11 1723 | 4 1724 | 9 1725 | 7 1726 | 9 1727 | 4 1728 | 9 1729 | 9 1730 | 9 1731 | 7 1732 | 9 1733 | 9 1734 | 9 1735 | 9 1736 | 9 1737 | 9 1738 | 9 1739 | 9 1740 | 9 1741 | 9 1742 | 9 1743 | 9 1744 | 9 1745 | 9 1746 | 9 1747 | 9 1748 | 9 1749 | 13 1750 | 9 1751 | 9 1752 | 4 1753 | 4 1754 | 6 1755 | 9 1756 | 13 1757 | 13 1758 | 13 1759 | 4 1760 | 6 1761 | 4 1762 | 7 1763 | 13 1764 | 6 1765 | 12 1766 | 13 1767 | 6 1768 | 5 1769 | 4 1770 | 9 1771 | 6 1772 | 13 1773 | 4 1774 | 13 1775 | 6 1776 | 6 1777 | 4 1778 | 6 1779 | 7 1780 | 4 1781 | 13 1782 | 4 1783 | 6 1784 | 10 1785 | 7 1786 | 7 1787 | 13 1788 | 6 1789 | 4 1790 | 7 1791 | 6 1792 | 7 1793 | 13 1794 | 6 1795 | 7 1796 | 10 1797 | 10 1798 | 6 1799 | 13 1800 | 7 1801 | 13 1802 | 4 1803 | 13 1804 | 7 1805 | 4 1806 | 4 1807 | 7 1808 | 4 1809 | 7 1810 | 13 1811 | 6 1812 | 7 1813 | 7 1814 | 13 1815 | 4 1816 | 7 1817 | 10 1818 | 7 1819 | 13 1820 | 7 1821 | 7 1822 | 10 1823 | 13 1824 | 10 1825 | 6 1826 | 13 1827 | 4 1828 | 6 1829 | 10 1830 | 6 1831 | 13 1832 | 10 1833 | 6 1834 | 6 1835 | 4 1836 | 6 1837 | 13 1838 | 7 1839 | 7 1840 | 13 1841 | 4 1842 | 10 1843 | 11 1844 | 11 1845 | 11 1846 | 11 1847 | 11 1848 | 11 1849 | 11 1850 | 11 1851 | 11 1852 | 11 1853 | 11 1854 | 11 1855 | 11 1856 | 11 1857 | 11 1858 | 11 1859 | 11 1860 | 11 1861 | 11 1862 | 11 1863 | 11 1864 | 9 1865 | 9 1866 | 7 1867 | 9 1868 | 9 1869 | 9 1870 | 9 1871 | 9 1872 | 9 1873 | 9 1874 | 9 1875 | 9 1876 | 9 1877 | 9 1878 | 9 1879 | 9 1880 | 9 1881 | 9 1882 | 9 1883 | 13 1884 | 9 1885 | 9 1886 | 9 1887 | 9 1888 | 7 1889 | 13 1890 | 4 1891 | 6 1892 | 6 1893 | 10 1894 | 4 1895 | 13 1896 | 10 1897 | 4 1898 | 4 1899 | 6 1900 | 13 1901 | 14 1902 | 9 1903 | 7 1904 | 6 1905 | 4 1906 | 7 1907 | 7 1908 | 10 1909 | 4 1910 | 13 1911 | 9 1912 | 10 1913 | 13 1914 | 10 1915 | 7 1916 | 7 1917 | 13 1918 | 7 1919 | 4 1920 | 7 1921 | 10 1922 | 4 1923 | 4 1924 | 6 1925 | 4 1926 | 4 1927 | 7 1928 | 1 1929 | 6 1930 | 7 1931 | 7 1932 | 6 1933 | 4 1934 | 4 1935 | 7 1936 | 6 1937 | 6 1938 | 4 1939 | 10 1940 | 6 1941 | 10 1942 | 10 1943 | 6 1944 | 6 1945 | 4 1946 | 6 1947 | 7 1948 | 13 1949 | 4 1950 | 13 1951 | 10 1952 | 13 1953 | 7 1954 | 6 1955 | 13 1956 | 13 1957 | 13 1958 | 7 1959 | 4 1960 | 13 1961 | 13 1962 | 13 1963 | 6 1964 | 6 1965 | 13 1966 | 13 1967 | 9 1968 | 7 1969 | 4 1970 | 7 1971 | 6 1972 | 13 1973 | 13 1974 | 13 1975 | 4 1976 | 6 1977 | 4 1978 | 7 1979 | 4 1980 | 13 1981 | 7 1982 | 7 1983 | 13 1984 | 7 1985 | 6 1986 | 7 1987 | 10 1988 | 13 1989 | 4 1990 | 6 1991 | 13 1992 | 7 1993 | 7 1994 | 6 1995 | 4 1996 | 4 1997 | 6 1998 | 4 1999 | 7 2000 | 4 2001 | -------------------------------------------------------------------------------- /README.md: -------------------------------------------------------------------------------- 1 | ## LogClass 2 | This repository provides an open-source toolkit for LogClass framework from W. Meng et al., "[LogClass: Anomalous Log Identification and Classification with Partial Labels](https://ieeexplore.ieee.org/document/9339940)," in IEEE Transactions on Network and Service Management, doi: 10.1109/TNSM.2021.3055425. 3 | 4 | LogClass automatically and accurately detects and classifies anomalous logs based on partial labels. 5 | 6 | ### Table of Contents 7 | 8 | [LogClass](#logclass) 9 | 10 | - [Table of Contents](#table-of-contents) 11 | - [Requirements](#requirements) 12 | - [Quick Start](#quick-start) 13 | - [Run LogClass](#run-logclass) 14 | - [Arguments](#arguments) 15 | - [Directory Structure](#directory-structure) 16 | - [Datasets](#datasets) 17 | - [How to](#how-to) 18 | - [How to add a new dataset](#how-to-add-a-new-dataset) 19 | - [Preprocessed Logs Format](#preprocessed-logs-format) 20 | - [How to run a new experiment](#how-to-run-a-new-experiment) 21 | - [Custom experiment](#custom-experiment) 22 | - [How to add a new model](#how-to-add-a-new-model) 23 | - [How to extract a new feature](#how-to-extract-a-new-feature) 24 | - [Included Experiments](#included-experiments) 25 | - [Testing PULearning](#testing-pulearning) 26 | - [Testing Anomaly Classification](#testing-anomaly-classification) 27 | - [Global LogClass](#global-logclass) 28 | - [Binary training/inference](#binary-traininginference) 29 | - [Citing](#citing) 30 | 31 | ​ 32 | 33 | ### Requirements 34 | 35 | Requirements are listed in `requirements.txt`. To install these, run: 36 | 37 | ``` 38 | pip install -r requirements.txt 39 | ``` 40 | 41 | 42 | 43 | ### Quick Start 44 | 45 | #### Run LogClass 46 | 47 | Several example experiments using LogClass are included in this repository. 48 | 49 | Here is an example to run one of them - training of the global experiment doing anomaly detection and classification. Run the following command in the home directory of this project: 50 | 51 | ``` 52 | python -m LogClass.logclass --train --kfold 3 --logs_type "bgl" --raw_logs "./Data/RAS_LOGS" --report macro 53 | ``` 54 | 55 | 56 | 57 | #### Arguments 58 | 59 | ``` 60 | python -m LogClass.logclass --help 61 | usage: logclass.py [-h] [--raw_logs raw_logs] [--base_dir base_dir] 62 | [--logs logs] [--models_dir models_dir] 63 | [--features_dir features_dir] [--logs_type logs_type] 64 | [--kfold kfold] [--healthy_label healthy_label] 65 | [--features features [features ...]] 66 | [--report report [report ...]] 67 | [--binary_classifier binary_classifier] 68 | [--multi_classifier multi_classifier] [--train] [--force] 69 | [--id id] [--swap] 70 | 71 | Runs binary classification with PULearning to detect anomalous logs. 72 | 73 | optional arguments: 74 | -h, --help show this help message and exit 75 | --raw_logs raw_logs input raw logs file path (default: None) 76 | --base_dir base_dir base output directory for pipeline output files 77 | (default: ['{your_logclass_dir}\\output']) 78 | --logs logs input logs file path and output for raw logs 79 | preprocessing (default: None) 80 | --models_dir models_dir 81 | trained models input/output directory path (default: 82 | None) 83 | --features_dir features_dir 84 | trained features_dir input/output directory path 85 | (default: None) 86 | --logs_type logs_type 87 | Input type of logs. (default: ['open_Apache']) 88 | --kfold kfold kfold crossvalidation (default: None) 89 | --healthy_label healthy_label 90 | the labels of unlabeled logs (default: ['unlabeled']) 91 | --features features [features ...] 92 | Features to be extracted from the logs messages. 93 | (default: ['tfilf']) 94 | --report report [report ...] 95 | Reports to be generated from the model and its 96 | predictions. (default: None) 97 | --binary_classifier binary_classifier 98 | Binary classifier to be used as anomaly detector. 99 | (default: ['pu_learning']) 100 | --multi_classifier multi_classifier 101 | Multi-clas classifier to classify anomalies. (default: 102 | ['svm']) 103 | --train If set, logclass will train on the given data. 104 | Otherwiseit will run inference on it. (default: False) 105 | --force Force training overwriting previous output with same 106 | id. (default: False) 107 | --id id Experiment id. Automatically generated if not 108 | specified. (default: None) 109 | --swap Swap testing/training data in kfold cross validation. 110 | (default: False) 111 | ``` 112 | 113 | 114 | 115 | #### Directory Structure 116 | 117 | ``` 118 | . 119 | ├── data 120 | │   └── open_source_logs # Included open-source log datasets 121 | │   ├── Apache 122 | │   ├── bgl 123 | │   ├── hadoop 124 | │   ├── hdfs 125 | │   ├── hpc 126 | │   ├── proxifier 127 | │   └── zookeeper 128 | ├── output # Example output folder 129 | │   ├── preprocessed_logs # Saved preprocessed logs for reuse 130 | │   │   ├── open_Apache.txt 131 | │   │   └── open_bgl.txt 132 | │   └── train_multi_open_bgl_2283696426 # Example experiment output 133 | │      ├── best_params.json 134 | │      ├── features 135 | │      │   ├── tfidf.pkl 136 | │      │   └── vocab.pkl 137 | │      ├── models 138 | │      │   └── multi.pkl 139 | │      └── results.csv 140 | ├── feature_engineering 141 | │   ├── __init__.py 142 | │   ├── length.py 143 | │   ├── tf_idf.py 144 | │   ├── tf_ilf.py 145 | │   ├── tf.py 146 | │   ├── registry.py 147 | │   ├── vectorizer.py # Log message vectorizing utilities 148 | │   └── utils.py 149 | ├── models 150 | │   ├── __init__.py 151 | │   ├── base_model.py # BaseModel class extended by all models 152 | │   ├── pu_learning.py 153 | │   ├── regular.py 154 | │   ├── svm.py 155 | │   ├── binary_registry.py 156 | │   └── multi_registry.py 157 | ├── preprocess 158 | │   ├── __init__.py 159 | │   ├── bgl_preprocessor.py 160 | │   ├── open_source_logs.py 161 | │   ├── registry.py 162 | │   └── utils.py 163 | ├── reporting 164 | │   ├── __init__.py 165 | │   ├── accuracy.py 166 | │   ├── confusion_matrix.py 167 | │   ├── macrof1.py 168 | │   ├── microf1.py 169 | │   ├── multi_class_acc.py 170 | │   ├── top_k_svm.py 171 | │   ├── bb_registry.py 172 | │   └── wb_registry.py 173 | ├── puLearning # PULearning third party implementation 174 | │   ├── __init__.py 175 | │   └── puAdapter.py 176 | ├── __init__.py 177 | ├── LICENSE 178 | ├── README.md 179 | ├── requirements.txt 180 | ├── init_params.py # Parses arguments, initializes global parameters 181 | ├── logclass.py # Performs training and inference of LogClass 182 | ├── test_pu.py # Compares robustness of LogClass 183 | ├── train_multi.py # Trains LogClass for anomalies classification 184 | ├── train_binary.py # Trains LogClass for log anomaly detection 185 | ├── run_binary.py # Loads trained LogClass and detects anomalies 186 | ├── decorators.py 187 | └── utils.py 188 | ``` 189 | 190 | #### Datasets 191 | 192 | In this repository we include various [open-source logs datasets](https://github.com/logpai/loghub) in the `data` folder as well as their corresponding preprocessing module in the `preprocess` package. Additionally there is another preprocessor provided for [BGL logs data](https://www.usenix.org/cfdr-data#hpc4), which can be downloaded directly from [here](https://www.usenix.org/sites/default/files/4372-intrepid_ras_0901_0908_scrubbed.zip.tar). 193 | 194 | 195 | 196 | ### How to 197 | 198 | Explain how to use and extend this toolkit. 199 | 200 | #### How to add a new dataset 201 | 202 | Add a new preprocessor module in the `preprocess` package. 203 | 204 | The module should implement a function that follows the `preprocess_datset(params)` function template included in all preprocessors. It should be decorated with `@register(f"{dataset_name}")` , e.g. open_Apache, and call the `process_logs(input_source, output, process_line)` function. This `process_line` function should also be defined in the processor as well. 205 | 206 | When done, add the module name to the `__init__.py` list of modules from the `preprocess` package and also the name from the decorator in the argsparse parameters options as the logs type. For example, `--logs_type open_Apache`. 207 | 208 | ##### Preprocessed Logs Format 209 | 210 | This format is ensured by the `process_line` function which is to be defined in each preprocessor. 211 | 212 | ```python 213 | def process_line(line): 214 | """ 215 | Processes a given line from the raw logs. 216 | 217 | Parameter 218 | --------- 219 | line : str 220 | One line from the raw logs. 221 | 222 | Returns 223 | ------- 224 | str 225 | String with the format f"{label} {msg}" where the `label` indicates whether 226 | the log is anomalous and if so, which anomaly category, and `msg` is the 227 | filtered log message without parameters. 228 | 229 | """ 230 | # your code 231 | ``` 232 | 233 | To filter the log message parameters, use the `remove_parameters(msg)`function from the `utils.py` module in the `preprocess` package. 234 | 235 | #### How to run a new experiment 236 | 237 | Several experiments examples are included in the repository. The best way to start with creating a new one is to follow the example from the others, specially the main function structure and its experiment function be it training or testing focused. 238 | 239 | The key things to consider the experiment should include are the following: 240 | 241 | - **Args parsing**: create custom `init_args()` and `parse_args(args)` functions for your experiment that call `init_main_args()` from the `init_params.py` module. 242 | 243 | - **Output file handling**: use `file_handling(params)` function (see `utils.py` in the main directory of the repo). 244 | 245 | - **Preprocessing raw logs**: if `--raw_logs` argument is provided, get the preprocessing function using the `--logs_type` argument from the `preprocess` module registry calling `get_preprocessor(f'{logs_type}')` function. 246 | 247 | - **Load logs**: call the `load_logs(params, ...)` function to get the preprocessed logs from the directory specified in the `--logs` parameter. It will return a tuple of x, y, and target label names data. 248 | 249 | 250 | ##### Custom experiment 251 | 252 | Main functions to consider for a custom experiment. Usually in its own function. 253 | 254 | **Feature Engineering** 255 | 256 | - `extract_features(x, params)` from `feature_engineering` package's `utils.py` module: Extracts all specified features in `--features` parameter from the preprocessed logs. See the function definition for further details. 257 | - `build_vocabulary(x)` from `feature_engineering` package's `vectorizer.py` module: Divides log into tokens and creates vocabulary. See the function definition for further details. 258 | - `log_to_vector(x, vocabulary)` from `feature_engineering` package's `vectorizer.py` module: Vectorizes each log message using a dict of words to index. See the function definition for further details. 259 | - `get_features_vector(x_vector, vocabulary, params)` from `feature_engineering` package's `utils.py` module: Extracts all specified features from the vectorized logs. See the function definition for further details. 260 | 261 | 262 | 263 | **Model training and inference** 264 | 265 | Each model extends the `BaseModel` class from module `base_model.py`. See the class definition for further details. 266 | 267 | There are two registries in the `models` package, one for binary models meant to be used for anomaly detection and another one for multi-classification models to classify the anomalies. Get the constructor for either using the `--binary_classifier` or `--multi_classifier` argument specified. E.g. `binary_classifier_registry.get_binary_model(params['binary_classifier'])`. 268 | 269 | By extending `BaseModel` the model is always saved when it fits the data. Load a model by calling its `load()` method. It will use the `params` attribute of the `BaseModel` class to get the experiment id and load its corresponding model. 270 | 271 | To save the params of an experiment call the `save_params(params)` function from the `utils.py` module in the main directory. `load_params(params)` in case of only using the module for inference. 272 | 273 | **Reporting** 274 | 275 | There are two kinds of reports, black box and white box and a registry for each in the `reporting` module. 276 | 277 | To use them, call the corresponding registry and obtain the report wrapper using `black_box_report_registry.get_bb_report('acc')`, for example. 278 | 279 | To add new reports, see the analogous explanation for [models](#how-to-add-a-new-model) or [features](#how-to-extract-a-new-feature) below. 280 | 281 | **Saving results** 282 | 283 | Among the provided experiments, `test_pu.py` and `train_multi.py` save their results creating a dict of column names to lists of results. Then the `save_results.py` function from the `utils.py` module is used to save them to a CSV file. 284 | 285 | 286 | 287 | #### How to add a new model 288 | 289 | To add a new model, implement a class that extends the `BaseModel` class and include its module in the `models` package. See the class definition for further details. 290 | 291 | Decorate a method that calls its constructor and returns an instance of the model with the `@register(f"{model_name}")`decorator from either the `binary_registry.py` or the `multi_registry.py` modules from the `models` package depending on whether the model is for anomaly detection or classification respectively. 292 | 293 | Finally, make sure you add the module's name in the `__init__.py` module from the `models` package and the model option in the `init_params.py` module within the list for either `--binary_classifier` or `multi_classifier` arguments. This way the constructor can be obtained by doing `binary_classifier_registry.get_binary_model(params['binary_classifier'])`, for example. 294 | 295 | 296 | 297 | #### How to extract a new feature 298 | 299 | To add a new feature extractor, create a module in the `feature_engineering` package that wraps your feature extractor function and returns the features. See `length.py` module as an example for further details. 300 | 301 | As in the other cases, decorate the wrapper function with `@register(f"{feature_name}")` and make sure you add the module name in the `__init__.py` from the `feature_engineering` package and the feature as an option in the `init_params.py` module `--features` argument. 302 | 303 | 304 | 305 | ### Included Experiments 306 | 307 | High level overview of each of the experiments included in the repository. 308 | 309 | #### Testing PULearning 310 | 311 | `test_pu.py` is mainly focused on proving the robustness of LogClass for anomaly detection when just providing few labeled data as anomalous. 312 | 313 | It would compare PULearning+RandomForest with any other given anomaly detection algorithm. Using the given data, it would start with having only healthy logs on the unlabeled data and gradually increase this up to 10%. To test PULearning, run the following command in the home directory of this project: 314 | 315 | ``` 316 | python -m LogClass.test_pu --logs_type "bgl" --raw_logs "./Data/RAS from Weibin/RAS_raw_label.dat" --binary_classifier regular --ratio 8 --step 1 --top_percentage 11 --kfold 3 317 | ``` 318 | 319 | This would first preprocess the logs. Then, for each kfold iteration, it will perform feature extraction and force a 1:8 ratio of anomalous:healthy logs. Finally with a step of 1% it will go from 0% to 10% anomalous logs in the unlabeled set and compare the accuracy of both anomaly detection algorithms. If none specified it will default to a plain RF. 320 | 321 | #### Testing Anomaly Classification 322 | 323 | `train_multi.py` is focused on showing the robustness of LogClass' TF-ILF feature extraction approach for multi-class anomaly classification. The main detail is that when using `--kfold N`, one can swap training/testing data slices using the `--swap` flag. This way, for instance, it can train on 10% logs and test on the remaining 90%, when pairing `--swap` with n ==10. To run such an experiment, use the following command from the parent directory of the project: 324 | 325 | ``` 326 | python -m LogClass.train_multi --logs_type "open_Apache" --raw_logs "./Data/open_source_logs/" --kfold 10 --swap 327 | ``` 328 | 329 | #### Global LogClass 330 | 331 | `logclass.py` is set up so that it does both training or testing of the learned models depending on the flags. For example to train and preprocessing run the following command in the home directory of this project: : 332 | 333 | ``` 334 | python -m LogClass.logclass --train --kfold 3 --logs_type "bgl" --raw_logs "./Data/RAS_LOGS" 335 | ``` 336 | 337 | This would first preprocess the raw BGL logs and extract their TF-ILF features, then train and save both PULearning with a RandomForest for anomaly detection and an SVM for multi-class anomaly classification. 338 | 339 | For running inference simply run: 340 | 341 | ``` 342 | python -m LogClass.logclass --logs_type 343 | ``` 344 | 345 | In this case it would load the learned feature extraction approach, both learned models and run inference on the whole logs. 346 | 347 | #### Binary training/inference 348 | 349 | `train_binary.py` and `run_binary.py` simply separate the binary part of `logclass.py` into two modules: one for training both feature extraction and the models, and another one for loading these and running inference. 350 | 351 | 352 | 353 | ### Citing 354 | 355 | If you find LogClass is useful for your research, please consider citing the paper: 356 | 357 | ``` 358 | @ARTICLE{9339940, author={Meng, Weibin and Liu, Ying and Zhang, Shenglin and Zaiter, Federico and Zhang, Yuzhe and Huang, Yuheng and Yu, Zhaoyang and Zhang, Yuzhi and Song, Lei and Zhang, Ming and Pei, Dan}, 359 | journal={IEEE Transactions on Network and Service Management}, 360 | title={LogClass: Anomalous Log Identification and Classification with Partial Labels}, 361 | year={2021}, 362 | doi={10.1109/TNSM.2021.3055425} 363 | } 364 | ``` 365 | 366 | This code was completed by [@Weibin Meng](https://github.com/WeibinMeng) and [@Federico Zaiter](https://github.com/federicozaiter). 367 | -------------------------------------------------------------------------------- /data/open_source_logs/hpc/groundtruth.seq: -------------------------------------------------------------------------------- 1 | 8 2 | 8 3 | 8 4 | 8 5 | 8 6 | 8 7 | 8 8 | 8 9 | 8 10 | 8 11 | 8 12 | 8 13 | 22 14 | 22 15 | 25 16 | 21 17 | 20 18 | 20 19 | 20 20 | 20 21 | 29 22 | 20 23 | 23 24 | 20 25 | 20 26 | 29 27 | 29 28 | 20 29 | 29 30 | 20 31 | 20 32 | 20 33 | 23 34 | 29 35 | 29 36 | 29 37 | 29 38 | 20 39 | 29 40 | 29 41 | 29 42 | 29 43 | 20 44 | 20 45 | 20 46 | 29 47 | 29 48 | 29 49 | 20 50 | 20 51 | 20 52 | 20 53 | 20 54 | 29 55 | 29 56 | 20 57 | 29 58 | 20 59 | 29 60 | 20 61 | 29 62 | 29 63 | 20 64 | 29 65 | 20 66 | 29 67 | 20 68 | 20 69 | 20 70 | 20 71 | 20 72 | 29 73 | 20 74 | 29 75 | 20 76 | 20 77 | 20 78 | 29 79 | 20 80 | 29 81 | 29 82 | 29 83 | 20 84 | 29 85 | 29 86 | 29 87 | 29 88 | 29 89 | 29 90 | 20 91 | 29 92 | 20 93 | 23 94 | 29 95 | 29 96 | 29 97 | 29 98 | 20 99 | 20 100 | 20 101 | 20 102 | 20 103 | 20 104 | 20 105 | 29 106 | 29 107 | 20 108 | 29 109 | 20 110 | 20 111 | 29 112 | 29 113 | 29 114 | 20 115 | 20 116 | 20 117 | 20 118 | 20 119 | 29 120 | 20 121 | 29 122 | 20 123 | 29 124 | 20 125 | 29 126 | 20 127 | 29 128 | 29 129 | 29 130 | 29 131 | 20 132 | 20 133 | 20 134 | 20 135 | 29 136 | 29 137 | 29 138 | 20 139 | 29 140 | 29 141 | 20 142 | 29 143 | 29 144 | 20 145 | 29 146 | 20 147 | 20 148 | 29 149 | 29 150 | 21 151 | 20 152 | 29 153 | 29 154 | 7 155 | 7 156 | 7 157 | 7 158 | 7 159 | 7 160 | 7 161 | 7 162 | 7 163 | 7 164 | 16 165 | 16 166 | 17 167 | 16 168 | 16 169 | 17 170 | 17 171 | 16 172 | 17 173 | 30 174 | 30 175 | 30 176 | 30 177 | 30 178 | 18 179 | 18 180 | 5 181 | 5 182 | 5 183 | 5 184 | 5 185 | 5 186 | 5 187 | 5 188 | 5 189 | 5 190 | 5 191 | 5 192 | 5 193 | 5 194 | 5 195 | 5 196 | 5 197 | 5 198 | 5 199 | 5 200 | 5 201 | 5 202 | 5 203 | 5 204 | 5 205 | 5 206 | 5 207 | 5 208 | 5 209 | 5 210 | 5 211 | 5 212 | 12 213 | 12 214 | 14 215 | 10 216 | 10 217 | 10 218 | 10 219 | 10 220 | 10 221 | 10 222 | 10 223 | 10 224 | 10 225 | 10 226 | 10 227 | 10 228 | 10 229 | 10 230 | 10 231 | 10 232 | 10 233 | 10 234 | 10 235 | 10 236 | 10 237 | 10 238 | 10 239 | 10 240 | 10 241 | 10 242 | 10 243 | 10 244 | 10 245 | 10 246 | 10 247 | 10 248 | 10 249 | 10 250 | 10 251 | 10 252 | 10 253 | 10 254 | 10 255 | 10 256 | 10 257 | 10 258 | 10 259 | 10 260 | 10 261 | 10 262 | 10 263 | 10 264 | 10 265 | 10 266 | 10 267 | 10 268 | 10 269 | 10 270 | 10 271 | 10 272 | 10 273 | 10 274 | 10 275 | 10 276 | 10 277 | 10 278 | 10 279 | 10 280 | 10 281 | 10 282 | 10 283 | 10 284 | 10 285 | 10 286 | 10 287 | 10 288 | 10 289 | 10 290 | 10 291 | 10 292 | 10 293 | 10 294 | 10 295 | 10 296 | 10 297 | 10 298 | 10 299 | 10 300 | 10 301 | 10 302 | 10 303 | 10 304 | 10 305 | 6 306 | 31 307 | 35 308 | 35 309 | 35 310 | 35 311 | 32 312 | 35 313 | 35 314 | 35 315 | 35 316 | 35 317 | 32 318 | 35 319 | 27 320 | 27 321 | 27 322 | 27 323 | 15 324 | 15 325 | 15 326 | 15 327 | 15 328 | 15 329 | 15 330 | 15 331 | 15 332 | 15 333 | 15 334 | 15 335 | 4 336 | 4 337 | 4 338 | 4 339 | 4 340 | 4 341 | 4 342 | 4 343 | 4 344 | 4 345 | 4 346 | 4 347 | 4 348 | 4 349 | 4 350 | 4 351 | 4 352 | 4 353 | 4 354 | 4 355 | 4 356 | 4 357 | 4 358 | 4 359 | 4 360 | 4 361 | 4 362 | 4 363 | 4 364 | 4 365 | 4 366 | 4 367 | 4 368 | 4 369 | 4 370 | 4 371 | 36 372 | 36 373 | 36 374 | 36 375 | 36 376 | 36 377 | 36 378 | 42 379 | 42 380 | 36 381 | 36 382 | 28 383 | 28 384 | 33 385 | 36 386 | 36 387 | 36 388 | 28 389 | 28 390 | 36 391 | 28 392 | 28 393 | 33 394 | 28 395 | 36 396 | 36 397 | 36 398 | 36 399 | 28 400 | 28 401 | 28 402 | 3 403 | 36 404 | 36 405 | 36 406 | 36 407 | 36 408 | 36 409 | 36 410 | 33 411 | 36 412 | 39 413 | 33 414 | 36 415 | 36 416 | 33 417 | 33 418 | 33 419 | 36 420 | 33 421 | 28 422 | 36 423 | 36 424 | 33 425 | 28 426 | 36 427 | 33 428 | 28 429 | 3 430 | 33 431 | 28 432 | 36 433 | 33 434 | 33 435 | 33 436 | 28 437 | 33 438 | 33 439 | 36 440 | 36 441 | 36 442 | 33 443 | 33 444 | 28 445 | 28 446 | 33 447 | 36 448 | 33 449 | 33 450 | 36 451 | 28 452 | 39 453 | 36 454 | 36 455 | 33 456 | 36 457 | 33 458 | 36 459 | 36 460 | 42 461 | 28 462 | 28 463 | 36 464 | 28 465 | 28 466 | 28 467 | 36 468 | 36 469 | 36 470 | 28 471 | 36 472 | 36 473 | 33 474 | 33 475 | 36 476 | 36 477 | 28 478 | 36 479 | 36 480 | 28 481 | 33 482 | 33 483 | 33 484 | 33 485 | 33 486 | 28 487 | 28 488 | 28 489 | 36 490 | 36 491 | 33 492 | 33 493 | 33 494 | 33 495 | 36 496 | 36 497 | 33 498 | 36 499 | 28 500 | 28 501 | 28 502 | 36 503 | 36 504 | 33 505 | 28 506 | 36 507 | 28 508 | 36 509 | 33 510 | 36 511 | 36 512 | 28 513 | 28 514 | 33 515 | 33 516 | 36 517 | 36 518 | 36 519 | 33 520 | 36 521 | 36 522 | 33 523 | 33 524 | 33 525 | 36 526 | 36 527 | 36 528 | 42 529 | 42 530 | 28 531 | 36 532 | 36 533 | 33 534 | 36 535 | 36 536 | 36 537 | 36 538 | 36 539 | 33 540 | 33 541 | 34 542 | 36 543 | 36 544 | 39 545 | 36 546 | 36 547 | 36 548 | 36 549 | 36 550 | 36 551 | 28 552 | 33 553 | 33 554 | 28 555 | 36 556 | 36 557 | 33 558 | 33 559 | 36 560 | 36 561 | 36 562 | 33 563 | 1 564 | 33 565 | 42 566 | 33 567 | 36 568 | 36 569 | 36 570 | 33 571 | 28 572 | 33 573 | 28 574 | 36 575 | 39 576 | 28 577 | 33 578 | 33 579 | 36 580 | 36 581 | 33 582 | 28 583 | 36 584 | 33 585 | 42 586 | 36 587 | 33 588 | 28 589 | 33 590 | 33 591 | 33 592 | 33 593 | 36 594 | 33 595 | 33 596 | 36 597 | 36 598 | 36 599 | 36 600 | 36 601 | 33 602 | 33 603 | 36 604 | 42 605 | 33 606 | 36 607 | 36 608 | 36 609 | 36 610 | 42 611 | 33 612 | 36 613 | 28 614 | 36 615 | 36 616 | 36 617 | 36 618 | 33 619 | 33 620 | 28 621 | 36 622 | 36 623 | 36 624 | 28 625 | 36 626 | 36 627 | 28 628 | 37 629 | 33 630 | 33 631 | 33 632 | 33 633 | 36 634 | 33 635 | 28 636 | 28 637 | 33 638 | 36 639 | 36 640 | 33 641 | 28 642 | 36 643 | 33 644 | 36 645 | 33 646 | 42 647 | 36 648 | 36 649 | 36 650 | 36 651 | 42 652 | 36 653 | 36 654 | 36 655 | 33 656 | 40 657 | 36 658 | 36 659 | 36 660 | 36 661 | 36 662 | 36 663 | 36 664 | 36 665 | 36 666 | 36 667 | 36 668 | 33 669 | 28 670 | 28 671 | 42 672 | 36 673 | 28 674 | 36 675 | 39 676 | 28 677 | 33 678 | 28 679 | 36 680 | 36 681 | 42 682 | 36 683 | 28 684 | 28 685 | 36 686 | 36 687 | 42 688 | 28 689 | 33 690 | 33 691 | 36 692 | 36 693 | 36 694 | 33 695 | 33 696 | 33 697 | 36 698 | 36 699 | 36 700 | 2 701 | 33 702 | 36 703 | 42 704 | 36 705 | 28 706 | 33 707 | 36 708 | 36 709 | 36 710 | 36 711 | 41 712 | 41 713 | 38 714 | 41 715 | 41 716 | 38 717 | 44 718 | 41 719 | 44 720 | 41 721 | 41 722 | 43 723 | 41 724 | 41 725 | 43 726 | 41 727 | 44 728 | 41 729 | 41 730 | 44 731 | 44 732 | 41 733 | 44 734 | 44 735 | 38 736 | 43 737 | 41 738 | 44 739 | 44 740 | 41 741 | 44 742 | 44 743 | 44 744 | 44 745 | 41 746 | 38 747 | 38 748 | 38 749 | 44 750 | 38 751 | 41 752 | 44 753 | 44 754 | 41 755 | 44 756 | 38 757 | 38 758 | 41 759 | 38 760 | 41 761 | 44 762 | 38 763 | 44 764 | 44 765 | 41 766 | 38 767 | 41 768 | 44 769 | 38 770 | 38 771 | 38 772 | 44 773 | 44 774 | 44 775 | 38 776 | 38 777 | 41 778 | 38 779 | 41 780 | 41 781 | 41 782 | 41 783 | 38 784 | 38 785 | 41 786 | 38 787 | 41 788 | 41 789 | 41 790 | 44 791 | 38 792 | 38 793 | 41 794 | 38 795 | 41 796 | 38 797 | 44 798 | 41 799 | 38 800 | 38 801 | 38 802 | 44 803 | 41 804 | 44 805 | 38 806 | 38 807 | 38 808 | 38 809 | 44 810 | 44 811 | 44 812 | 44 813 | 44 814 | 44 815 | 44 816 | 38 817 | 41 818 | 44 819 | 41 820 | 38 821 | 41 822 | 44 823 | 41 824 | 44 825 | 38 826 | 43 827 | 44 828 | 44 829 | 44 830 | 38 831 | 44 832 | 44 833 | 44 834 | 41 835 | 44 836 | 44 837 | 44 838 | 38 839 | 38 840 | 41 841 | 41 842 | 38 843 | 44 844 | 44 845 | 44 846 | 44 847 | 44 848 | 38 849 | 44 850 | 41 851 | 38 852 | 41 853 | 41 854 | 41 855 | 41 856 | 44 857 | 41 858 | 44 859 | 38 860 | 41 861 | 38 862 | 38 863 | 38 864 | 38 865 | 44 866 | 38 867 | 41 868 | 38 869 | 38 870 | 41 871 | 41 872 | 38 873 | 41 874 | 38 875 | 41 876 | 41 877 | 41 878 | 38 879 | 38 880 | 41 881 | 41 882 | 38 883 | 38 884 | 38 885 | 38 886 | 44 887 | 41 888 | 41 889 | 44 890 | 44 891 | 44 892 | 44 893 | 41 894 | 44 895 | 38 896 | 41 897 | 41 898 | 44 899 | 41 900 | 38 901 | 44 902 | 38 903 | 41 904 | 41 905 | 41 906 | 38 907 | 44 908 | 38 909 | 38 910 | 41 911 | 38 912 | 44 913 | 44 914 | 41 915 | 44 916 | 44 917 | 44 918 | 38 919 | 44 920 | 41 921 | 44 922 | 44 923 | 44 924 | 44 925 | 44 926 | 44 927 | 41 928 | 44 929 | 41 930 | 44 931 | 44 932 | 44 933 | 43 934 | 41 935 | 44 936 | 41 937 | 44 938 | 41 939 | 44 940 | 41 941 | 38 942 | 41 943 | 38 944 | 41 945 | 44 946 | 44 947 | 44 948 | 44 949 | 41 950 | 38 951 | 38 952 | 44 953 | 44 954 | 38 955 | 41 956 | 44 957 | 41 958 | 44 959 | 44 960 | 38 961 | 38 962 | 38 963 | 38 964 | 38 965 | 41 966 | 38 967 | 44 968 | 44 969 | 44 970 | 41 971 | 38 972 | 41 973 | 38 974 | 38 975 | 41 976 | 38 977 | 41 978 | 38 979 | 38 980 | 41 981 | 38 982 | 41 983 | 44 984 | 41 985 | 44 986 | 44 987 | 38 988 | 44 989 | 44 990 | 44 991 | 41 992 | 44 993 | 41 994 | 41 995 | 38 996 | 38 997 | 41 998 | 41 999 | 41 1000 | 38 1001 | 38 1002 | 38 1003 | 38 1004 | 41 1005 | 38 1006 | 41 1007 | 38 1008 | 44 1009 | 41 1010 | 41 1011 | 38 1012 | 41 1013 | 41 1014 | 38 1015 | 38 1016 | 38 1017 | 38 1018 | 41 1019 | 41 1020 | 41 1021 | 41 1022 | 44 1023 | 41 1024 | 41 1025 | 41 1026 | 41 1027 | 41 1028 | 38 1029 | 43 1030 | 38 1031 | 38 1032 | 38 1033 | 38 1034 | 44 1035 | 38 1036 | 41 1037 | 44 1038 | 41 1039 | 41 1040 | 41 1041 | 38 1042 | 41 1043 | 44 1044 | 38 1045 | 41 1046 | 38 1047 | 41 1048 | 41 1049 | 44 1050 | 41 1051 | 41 1052 | 38 1053 | 38 1054 | 38 1055 | 38 1056 | 41 1057 | 38 1058 | 44 1059 | 44 1060 | 44 1061 | 44 1062 | 44 1063 | 43 1064 | 38 1065 | 38 1066 | 41 1067 | 38 1068 | 38 1069 | 38 1070 | 44 1071 | 41 1072 | 44 1073 | 41 1074 | 41 1075 | 41 1076 | 38 1077 | 44 1078 | 38 1079 | 44 1080 | 41 1081 | 38 1082 | 38 1083 | 38 1084 | 41 1085 | 43 1086 | 38 1087 | 38 1088 | 38 1089 | 43 1090 | 44 1091 | 38 1092 | 44 1093 | 41 1094 | 38 1095 | 44 1096 | 41 1097 | 38 1098 | 41 1099 | 44 1100 | 41 1101 | 44 1102 | 44 1103 | 41 1104 | 44 1105 | 44 1106 | 44 1107 | 38 1108 | 44 1109 | 44 1110 | 43 1111 | 41 1112 | 44 1113 | 44 1114 | 44 1115 | 44 1116 | 44 1117 | 44 1118 | 44 1119 | 44 1120 | 44 1121 | 38 1122 | 43 1123 | 41 1124 | 38 1125 | 41 1126 | 38 1127 | 41 1128 | 41 1129 | 41 1130 | 41 1131 | 38 1132 | 41 1133 | 41 1134 | 41 1135 | 41 1136 | 38 1137 | 41 1138 | 41 1139 | 41 1140 | 38 1141 | 41 1142 | 38 1143 | 43 1144 | 41 1145 | 38 1146 | 38 1147 | 38 1148 | 43 1149 | 38 1150 | 41 1151 | 38 1152 | 44 1153 | 38 1154 | 38 1155 | 38 1156 | 44 1157 | 44 1158 | 44 1159 | 44 1160 | 44 1161 | 44 1162 | 38 1163 | 43 1164 | 44 1165 | 41 1166 | 38 1167 | 44 1168 | 38 1169 | 38 1170 | 38 1171 | 38 1172 | 41 1173 | 44 1174 | 41 1175 | 41 1176 | 44 1177 | 44 1178 | 44 1179 | 44 1180 | 38 1181 | 38 1182 | 38 1183 | 38 1184 | 41 1185 | 38 1186 | 44 1187 | 38 1188 | 44 1189 | 44 1190 | 44 1191 | 44 1192 | 38 1193 | 41 1194 | 38 1195 | 41 1196 | 41 1197 | 38 1198 | 38 1199 | 38 1200 | 38 1201 | 41 1202 | 38 1203 | 38 1204 | 41 1205 | 41 1206 | 44 1207 | 41 1208 | 44 1209 | 44 1210 | 44 1211 | 44 1212 | 41 1213 | 38 1214 | 41 1215 | 38 1216 | 38 1217 | 38 1218 | 44 1219 | 41 1220 | 44 1221 | 44 1222 | 41 1223 | 38 1224 | 38 1225 | 38 1226 | 44 1227 | 44 1228 | 38 1229 | 44 1230 | 41 1231 | 44 1232 | 44 1233 | 38 1234 | 44 1235 | 44 1236 | 44 1237 | 44 1238 | 44 1239 | 44 1240 | 44 1241 | 44 1242 | 44 1243 | 38 1244 | 44 1245 | 44 1246 | 38 1247 | 38 1248 | 44 1249 | 44 1250 | 41 1251 | 41 1252 | 44 1253 | 44 1254 | 44 1255 | 44 1256 | 38 1257 | 44 1258 | 38 1259 | 44 1260 | 41 1261 | 44 1262 | 44 1263 | 38 1264 | 44 1265 | 41 1266 | 41 1267 | 38 1268 | 44 1269 | 38 1270 | 41 1271 | 44 1272 | 44 1273 | 44 1274 | 41 1275 | 44 1276 | 44 1277 | 44 1278 | 41 1279 | 44 1280 | 38 1281 | 41 1282 | 44 1283 | 41 1284 | 41 1285 | 44 1286 | 44 1287 | 38 1288 | 44 1289 | 44 1290 | 44 1291 | 41 1292 | 44 1293 | 44 1294 | 38 1295 | 38 1296 | 44 1297 | 44 1298 | 38 1299 | 44 1300 | 41 1301 | 44 1302 | 44 1303 | 44 1304 | 44 1305 | 41 1306 | 44 1307 | 38 1308 | 44 1309 | 44 1310 | 44 1311 | 44 1312 | 38 1313 | 44 1314 | 44 1315 | 38 1316 | 41 1317 | 41 1318 | 38 1319 | 44 1320 | 44 1321 | 44 1322 | 44 1323 | 44 1324 | 38 1325 | 44 1326 | 44 1327 | 44 1328 | 44 1329 | 44 1330 | 44 1331 | 41 1332 | 41 1333 | 41 1334 | 38 1335 | 44 1336 | 41 1337 | 41 1338 | 44 1339 | 44 1340 | 38 1341 | 44 1342 | 44 1343 | 44 1344 | 44 1345 | 44 1346 | 38 1347 | 41 1348 | 44 1349 | 41 1350 | 44 1351 | 44 1352 | 44 1353 | 44 1354 | 44 1355 | 44 1356 | 44 1357 | 44 1358 | 44 1359 | 43 1360 | 44 1361 | 44 1362 | 44 1363 | 44 1364 | 44 1365 | 44 1366 | 44 1367 | 44 1368 | 44 1369 | 43 1370 | 44 1371 | 41 1372 | 44 1373 | 41 1374 | 44 1375 | 44 1376 | 44 1377 | 44 1378 | 44 1379 | 44 1380 | 44 1381 | 41 1382 | 44 1383 | 41 1384 | 44 1385 | 44 1386 | 38 1387 | 43 1388 | 41 1389 | 41 1390 | 44 1391 | 38 1392 | 41 1393 | 44 1394 | 44 1395 | 41 1396 | 44 1397 | 44 1398 | 44 1399 | 44 1400 | 44 1401 | 44 1402 | 38 1403 | 41 1404 | 44 1405 | 44 1406 | 44 1407 | 44 1408 | 43 1409 | 41 1410 | 44 1411 | 44 1412 | 44 1413 | 41 1414 | 41 1415 | 41 1416 | 38 1417 | 38 1418 | 44 1419 | 41 1420 | 43 1421 | 38 1422 | 38 1423 | 41 1424 | 38 1425 | 41 1426 | 38 1427 | 44 1428 | 44 1429 | 44 1430 | 38 1431 | 38 1432 | 43 1433 | 38 1434 | 35 1435 | 35 1436 | 35 1437 | 35 1438 | 35 1439 | 35 1440 | 32 1441 | 35 1442 | 35 1443 | 32 1444 | 35 1445 | 35 1446 | 35 1447 | 35 1448 | 35 1449 | 35 1450 | 32 1451 | 35 1452 | 35 1453 | 32 1454 | 32 1455 | 32 1456 | 32 1457 | 11 1458 | 11 1459 | 26 1460 | 11 1461 | 11 1462 | 11 1463 | 11 1464 | 11 1465 | 11 1466 | 11 1467 | 11 1468 | 11 1469 | 11 1470 | 11 1471 | 11 1472 | 11 1473 | 11 1474 | 11 1475 | 11 1476 | 11 1477 | 11 1478 | 11 1479 | 11 1480 | 11 1481 | 11 1482 | 11 1483 | 11 1484 | 11 1485 | 11 1486 | 11 1487 | 11 1488 | 11 1489 | 11 1490 | 11 1491 | 11 1492 | 11 1493 | 11 1494 | 11 1495 | 11 1496 | 11 1497 | 11 1498 | 11 1499 | 11 1500 | 11 1501 | 11 1502 | 11 1503 | 11 1504 | 11 1505 | 11 1506 | 11 1507 | 11 1508 | 11 1509 | 11 1510 | 11 1511 | 11 1512 | 11 1513 | 11 1514 | 11 1515 | 11 1516 | 11 1517 | 11 1518 | 11 1519 | 11 1520 | 11 1521 | 11 1522 | 11 1523 | 11 1524 | 11 1525 | 11 1526 | 11 1527 | 11 1528 | 11 1529 | 11 1530 | 11 1531 | 11 1532 | 11 1533 | 11 1534 | 11 1535 | 11 1536 | 11 1537 | 11 1538 | 11 1539 | 11 1540 | 11 1541 | 11 1542 | 19 1543 | 11 1544 | 11 1545 | 11 1546 | 11 1547 | 11 1548 | 11 1549 | 11 1550 | 11 1551 | 11 1552 | 11 1553 | 11 1554 | 11 1555 | 11 1556 | 11 1557 | 11 1558 | 11 1559 | 11 1560 | 11 1561 | 11 1562 | 11 1563 | 11 1564 | 11 1565 | 11 1566 | 11 1567 | 11 1568 | 11 1569 | 11 1570 | 11 1571 | 11 1572 | 11 1573 | 11 1574 | 11 1575 | 11 1576 | 11 1577 | 11 1578 | 11 1579 | 11 1580 | 11 1581 | 11 1582 | 11 1583 | 11 1584 | 11 1585 | 11 1586 | 11 1587 | 11 1588 | 11 1589 | 11 1590 | 11 1591 | 11 1592 | 11 1593 | 11 1594 | 11 1595 | 11 1596 | 11 1597 | 11 1598 | 11 1599 | 11 1600 | 11 1601 | 11 1602 | 11 1603 | 11 1604 | 11 1605 | 11 1606 | 11 1607 | 11 1608 | 11 1609 | 11 1610 | 11 1611 | 11 1612 | 11 1613 | 11 1614 | 11 1615 | 11 1616 | 11 1617 | 11 1618 | 11 1619 | 11 1620 | 11 1621 | 11 1622 | 11 1623 | 11 1624 | 11 1625 | 11 1626 | 11 1627 | 11 1628 | 11 1629 | 11 1630 | 11 1631 | 11 1632 | 11 1633 | 11 1634 | 11 1635 | 11 1636 | 11 1637 | 11 1638 | 11 1639 | 11 1640 | 11 1641 | 11 1642 | 11 1643 | 11 1644 | 11 1645 | 11 1646 | 11 1647 | 11 1648 | 11 1649 | 11 1650 | 11 1651 | 11 1652 | 11 1653 | 11 1654 | 11 1655 | 11 1656 | 11 1657 | 11 1658 | 11 1659 | 11 1660 | 11 1661 | 11 1662 | 11 1663 | 11 1664 | 11 1665 | 11 1666 | 11 1667 | 11 1668 | 11 1669 | 11 1670 | 11 1671 | 11 1672 | 11 1673 | 11 1674 | 11 1675 | 11 1676 | 11 1677 | 11 1678 | 11 1679 | 11 1680 | 11 1681 | 11 1682 | 11 1683 | 11 1684 | 11 1685 | 11 1686 | 11 1687 | 11 1688 | 11 1689 | 11 1690 | 11 1691 | 11 1692 | 11 1693 | 11 1694 | 11 1695 | 11 1696 | 11 1697 | 11 1698 | 11 1699 | 11 1700 | 11 1701 | 11 1702 | 11 1703 | 11 1704 | 11 1705 | 11 1706 | 11 1707 | 11 1708 | 11 1709 | 11 1710 | 11 1711 | 11 1712 | 11 1713 | 11 1714 | 11 1715 | 11 1716 | 11 1717 | 11 1718 | 11 1719 | 11 1720 | 11 1721 | 11 1722 | 11 1723 | 11 1724 | 11 1725 | 11 1726 | 11 1727 | 11 1728 | 11 1729 | 11 1730 | 11 1731 | 11 1732 | 11 1733 | 11 1734 | 11 1735 | 11 1736 | 11 1737 | 11 1738 | 11 1739 | 11 1740 | 11 1741 | 11 1742 | 11 1743 | 11 1744 | 11 1745 | 11 1746 | 11 1747 | 11 1748 | 11 1749 | 11 1750 | 11 1751 | 11 1752 | 11 1753 | 11 1754 | 11 1755 | 11 1756 | 11 1757 | 11 1758 | 11 1759 | 11 1760 | 11 1761 | 11 1762 | 11 1763 | 11 1764 | 11 1765 | 11 1766 | 11 1767 | 11 1768 | 11 1769 | 11 1770 | 11 1771 | 11 1772 | 11 1773 | 11 1774 | 11 1775 | 11 1776 | 11 1777 | 11 1778 | 11 1779 | 11 1780 | 11 1781 | 11 1782 | 11 1783 | 11 1784 | 11 1785 | 11 1786 | 11 1787 | 11 1788 | 11 1789 | 11 1790 | 11 1791 | 11 1792 | 11 1793 | 11 1794 | 11 1795 | 11 1796 | 11 1797 | 11 1798 | 11 1799 | 11 1800 | 11 1801 | 11 1802 | 11 1803 | 11 1804 | 11 1805 | 11 1806 | 11 1807 | 11 1808 | 11 1809 | 11 1810 | 11 1811 | 11 1812 | 11 1813 | 11 1814 | 11 1815 | 11 1816 | 11 1817 | 11 1818 | 11 1819 | 11 1820 | 11 1821 | 11 1822 | 11 1823 | 11 1824 | 11 1825 | 11 1826 | 11 1827 | 11 1828 | 11 1829 | 11 1830 | 11 1831 | 11 1832 | 11 1833 | 11 1834 | 11 1835 | 11 1836 | 11 1837 | 11 1838 | 11 1839 | 11 1840 | 11 1841 | 9 1842 | 11 1843 | 11 1844 | 11 1845 | 11 1846 | 11 1847 | 11 1848 | 11 1849 | 11 1850 | 11 1851 | 11 1852 | 11 1853 | 11 1854 | 24 1855 | 24 1856 | 24 1857 | 24 1858 | 24 1859 | 24 1860 | 24 1861 | 24 1862 | 24 1863 | 24 1864 | 24 1865 | 24 1866 | 24 1867 | 24 1868 | 24 1869 | 24 1870 | 24 1871 | 24 1872 | 24 1873 | 24 1874 | 24 1875 | 24 1876 | 24 1877 | 24 1878 | 24 1879 | 24 1880 | 24 1881 | 24 1882 | 24 1883 | 24 1884 | 24 1885 | 24 1886 | 24 1887 | 24 1888 | 24 1889 | 24 1890 | 24 1891 | 24 1892 | 24 1893 | 24 1894 | 24 1895 | 24 1896 | 24 1897 | 24 1898 | 24 1899 | 24 1900 | 24 1901 | 24 1902 | 24 1903 | 24 1904 | 24 1905 | 24 1906 | 24 1907 | 24 1908 | 24 1909 | 24 1910 | 13 1911 | 13 1912 | 13 1913 | 13 1914 | 13 1915 | 13 1916 | 13 1917 | 13 1918 | 13 1919 | 13 1920 | 13 1921 | 13 1922 | 13 1923 | 13 1924 | 13 1925 | 13 1926 | 13 1927 | 13 1928 | 13 1929 | 13 1930 | 13 1931 | 13 1932 | 13 1933 | 13 1934 | 13 1935 | 13 1936 | 13 1937 | 13 1938 | 13 1939 | 13 1940 | 13 1941 | 13 1942 | 13 1943 | 13 1944 | 13 1945 | 13 1946 | 13 1947 | 13 1948 | 13 1949 | 13 1950 | 13 1951 | 13 1952 | 13 1953 | 13 1954 | 13 1955 | 13 1956 | 13 1957 | 13 1958 | 13 1959 | 13 1960 | 13 1961 | 13 1962 | 13 1963 | 13 1964 | 13 1965 | 13 1966 | 13 1967 | 13 1968 | 13 1969 | 13 1970 | 13 1971 | 13 1972 | 13 1973 | 13 1974 | 13 1975 | 13 1976 | 13 1977 | 13 1978 | 13 1979 | 13 1980 | 13 1981 | 13 1982 | 13 1983 | 13 1984 | 13 1985 | 13 1986 | 13 1987 | 13 1988 | 13 1989 | 13 1990 | 13 1991 | 13 1992 | 13 1993 | 13 1994 | 13 1995 | 13 1996 | 13 1997 | 13 1998 | 13 1999 | 13 2000 | 13 2001 | -------------------------------------------------------------------------------- /data/open_source_logs/bgl/groundtruth.seq: -------------------------------------------------------------------------------- 1 | 64 2 | 64 3 | 64 4 | 64 5 | 68 6 | 68 7 | 68 8 | 52 9 | 19 10 | 19 11 | 52 12 | 112 13 | 112 14 | 112 15 | 112 16 | 112 17 | 112 18 | 112 19 | 112 20 | 112 21 | 112 22 | 112 23 | 112 24 | 112 25 | 112 26 | 112 27 | 112 28 | 112 29 | 112 30 | 112 31 | 112 32 | 92 33 | 112 34 | 112 35 | 112 36 | 112 37 | 112 38 | 112 39 | 112 40 | 112 41 | 112 42 | 112 43 | 112 44 | 112 45 | 112 46 | 112 47 | 112 48 | 112 49 | 112 50 | 112 51 | 112 52 | 112 53 | 112 54 | 112 55 | 112 56 | 112 57 | 52 58 | 52 59 | 64 60 | 64 61 | 64 62 | 64 63 | 64 64 | 64 65 | 64 66 | 64 67 | 112 68 | 112 69 | 34 70 | 112 71 | 112 72 | 112 73 | 112 74 | 112 75 | 112 76 | 112 77 | 112 78 | 112 79 | 112 80 | 112 81 | 112 82 | 112 83 | 112 84 | 112 85 | 40 86 | 112 87 | 112 88 | 52 89 | 17 90 | 52 91 | 112 92 | 112 93 | 112 94 | 112 95 | 112 96 | 112 97 | 112 98 | 52 99 | 112 100 | 112 101 | 52 102 | 112 103 | 112 104 | 69 105 | 69 106 | 69 107 | 69 108 | 69 109 | 69 110 | 69 111 | 69 112 | 69 113 | 69 114 | 69 115 | 69 116 | 69 117 | 69 118 | 69 119 | 69 120 | 69 121 | 69 122 | 69 123 | 69 124 | 69 125 | 69 126 | 69 127 | 69 128 | 69 129 | 69 130 | 69 131 | 69 132 | 69 133 | 69 134 | 69 135 | 69 136 | 69 137 | 69 138 | 69 139 | 69 140 | 69 141 | 69 142 | 69 143 | 69 144 | 69 145 | 69 146 | 69 147 | 69 148 | 69 149 | 69 150 | 69 151 | 69 152 | 69 153 | 69 154 | 69 155 | 69 156 | 69 157 | 69 158 | 69 159 | 69 160 | 69 161 | 69 162 | 69 163 | 69 164 | 52 165 | 41 166 | 87 167 | 87 168 | 87 169 | 87 170 | 87 171 | 87 172 | 87 173 | 87 174 | 106 175 | 106 176 | 106 177 | 106 178 | 106 179 | 106 180 | 106 181 | 112 182 | 112 183 | 112 184 | 112 185 | 112 186 | 87 187 | 87 188 | 87 189 | 87 190 | 87 191 | 87 192 | 87 193 | 87 194 | 106 195 | 106 196 | 106 197 | 106 198 | 106 199 | 106 200 | 106 201 | 106 202 | 87 203 | 106 204 | 106 205 | 86 206 | 86 207 | 95 208 | 95 209 | 100 210 | 87 211 | 87 212 | 87 213 | 87 214 | 87 215 | 106 216 | 87 217 | 106 218 | 87 219 | 86 220 | 87 221 | 86 222 | 86 223 | 87 224 | 86 225 | 95 226 | 87 227 | 87 228 | 87 229 | 87 230 | 86 231 | 106 232 | 73 233 | 73 234 | 73 235 | 73 236 | 100 237 | 100 238 | 86 239 | 73 240 | 95 241 | 57 242 | 95 243 | 57 244 | 100 245 | 100 246 | 80 247 | 80 248 | 81 249 | 79 250 | 109 251 | 109 252 | 96 253 | 57 254 | 99 255 | 57 256 | 57 257 | 74 258 | 74 259 | 74 260 | 105 261 | 104 262 | 104 263 | 58 264 | 82 265 | 89 266 | 58 267 | 84 268 | 84 269 | 85 270 | 85 271 | 79 272 | 71 273 | 79 274 | 78 275 | 63 276 | 63 277 | 61 278 | 61 279 | 61 280 | 63 281 | 61 282 | 92 283 | 93 284 | 96 285 | 96 286 | 93 287 | 93 288 | 96 289 | 103 290 | 72 291 | 99 292 | 99 293 | 74 294 | 102 295 | 77 296 | 101 297 | 58 298 | 58 299 | 90 300 | 58 301 | 94 302 | 85 303 | 85 304 | 85 305 | 71 306 | 71 307 | 71 308 | 71 309 | 71 310 | 71 311 | 91 312 | 91 313 | 91 314 | 112 315 | 51 316 | 112 317 | 64 318 | 64 319 | 64 320 | 64 321 | 64 322 | 64 323 | 112 324 | 112 325 | 112 326 | 64 327 | 64 328 | 64 329 | 64 330 | 64 331 | 64 332 | 64 333 | 112 334 | 64 335 | 64 336 | 112 337 | 112 338 | 112 339 | 112 340 | 112 341 | 112 342 | 112 343 | 112 344 | 112 345 | 112 346 | 19 347 | 64 348 | 112 349 | 52 350 | 112 351 | 112 352 | 112 353 | 112 354 | 112 355 | 112 356 | 112 357 | 112 358 | 112 359 | 112 360 | 112 361 | 112 362 | 14 363 | 30 364 | 52 365 | 112 366 | 112 367 | 112 368 | 112 369 | 112 370 | 17 371 | 64 372 | 14 373 | 64 374 | 68 375 | 68 376 | 68 377 | 68 378 | 68 379 | 68 380 | 68 381 | 68 382 | 68 383 | 68 384 | 52 385 | 52 386 | 52 387 | 68 388 | 68 389 | 68 390 | 68 391 | 112 392 | 112 393 | 112 394 | 112 395 | 112 396 | 112 397 | 112 398 | 112 399 | 112 400 | 112 401 | 112 402 | 112 403 | 112 404 | 112 405 | 112 406 | 112 407 | 112 408 | 112 409 | 112 410 | 112 411 | 112 412 | 112 413 | 112 414 | 112 415 | 112 416 | 112 417 | 112 418 | 112 419 | 112 420 | 112 421 | 112 422 | 112 423 | 112 424 | 112 425 | 112 426 | 112 427 | 112 428 | 112 429 | 14 430 | 52 431 | 64 432 | 112 433 | 107 434 | 112 435 | 100 436 | 100 437 | 80 438 | 57 439 | 57 440 | 57 441 | 105 442 | 105 443 | 105 444 | 82 445 | 79 446 | 79 447 | 112 448 | 107 449 | 107 450 | 106 451 | 73 452 | 100 453 | 80 454 | 57 455 | 57 456 | 105 457 | 52 458 | 59 459 | 52 460 | 112 461 | 112 462 | 112 463 | 112 464 | 112 465 | 112 466 | 112 467 | 112 468 | 112 469 | 68 470 | 68 471 | 68 472 | 112 473 | 112 474 | 112 475 | 112 476 | 112 477 | 112 478 | 112 479 | 112 480 | 112 481 | 112 482 | 112 483 | 112 484 | 112 485 | 112 486 | 112 487 | 112 488 | 112 489 | 112 490 | 112 491 | 112 492 | 112 493 | 112 494 | 112 495 | 112 496 | 52 497 | 28 498 | 52 499 | 52 500 | 112 501 | 112 502 | 112 503 | 112 504 | 112 505 | 112 506 | 112 507 | 112 508 | 112 509 | 112 510 | 112 511 | 112 512 | 112 513 | 112 514 | 112 515 | 112 516 | 112 517 | 112 518 | 112 519 | 112 520 | 112 521 | 112 522 | 76 523 | 47 524 | 68 525 | 68 526 | 68 527 | 68 528 | 68 529 | 68 530 | 68 531 | 68 532 | 68 533 | 68 534 | 68 535 | 68 536 | 68 537 | 68 538 | 68 539 | 68 540 | 68 541 | 68 542 | 68 543 | 68 544 | 68 545 | 68 546 | 68 547 | 68 548 | 68 549 | 68 550 | 68 551 | 68 552 | 68 553 | 68 554 | 68 555 | 68 556 | 68 557 | 68 558 | 68 559 | 112 560 | 112 561 | 112 562 | 112 563 | 112 564 | 112 565 | 112 566 | 112 567 | 112 568 | 112 569 | 112 570 | 112 571 | 112 572 | 112 573 | 112 574 | 112 575 | 112 576 | 112 577 | 112 578 | 112 579 | 112 580 | 112 581 | 112 582 | 112 583 | 112 584 | 112 585 | 112 586 | 112 587 | 112 588 | 112 589 | 112 590 | 112 591 | 112 592 | 112 593 | 112 594 | 112 595 | 112 596 | 112 597 | 20 598 | 24 599 | 112 600 | 112 601 | 112 602 | 112 603 | 112 604 | 112 605 | 112 606 | 112 607 | 112 608 | 112 609 | 68 610 | 68 611 | 68 612 | 68 613 | 112 614 | 112 615 | 112 616 | 112 617 | 112 618 | 112 619 | 112 620 | 112 621 | 59 622 | 8 623 | 29 624 | 112 625 | 112 626 | 112 627 | 112 628 | 112 629 | 112 630 | 112 631 | 112 632 | 112 633 | 112 634 | 112 635 | 112 636 | 112 637 | 112 638 | 112 639 | 112 640 | 112 641 | 112 642 | 112 643 | 112 644 | 112 645 | 112 646 | 112 647 | 112 648 | 112 649 | 112 650 | 112 651 | 112 652 | 112 653 | 112 654 | 112 655 | 112 656 | 112 657 | 112 658 | 112 659 | 112 660 | 112 661 | 112 662 | 112 663 | 112 664 | 112 665 | 112 666 | 112 667 | 112 668 | 112 669 | 112 670 | 112 671 | 112 672 | 112 673 | 112 674 | 112 675 | 112 676 | 112 677 | 112 678 | 112 679 | 112 680 | 112 681 | 112 682 | 112 683 | 112 684 | 112 685 | 112 686 | 112 687 | 112 688 | 112 689 | 112 690 | 112 691 | 112 692 | 112 693 | 112 694 | 112 695 | 112 696 | 112 697 | 112 698 | 112 699 | 112 700 | 112 701 | 112 702 | 112 703 | 112 704 | 112 705 | 112 706 | 112 707 | 112 708 | 112 709 | 112 710 | 112 711 | 112 712 | 112 713 | 112 714 | 112 715 | 112 716 | 112 717 | 112 718 | 112 719 | 112 720 | 112 721 | 112 722 | 112 723 | 112 724 | 112 725 | 112 726 | 112 727 | 112 728 | 112 729 | 112 730 | 112 731 | 112 732 | 112 733 | 112 734 | 112 735 | 112 736 | 112 737 | 112 738 | 112 739 | 112 740 | 112 741 | 112 742 | 112 743 | 112 744 | 112 745 | 112 746 | 112 747 | 112 748 | 112 749 | 112 750 | 112 751 | 112 752 | 68 753 | 68 754 | 68 755 | 112 756 | 112 757 | 112 758 | 112 759 | 112 760 | 112 761 | 112 762 | 112 763 | 112 764 | 112 765 | 112 766 | 112 767 | 112 768 | 112 769 | 112 770 | 112 771 | 112 772 | 112 773 | 112 774 | 112 775 | 112 776 | 112 777 | 112 778 | 112 779 | 112 780 | 112 781 | 112 782 | 112 783 | 112 784 | 112 785 | 112 786 | 112 787 | 112 788 | 112 789 | 112 790 | 112 791 | 112 792 | 112 793 | 112 794 | 112 795 | 112 796 | 112 797 | 112 798 | 112 799 | 112 800 | 112 801 | 112 802 | 112 803 | 112 804 | 112 805 | 112 806 | 112 807 | 112 808 | 112 809 | 112 810 | 112 811 | 112 812 | 112 813 | 112 814 | 112 815 | 112 816 | 112 817 | 112 818 | 112 819 | 112 820 | 112 821 | 52 822 | 14 823 | 67 824 | 67 825 | 112 826 | 112 827 | 112 828 | 112 829 | 112 830 | 112 831 | 112 832 | 112 833 | 112 834 | 112 835 | 112 836 | 112 837 | 112 838 | 51 839 | 68 840 | 68 841 | 68 842 | 68 843 | 68 844 | 68 845 | 68 846 | 68 847 | 68 848 | 68 849 | 68 850 | 68 851 | 68 852 | 68 853 | 68 854 | 68 855 | 68 856 | 68 857 | 68 858 | 112 859 | 112 860 | 112 861 | 112 862 | 112 863 | 112 864 | 112 865 | 112 866 | 112 867 | 112 868 | 112 869 | 112 870 | 112 871 | 52 872 | 112 873 | 112 874 | 112 875 | 112 876 | 112 877 | 112 878 | 112 879 | 112 880 | 112 881 | 112 882 | 112 883 | 112 884 | 112 885 | 112 886 | 112 887 | 112 888 | 112 889 | 112 890 | 112 891 | 112 892 | 112 893 | 112 894 | 112 895 | 112 896 | 112 897 | 112 898 | 112 899 | 112 900 | 112 901 | 112 902 | 112 903 | 112 904 | 112 905 | 112 906 | 112 907 | 112 908 | 112 909 | 112 910 | 112 911 | 112 912 | 112 913 | 112 914 | 112 915 | 112 916 | 112 917 | 112 918 | 112 919 | 112 920 | 112 921 | 112 922 | 112 923 | 112 924 | 112 925 | 112 926 | 112 927 | 112 928 | 112 929 | 112 930 | 112 931 | 112 932 | 112 933 | 112 934 | 112 935 | 112 936 | 112 937 | 112 938 | 112 939 | 112 940 | 112 941 | 112 942 | 112 943 | 112 944 | 112 945 | 112 946 | 35 947 | 7 948 | 112 949 | 112 950 | 112 951 | 112 952 | 112 953 | 112 954 | 17 955 | 112 956 | 112 957 | 112 958 | 112 959 | 112 960 | 112 961 | 112 962 | 112 963 | 112 964 | 112 965 | 112 966 | 112 967 | 112 968 | 112 969 | 112 970 | 112 971 | 112 972 | 112 973 | 112 974 | 112 975 | 112 976 | 112 977 | 112 978 | 112 979 | 112 980 | 112 981 | 112 982 | 112 983 | 112 984 | 112 985 | 112 986 | 112 987 | 112 988 | 112 989 | 112 990 | 112 991 | 112 992 | 112 993 | 112 994 | 112 995 | 112 996 | 112 997 | 112 998 | 112 999 | 112 1000 | 112 1001 | 112 1002 | 112 1003 | 112 1004 | 112 1005 | 112 1006 | 112 1007 | 112 1008 | 112 1009 | 112 1010 | 112 1011 | 112 1012 | 112 1013 | 112 1014 | 112 1015 | 112 1016 | 112 1017 | 112 1018 | 112 1019 | 112 1020 | 112 1021 | 112 1022 | 112 1023 | 112 1024 | 112 1025 | 112 1026 | 112 1027 | 112 1028 | 112 1029 | 112 1030 | 32 1031 | 90 1032 | 112 1033 | 62 1034 | 62 1035 | 62 1036 | 62 1037 | 62 1038 | 62 1039 | 62 1040 | 62 1041 | 62 1042 | 62 1043 | 62 1044 | 62 1045 | 62 1046 | 62 1047 | 62 1048 | 62 1049 | 62 1050 | 62 1051 | 62 1052 | 62 1053 | 62 1054 | 62 1055 | 62 1056 | 62 1057 | 62 1058 | 62 1059 | 62 1060 | 62 1061 | 62 1062 | 62 1063 | 62 1064 | 62 1065 | 62 1066 | 62 1067 | 62 1068 | 62 1069 | 62 1070 | 51 1071 | 112 1072 | 112 1073 | 112 1074 | 112 1075 | 112 1076 | 112 1077 | 112 1078 | 112 1079 | 112 1080 | 112 1081 | 62 1082 | 62 1083 | 62 1084 | 62 1085 | 112 1086 | 112 1087 | 112 1088 | 112 1089 | 112 1090 | 112 1091 | 112 1092 | 112 1093 | 112 1094 | 112 1095 | 112 1096 | 112 1097 | 112 1098 | 112 1099 | 112 1100 | 62 1101 | 17 1102 | 51 1103 | 62 1104 | 62 1105 | 62 1106 | 62 1107 | 62 1108 | 62 1109 | 62 1110 | 62 1111 | 62 1112 | 62 1113 | 62 1114 | 62 1115 | 112 1116 | 112 1117 | 62 1118 | 62 1119 | 62 1120 | 62 1121 | 62 1122 | 62 1123 | 62 1124 | 62 1125 | 62 1126 | 62 1127 | 62 1128 | 62 1129 | 112 1130 | 112 1131 | 112 1132 | 112 1133 | 112 1134 | 62 1135 | 112 1136 | 62 1137 | 62 1138 | 62 1139 | 62 1140 | 62 1141 | 62 1142 | 62 1143 | 62 1144 | 62 1145 | 62 1146 | 62 1147 | 62 1148 | 62 1149 | 62 1150 | 62 1151 | 52 1152 | 112 1153 | 112 1154 | 112 1155 | 112 1156 | 112 1157 | 112 1158 | 112 1159 | 112 1160 | 112 1161 | 112 1162 | 112 1163 | 112 1164 | 112 1165 | 112 1166 | 112 1167 | 112 1168 | 112 1169 | 112 1170 | 112 1171 | 112 1172 | 112 1173 | 112 1174 | 112 1175 | 112 1176 | 112 1177 | 112 1178 | 112 1179 | 112 1180 | 112 1181 | 112 1182 | 112 1183 | 112 1184 | 112 1185 | 112 1186 | 112 1187 | 112 1188 | 112 1189 | 112 1190 | 112 1191 | 112 1192 | 112 1193 | 112 1194 | 112 1195 | 112 1196 | 112 1197 | 112 1198 | 112 1199 | 17 1200 | 53 1201 | 53 1202 | 65 1203 | 1 1204 | 6 1205 | 47 1206 | 53 1207 | 47 1208 | 22 1209 | 22 1210 | 22 1211 | 22 1212 | 22 1213 | 22 1214 | 22 1215 | 22 1216 | 22 1217 | 2 1218 | 6 1219 | 59 1220 | 2 1221 | 76 1222 | 76 1223 | 52 1224 | 45 1225 | 76 1226 | 47 1227 | 47 1228 | 59 1229 | 47 1230 | 59 1231 | 2 1232 | 46 1233 | 52 1234 | 27 1235 | 51 1236 | 52 1237 | 52 1238 | 52 1239 | 26 1240 | 26 1241 | 26 1242 | 26 1243 | 26 1244 | 26 1245 | 26 1246 | 26 1247 | 26 1248 | 26 1249 | 26 1250 | 26 1251 | 26 1252 | 26 1253 | 26 1254 | 40 1255 | 40 1256 | 14 1257 | 9 1258 | 20 1259 | 66 1260 | 66 1261 | 52 1262 | 51 1263 | 40 1264 | 48 1265 | 48 1266 | 48 1267 | 17 1268 | 48 1269 | 48 1270 | 52 1271 | 52 1272 | 40 1273 | 48 1274 | 48 1275 | 48 1276 | 48 1277 | 48 1278 | 48 1279 | 40 1280 | 48 1281 | 48 1282 | 51 1283 | 75 1284 | 75 1285 | 75 1286 | 75 1287 | 68 1288 | 68 1289 | 75 1290 | 75 1291 | 68 1292 | 75 1293 | 75 1294 | 75 1295 | 75 1296 | 75 1297 | 75 1298 | 75 1299 | 75 1300 | 75 1301 | 68 1302 | 75 1303 | 75 1304 | 75 1305 | 75 1306 | 68 1307 | 75 1308 | 75 1309 | 75 1310 | 75 1311 | 75 1312 | 75 1313 | 75 1314 | 75 1315 | 68 1316 | 68 1317 | 68 1318 | 75 1319 | 75 1320 | 75 1321 | 75 1322 | 75 1323 | 75 1324 | 75 1325 | 75 1326 | 75 1327 | 52 1328 | 17 1329 | 52 1330 | 2 1331 | 52 1332 | 68 1333 | 68 1334 | 68 1335 | 68 1336 | 75 1337 | 75 1338 | 75 1339 | 68 1340 | 68 1341 | 68 1342 | 68 1343 | 68 1344 | 68 1345 | 68 1346 | 68 1347 | 68 1348 | 68 1349 | 68 1350 | 68 1351 | 68 1352 | 68 1353 | 68 1354 | 68 1355 | 75 1356 | 75 1357 | 75 1358 | 75 1359 | 75 1360 | 75 1361 | 75 1362 | 75 1363 | 75 1364 | 75 1365 | 75 1366 | 75 1367 | 75 1368 | 75 1369 | 75 1370 | 75 1371 | 44 1372 | 44 1373 | 52 1374 | 17 1375 | 48 1376 | 52 1377 | 50 1378 | 50 1379 | 17 1380 | 40 1381 | 50 1382 | 50 1383 | 50 1384 | 6 1385 | 24 1386 | 40 1387 | 50 1388 | 52 1389 | 52 1390 | 50 1391 | 52 1392 | 50 1393 | 56 1394 | 10 1395 | 24 1396 | 17 1397 | 52 1398 | 14 1399 | 9 1400 | 60 1401 | 9 1402 | 9 1403 | 51 1404 | 50 1405 | 9 1406 | 52 1407 | 76 1408 | 2 1409 | 52 1410 | 40 1411 | 40 1412 | 52 1413 | 40 1414 | 6 1415 | 22 1416 | 22 1417 | 22 1418 | 22 1419 | 22 1420 | 22 1421 | 22 1422 | 22 1423 | 22 1424 | 22 1425 | 22 1426 | 22 1427 | 22 1428 | 22 1429 | 22 1430 | 22 1431 | 22 1432 | 22 1433 | 22 1434 | 22 1435 | 22 1436 | 22 1437 | 22 1438 | 22 1439 | 6 1440 | 6 1441 | 22 1442 | 22 1443 | 14 1444 | 14 1445 | 14 1446 | 14 1447 | 14 1448 | 37 1449 | 14 1450 | 14 1451 | 40 1452 | 40 1453 | 40 1454 | 40 1455 | 40 1456 | 52 1457 | 51 1458 | 17 1459 | 52 1460 | 51 1461 | 52 1462 | 52 1463 | 52 1464 | 52 1465 | 9 1466 | 48 1467 | 52 1468 | 48 1469 | 49 1470 | 52 1471 | 52 1472 | 52 1473 | 52 1474 | 31 1475 | 52 1476 | 48 1477 | 52 1478 | 52 1479 | 31 1480 | 9 1481 | 51 1482 | 52 1483 | 52 1484 | 51 1485 | 17 1486 | 9 1487 | 52 1488 | 31 1489 | 31 1490 | 31 1491 | 31 1492 | 31 1493 | 14 1494 | 52 1495 | 60 1496 | 17 1497 | 27 1498 | 52 1499 | 52 1500 | 48 1501 | 48 1502 | 9 1503 | 42 1504 | 42 1505 | 42 1506 | 42 1507 | 42 1508 | 42 1509 | 52 1510 | 52 1511 | 108 1512 | 26 1513 | 26 1514 | 48 1515 | 51 1516 | 17 1517 | 14 1518 | 49 1519 | 27 1520 | 66 1521 | 21 1522 | 52 1523 | 52 1524 | 31 1525 | 52 1526 | 17 1527 | 64 1528 | 52 1529 | 31 1530 | 14 1531 | 14 1532 | 75 1533 | 62 1534 | 62 1535 | 75 1536 | 75 1537 | 75 1538 | 75 1539 | 75 1540 | 75 1541 | 75 1542 | 62 1543 | 75 1544 | 75 1545 | 75 1546 | 75 1547 | 75 1548 | 75 1549 | 75 1550 | 75 1551 | 75 1552 | 75 1553 | 75 1554 | 75 1555 | 75 1556 | 62 1557 | 62 1558 | 62 1559 | 75 1560 | 75 1561 | 75 1562 | 75 1563 | 75 1564 | 75 1565 | 75 1566 | 75 1567 | 75 1568 | 75 1569 | 75 1570 | 75 1571 | 75 1572 | 75 1573 | 75 1574 | 75 1575 | 75 1576 | 62 1577 | 62 1578 | 75 1579 | 75 1580 | 75 1581 | 75 1582 | 75 1583 | 75 1584 | 75 1585 | 75 1586 | 75 1587 | 75 1588 | 75 1589 | 75 1590 | 75 1591 | 75 1592 | 62 1593 | 75 1594 | 75 1595 | 75 1596 | 75 1597 | 75 1598 | 75 1599 | 75 1600 | 62 1601 | 62 1602 | 62 1603 | 62 1604 | 62 1605 | 62 1606 | 62 1607 | 62 1608 | 62 1609 | 62 1610 | 62 1611 | 62 1612 | 62 1613 | 62 1614 | 62 1615 | 62 1616 | 62 1617 | 75 1618 | 75 1619 | 75 1620 | 75 1621 | 75 1622 | 75 1623 | 75 1624 | 75 1625 | 75 1626 | 75 1627 | 75 1628 | 75 1629 | 75 1630 | 75 1631 | 75 1632 | 75 1633 | 75 1634 | 75 1635 | 75 1636 | 75 1637 | 75 1638 | 62 1639 | 75 1640 | 75 1641 | 75 1642 | 75 1643 | 75 1644 | 75 1645 | 75 1646 | 62 1647 | 62 1648 | 75 1649 | 75 1650 | 75 1651 | 75 1652 | 75 1653 | 75 1654 | 62 1655 | 75 1656 | 75 1657 | 75 1658 | 75 1659 | 75 1660 | 75 1661 | 62 1662 | 75 1663 | 75 1664 | 75 1665 | 62 1666 | 75 1667 | 75 1668 | 75 1669 | 75 1670 | 75 1671 | 75 1672 | 75 1673 | 48 1674 | 75 1675 | 75 1676 | 75 1677 | 75 1678 | 75 1679 | 62 1680 | 75 1681 | 75 1682 | 75 1683 | 62 1684 | 62 1685 | 62 1686 | 75 1687 | 75 1688 | 75 1689 | 52 1690 | 64 1691 | 52 1692 | 17 1693 | 64 1694 | 31 1695 | 31 1696 | 75 1697 | 75 1698 | 52 1699 | 48 1700 | 75 1701 | 75 1702 | 75 1703 | 75 1704 | 75 1705 | 75 1706 | 75 1707 | 75 1708 | 75 1709 | 75 1710 | 75 1711 | 75 1712 | 75 1713 | 75 1714 | 75 1715 | 75 1716 | 75 1717 | 75 1718 | 62 1719 | 21 1720 | 80 1721 | 75 1722 | 75 1723 | 75 1724 | 62 1725 | 75 1726 | 75 1727 | 75 1728 | 52 1729 | 75 1730 | 75 1731 | 75 1732 | 75 1733 | 24 1734 | 52 1735 | 38 1736 | 52 1737 | 52 1738 | 17 1739 | 52 1740 | 66 1741 | 75 1742 | 75 1743 | 75 1744 | 62 1745 | 48 1746 | 17 1747 | 52 1748 | 66 1749 | 21 1750 | 21 1751 | 11 1752 | 48 1753 | 48 1754 | 48 1755 | 6 1756 | 6 1757 | 66 1758 | 21 1759 | 31 1760 | 25 1761 | 25 1762 | 70 1763 | 70 1764 | 70 1765 | 52 1766 | 48 1767 | 48 1768 | 43 1769 | 70 1770 | 36 1771 | 36 1772 | 36 1773 | 36 1774 | 36 1775 | 36 1776 | 36 1777 | 70 1778 | 18 1779 | 15 1780 | 70 1781 | 15 1782 | 70 1783 | 15 1784 | 18 1785 | 15 1786 | 52 1787 | 88 1788 | 16 1789 | 70 1790 | 16 1791 | 70 1792 | 16 1793 | 70 1794 | 70 1795 | 16 1796 | 83 1797 | 70 1798 | 70 1799 | 13 1800 | 70 1801 | 70 1802 | 6 1803 | 70 1804 | 52 1805 | 40 1806 | 40 1807 | 4 1808 | 4 1809 | 4 1810 | 5 1811 | 5 1812 | 5 1813 | 5 1814 | 5 1815 | 4 1816 | 4 1817 | 5 1818 | 5 1819 | 4 1820 | 5 1821 | 4 1822 | 5 1823 | 5 1824 | 4 1825 | 5 1826 | 5 1827 | 5 1828 | 5 1829 | 5 1830 | 5 1831 | 4 1832 | 5 1833 | 5 1834 | 4 1835 | 5 1836 | 5 1837 | 5 1838 | 5 1839 | 5 1840 | 4 1841 | 4 1842 | 4 1843 | 5 1844 | 5 1845 | 4 1846 | 5 1847 | 4 1848 | 4 1849 | 5 1850 | 4 1851 | 4 1852 | 5 1853 | 4 1854 | 5 1855 | 5 1856 | 5 1857 | 4 1858 | 5 1859 | 5 1860 | 4 1861 | 4 1862 | 4 1863 | 5 1864 | 4 1865 | 5 1866 | 4 1867 | 5 1868 | 4 1869 | 5 1870 | 4 1871 | 5 1872 | 4 1873 | 4 1874 | 5 1875 | 5 1876 | 5 1877 | 5 1878 | 4 1879 | 5 1880 | 4 1881 | 5 1882 | 4 1883 | 4 1884 | 5 1885 | 5 1886 | 4 1887 | 4 1888 | 4 1889 | 4 1890 | 4 1891 | 5 1892 | 5 1893 | 5 1894 | 4 1895 | 4 1896 | 5 1897 | 52 1898 | 4 1899 | 5 1900 | 5 1901 | 5 1902 | 5 1903 | 5 1904 | 5 1905 | 5 1906 | 5 1907 | 5 1908 | 4 1909 | 5 1910 | 4 1911 | 5 1912 | 4 1913 | 4 1914 | 5 1915 | 5 1916 | 4 1917 | 5 1918 | 5 1919 | 5 1920 | 5 1921 | 5 1922 | 4 1923 | 4 1924 | 5 1925 | 4 1926 | 4 1927 | 4 1928 | 5 1929 | 5 1930 | 52 1931 | 52 1932 | 52 1933 | 12 1934 | 45 1935 | 3 1936 | 13 1937 | 52 1938 | 52 1939 | 52 1940 | 52 1941 | 52 1942 | 52 1943 | 12 1944 | 52 1945 | 33 1946 | 98 1947 | 43 1948 | 111 1949 | 59 1950 | 49 1951 | 54 1952 | 3 1953 | 3 1954 | 3 1955 | 3 1956 | 3 1957 | 3 1958 | 3 1959 | 3 1960 | 14 1961 | 14 1962 | 12 1963 | 33 1964 | 31 1965 | 31 1966 | 31 1967 | 31 1968 | 31 1969 | 31 1970 | 14 1971 | 14 1972 | 12 1973 | 55 1974 | 16 1975 | 16 1976 | 12 1977 | 31 1978 | 11 1979 | 12 1980 | 12 1981 | 16 1982 | 16 1983 | 33 1984 | 56 1985 | 23 1986 | 33 1987 | 56 1988 | 48 1989 | 39 1990 | 110 1991 | 97 1992 | 64 1993 | 64 1994 | 64 1995 | 64 1996 | 64 1997 | 64 1998 | 64 1999 | 64 2000 | 48 2001 | --------------------------------------------------------------------------------