├── images
├── Arch.jpg
└── Model_arch.jpg
├── Data Cleaning
├── scales_cleaning.py
├── ashrea_cleaning.py
└── medium_us_cleaning.py
├── README.md
├── Other_Baseline_models
├── IMBlearn+SMOTE.py
├── Baselines.py
└── lstm_model+attention_weights.py
├── TL_model
└── Base_TL_model.py
├── Feature_selectin+Base_model
└── feature_selection+plots_ashrae+scales.py
└── Thermal_Comfort_Datasets
└── Scales.csv
/images/Arch.jpg:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/anirudhs123/Thermal-comfort-prediction-in-low-resourced-buildings/HEAD/images/Arch.jpg
--------------------------------------------------------------------------------
/images/Model_arch.jpg:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/anirudhs123/Thermal-comfort-prediction-in-low-resourced-buildings/HEAD/images/Model_arch.jpg
--------------------------------------------------------------------------------
/Data Cleaning/scales_cleaning.py:
--------------------------------------------------------------------------------
1 | import pandas as pd
2 | import numpy as np
3 |
4 | # Loading data from local disk
5 | # Modify path of the data accordingly
6 |
7 | path_str = '/content/drive/My Drive/Colab Notebooks/Thermal sensation prediction(Murata)/FinalDataset_2019-04-30.csv'
8 | data=pd.read_csv(path_str,encoding= 'unicode_escape')
9 |
10 | #Index taken from the column description file
11 | index_to_choose_for_cols=[45,44,123,124,120,113,112,40]
12 | cols=data.columns
13 | cols_chosen=[]
14 | for i in index_to_choose_for_cols:
15 | cols_chosen.append(cols[i-1])
16 |
17 | data=data[cols_chosen]
18 | cols=['Age', 'Sex', 'Air velocity (m/s)', 'Air temperature (¡C)',
19 | 'Radiant temperature (¡C)', 'Relative humidity (%)',
20 | 'Outdoor monthly air temperature (¡C)', 'Thermal sensation']
21 |
22 | data.columns=cols
23 |
24 | data=data[data['Age'] !=7]
25 | data=data[data['Sex']!=4]
26 | data=data[data['Sex']!=3]
27 | data=data[data['Thermal sensation'].notna()]
28 |
29 | #They have used 7 classes with option for slightly cold and slightly warm extra options
30 | #Combining them into the warm and cold categories
31 | #Matchin dict
32 | matching_dict={1:-2,2:-1,3:-1,4:0,5:0,6:1,7:2}
33 | def change(x):
34 | return(matching_dict[x])
35 | data['Thermal sensation']=data['Thermal sensation'].apply(lambda x:change(x))
36 |
37 |
38 |
39 | #Imputing the missing values
40 | for col in data.columns:
41 | data[col]=data[col].fillna(data[col].median())
42 |
43 |
44 | data=data[data['Air velocity (m/s)']>=0]
45 | data=data[data['Air temperature (¡C)']>0]
46 | data=data[data['Radiant temperature (¡C)']>0]
47 |
48 | from sklearn.cluster import DBSCAN
49 |
50 | def DBSCAN_outlier_detection(data):
51 | outlier_detection=DBSCAN(min_samples=5,eps=3)
52 | clusters=outlier_detection.fit_predict(data)
53 | data['Clusters']=clusters
54 | data=data[data['Clusters']!=-1]
55 | data=data.drop(['Clusters'],axis=1)
56 | return(data)
57 |
58 | data=DBSCAN_outlier_detection(data)
59 | data=data.drop_duplicates()
60 | data.to_csv('Scales.csv')
61 |
62 |
--------------------------------------------------------------------------------
/Data Cleaning/ashrea_cleaning.py:
--------------------------------------------------------------------------------
1 | import pandas as pd
2 | import numpy as np
3 |
4 | # Loading the Raw datafile
5 | # Modify pathstr accordingly
6 | path_str = '/content/drive/MyDrive/Colab Notebooks/Thermal sensation prediction(Murata)/ashrae_db2.01.csv'
7 | data_ash=pd.read_csv(path_str,encoding='ISO-8859-1')
8 |
9 | #Removing all the other data not having AC as thier Cooling stratergy
10 | data1=data_ash[data_ash['Cooling startegy_building level']=='Air Conditioned']
11 | data_ash=pd.concat([data1,data2,data3],axis=0)
12 |
13 | #Making the range from [-3,3] to[2,2]
14 | data_ash['Thermal sensation'] = data_ash['Thermal sensation'].apply(lambda x: -2 if x <= -2 else x)
15 | data_ash['Thermal sensation'] = data_ash['Thermal sensation'].apply(lambda x: 2 if x >= 2 else x)
16 | #Rounding off the values to make it categorical in nature
17 | data_ash['Thermal sensation'] = data_ash['Thermal sensation'].apply(lambda x: np.round(x))
18 |
19 |
20 | data_ash=data_ash.loc[data_ash['Thermal sensation'].notnull()]
21 | data_ash=data_ash.loc[data_ash['Sex'].notnull()]
22 |
23 | #data_ash['Cooling startegy_building level'].value_counts()
24 | data_ash['Thermal sensation'].value_counts()
25 |
26 | data_ash=data_ash[['Age','Sex','Air velocity (m/s)','Air temperature (¡C)','Radiant temperature (¡C)','Relative humidity (%)','Clo', 'Met','Outdoor monthly air temperature (¡C)','Thermal sensation']]
27 |
28 | data_ash=data_ash.drop_duplicates()
29 |
30 | def fun(str1):
31 | if(str1=='Female'):
32 | return(2.0)
33 | elif(str1=='Male'):
34 | return(1.0)
35 |
36 |
37 | data_ash['Sex']=data_ash['Sex'].apply(lambda x: fun(x))
38 |
39 | data_ash.columns
40 |
41 | data_ash=data_ash[data_ash['Air velocity (m/s)']>0]
42 | data_ash=data_ash[data_ash['Air temperature (¡C)']>0]
43 | data_ash=data_ash[data_ash['Radiant temperature (¡C)']>0]
44 |
45 | data_ash['Clo'].describe()
46 |
47 | data_ash=data_ash.fillna(data_ash.median())
48 |
49 | data_ash=data_ash.drop_duplicates()
50 | data_ash=data_ash.dropna()
51 |
52 |
53 |
54 | from sklearn.cluster import DBSCAN
55 |
56 | def DBSCAN_outlier_detection(data):
57 | outlier_detection=DBSCAN(min_samples=5,eps=3)
58 | clusters=outlier_detection.fit_predict(data)
59 | data['Clusters']=clusters
60 | data=data[data['Clusters']!=-1]
61 | data=data.drop(['Clusters'],axis=1)
62 | return(data)
63 |
64 | data_ash=DBSCAN_outlier_detection(data_ash)
65 |
66 | data_ash.to_csv('Ashrae.csv')
--------------------------------------------------------------------------------
/Data Cleaning/medium_us_cleaning.py:
--------------------------------------------------------------------------------
1 | import pandas as pd
2 | import numpy as np
3 |
4 | cols=['Age', 'Sex', 'Air velocity (m/s)', 'Air temperature (¡C)',
5 | 'Radiant temperature (¡C)', 'Relative humidity (%)', 'Clo', 'Met',
6 | 'Outdoor monthly air temperature (¡C)', 'Thermal sensation']
7 |
8 | # Reading the raw data files
9 | # Modify path_str accordingly
10 | path_str = '/content/drive/My Drive/Colab Notebooks/Thermal sensation prediction(Murata)/LANGEVIN_DATA.txt'
11 | f=open(path_str,'r')
12 | text_us=f.readlines()
13 |
14 | lens=[]
15 | for i in range(len(text_us)):
16 | if(len(text_us[i].split())!=118):
17 | lens.append((i,len(text_us[i].split())))
18 |
19 | dicts={}
20 | for i in range(1,119):
21 | dicts[i]=[]
22 |
23 | for i in range(len(text_us)):
24 | temp=text_us[i].split()
25 | for j in range(len(temp)):
26 | dicts[j+1].append(float(temp[j]))
27 |
28 | df=pd.DataFrame(dicts)
29 |
30 | # Using only significant cols
31 | # Use Metadata file to identify the corresponding column numbers
32 | data_us=df[[27,26,8,6,9,7,17,16,12,48]]
33 |
34 | data_us.columns=cols
35 |
36 | data_us=data_us.loc[data_us['Thermal sensation'].notnull()]
37 | data_us=data_us.loc[data_us['Sex'].notnull()]
38 |
39 |
40 |
41 | #Making the range from [-3,3] to[2,2]
42 | data_us['Thermal sensation'] = data_us['Thermal sensation'].apply(lambda x: -2 if x <= -2 else x)
43 | data_us['Thermal sensation'] = data_us['Thermal sensation'].apply(lambda x: 2 if x >= 2 else x)
44 | #Rounding off the values to make it categorical in nature
45 | data_us['Thermal sensation'] = data_us['Thermal sensation'].apply(lambda x: np.round(x))
46 |
47 | data_us = data_us.fillna(value=data_us.median())
48 | data_us=data_us.drop_duplicates()
49 | data_us.dropna(inplace=True)
50 |
51 | data_us=data_us[data_us['Air velocity (m/s)']>0]
52 | data_us=data_us[data_us['Air temperature (¡C)']>0]
53 | data_us=data_us[data_us['Radiant temperature (¡C)']>0]
54 |
55 | cols=['Air velocity (m/s)','Air temperature (¡C)','Radiant temperature (¡C)','Relative humidity (%)']
56 | for col in cols:
57 | data_us[col]=data_us[col].apply(lambda x: np.round(x,2))
58 |
59 | from sklearn.cluster import DBSCAN
60 |
61 | def DBSCAN_outlier_detection(data):
62 | outlier_detection=DBSCAN(min_samples=5,eps=3)
63 | clusters=outlier_detection.fit_predict(data)
64 | data['Clusters']=clusters
65 | data=data[data['Clusters']!=-1]
66 | data=data.drop(['Clusters'],axis=1)
67 | return(data)
68 |
69 |
70 | data_us=DBSCAN_outlier_detection(data_us)
71 | data_us.to_csv('Medium_US.csv')
72 |
73 |
--------------------------------------------------------------------------------
/README.md:
--------------------------------------------------------------------------------
1 | # A-hybrid-deep-transfer-learning-strategy-for-thermal-comfort-prediction-in-buildings
2 | This work entails the usage of a novel hybrid deep transfer learning method for thermal comfort prediction in buildings with no or very little labelled data. We provide a novel Deep Learning architecture that jointly uses CNNs and LSTMs and the concept of Knowledge transfer from source to target domain to accurately predict the thermal comfort in low resourced buildings. We perform extensive ablation study and comparative anlaysis with other state of the art models and out perform them in various quality metrics. For detailed information regarding this work, please visit [our publication](https://www.sciencedirect.com/science/article/abs/pii/S0360132321005345).
3 |
4 |
5 |
6 |
7 |
8 | ## Key Contributions
9 | 1. A transfer learning based CNN-LSTM (TL CNN-LSTM) model is presented for accurate thermal comfort prediction in buildings with limited modeling data across different climate zones. In the design of TL CNN-LSTM, two significant challenges such as the identification of significant TCPs and imbalanced nature of the data were addressed.
10 | 2. The developed model takes input of personal, indoor and outdoor features from the source datasets in specific order and captures the spatio temporal relations for accurate thermal comfort modeling.
11 | 3. Extensive experiments on ASHRAE RP-884, Scales project, and Medium US office datasets show that TL CNN-LSTM outperforms the state of-the-art thermal comfort algorithms in terms of various quality metrics (Accuracy, ROC-AUC Score, Mathews Correlation Coefficient)).
12 | 4. The studies on the impact of significant TCPs and their different combinations on thermal comfort modeling indicate that TL CNN-LSTM achieves best prediction performance with nine TCPs (PMV, personal, and outdoor environmental factors).
13 | 5. The experiments on analyzing the impact of (i) CNN and LSTM layers on TL CNN-LSTM, (ii) CNN-LSTM layers for parameter transfer, and (iii) size of the target dataset on TL CNN-LSTM and CNN-LSTM demonstrates the effectiveness and applicability of the proposed transductive transfer learning based thermal comfort model for buildings with limited modeling data.
14 |
15 | Link to paper: https://www.sciencedirect.com/science/article/abs/pii/S0360132321005345
16 |
17 | ## People
18 |
19 | This work has been developed by [Anirudh Sriram](https://github.com/anirudhs123), [Dr. Nivethitha Somu ](https://scholar.google.com/citations?user=q1M0BgIAAAAJ&hl=en), [Prof. Anupama Kowli](https://www.ee.iitb.ac.in/web/people/faculty/home/anu) and [Prof.Krithi Ramamritham ](https://www.iitb.ac.in/en/employee/prof-krithi-ramamritham) from Indian Institute of Technology, Madras and Indian Institute of Technology, Bombay. Ask us your questions at [anirudhsriram30799@gmail.com](mailto:anirudhsriram30799@gmail.com).
20 |
--------------------------------------------------------------------------------
/Other_Baseline_models/IMBlearn+SMOTE.py:
--------------------------------------------------------------------------------
1 | # -*- coding: utf-8 -*-
2 | """Untitled0.ipynb
3 |
4 | Automatically generated by Colaboratory.
5 |
6 | Original file is located at
7 | https://colab.research.google.com/drive/14boSphvuxVE_x8cI-sHxTgBZLHDbcPLI
8 | """
9 |
10 | import pandas as pd
11 | import numpy as np
12 | data_us=pd.read_csv('/content/drive/My Drive/Colab Notebooks/Thermal sensation prediction(Murata)/Medium_office.csv')
13 | data_C=pd.read_csv('/content/drive/My Drive/Colab Notebooks/Thermal sensation prediction(Murata)/Ashrae_clean.csv')
14 |
15 | data_C.drop(['Unnamed: 0'],axis=1,inplace=True)
16 | data_C=data_C.drop_duplicates(['Age', 'Sex','Air velocity (m/s)', 'Air temperature (¡C)', 'Radiant temperature (¡C)', 'Relative humidity (%)', 'Clo', 'Met', 'Outdoor monthly air temperature (¡C)'])
17 |
18 | data_us.drop(['Unnamed: 0'],axis=1,inplace=True)
19 | data_us=data_us.drop_duplicates(['Age', 'Sex','Air velocity (m/s)', 'Air temperature (¡C)', 'Radiant temperature (¡C)', 'Relative humidity (%)', 'Clo', 'Met', 'Outdoor monthly air temperature (¡C)'])
20 |
21 | data_us_C=data_us
22 | data_us_B=data_us.drop(['Outdoor monthly air temperature (¡C)'],axis=1)
23 | data_us_A=data_us_B.drop(['Age','Sex'],axis=1)
24 |
25 | data_B=data_C.drop(['Outdoor monthly air temperature (¡C)'],axis=1)
26 | data_A=data_B.drop(['Age','Sex'],axis=1)
27 |
28 | def fun1(x):
29 | return(np.round(x,2))
30 |
31 | cols=['Air velocity (m/s)','Air temperature (¡C)','Radiant temperature (¡C)','Relative humidity (%)']
32 | for col in cols:
33 | data_us_A[col]=data_us_A[col].apply(lambda x:fun1(x))
34 | data_us_B[col]=data_us_B[col].apply(lambda x:fun1(x))
35 | data_us_C[col]=data_us_C[col].apply(lambda x:fun1(x))
36 |
37 | from sklearn.model_selection import train_test_split, StratifiedShuffleSplit, GridSearchCV, cross_val_score
38 | from sklearn.pipeline import make_pipeline
39 | from imblearn.pipeline import make_pipeline as imbalanced_make_pipeline
40 | from imblearn.over_sampling import SMOTE, ADASYN
41 | from imblearn.under_sampling import RandomUnderSampler
42 |
43 | from sklearn.preprocessing import RobustScaler
44 | import warnings
45 | warnings.filterwarnings("ignore")
46 |
47 | X_train=data_C.drop(['Thermal sensation'],axis=1)
48 | y_train=data_C['Thermal sensation']
49 | X_test=data_us_C.drop(['Thermal sensation'],axis=1)
50 | y_test=data_us_C['Thermal sensation']
51 |
52 | X_bal, y_bal = ADASYN(sampling_strategy='minority',random_state=0).fit_resample(X_train,y_train)
53 |
54 | # Join X and y
55 | X_bal = pd.DataFrame(X_bal,columns=X_train.columns)
56 | y_bal = pd.DataFrame(y_bal,columns=['Thermal Sensation'])
57 | balanced = X_bal.join(y_bal)
58 |
59 | #Importing the metrics to compare the model performance
60 | from sklearn.metrics import accuracy_score,f1_score,precision_score,recall_score
61 | from sklearn.metrics import matthews_corrcoef,confusion_matrix
62 | #This functions trains the model and performs prediction and anlaysis of model performance
63 | def model_performance(model,X_test,y_test,model_name):
64 | y_pred=model.predict(X_test)
65 | acc=accuracy_score(y_test,y_pred)
66 | cm=confusion_matrix(y_test,y_pred)
67 | recall = np.diag(cm) / np.sum(cm, axis = 1)
68 | precision = np.diag(cm) / np.sum(cm, axis = 0)
69 | precs=np.mean(recall)
70 | recall=np.mean(precision)
71 | f1=(2*precs*recall)/(precs+recall)
72 | coeff=matthews_corrcoef(y_test,y_pred)
73 | print('Accuracy of {} is: {}'.format(model_name,acc))
74 | print('F1_Score of {} is: {}'.format(model_name,f1))
75 | print('Precsion of {} is: {}'.format(model_name,precs))
76 | print('Recall of {} is: {}'.format(model_name,recall))
77 | print('Confusion matrix of {} is:'.format(model_name))
78 | print(cm)
79 |
80 | from sklearn.ensemble import RandomForestClassifier
81 | RF_model=RandomForestClassifier(n_estimators=200,class_weight='balanced_subsample',max_depth=12)
82 |
83 | model=RF_model
84 | model.fit(X_bal,y_bal)
85 | model_performance(model,X_test,y_test,'RF_model')
86 |
87 |
88 |
89 |
90 |
91 | from imblearn.ensemble import BalancedRandomForestClassifier
92 | from imblearn.over_sampling import SMOTE
93 | from imblearn.pipeline import Pipeline
94 |
95 | # transform the dataset
96 | oversample = SMOTE()
97 | X_1, y_1 = oversample.fit_resample(X_train, y_train)
98 |
99 | from collections import Counter
100 | counter = Counter(y_train)
101 | print(counter)
102 |
103 | over = SMOTE(sampling_strategy={0:18000,1:18000,2:18000,3:18000,4:18000})
104 | under = RandomUnderSampler(sampling_strategy={0:10000,1:10000,2:10000,3:10000,4:10000})
105 | steps = [('o', over), ('u', under)]
106 | pipeline = Pipeline(steps=steps)
107 | # transform the dataset
108 | X, y = pipeline.fit_resample(X_train, y_train)
109 |
110 | RF_model=RandomForestClassifier(n_estimators=100,class_weight='balanced_subsample',max_depth=12)
111 | model=RF_model
112 | model.fit(X,y)
113 | model_performance(model,X_test,y_test,'RF_model')
114 |
115 | from keras.models import Sequential
116 | from keras.layers import Dense,Dropout,ReLU
117 | from keras.optimizers import Adam
118 | from keras.callbacks import EarlyStopping
119 | from keras.utils import to_categorical
120 | from keras.layers import Embedding,Conv1D,LSTM,Input,TimeDistributed,SpatialDropout1D,Flatten,Dropout
121 | from keras.models import Model
122 | from keras.callbacks import ModelCheckpoint
123 | from sklearn.preprocessing import MinMaxScaler
124 | def build_model_LSTM(X_train,y_train):
125 | y_train=np.asarray(y_train ,dtype=int)
126 | y_train=to_categorical(y_train,num_classes=5)
127 | model=Sequential()
128 | model.add(Embedding(101,256,input_length=len(X_train[0]),))
129 | model.add(Conv1D(filters=128,kernel_size=5,padding='same'))
130 | model.add(SpatialDropout1D(0.1))
131 | model.add(LSTM(256,return_sequences=True))
132 | model.add(LSTM(256,return_sequences=True))
133 | model.add(Flatten())
134 | model.add(Dense(64,activation='relu'))
135 | model.add(Dropout(0.1))
136 | model.add(Dense(16,activation='relu'))
137 | model.add(Dropout(0.1))
138 | model.add(Dense(5,activation='softmax'))
139 | model.compile(loss='categorical_crossentropy',optimizer=Adam(lr=0.001,beta_1=0.99,beta_2=0.999),metrics=['accuracy'])
140 | es=EarlyStopping(monitor='val_loss',restore_best_weights=True)
141 | model.fit(X_train,y_train,epochs=20,validation_split=0.2,batch_size=128)
142 | model.summary()
143 | return(model)
144 |
145 | #Importing the metrics to compare the model performance
146 | from sklearn.metrics import accuracy_score,f1_score,precision_score,recall_score
147 | from sklearn.metrics import matthews_corrcoef,confusion_matrix
148 | #This functions trains the model and performs prediction and anlaysis of model performance
149 | def model_performance_MLP(model,X_test,y_test,model_name):
150 | y_pred=model.predict(X_test)
151 | y_preds=[]
152 | for i in range(len(y_pred)):
153 | y_preds.append(np.argmax(y_pred[i]))
154 |
155 | y_pred=y_preds
156 | acc=accuracy_score(y_test,y_pred)
157 | cm=confusion_matrix(y_test,y_pred)
158 | recall = np.diag(cm) / np.sum(cm, axis = 1)
159 | precision = np.diag(cm) / np.sum(cm, axis = 0)
160 | precs=np.mean(recall)
161 | recall=np.mean(precision)
162 | f1=(2*precs*recall)/(precs+recall)
163 | coeff=matthews_corrcoef(y_test,y_pred)
164 | print('Accuracy of {} is: {}'.format(model_name,acc))
165 | print('F1_Score of {} is: {}'.format(model_name,f1))
166 | print('Precsion of {} is: {}'.format(model_name,precs))
167 | print('Recall of {} is: {}'.format(model_name,recall))
168 | print('Confusion matrix of {} is:'.format(model_name))
169 | print(cm)
170 |
171 |
172 |
173 |
174 |
175 |
176 |
177 |
178 |
179 |
--------------------------------------------------------------------------------
/Other_Baseline_models/Baselines.py:
--------------------------------------------------------------------------------
1 | import pandas as pd
2 | import numpy as np
3 |
4 | from keras.models import Sequential
5 | from keras.layers import Dense,Dropout,ReLU
6 | from keras.optimizers import Adam
7 | from keras.callbacks import EarlyStopping
8 | from keras.utils import to_categorical
9 | from keras.layers import Embedding,Conv1D,LSTM,Input,TimeDistributed,SpatialDropout1D,Flatten,Dropout
10 | from keras.models import Model
11 | from keras.callbacks import ModelCheckpoint
12 |
13 | #Importing models from sklearn library
14 | #Trying various inbuilt models
15 | #SVM,Naive bayes,KNN,Random Forest, Decision Trees
16 | from sklearn.naive_bayes import GaussianNB
17 | from sklearn.neighbors import KNeighborsClassifier
18 | from sklearn.ensemble import RandomForestClassifier
19 | from sklearn.tree import DecisionTreeClassifier
20 | from sklearn.svm import SVC
21 | from sklearn.ensemble import AdaBoostClassifier
22 |
23 |
24 | #Fpr Building a ANN model
25 | #Importing necssary layers from keras
26 | from keras.models import Sequential
27 | from keras.layers import Dense,Dropout,ReLU
28 | from keras.optimizers import Adam
29 | from keras.callbacks import EarlyStopping
30 | from keras.utils import to_categorical
31 |
32 |
33 | #Importing the metrics to compare the model performance
34 | from sklearn.metrics import accuracy_score,f1_score,precision_score,recall_score
35 | from sklearn.metrics import matthews_corrcoef,confusion_matrix
36 |
37 |
38 | # Loading data
39 | # Modify path to each datasets accordingly
40 | data_us=pd.read_csv('/content/drive/My Drive/Colab Notebooks/Thermal sensation prediction(Murata)/Medium_office.csv')
41 | data_ash=pd.read_csv('/content/drive/My Drive/Colab Notebooks/Thermal sensation prediction(Murata)/Ashrae_clean.csv')
42 |
43 | # CLASSIFICATION MODELS
44 |
45 | data_us_C=data_us
46 | data_us_B=data_us.drop(['Outdoor monthly air temperature (¡C)'],axis=1)
47 | data_us_A=data_us_B.drop(['Age','Sex'],axis=1)
48 |
49 | data_B=data_ash.drop(['Outdoor monthly air temperature (¡C)'],axis=1)
50 | data_A=data_B.drop(['Age','Sex'],axis=1)
51 |
52 | def fun1(x):
53 | return(np.round(x,2))
54 |
55 | cols=['Air velocity (m/s)','Air temperature (¡C)','Radiant temperature (¡C)','Relative humidity (%)']
56 | for col in cols:
57 | data_us_A[col]=data_us_A[col].apply(lambda x:fun1(x))
58 | data_us_B[col]=data_us_B[col].apply(lambda x:fun1(x))
59 | data_us_C[col]=data_us_C[col].apply(lambda x:fun1(x))
60 |
61 |
62 | #Defining the models
63 | #Using Class weights as Balanced(Hueristic approach to handle imbalanced classes)
64 | SVM_lin_model=SVC(class_weight='balanced',kernel='linear')
65 | SVM_poly_model=SVC(class_weight='balanced',kernel='poly')
66 | SVM_radial_model=SVC(class_weight='balanced',kernel='rbf')
67 | NB_model=GaussianNB()
68 | #Instead of giving equal weights to all neighbours, Here the weights are inversely
69 | #proptional to the distance of the point from the desired point
70 | KNN_model=KNeighborsClassifier(n_neighbors=10,weights='distance')
71 | RF_model=RandomForestClassifier(n_estimators=200,class_weight='balanced_subsample')
72 | DT_model=DecisionTreeClassifier(max_depth=5,class_weight='balanced')
73 | Adaboost_model=AdaBoostClassifier(n_estimators=200,random_state=1)
74 |
75 |
76 | #Defining a common function to handle all the three datasets
77 | #I tried adding Dropouts, Performance decreased hence I have removed them.
78 | #Tried different intializers also, not much difference
79 | def build_model(X_train,y_train):
80 | y_train=np.asarray(y_train ,dtype=int)
81 | MLP_model=Sequential()
82 | sums=[0,0,0,0,0]
83 | for i in range(len(y_train)):
84 | sums[y_train[i]]+=1
85 | MLP_model.add(Dense(1024,activation='relu',input_shape=(len(list(X_train.columns)),)))
86 | MLP_model.add(Dense(512,activation='relu',kernel_initializer='he_uniform'))
87 | MLP_model.add(Dense(256,activation='relu',kernel_initializer='he_uniform'))
88 | MLP_model.add(Dense(128,activation='relu',kernel_initializer='he_uniform'))
89 | MLP_model.add(Dense(64,activation='relu',kernel_initializer='he_uniform'))
90 | MLP_model.add(Dense(32,activation='relu',kernel_initializer='he_uniform'))
91 | MLP_model.add(Dense(16,activation='relu',kernel_initializer='he_uniform'))
92 | MLP_model.add(Dense(8,activation='relu',kernel_initializer='he_uniform'))
93 | MLP_model.add(Dense(5,activation='softmax'))
94 | MLP_model.compile(optimizer=Adam(lr=0.001),metrics=['accuracy'],loss='sparse_categorical_crossentropy')
95 | #Early stopping to prevent overfitting
96 | #es=EarlyStopping(monitor='val_loss',patience=2,restore_best_weights=True)
97 | MLP_model.fit(X_train,y_train,epochs=20,validation_split=0.2,batch_size=32,class_weight={0:1/sums[0],1:1/sums[1],2:1/sums[2],3:1/sums[3],4:1/sums[4]})
98 | return(MLP_model)
99 |
100 | #This functions trains the model and performs prediction and anlaysis of model performance
101 | def model_performance(model,X_test,y_test,model_name):
102 | y_pred=model.predict(X_test)
103 | acc=accuracy_score(y_test,y_pred)
104 | cm=confusion_matrix(y_test,y_pred)
105 | recall = np.diag(cm) / np.sum(cm, axis = 1)
106 | precision = np.diag(cm) / np.sum(cm, axis = 0)
107 | precs=np.mean(recall)
108 | recall=np.mean(precision)
109 | f1=(2*precs*recall)/(precs+recall)
110 | coeff=matthews_corrcoef(y_test,y_pred)
111 | print('Accuracy of {} is: {}'.format(model_name,acc))
112 | print('F1_Score of {} is: {}'.format(model_name,f1))
113 | print('Precsion of {} is: {}'.format(model_name,precs))
114 | print('Recall of {} is: {}'.format(model_name,recall))
115 | print('Mathew_coeff of {} is: {} '.format(model_name,coeff))
116 | print('Confusion matrix of {} is:'.format(model_name))
117 | print(cm)
118 |
119 | #Importing the metrics to compare the model performance
120 | from sklearn.metrics import accuracy_score,f1_score,precision_score,recall_score
121 | from sklearn.metrics import matthews_corrcoef,confusion_matrix
122 | #This functions trains the model and performs prediction and anlaysis of model performance
123 | def model_performance_MLP(model,X_test,y_test,model_name):
124 | y_pred=model.predict(X_test)
125 | y_preds=[]
126 | for i in range(len(y_pred)):
127 | y_preds.append(np.argmax(y_pred[i]))
128 |
129 | y_pred=y_preds
130 | acc=accuracy_score(y_test,y_pred)
131 | cm=confusion_matrix(y_test,y_pred)
132 | recall = np.diag(cm) / np.sum(cm, axis = 1)
133 | precision = np.diag(cm) / np.sum(cm, axis = 0)
134 | precs=np.mean(recall)
135 | recall=np.mean(precision)
136 | f1=(2*precs*recall)/(precs+recall)
137 | coeff=matthews_corrcoef(y_test,y_pred)
138 | print('Accuracy of {} is: {}'.format(model_name,acc))
139 | print('F1_Score of {} is: {}'.format(model_name,f1))
140 | print('Precsion of {} is: {}'.format(model_name,precs))
141 | print('Recall of {} is: {}'.format(model_name,recall))
142 | print('Mathew_coeff of {} is: {} '.format(model_name,coeff))
143 | print('Confusion matrix of {} is:'.format(model_name))
144 | print(cm)
145 |
146 |
147 |
148 | models=[SVM_lin_model,SVM_poly_model,SVM_radial_model,NB_model,KNN_model,RF_model,DT_model,Adaboost_model,MLP_model]
149 |
150 | X_train=data_C.drop(['Thermal sensation'],axis=1)
151 | y_train=data_C['Thermal sensation']
152 | X_test=data_us_C.drop(['Thermal sensation'],axis=1)
153 | y_test=data_us_C['Thermal sensation']
154 | model=RF_model
155 | model.fit(X_train,y_train)
156 | model_performance(model,X_test,y_test,str(model))
157 |
158 |
159 |
160 | def build_model_LSTM(X_train,y_train):
161 | y_train=np.asarray(y_train ,dtype=int)
162 | sums=[0,0,0,0,0]
163 | for i in range(len(y_train)):
164 | sums[y_train[i]]+=1
165 | y_train=to_categorical(y_train,num_classes=5)
166 | model=Sequential()
167 | model.add(Embedding(101,256,input_length=len(X_train.columns),))
168 | model.add(LSTM(256,return_sequences=True))
169 | model.add(LSTM(256,return_sequences=True))
170 | model.add(Flatten())
171 | model.add(Dense(64,activation='relu'))
172 | model.add(Dropout(0.1))
173 | model.add(Dense(16,activation='relu'))
174 | model.add(Dropout(0.1))
175 | model.add(Dense(5,activation='softmax'))
176 | model.compile(loss='categorical_crossentropy',optimizer=Adam(lr=0.001,beta_1=0.99,beta_2=0.999),metrics=['accuracy'],weighted_metrics=['accuracy'])
177 | es=EarlyStopping(monitor='val_loss',restore_best_weights=True)
178 | model.fit(X_train,y_train,epochs=20,validation_split=0.2,batch_size=64,class_weight={0:1/sums[0],1:1/sums[1],2:1/sums[2],3:1/sums[3],4:1/sums[4]})
179 | model.summary()
180 | return(model)
181 |
182 | X_train=data_A.drop(['Thermal sensation'],axis=1)
183 | X_train=X_train.drop(['Unnamed: 0'],axis=1)
184 | y_train=data_A['Thermal sensation']
185 | X_test=data_us_A.drop(['Thermal sensation'],axis=1)
186 | X_test=X_test.drop(['Unnamed: 0'],axis=1)
187 | y_test=data_us_A['Thermal sensation']
188 | model=build_model_LSTM(X_train,y_train)
189 | model_performance_MLP(model,X_test,y_test,str(model))
190 |
--------------------------------------------------------------------------------
/Other_Baseline_models/lstm_model+attention_weights.py:
--------------------------------------------------------------------------------
1 |
2 | import pandas as pd
3 | import numpy as np
4 |
5 | #Importing models from sklearn library
6 | #Trying various inbuilt models
7 | #SVM,Naive bayes,KNN,Random Forest, Decision Trees
8 |
9 | from sklearn.naive_bayes import GaussianNB
10 | from sklearn.neighbors import KNeighborsClassifier
11 | from sklearn.ensemble import RandomForestClassifier
12 | from sklearn.tree import DecisionTreeClassifier
13 | from sklearn.svm import SVC
14 | from sklearn.ensemble import AdaBoostClassifier
15 |
16 | from sklearn import *
17 |
18 | #For plotting
19 | import matplotlib.pyplot as plt
20 | import seaborn as sns
21 |
22 | from tensorflow.keras import models, layers, preprocessing as kprocessing
23 | from tensorflow.keras import backend as K
24 |
25 | from keras.models import Sequential
26 | from keras.layers import Dense,Dropout,ReLU
27 | from keras.optimizers import Adam
28 | from keras.callbacks import EarlyStopping,ModelCheckpoint
29 | from keras.utils import to_categorical
30 | from keras.layers import Embedding,Conv1D,LSTM,Input,TimeDistributed,SpatialDropout1D,Flatten,Dropout
31 | from keras.models import Model
32 |
33 |
34 | from sklearn.preprocessing import MinMaxScaler
35 | from sklearn.decomposition import PCA
36 |
37 | #Reading ASHRAE AND SCALES DATA
38 | data_us=pd.read_csv('/content/drive/My Drive/Colab Notebooks/Thermal sensation prediction(Murata)/Medium_office.csv')
39 | data_C=pd.read_csv('/content/drive/My Drive/Colab Notebooks/Thermal sensation prediction(Murata)/Ashrae_clean.csv')
40 |
41 | data_C.drop(['Unnamed: 0'],axis=1,inplace=True)
42 | data_C=data_C.drop_duplicates(['Age', 'Sex','Air velocity (m/s)', 'Air temperature (¡C)', 'Radiant temperature (¡C)', 'Relative humidity (%)', 'Clo', 'Met', 'Outdoor monthly air temperature (¡C)'])
43 |
44 | data_us.drop(['Unnamed: 0'],axis=1,inplace=True)
45 | data_us=data_us.drop_duplicates(['Age', 'Sex','Air velocity (m/s)', 'Air temperature (¡C)', 'Radiant temperature (¡C)', 'Relative humidity (%)', 'Clo', 'Met', 'Outdoor monthly air temperature (¡C)'])
46 |
47 | data_us_C=data_us
48 |
49 | def fun1(x):
50 | return(np.round(x,2))
51 |
52 | cols=['Air velocity (m/s)','Air temperature (¡C)','Radiant temperature (¡C)','Relative humidity (%)']
53 | for col in cols:
54 | data_us_C[col]=data_us_C[col].apply(lambda x:fun1(x))
55 |
56 | #Creating the Train and Test dataframes
57 | data_C=data_C[data_C>0]
58 | data_C=data_C.dropna()
59 | X_train=data_C.drop(['Thermal sensation'],axis=1)
60 | y_train=data_C['Thermal sensation'].values
61 |
62 |
63 | data_us_C=data_us_C[data_us_C>0]
64 | data_us_C=data_us_C.dropna()
65 | X_test=data_us_C.drop(['Thermal sensation'],axis=1)
66 | y_test=data_us_C['Thermal sensation'].values
67 |
68 |
69 | # Make an instance of the Model
70 | pca = PCA(.99)
71 | pca.fit(X_train)
72 | X_train_1 = pca.transform(X_train)
73 | X_test_1 = pca.transform(X_test)
74 |
75 |
76 |
77 |
78 | sc=MinMaxScaler()
79 | sc.fit(X_train_1)
80 | X_train_2=sc.transform(X_train_1)
81 | X_test_2=sc.transform(X_test_1)
82 |
83 | ## attention layer
84 | def attention_layer(inputs, neurons):
85 | x = layers.Permute((2,1))(inputs)
86 | x = layers.Dense(neurons, activation="softmax")(x)
87 | x = layers.Permute((2,1), name="attention")(x)
88 | x = layers.multiply([inputs, x])
89 | return x
90 |
91 | ## input
92 | x_in = layers.Input(shape=(X_train.shape[1],))
93 | ## embedding
94 | x = layers.Embedding(input_dim=101,
95 | output_dim=256,
96 | input_length=X_train.shape[0])(x_in)
97 |
98 | ## apply attention
99 | x = attention_layer(x, neurons=X_train.shape[1])
100 | ## 2 layers of bidirectional lstm
101 | x = layers.Bidirectional(layers.LSTM(units=15, dropout=0.2,
102 | return_sequences=True))(x)
103 | x = layers.Bidirectional(layers.LSTM(units=15, dropout=0.2))(x)
104 | ## final dense layers
105 | x = layers.Dense(64, activation='relu')(x)
106 | y_out = layers.Dense(5, activation='softmax')(x)
107 | ## compile
108 | model = models.Model(x_in, y_out)
109 | model.compile(loss='sparse_categorical_crossentropy',
110 | optimizer='adam', metrics=['accuracy'])
111 |
112 | model.summary()
113 |
114 |
115 |
116 | ## train
117 | checkpoint_filepath = '/tmp/checkpoint'
118 | es=ModelCheckpoint(filepath=checkpoint_filepath,monitor='val_loss',save_best_only=True,mode='min',save_weights_only=True)
119 | training = model.fit(x=X_train, y=y_train, batch_size=64,
120 | epochs=20,verbose=1, validation_split=0.2,callbacks=[es])
121 | model.load_weights(checkpoint_filepath)
122 |
123 | #Plotting variation Training and Validation Loss vs Epochs
124 | metrics = [k for k in training.history.keys() if ("loss" not in k) and ("val" not in k)]
125 | fig, ax = plt.subplots(nrows=1, ncols=2, sharey=True)
126 | ax[0].set(title="Training")
127 | ax11 = ax[0].twinx()
128 | ax[0].plot(training.history['loss'], color='black')
129 | ax[0].set_xlabel('Epochs')
130 | ax[0].set_ylabel('Loss', color='black')
131 | for metric in metrics:
132 | ax11.plot(training.history[metric], label=metric)
133 | ax11.set_ylabel("Score", color='steelblue')
134 | ax11.legend()
135 | ax[1].set(title="Validation")
136 | ax22 = ax[1].twinx()
137 | ax[1].plot(training.history['val_loss'], color='black')
138 | ax[1].set_xlabel('Epochs')
139 | ax[1].set_ylabel('Loss', color='black')
140 | for metric in metrics:
141 | ax22.plot(training.history['val_'+metric], label=metric)
142 | ax22.set_ylabel("Score", color="steelblue")
143 | plt.show()
144 |
145 | #Defining function for determining Model performance for DL models
146 | from sklearn.metrics import *
147 | def model_performance_DL(model,X_train,y_train,X_test,y_test):
148 | #Model is fit on the train set and then used to predict ono Test set
149 | y_pred = model.predict(X_test)
150 | predicted=[]
151 | predicted_prob=y_pred
152 | for i in range(len(y_pred)):
153 | predicted.append(np.argmax(y_pred[i]))
154 |
155 |
156 | #Different metrics are used to check the model performance
157 | #Weighted precision,recall,f1_score is reported for all models
158 | classes = np.unique(y_test)
159 | y_test_array = pd.get_dummies(y_test, drop_first=False).values
160 |
161 | accuracy = accuracy_score(y_test, predicted)
162 | print("Accuracy:", round(accuracy,2))
163 | #print("Auc:", round(auc,2))
164 | print("Detail:")
165 | print(classification_report(y_test, predicted))
166 |
167 | ## Plot confusion matrix
168 | cm = confusion_matrix(y_test, predicted)
169 | fig, ax = plt.subplots()
170 | sns.heatmap(cm, annot=True, fmt='d', ax=ax, cmap=plt.cm.Blues,
171 | cbar=False)
172 | ax.set(xlabel="Pred", ylabel="True", xticklabels=classes,
173 | yticklabels=classes, title="Confusion matrix")
174 | plt.yticks(rotation=0)
175 |
176 | ## Plot roc-auc curve
177 | plt.figure()
178 | for i in range(len(classes)):
179 | fpr, tpr, thresholds = roc_curve(y_test_array[:,i],
180 | predicted_prob[:,i])
181 | plt.plot(fpr, tpr, lw=3,
182 | label='{0} (area={1:0.2f})'.format(classes[i],
183 | auc(fpr, tpr),figsize=(20,20))
184 | )
185 |
186 | plt.plot([0,1], [0,1], color='navy', lw=3, linestyle='--')
187 | plt.xlim([-0.05,1.0])
188 | plt.ylim([0,1.05])
189 | plt.xlabel('False Positive Rate')
190 | plt.ylabel('True Positive Rate (Recall)')
191 | plt.title('Receiver operating characteristic')
192 |
193 | plt.legend(loc='best')
194 | plt.grid(True)
195 |
196 | ## Plot precision-recall curve
197 | plt.figure()
198 | for i in range(len(classes)):
199 | precision, recall, thresholds = precision_recall_curve(
200 | y_test_array[:,i], predicted_prob[:,i])
201 | plt.plot(recall, precision, lw=3,
202 | label='{0} (area={1:0.2f})'.format(classes[i],
203 | auc(recall, precision))
204 | )
205 | plt.xlim([0,1.05])
206 | plt.ylim([0,1.05])
207 | plt.xlabel('Recall')
208 | plt.ylabel('Precision')
209 | plt.title('Precision-Recall curve')
210 | plt.legend(loc="best")
211 | plt.grid(True)
212 | plt.show()
213 |
214 | model_performance_DL(model,X_train,y_train,X_test,y_test)
215 |
216 |
217 | col_dict={}
218 | for col in X_train.columns:
219 | col_dict[col]=[]
220 |
221 | for i in range(len(X_test)):
222 | X_instance=X_test.iloc[i].values
223 | X_instance=X_instance.reshape(1,X_instance.shape[0],1)
224 |
225 | #attention weights
226 | layer = [layer for layer in model.layers if "attention" in
227 | layer.name][0]
228 | func = K.function([model.input], [layer.output])
229 | weights = func(X_instance)[0]
230 | weights = np.mean(weights, axis=2).flatten()
231 |
232 |
233 | #rescale weights, remove null vector, map word-weight
234 | weights = preprocessing.MinMaxScaler(feature_range=(0,1)).fit_transform(np.array(weights).reshape(-1,1)).reshape(-1)
235 | weights = [weights[n] for n,idx in enumerate(X_instance[0]) if idx
236 | != 0]
237 |
238 |
239 | dict_col_weight = {word:weights[n] for n,word in
240 | enumerate(X_train.columns)}
241 | for col in dict_col_weight.keys():
242 | col_dict[col].append(dict_col_weight[col])
243 |
244 | for col in col_dict.keys():
245 | col_dict[col]=np.mean(col_dict[col])
246 |
247 | # Average weigt of cols barplot
248 | if len(col_dict) > 0:
249 | dtf = pd.DataFrame.from_dict(col_dict, orient='index',
250 | columns=["score"])
251 | dtf.sort_values(by="score",
252 | ascending=True).plot(kind="barh",
253 | legend=False).grid(axis='x')
254 | plt.show()
255 |
--------------------------------------------------------------------------------
/TL_model/Base_TL_model.py:
--------------------------------------------------------------------------------
1 | import pandas as pd
2 | import numpy as np
3 |
4 | from sklearn.naive_bayes import GaussianNB
5 | from sklearn.neighbors import KNeighborsClassifier
6 | from sklearn.ensemble import RandomForestClassifier
7 | from sklearn.tree import DecisionTreeClassifier
8 | from sklearn.svm import SVC
9 | from sklearn.ensemble import AdaBoostClassifier
10 |
11 | from sklearn import *
12 |
13 | from sklearn.preprocessing import RobustScaler,MinMaxScaler
14 | from sklearn.preprocessing import StandardScaler
15 | from sklearn.cluster import DBSCAN
16 | from sklearn.utils import resample
17 |
18 | from keras.models import Sequential
19 | from keras.layers import Dense,Dropout,ReLU
20 | from keras.optimizers import Adam
21 | from keras.callbacks import EarlyStopping,ModelCheckpoint
22 | from keras.utils import to_categorical
23 | from keras.layers import Embedding,Conv1D,LSTM,Input,TimeDistributed,SpatialDropout1D,Flatten,Dropout
24 | from keras.models import Model
25 |
26 | from sklearn.metrics import *
27 |
28 | from tensorflow.keras import models, layers, optimizers,regularizers,preprocessing as kprocessing
29 | from tensorflow.keras import backend as K
30 | from keras.callbacks import EarlyStopping,ModelCheckpoint
31 |
32 | #For plotting
33 | import matplotlib.pyplot as plt
34 | import seaborn as sns
35 |
36 | import warnings
37 | warnings.filterwarnings("ignore")
38 | from sklearn.model_selection import train_test_split
39 |
40 | # Read the data from local disk
41 | # Change the paths accordingly
42 | data_ash=pd.read_csv('/content/drive/MyDrive/Colab Notebooks/Thermal sensation prediction(Murata)/Ashrae_data_fin.csv')
43 | data_scales=pd.read_csv('/content/drive/MyDrive/Colab Notebooks/Thermal sensation prediction(Murata)/Scales_data_fin.csv')
44 | data_us=pd.read_csv('/content/drive/MyDrive/Colab Notebooks/Thermal sensation prediction(Murata)/Medium_US_data_fin.csv')
45 |
46 | # Changing from [-2,2] to [0,4]
47 | data_ash['Thermal sensation']=data_ash['Thermal sensation'].apply(lambda x: x+2)
48 | data_scales['Thermal sensation']=data_scales['Thermal sensation'].apply(lambda x: x+2)
49 | data_us['Thermal sensation']=data_us['Thermal sensation'].apply(lambda x: x+2)
50 |
51 | # Encoding age into categories
52 | def age_encode(x):
53 | if(x==1):
54 | return(18)
55 | elif(x==2):
56 | return(23)
57 | elif(x==3):
58 | return(28)
59 | elif(x==4):
60 | return(33)
61 | elif(x==5):
62 | return(38)
63 | elif(x==6):
64 | return(50)
65 | data_scales['Age']=data_scales['Age'].apply(lambda x: age_encode(x))
66 |
67 | #Downsampling the Data
68 | def Down_sampling_data(df):
69 | df_0 = df[df['Thermal sensation']==0]
70 | df_1 = df[df['Thermal sensation']==1]
71 | df_2 = df[df['Thermal sensation']==2]
72 | df_3 = df[df['Thermal sensation']==3]
73 | df_4 = df[df['Thermal sensation']==4]
74 |
75 | # Downsample majority class
76 | lens=[len(df_0),len(df_1),len(df_2),len(df_3),len(df_4)]
77 | max_ind=np.argmax(lens)
78 | min_ind=np.argmin(lens)
79 | dfs=[df_0,df_1,df_2,df_3,df_4]
80 | df_minority=dfs[min_ind]
81 | dfs_majority_downsampled=[]
82 | for i in range(5):
83 | if(i!=min_ind):
84 | df_majority=dfs[i]
85 | df_majority_downsampled = resample(df_majority,
86 | replace=False, # sample without replacement
87 | n_samples=len(df_minority),# to match minority class
88 | random_state=123) # reproducible results
89 | dfs_majority_downsampled.append(df_majority_downsampled)
90 |
91 | # Combine minority class with downsampled majority class
92 | df_downsampled = pd.concat(dfs_majority_downsampled+[df_minority])
93 | return(df_downsampled)
94 |
95 | # Upsample minority classes
96 | def Up_sampling_data(df):
97 | df_0 = df[df['Thermal sensation']==0]
98 | df_1 = df[df['Thermal sensation']==1]
99 | df_2 = df[df['Thermal sensation']==2]
100 | df_3 = df[df['Thermal sensation']==3]
101 | df_4 = df[df['Thermal sensation']==4]
102 | # Upsample minority class
103 | lens=[len(df_0),len(df_1),len(df_2),len(df_3),len(df_4)]
104 | max_ind=np.argmax(lens)
105 | min_ind=np.argmin(lens)
106 | dfs=[df_0,df_1,df_2,df_3,df_4]
107 | df_majority=dfs[max_ind]
108 | dfs_minority_upsampled=[]
109 | for i in range(5):
110 | if(i!=max_ind):
111 | df_minority=dfs[i]
112 | df_minority_upsampled = resample(df_minority,
113 | replace=True, # sample with replacement
114 | n_samples=len(df_majority), # to match majority class
115 | random_state=123)
116 |
117 | dfs_minority_upsampled.append(df_minority_upsampled)
118 |
119 | # Combine minority class with downsampled majority class
120 | df_upsampled = pd.concat(dfs_minority_upsampled+[df_majority])
121 | return(df_upsampled)
122 |
123 | #Downsmapling datasets
124 | data_ash=Down_sampling_data(data_ash)
125 | data_us=Down_sampling_data(data_us)
126 | data_scales=Down_sampling_data(data_scales)
127 |
128 | # Baseline models - Comparative study
129 | SVM_lin_model=SVC(class_weight='balanced',kernel='linear')
130 | SVM_poly_model=SVC(class_weight='balanced',kernel='poly')
131 | SVM_radial_model=SVC(class_weight='balanced',kernel='rbf')
132 |
133 | NB_model=GaussianNB()
134 | KNN_model=KNeighborsClassifier(n_neighbors=5,weights='distance')
135 |
136 | RF_model=RandomForestClassifier(n_estimators=200,class_weight='balanced_subsample',max_depth=10)
137 | DT_model=DecisionTreeClassifier(max_depth=10,class_weight='balanced')
138 | Adaboost_model=AdaBoostClassifier(n_estimators=100,random_state=1)
139 |
140 | # Dropping Unwanted cols for different feature sets
141 | X1=data_ash.drop(['Thermal sensation','Clo','Met'],axis=1)
142 | y1=data_ash['Thermal sensation']
143 |
144 | X2=data_us.drop(['Thermal sensation','Clo','Met'],axis=1)
145 | y2=data_us['Thermal sensation']
146 |
147 | X3=data_scales.drop(['Thermal sensation'],axis=1)
148 | y3=data_scales['Thermal sensation']
149 |
150 | # Data Preprocessing Pipeline
151 | def Standard_Scaler_preprocessing(X_train,X_test):
152 | sc=StandardScaler()
153 | sc.fit(X_train)
154 | X_train_1=sc.transform(X_train)
155 | X_test_1=sc.transform(X_test)
156 | X_train_1=pd.DataFrame(X_train_1,columns=X_train.columns)
157 | X_test_1=pd.DataFrame(X_test_1,columns=X_test.columns)
158 | return(X_train_1,X_test_1)
159 |
160 | # Creating Train and Test datasets
161 | X_train=pd.concat([X1,X3])
162 | X_test=X2
163 |
164 | y1=list(y1)
165 | y2=list(y2)
166 | y3=list(y3)
167 | y_train=(y1+y3)
168 | y_test=y2
169 |
170 | y_train=np.array(y_train)
171 | y_test=np.array(y_test)
172 |
173 | X_train,X_test=Standard_Scaler_preprocessing(X_train,X_test)
174 |
175 | # Function to obtain model performance
176 | def perf(model,X_train,y_train,X_test,y_test):
177 | model.fit(X_train,y_train)
178 | preds=model.predict(X_test)
179 | print(classification_report(y_test,preds))
180 |
181 | #Defining function for determining Model performance for DL models
182 | def model_performance_DL(model,X_train,y_train,X_test,y_test):
183 | #Model is fit on the train set and then used to predict ono Test set
184 | y_pred = model.predict(X_test)
185 | predicted=[]
186 | predicted_prob=y_pred
187 | for i in range(len(y_pred)):
188 | predicted.append(np.argmax(y_pred[i]))
189 |
190 |
191 | #Different metrics are used to check the model performance
192 | #Weighted precision,recall,f1_score is reported for all models
193 | classes = np.unique(y_test)
194 | y_test_array = pd.get_dummies(y_test, drop_first=False).values
195 |
196 | accuracy = accuracy_score(y_test, predicted)
197 | print("Accuracy:", round(accuracy,2))
198 | print("Detail:")
199 | print(classification_report(y_test, predicted))
200 |
201 | ## Plot confusion matrix
202 | cm = confusion_matrix(y_test, predicted)
203 | fig, ax = plt.subplots()
204 | sns.heatmap(cm, annot=True, fmt='d', ax=ax, cmap=plt.cm.Blues,
205 | cbar=False)
206 | ax.set(xlabel="Pred", ylabel="True", xticklabels=classes,
207 | yticklabels=classes, title="Confusion matrix")
208 | plt.yticks(rotation=0)
209 |
210 | ## Plot roc-auc curve
211 | plt.figure()
212 | for i in range(len(classes)):
213 | fpr, tpr, thresholds = roc_curve(y_test_array[:,i],
214 | predicted_prob[:,i])
215 | plt.plot(fpr, tpr, lw=3,
216 | label='{0} (area={1:0.2f})'.format(classes[i],
217 | auc(fpr, tpr),figsize=(20,20))
218 | )
219 |
220 | plt.plot([0,1], [0,1], color='navy', lw=3, linestyle='--')
221 | plt.xlim([-0.05,1.0])
222 | plt.ylim([0,1.05])
223 | plt.xlabel('False Positive Rate')
224 | plt.ylabel('True Positive Rate (Recall)')
225 | plt.title('Receiver operating characteristic')
226 |
227 | plt.legend(loc='best')
228 | plt.grid(True)
229 |
230 | ## Plot precision-recall curve
231 | plt.figure()
232 | for i in range(len(classes)):
233 | precision, recall, thresholds = precision_recall_curve(
234 | y_test_array[:,i], predicted_prob[:,i])
235 | plt.plot(recall, precision, lw=3,
236 | label='{0} (area={1:0.2f})'.format(classes[i],
237 | auc(recall, precision))
238 | )
239 | plt.xlim([0,1.05])
240 | plt.ylim([0,1.05])
241 | plt.xlabel('Recall')
242 | plt.ylabel('Precision')
243 | plt.title('Precision-Recall curve')
244 | plt.legend(loc="best")
245 | plt.grid(True)
246 | plt.show()
247 |
248 | X_train_temp=X_train.values
249 | X_test_temp=X_test.values
250 |
251 | #Inverse Class weights for Imbalanced classes
252 | Tot=len(y_train)
253 | count_arr=[0]*len(np.unique(y_train))
254 | for i in range(len(y_train)):
255 | count_arr[int(y_train[i])]+=1
256 |
257 | weight_dicts={}
258 | for i in np.unique(y_train):
259 | weight_dicts[int(i)]=Tot/count_arr[int(i)]
260 |
261 | #Inverse Class weights for Imbalanced classes
262 | Tot=len(y_test)
263 | count_arr=[0]*len(np.unique(y_test))
264 | for i in range(len(y_test)):
265 | count_arr[int(y_test[i])]+=1
266 |
267 | weight_dicts_test={}
268 | for i in np.unique(y_train):
269 | weight_dicts_test[int(i)]=Tot/count_arr[int(i)]
270 |
271 | #Building a ANN model
272 | #Defining a common function to handle all the three datasets
273 |
274 | # Building different types of DL models
275 | def build_model(X_train,y_train,weight_dicts):
276 | y_train=np.asarray(y_train ,dtype=int)
277 | MLP_model=Sequential()
278 | MLP_model.add(Dense(1024,activation='relu',input_dim=len(X_train[0])))
279 | MLP_model.add(Dense(512,activation='relu',kernel_initializer='glorot_uniform'))
280 | MLP_model.add(Dense(256,activation='relu',kernel_initializer='glorot_uniform'))
281 | MLP_model.add(Dense(128,activation='relu',kernel_initializer='glorot_uniform'))
282 | MLP_model.add(Dense(64,activation='relu',kernel_initializer='glorot_uniform'))
283 | MLP_model.add(Dense(32,activation='relu',kernel_initializer='glorot_uniform'))
284 | MLP_model.add(Dense(16,activation='relu',kernel_initializer='glorot_uniform'))
285 | MLP_model.add(Dense(8,activation='relu',kernel_initializer='glorot_uniform'))
286 | MLP_model.add(Dense(5,activation='softmax'))
287 | MLP_model.compile(optimizer=Adam(lr=0.001),metrics=['accuracy'],loss='sparse_categorical_crossentropy',weighted_metrics=['accuracy'])
288 | checkpoint_filepath = '/tmp/checkpoint'
289 | es=ModelCheckpoint(filepath=checkpoint_filepath,monitor='val_accuracy',save_best_only=True,mode='max',save_weights_only=True)
290 | MLP_model.fit(X_train,y_train,epochs=150,validation_split=0.2,batch_size=64,callbacks=[es],class_weight=weight_dicts)
291 | MLP_model.load_weights(checkpoint_filepath)
292 | return(MLP_model)
293 |
294 |
295 | def build_model_LSTM_CNN(X_train,y_train,weight_dicts):
296 | y_train=np.asarray(y_train ,dtype=int)
297 | y_train=to_categorical(y_train,num_classes=5)
298 | X_train=X_train.reshape(X_train.shape[0],X_train.shape[1],-1)
299 | model=Sequential()
300 | model.add(Conv1D(filters=128,kernel_size=5,padding='same',input_shape=(X_train.shape[1],X_train.shape[2])))
301 | model.add(SpatialDropout1D(0.1))
302 | model.add(LSTM(256,return_sequences=True))
303 | model.add(LSTM(256,return_sequences=True))
304 | model.add(Flatten())
305 | model.add(Dense(64,activation='relu'))
306 | model.add(Dense(32,activation='relu'))
307 | model.add(Dense(16,activation='relu'))
308 | model.add(Dense(8,activation='relu'))
309 | model.add(Dense(5,activation='softmax'))
310 | model.compile(loss='categorical_crossentropy',optimizer=Adam(lr=0.001),metrics=['accuracy'],weighted_metrics=['accuracy'])
311 | checkpoint_filepath = '/tmp/checkpoint'
312 | es=ModelCheckpoint(filepath=checkpoint_filepath,monitor='val_accuracy',save_best_only=True,mode='max',save_weights_only=True)
313 | model.fit(X_train,y_train,epochs=100,validation_split=0.2,batch_size=64,callbacks=[es],class_weight=weight_dicts)
314 | model.load_weights(checkpoint_filepath)
315 | return(model)
316 |
317 | def build_model_LSTM(X_train,y_train,weight_dicts):
318 | y_train=np.asarray(y_train ,dtype=int)
319 | y_train=to_categorical(y_train,num_classes=5)
320 | X_train=X_train.reshape(X_train.shape[0],X_train.shape[1],-1)
321 | model=Sequential()
322 | model.add(LSTM(256,return_sequences=True,input_shape=(X_train.shape[1],X_train.shape[2])))
323 | model.add(LSTM(256,return_sequences=True))
324 | model.add(LSTM(256,return_sequences=True))
325 | model.add(Flatten())
326 | model.add(Dense(64,activation='relu'))
327 | model.add(Dense(32,activation='relu'))
328 | model.add(Dense(16,activation='relu'))
329 | model.add(Dense(8,activation='relu'))
330 | model.add(Dense(5,activation='softmax'))
331 | model.compile(loss='categorical_crossentropy',optimizer=Adam(lr=0.001),metrics=['accuracy'],weighted_metrics=['accuracy'])
332 | checkpoint_filepath = '/tmp/checkpoint'
333 | es=ModelCheckpoint(filepath=checkpoint_filepath,monitor='val_accuracy',save_best_only=True,mode='max',save_weights_only=True)
334 | model.fit(X_train,y_train,epochs=150,validation_split=0.2,batch_size=64,callbacks=[es],class_weight=weight_dicts)
335 | model.load_weights(checkpoint_filepath)
336 | return(model)
337 |
338 | #For finding out the performance of DL models use this cell
339 | #So as to execute some other other than LSTM
340 | # Use the corresponding build function
341 | MLP_model_train=build_model(X_train_temp,y_train,weight_dicts)
342 | model_performance_DL(MLP_model_train,X_train_temp,y_train,X_test_temp,y_test)
343 | weights=MLP_model_train.layers[-1].get_weights()
344 |
345 |
346 | # Transfer Learning Model
347 | model=Sequential()
348 | X_train1=X_train_temp.reshape(X_train_temp.shape[0],X_train_temp.shape[1],-1)
349 | model.add(LSTM(256,return_sequences=True,input_shape=(X_train1.shape[1],X_train1.shape[2])))
350 | model.add(LSTM(256,return_sequences=True))
351 | model.add(Flatten())
352 | model.add(Dense(64,activation='relu'))
353 | model.add(Dense(32,activation='relu'))
354 | model.add(Dense(16,activation='relu'))
355 | model.add(Dense(8,activation='relu'))
356 | model.add(Dense(5,activation='softmax'))
357 |
358 |
359 | model.layers[-1].set_weights(weights)
360 | model.layers[-1].trainable=False
361 |
362 |
363 | X_t1,X_t2,y_t1,y_t2=train_test_split(X_test,y_test,test_size=0.1,random_state=2)
364 |
365 | X_t=X_t1.values
366 | X_t=X_t.reshape(X_t.shape[0],X_t.shape[1],-1)
367 |
368 | #Target domain DL model
369 | model.compile(loss='sparse_categorical_crossentropy',optimizer=Adam(lr=0.001),metrics=['accuracy'],weighted_metrics=['accuracy'])
370 | checkpoint_filepath = '/tmp/checkpoint'
371 | es=ModelCheckpoint(filepath=checkpoint_filepath,monitor='val_accuracy',save_best_only=True,mode='max',save_weights_only=True)
372 | model.fit(X_t,y_t1,epochs=100,validation_split=0.2,batch_size=64,callbacks=[es])
373 | model.load_weights(checkpoint_filepath)
374 |
375 | # Performance of Target domain model
376 | X_t3=X_t2.values
377 | X_t3=X_t3.reshape(X_t3.shape[0],X_t3.shape[1],-1)
378 | model_performance_DL(model,X_t,y_t1,X_t3,y_t2)
379 |
--------------------------------------------------------------------------------
/Feature_selectin+Base_model/feature_selection+plots_ashrae+scales.py:
--------------------------------------------------------------------------------
1 | import pandas as pd
2 | import numpy as np
3 |
4 | #Importing models from sklearn library
5 | #Trying various inbuilt models
6 | #SVM,Naive bayes,KNN,Random Forest, Decision Trees
7 |
8 | from sklearn.naive_bayes import GaussianNB
9 | from sklearn.neighbors import KNeighborsClassifier
10 | from sklearn.ensemble import RandomForestClassifier
11 | from sklearn.tree import DecisionTreeClassifier
12 | from sklearn.svm import SVC
13 | from sklearn.ensemble import AdaBoostClassifier
14 |
15 | from sklearn import *
16 |
17 | #For plotting
18 | import matplotlib.pyplot as plt
19 | import seaborn as sns
20 |
21 | #Reading ASHRAE AND SCALES DATA
22 | # Modify data path accordingly
23 |
24 | data_us=pd.read_csv('/content/drive/My Drive/Colab Notebooks/Thermal sensation prediction(Murata)/Medium_office.csv')
25 | data_C=pd.read_csv('/content/drive/My Drive/Colab Notebooks/Thermal sensation prediction(Murata)/Ashrae_clean.csv')
26 |
27 | data_C.drop(['Unnamed: 0'],axis=1,inplace=True)
28 | data_C=data_C.drop_duplicates(['Age', 'Sex','Air velocity (m/s)', 'Air temperature (¡C)', 'Radiant temperature (¡C)', 'Relative humidity (%)', 'Clo', 'Met', 'Outdoor monthly air temperature (¡C)'])
29 |
30 | data_us.drop(['Unnamed: 0'],axis=1,inplace=True)
31 | data_us=data_us.drop_duplicates(['Age', 'Sex','Air velocity (m/s)', 'Air temperature (¡C)', 'Radiant temperature (¡C)', 'Relative humidity (%)', 'Clo', 'Met', 'Outdoor monthly air temperature (¡C)'])
32 |
33 | data_us_C=data_us
34 |
35 | def fun1(x):
36 | return(np.round(x,2))
37 |
38 | cols=['Air velocity (m/s)','Air temperature (¡C)','Radiant temperature (¡C)','Relative humidity (%)']
39 | for col in cols:
40 | data_us_C[col]=data_us_C[col].apply(lambda x:fun1(x))
41 |
42 |
43 |
44 |
45 |
46 |
47 |
48 | #Plotting the Number of sample in each class in train data
49 | fig, ax = plt.subplots()
50 | fig.suptitle("Thermal Sensation", fontsize=12)
51 | data_C["Thermal sensation"].reset_index().groupby("Thermal sensation").count().sort_values(by=
52 | "index").plot(kind="barh", legend=False,
53 | ax=ax).grid(axis='x')
54 | plt.show()
55 |
56 | #Plotting the Number of sample in each class in Test data
57 | fig, ax = plt.subplots()
58 | fig.suptitle("Thermal Sensation", fontsize=12)
59 | data_us_C["Thermal sensation"].reset_index().groupby("Thermal sensation").count().sort_values(by=
60 | "index").plot(kind="barh", legend=False,
61 | ax=ax).grid(axis='x')
62 | plt.show()
63 |
64 | #Creating the Train and Test dataframes
65 | data_C=data_C[data_C>0]
66 | data_C=data_C.dropna()
67 | X_train=data_C.drop(['Thermal sensation'],axis=1)
68 | y_train=data_C['Thermal sensation'].values
69 |
70 |
71 | data_us_C=data_us_C[data_us_C>0]
72 | data_us_C=data_us_C.dropna()
73 | X_test=data_us_C.drop(['Thermal sensation'],axis=1)
74 | y_test=data_us_C['Thermal sensation'].values
75 |
76 |
77 |
78 |
79 |
80 | #Feature selection
81 | #Set of all features which is statistically significant in train set
82 | y = data_C["Thermal sensation"]
83 | X_names = X_train.columns
84 | p_value_limit = 0.95
85 | dtf_features = pd.DataFrame()
86 | for cat in np.unique(y):
87 | chi2, p = feature_selection.chi2(X_train, y==cat)
88 | dtf_features = dtf_features.append(pd.DataFrame(
89 | {"feature":X_names, "score":1-p, "y":cat}))
90 | dtf_features = dtf_features.sort_values(["y","score"],
91 | ascending=[True,False])
92 | dtf_features = dtf_features[dtf_features["score"]>p_value_limit]
93 | X_names = dtf_features["feature"].unique().tolist()
94 |
95 | print('Important Columns are :',X_names)
96 |
97 | #The feature playing a major role in identifying each category
98 | #Statistical significance limit is set at alpha =0.05
99 | for cat in np.unique(y):
100 | print("# {}:".format(cat))
101 | print(" . selected features:",
102 | len(dtf_features[dtf_features["y"]==cat]))
103 | print(" . top features:", ",".join(
104 | dtf_features[dtf_features["y"]==cat]["feature"].values[:10]))
105 | print(" ")
106 |
107 | #Selecting only the cols which are statistically signoificant
108 | X_train=X_train[X_names]
109 | X_test=X_test[X_names]
110 |
111 |
112 |
113 |
114 |
115 | #Feature selection
116 | #Set of all features which is statistically significant in train set
117 | y = data_us_C["Thermal sensation"]
118 | X_names_test = X_test.columns
119 | p_value_limit = 0.95
120 | dtf_features_test = pd.DataFrame()
121 | for cat in np.unique(y):
122 | chi2, p = feature_selection.chi2(X_test, y==cat)
123 | dtf_features_test = dtf_features_test.append(pd.DataFrame(
124 | {"feature":X_names_test, "score":1-p, "y":cat}))
125 | dtf_features_test = dtf_features_test.sort_values(["y","score"],
126 | ascending=[True,False])
127 | dtf_features_test = dtf_features_test[dtf_features_test["score"]>p_value_limit]
128 | X_names_test = dtf_features_test["feature"].unique().tolist()
129 |
130 |
131 | #The feature playing a major role in identifying each category
132 | #Statistical significance limit is set at alpha =0.05
133 | for cat in np.unique(y):
134 | print("# {}:".format(cat))
135 | print(" . selected features:",
136 | len(dtf_features_test[dtf_features_test["y"]==cat]))
137 | print(" . top features:", ",".join(
138 | dtf_features_test[dtf_features_test["y"]==cat]["feature"].values[:10]))
139 | print(" ")
140 |
141 | #Defining the models
142 | #Using Class weights as Balanced(Hueristic approach to handle imbalanced classes)
143 | SVM_lin_model=SVC(class_weight='balanced',kernel='linear')
144 | SVM_poly_model=SVC(class_weight='balanced',kernel='poly')
145 | SVM_radial_model=SVC(class_weight='balanced',kernel='rbf')
146 | NB_model=GaussianNB()
147 | #Instead of giving equal weights to all neighbours, Here the weights are inversely
148 | #proptional to the distance of the point from the desired point
149 | KNN_model=KNeighborsClassifier(n_neighbors=10,weights='distance')
150 | RF_model=RandomForestClassifier(n_estimators=200,class_weight='balanced_subsample',max_depth=10)
151 | DT_model=DecisionTreeClassifier(max_depth=10,class_weight='balanced')
152 | Adaboost_model=AdaBoostClassifier(n_estimators=200,random_state=1)
153 |
154 | #Defining function
155 |
156 | def model_performance(model,X_train,y_train,X_test,y_test):
157 | #Model is fit on the train set and then used to predict ono Test set
158 | model.fit(X_train,y_train)
159 | predicted = model.predict(X_test)
160 | predicted_prob = model.predict_proba(X_test)
161 |
162 | #Different metrics are used to check the model performance
163 | #Weighted precision,recall,f1_score is reported for all models
164 | classes = np.unique(y_test)
165 | y_test_array = pd.get_dummies(y_test, drop_first=False).values
166 |
167 | accuracy = metrics.accuracy_score(y_test, predicted)
168 | auc = metrics.roc_auc_score(y_test, predicted_prob,
169 | multi_class="ovr")
170 | print("Accuracy:", round(accuracy,2))
171 | print("Auc:", round(auc,2))
172 | print("Detail:")
173 | print(metrics.classification_report(y_test, predicted))
174 |
175 | ## Plot confusion matrix
176 | cm = metrics.confusion_matrix(y_test, predicted)
177 | fig, ax = plt.subplots()
178 | sns.heatmap(cm, annot=True, fmt='d', ax=ax, cmap=plt.cm.Blues,
179 | cbar=False)
180 | ax.set(xlabel="Pred", ylabel="True", xticklabels=classes,
181 | yticklabels=classes, title="Confusion matrix")
182 | plt.yticks(rotation=0)
183 |
184 | ## Plot roc-auc curve
185 | plt.figure()
186 | for i in range(len(classes)):
187 | fpr, tpr, thresholds = metrics.roc_curve(y_test_array[:,i],
188 | predicted_prob[:,i])
189 | plt.plot(fpr, tpr, lw=3,
190 | label='{0} (area={1:0.2f})'.format(classes[i],
191 | metrics.auc(fpr, tpr),figsize=(20,20))
192 | )
193 |
194 | plt.plot([0,1], [0,1], color='navy', lw=3, linestyle='--')
195 | plt.xlim([-0.05,1.0])
196 | plt.ylim([0,1.05])
197 | plt.xlabel('False Positive Rate')
198 | plt.ylabel('True Positive Rate (Recall)')
199 | plt.title('Receiver operating characteristic')
200 |
201 | plt.legend(loc='best')
202 | plt.grid(True)
203 |
204 | ## Plot precision-recall curve
205 | plt.figure()
206 | for i in range(len(classes)):
207 | precision, recall, thresholds = metrics.precision_recall_curve(
208 | y_test_array[:,i], predicted_prob[:,i])
209 | plt.plot(recall, precision, lw=3,
210 | label='{0} (area={1:0.2f})'.format(classes[i],
211 | metrics.auc(recall, precision))
212 | )
213 | plt.xlim([0,1.05])
214 | plt.ylim([0,1.05])
215 | plt.xlabel('Recall')
216 | plt.ylabel('Precision')
217 | plt.title('Precision-Recall curve')
218 | plt.legend(loc="best")
219 | plt.grid(True)
220 | plt.show()
221 |
222 | #Change the model name accordingle to get the performance of desired model
223 | model_performance(Adaboost_model,X_train,y_train,X_test,y_test)
224 |
225 | #Plotting Prercision recall curve for each classifier for each category separately
226 | classes = np.unique(y_test)
227 | y_test_array = pd.get_dummies(y_test, drop_first=False).values
228 |
229 | models=[NB_model,KNN_model,RF_model,DT_model,Adaboost_model]
230 | model_names=['NB_model','KNN_model','RF_model','DT_model','Adaboost_model']
231 | for i in range(len(classes)):
232 | plt.figure()
233 | plt.xlim([0,1.05])
234 | plt.ylim([0,1.05])
235 | plt.xlabel('Recall')
236 | plt.ylabel('Precision')
237 | plt.title('Precision-Recall curve')
238 |
239 | plt.grid(True)
240 | count=0
241 | for model in models:
242 | model.fit(X_train,y_train)
243 | predicted = model.predict(X_test)
244 | predicted_prob = model.predict_proba(X_test)
245 | precision, recall, thresholds = metrics.precision_recall_curve(
246 | y_test_array[:,i], predicted_prob[:,i])
247 | plt.plot(recall, precision, lw=3,
248 | label='{} of {}'.format(classes[i],model_names[count]))
249 | count+=1
250 | plt.legend(loc="best")
251 | plt.show()
252 |
253 |
254 |
255 | #Building a ANN model
256 | #Importing necssary layers from keras
257 | from keras.models import Sequential
258 | from keras.layers import Dense,Dropout,ReLU
259 | from keras.optimizers import Adam
260 | from keras.callbacks import EarlyStopping
261 | from keras.utils import to_categorical
262 | from keras.layers import Embedding,Conv1D,LSTM,Input,TimeDistributed,SpatialDropout1D,Flatten,Dropout
263 | from keras.models import Model
264 |
265 | #Defining a common function to handle all the three datasets
266 | #Tried different intializers also, not much difference
267 | def build_model(X_train,y_train):
268 | y_train=np.asarray(y_train ,dtype=int)
269 | MLP_model=Sequential()
270 | MLP_model.add(Dense(1024,activation='relu',input_shape=(len(list(X_train.columns)),)))
271 | MLP_model.add(Dense(512,activation='relu',kernel_initializer='he_uniform'))
272 | MLP_model.add(Dense(256,activation='relu',kernel_initializer='he_uniform'))
273 | MLP_model.add(Dense(128,activation='relu',kernel_initializer='he_uniform'))
274 | MLP_model.add(Dense(64,activation='relu',kernel_initializer='he_uniform'))
275 | MLP_model.add(Dense(32,activation='relu',kernel_initializer='he_uniform'))
276 | MLP_model.add(Dense(16,activation='relu',kernel_initializer='he_uniform'))
277 | MLP_model.add(Dense(8,activation='relu',kernel_initializer='he_uniform'))
278 | MLP_model.add(Dense(5,activation='softmax'))
279 | MLP_model.compile(optimizer=Adam(lr=0.001),metrics=['accuracy'],loss='sparse_categorical_crossentropy')
280 | #Early stopping to prevent overfitting
281 | #es=EarlyStopping(monitor='val_loss',patience=2,restore_best_weights=True)
282 | MLP_model.fit(X_train,y_train,epochs=20,validation_split=0.2,batch_size=64)
283 | return(MLP_model)
284 |
285 | def build_model_LSTM_CNN(X_train,y_train):
286 | y_train=np.asarray(y_train ,dtype=int)
287 | y_train=to_categorical(y_train,num_classes=5)
288 | model=Sequential()
289 | model.add(Embedding(101,256,input_length=len(X_train.columns),))
290 | model.add(Conv1D(filters=128,kernel_size=5,padding='same'))
291 | model.add(SpatialDropout1D(0.1))
292 | model.add(LSTM(256,return_sequences=True))
293 | model.add(LSTM(256,return_sequences=True))
294 | model.add(Flatten())
295 | model.add(Dense(64,activation='relu'))
296 | model.add(Dropout(0.1))
297 | model.add(Dense(16,activation='relu'))
298 | model.add(Dropout(0.1))
299 | model.add(Dense(5,activation='softmax'))
300 | model.compile(loss='categorical_crossentropy',optimizer=Adam(lr=0.001),metrics=['accuracy'])
301 | #es=EarlyStopping(monitor='val_loss',restore_best_weights=True)
302 | model.fit(X_train,y_train,epochs=20,validation_split=0.2,batch_size=64)
303 | model.summary()
304 | return(model)
305 |
306 | def build_model_LSTM(X_train,y_train):
307 | y_train=np.asarray(y_train ,dtype=int)
308 | y_train=to_categorical(y_train,num_classes=5)
309 | model=Sequential()
310 | model.add(Embedding(101,256,input_length=len(X_train.columns),))
311 | model.add(LSTM(256,return_sequences=True))
312 | model.add(LSTM(256,return_sequences=True))
313 | model.add(Flatten())
314 | model.add(Dense(64,activation='relu'))
315 | model.add(Dropout(0.1))
316 | model.add(Dense(16,activation='relu'))
317 | model.add(Dropout(0.1))
318 | model.add(Dense(5,activation='softmax'))
319 | model.compile(loss='categorical_crossentropy',optimizer=Adam(lr=0.001),metrics=['accuracy'])
320 | #es=EarlyStopping(monitor='val_loss',restore_best_weights=True)
321 | model.fit(X_train,y_train,epochs=20,validation_split=0.2,batch_size=64)
322 | model.summary()
323 | return(model)
324 |
325 | #Defining function for determining Model performance for DL models
326 |
327 | def model_performance_DL(model,X_train,y_train,X_test,y_test):
328 | #Model is fit on the train set and then used to predict ono Test set
329 | y_pred = model.predict(X_test)
330 | predicted=[]
331 | predicted_prob=y_pred
332 | for i in range(len(y_pred)):
333 | predicted.append(np.argmax(y_pred[i]))
334 |
335 |
336 | #Different metrics are used to check the model performance
337 | #Weighted precision,recall,f1_score is reported for all models
338 | classes = np.unique(y_test)
339 | y_test_array = pd.get_dummies(y_test, drop_first=False).values
340 |
341 | accuracy = metrics.accuracy_score(y_test, predicted)
342 | print("Accuracy:", round(accuracy,2))
343 | print("Auc:", round(auc,2))
344 | print("Detail:")
345 | print(metrics.classification_report(y_test, predicted))
346 |
347 | ## Plot confusion matrix
348 | cm = metrics.confusion_matrix(y_test, predicted)
349 | fig, ax = plt.subplots()
350 | sns.heatmap(cm, annot=True, fmt='d', ax=ax, cmap=plt.cm.Blues,
351 | cbar=False)
352 | ax.set(xlabel="Pred", ylabel="True", xticklabels=classes,
353 | yticklabels=classes, title="Confusion matrix")
354 | plt.yticks(rotation=0)
355 |
356 | ## Plot roc-auc curve
357 | plt.figure()
358 | for i in range(len(classes)):
359 | fpr, tpr, thresholds = metrics.roc_curve(y_test_array[:,i],
360 | predicted_prob[:,i])
361 | plt.plot(fpr, tpr, lw=3,
362 | label='{0} (area={1:0.2f})'.format(classes[i],
363 | metrics.auc(fpr, tpr),figsize=(20,20))
364 | )
365 |
366 | plt.plot([0,1], [0,1], color='navy', lw=3, linestyle='--')
367 | plt.xlim([-0.05,1.0])
368 | plt.ylim([0,1.05])
369 | plt.xlabel('False Positive Rate')
370 | plt.ylabel('True Positive Rate (Recall)')
371 | plt.title('Receiver operating characteristic')
372 |
373 | plt.legend(loc='best')
374 | plt.grid(True)
375 |
376 | ## Plot precision-recall curve
377 | plt.figure()
378 | for i in range(len(classes)):
379 | precision, recall, thresholds = metrics.precision_recall_curve(
380 | y_test_array[:,i], predicted_prob[:,i])
381 | plt.plot(recall, precision, lw=3,
382 | label='{0} (area={1:0.2f})'.format(classes[i],
383 | metrics.auc(recall, precision))
384 | )
385 | plt.xlim([0,1.05])
386 | plt.ylim([0,1.05])
387 | plt.xlabel('Recall')
388 | plt.ylabel('Precision')
389 | plt.title('Precision-Recall curve')
390 | plt.legend(loc="best")
391 | plt.grid(True)
392 | plt.show()
393 |
394 | #For finding out the performance of DL models use this cell
395 | #So as to execute some other other than LSTM
396 | # Use the corresponding build function
397 | LSTM_model=build_model_LSTM(X_train,y_train)
398 | model_performance_DL(LSTM_model,X_train,y_train,X_test,y_test)
399 |
400 |
401 |
402 |
403 |
404 |
405 |
406 |
407 |
408 |
--------------------------------------------------------------------------------
/Thermal_Comfort_Datasets/Scales.csv:
--------------------------------------------------------------------------------
1 | ,Age,Sex,Air velocity (m/s),Air temperature (¡C),Radiant temperature (¡C),Relative humidity (%),Outdoor monthly air temperature (¡C),Thermal sensation
2 | 0,1.0,2.0,0.1,22.3,23.1,29.0,38.0,-1
3 | 1,2.0,2.0,0.1,22.3,23.1,29.0,38.0,-1
4 | 2,1.0,2.0,0.1,22.3,23.1,29.0,38.0,-2
5 | 5,1.0,1.0,0.1,22.3,23.1,29.0,38.0,0
6 | 8,2.0,2.0,0.1,22.3,23.1,29.0,38.0,0
7 | 9,2.0,1.0,0.1,22.3,23.1,29.0,38.0,0
8 | 10,2.0,1.0,0.1,22.3,23.1,29.0,38.0,1
9 | 11,2.0,1.0,0.1,22.3,23.1,29.0,38.0,-1
10 | 14,1.0,1.0,0.1,22.3,23.1,29.0,38.0,1
11 | 17,2.0,2.0,0.1,22.3,23.1,29.0,38.0,-2
12 | 19,1.0,1.0,0.1,22.3,23.1,29.0,38.0,-1
13 | 21,2.0,1.0,0.1,22.975,23.1,29.0,38.0,-1
14 | 22,2.0,1.0,0.1,22.975,23.1,29.0,38.0,0
15 | 23,1.0,2.0,0.1,22.975,23.1,29.0,38.0,0
16 | 24,1.0,2.0,0.1,22.975,23.1,29.0,38.0,-1
17 | 25,2.0,2.0,0.1,22.975,23.1,29.0,38.0,0
18 | 26,1.0,1.0,0.1,22.975,23.1,29.0,38.0,-1
19 | 27,2.0,1.0,0.1,22.975,23.1,29.0,38.0,1
20 | 30,1.0,1.0,0.1,22.975,23.1,29.0,38.0,0
21 | 51,2.0,2.0,0.1,22.975,23.1,29.0,38.0,-1
22 | 54,1.0,1.0,0.1,22.65,23.1,21.5,40.0,0
23 | 55,2.0,1.0,0.1,22.65,23.1,21.5,40.0,-1
24 | 56,1.0,1.0,0.1,22.65,23.1,21.5,40.0,-1
25 | 60,1.0,2.0,0.1,22.65,23.1,21.5,40.0,-1
26 | 61,2.0,1.0,0.1,22.65,23.1,21.5,40.0,-2
27 | 63,2.0,1.0,0.1,22.65,23.1,21.5,40.0,0
28 | 69,1.0,2.0,0.1,22.65,23.1,21.5,40.0,0
29 | 73,2.0,2.0,0.1,22.65,23.1,21.5,40.0,-1
30 | 74,3.0,2.0,0.1,22.65,23.1,21.5,40.0,-2
31 | 77,1.0,1.0,0.1,23.7,23.1,49.0,26.8,0
32 | 78,2.0,2.0,0.1,23.7,23.1,49.0,26.8,0
33 | 79,2.0,1.0,0.1,23.7,23.1,49.0,26.8,1
34 | 80,2.0,1.0,0.1,23.7,23.1,49.0,26.8,-1
35 | 81,1.0,1.0,0.1,23.7,23.1,49.0,26.8,-1
36 | 82,1.0,1.0,0.1,23.7,23.1,49.0,26.8,1
37 | 87,2.0,1.0,0.1,23.7,23.1,49.0,26.8,0
38 | 95,1.0,2.0,0.1,23.7,23.1,49.0,26.8,0
39 | 97,2.0,2.0,0.1,23.7,23.1,49.0,26.8,1
40 | 105,1.0,2.0,0.1,23.9,23.1,42.0,29.8,0
41 | 107,2.0,2.0,0.1,23.9,23.1,42.0,29.8,0
42 | 108,2.0,1.0,0.1,23.9,23.1,42.0,29.8,-1
43 | 109,1.0,1.0,0.1,23.9,23.1,42.0,29.8,0
44 | 111,2.0,1.0,0.1,23.9,23.1,42.0,29.8,0
45 | 115,2.0,2.0,0.1,23.9,23.1,42.0,29.8,-1
46 | 124,1.0,2.0,0.1,23.9,23.1,42.0,29.8,-1
47 | 128,1.0,1.0,0.1,23.9,23.1,42.0,29.8,1
48 | 129,1.0,1.0,0.1,23.7,23.1,37.0,27.2,0
49 | 131,2.0,2.0,0.1,23.7,23.1,37.0,27.2,-1
50 | 132,2.0,1.0,0.1,23.7,23.1,37.0,27.2,-1
51 | 133,2.0,1.0,0.1,23.7,23.1,37.0,27.2,0
52 | 135,1.0,1.0,0.1,23.7,23.1,37.0,27.2,-1
53 | 138,1.0,2.0,0.1,23.7,23.1,37.0,27.2,0
54 | 139,2.0,2.0,0.1,23.7,23.1,37.0,27.2,0
55 | 145,3.0,2.0,0.1,23.7,23.1,37.0,27.2,-1
56 | 147,1.0,1.0,0.1,23.7,23.1,37.0,27.2,1
57 | 156,3.0,1.0,0.1,23.7,23.1,37.0,27.2,0
58 | 161,1.0,2.0,0.1,23.7,23.1,37.0,27.2,-1
59 | 171,1.0,2.0,0.1,23.7,23.1,37.0,27.2,-2
60 | 172,2.0,2.0,0.1,23.7,23.1,0.72,13.9,-1
61 | 173,1.0,2.0,0.1,23.7,23.1,0.72,13.9,-1
62 | 174,1.0,2.0,0.1,23.7,23.1,0.72,13.9,2
63 | 175,1.0,2.0,0.1,23.7,23.1,0.72,13.9,0
64 | 177,2.0,1.0,0.1,23.7,23.1,0.72,13.9,-1
65 | 178,1.0,1.0,0.1,23.7,23.1,0.72,13.9,-1
66 | 181,3.0,1.0,0.1,23.7,23.1,0.72,13.9,-1
67 | 184,4.0,2.0,0.1,23.7,23.1,0.72,13.9,0
68 | 186,1.0,2.0,0.1,23.7,23.1,0.72,13.9,1
69 | 194,1.0,1.0,0.1,23.7,23.1,0.72,13.9,0
70 | 195,2.0,1.0,0.1,23.7,23.1,0.72,13.9,0
71 | 199,1.0,1.0,0.1,23.7,23.1,0.72,13.9,2
72 | 211,2.0,2.0,0.1,23.7,23.1,0.72,13.9,0
73 | 212,1.0,2.0,0.1,23.7,23.1,0.72,13.9,-2
74 | 216,1.0,1.0,0.1,23.7,23.1,0.72,13.9,1
75 | 231,2.0,2.0,0.1,17.4,23.1,60.0,12.8,-1
76 | 232,2.0,2.0,0.1,17.4,23.1,60.0,12.8,0
77 | 234,3.0,1.0,0.1,17.4,23.1,60.0,12.8,-1
78 | 235,2.0,1.0,0.1,17.4,23.1,60.0,12.8,-1
79 | 236,2.0,1.0,0.1,17.4,23.1,60.0,12.8,1
80 | 237,3.0,2.0,0.1,17.4,23.1,60.0,12.8,-1
81 | 239,6.0,2.0,0.1,17.4,23.1,60.0,12.8,0
82 | 241,2.0,1.0,0.1,17.4,23.1,60.0,12.8,0
83 | 242,4.0,2.0,0.1,17.4,23.1,60.0,12.8,-1
84 | 250,3.0,2.0,0.1,17.4,23.1,60.0,12.8,0
85 | 258,4.0,2.0,0.1,17.4,23.1,60.0,12.8,0
86 | 261,5.0,1.0,0.1,17.4,23.1,60.0,12.8,0
87 | 263,5.0,2.0,0.1,17.4,23.1,60.0,12.8,-1
88 | 264,3.0,1.0,0.1,17.4,23.1,60.0,12.8,0
89 | 267,4.0,1.0,0.1,17.4,23.1,60.0,12.8,-1
90 | 269,2.0,2.0,0.1,17.4,23.1,60.0,12.8,1
91 | 300,2.0,2.0,0.1,23.7,23.1,65.0,25.6,0
92 | 301,2.0,2.0,0.1,23.7,23.1,65.0,25.6,-1
93 | 302,1.0,2.0,0.1,23.7,23.1,65.0,25.6,0
94 | 303,1.0,1.0,0.1,23.7,23.1,65.0,25.6,0
95 | 305,2.0,1.0,0.1,23.7,23.1,65.0,25.6,0
96 | 307,2.0,1.0,0.1,23.7,23.1,65.0,25.6,-1
97 | 313,1.0,1.0,0.1,23.7,23.1,65.0,25.6,1
98 | 314,5.0,1.0,0.1,23.7,23.1,56.0,28.4,0
99 | 315,3.0,1.0,0.1,23.7,23.1,56.0,28.4,2
100 | 316,3.0,1.0,0.1,23.7,23.1,56.0,28.4,0
101 | 317,2.0,2.0,0.1,23.7,23.1,56.0,28.4,0
102 | 319,2.0,1.0,0.1,23.7,23.1,56.0,28.4,0
103 | 321,3.0,2.0,0.1,23.7,23.1,56.0,28.4,0
104 | 324,3.0,2.0,0.1,23.7,23.1,56.0,28.4,1
105 | 325,6.0,2.0,0.1,23.7,23.1,56.0,28.4,1
106 | 327,3.0,2.0,0.1,23.7,23.1,56.0,28.4,2
107 | 331,3.0,1.0,0.1,23.7,23.1,56.0,28.4,1
108 | 334,2.0,1.0,0.1,23.7,23.1,56.0,28.4,2
109 | 337,2.0,2.0,0.1,23.7,23.1,56.0,28.4,1
110 | 341,2.0,2.0,0.1,23.7,23.1,56.0,28.4,-1
111 | 343,4.0,2.0,0.1,23.7,23.1,56.0,28.4,-1
112 | 356,2.0,2.0,0.1,23.7,23.1,56.0,28.4,2
113 | 358,6.0,2.0,0.1,23.7,23.1,56.0,28.4,0
114 | 360,4.0,2.0,0.1,23.7,23.1,56.0,28.4,1
115 | 361,5.0,2.0,0.1,23.7,23.1,56.0,28.4,1
116 | 362,1.0,1.0,0.1,24.7,23.1,65.0,21.9,-2
117 | 363,1.0,1.0,0.1,24.7,23.1,65.0,21.9,-1
118 | 364,5.0,2.0,0.1,24.7,23.1,65.0,21.9,0
119 | 365,1.0,2.0,0.1,24.7,23.1,65.0,21.9,-1
120 | 366,3.0,1.0,0.1,24.7,23.1,65.0,21.9,-1
121 | 370,1.0,2.0,0.1,24.7,23.1,65.0,21.9,-2
122 | 371,1.0,1.0,0.1,24.7,23.1,65.0,21.9,0
123 | 374,2.0,1.0,0.1,24.7,23.1,65.0,21.9,0
124 | 375,2.0,2.0,0.1,24.7,23.1,65.0,21.9,-1
125 | 376,1.0,2.0,0.1,24.7,23.1,65.0,21.9,0
126 | 383,6.0,2.0,0.1,24.7,23.1,65.0,21.9,-1
127 | 385,2.0,1.0,0.1,24.7,23.1,65.0,21.9,-1
128 | 388,2.0,2.0,0.1,24.7,23.1,65.0,21.9,0
129 | 418,3.0,2.0,0.1,24.7,23.1,65.0,21.9,-1
130 | 422,2.0,2.0,0.1,24.7,23.1,65.0,21.9,1
131 | 431,2.0,1.0,0.1,18.53,23.1,38.0,13.65,0
132 | 432,1.0,1.0,0.1,18.53,23.1,38.0,13.65,0
133 | 433,1.0,2.0,0.1,18.53,23.1,38.0,13.65,0
134 | 438,2.0,2.0,0.1,18.53,23.1,38.0,13.65,0
135 | 442,2.0,1.0,0.1,18.53,23.1,38.0,13.65,1
136 | 446,6.0,1.0,0.1,18.53,23.1,38.0,13.65,0
137 | 449,1.0,1.0,0.1,18.53,23.1,38.0,13.65,1
138 | 454,3.0,1.0,0.1,18.53,23.1,38.0,13.65,0
139 | 460,4.0,1.0,0.1,18.53,23.1,38.0,13.65,-1
140 | 462,3.0,2.0,0.1,18.53,23.1,38.0,13.65,0
141 | 463,2.0,2.0,0.1,23.03,23.1,37.8,18.24,0
142 | 464,2.0,2.0,0.1,23.03,23.1,37.8,18.24,1
143 | 466,2.0,2.0,0.1,23.03,23.1,37.8,18.24,-1
144 | 467,1.0,1.0,0.1,23.03,23.1,37.8,18.24,0
145 | 468,3.0,2.0,0.1,23.03,23.1,37.8,18.24,0
146 | 470,1.0,2.0,0.1,23.03,23.1,37.8,18.24,0
147 | 475,2.0,1.0,0.1,23.03,23.1,37.8,18.24,0
148 | 482,1.0,2.0,0.1,23.03,23.1,37.8,18.24,-1
149 | 483,1.0,1.0,0.05,25.2,25.2,64.0,16.0,0
150 | 484,1.0,2.0,0.05,25.2,25.2,64.0,16.0,0
151 | 486,2.0,2.0,0.05,25.2,25.2,64.0,16.0,0
152 | 492,1.0,2.0,0.05,25.2,25.2,64.0,16.0,1
153 | 496,2.0,2.0,0.05,25.2,25.2,64.0,16.0,1
154 | 497,2.0,1.0,0.05,25.2,25.2,64.0,16.0,0
155 | 500,3.0,2.0,0.05,25.2,25.2,64.0,16.0,0
156 | 504,6.0,2.0,0.05,25.2,25.2,64.0,16.0,0
157 | 516,2.0,2.0,0.05,25.2,25.2,64.0,16.0,-1
158 | 523,2.0,1.0,0.05,25.2,25.2,64.0,16.0,-1
159 | 530,2.0,2.0,0.1,23.7,23.1,65.0,16.7,0
160 | 531,3.0,2.0,0.1,23.7,23.1,65.0,16.7,0
161 | 534,3.0,1.0,0.1,23.7,23.1,68.0,15.8,0
162 | 535,3.0,2.0,0.1,23.7,23.1,68.0,15.8,-1
163 | 536,2.0,1.0,0.1,23.7,23.1,68.0,15.8,-2
164 | 537,2.0,1.0,0.1,23.7,23.1,68.0,15.8,0
165 | 538,2.0,2.0,0.1,23.7,23.1,68.0,15.8,-1
166 | 539,2.0,1.0,0.1,23.7,23.1,68.0,15.8,-1
167 | 542,5.0,1.0,0.1,23.7,23.1,68.0,15.8,0
168 | 545,2.0,2.0,0.1,23.7,23.1,68.0,15.8,0
169 | 547,3.0,2.0,0.1,23.7,23.1,68.0,15.8,0
170 | 552,3.0,1.0,0.1,23.7,23.1,68.0,15.8,-1
171 | 556,4.0,1.0,0.1,23.7,23.1,68.0,15.8,0
172 | 557,2.0,2.0,0.1,23.7,23.1,63.0,13.3,1
173 | 558,2.0,1.0,0.1,23.7,23.1,63.0,13.3,-1
174 | 559,1.0,1.0,0.1,23.7,23.1,63.0,13.3,-1
175 | 560,2.0,2.0,0.1,23.7,23.1,63.0,13.3,-1
176 | 561,2.0,1.0,0.1,23.7,23.1,63.0,13.3,0
177 | 562,1.0,2.0,0.1,23.7,23.1,63.0,13.3,0
178 | 563,1.0,1.0,0.1,23.7,23.1,63.0,13.3,0
179 | 564,1.0,1.0,0.1,23.7,23.1,63.0,13.3,1
180 | 567,6.0,2.0,0.1,23.7,23.1,63.0,13.3,0
181 | 569,3.0,2.0,0.1,23.7,23.1,63.0,13.3,0
182 | 575,1.0,2.0,0.1,23.7,23.1,63.0,13.3,-2
183 | 578,2.0,2.0,0.1,23.7,23.1,63.0,13.3,0
184 | 583,1.0,2.0,0.1,23.7,23.1,63.0,13.3,-1
185 | 586,2.0,2.0,0.1,23.7,23.1,55.0,24.5,0
186 | 587,3.0,1.0,0.1,23.7,23.1,55.0,24.5,-1
187 | 589,3.0,2.0,0.1,23.7,23.1,55.0,24.5,0
188 | 590,1.0,1.0,0.1,23.7,23.1,55.0,24.5,0
189 | 592,1.0,2.0,0.1,23.7,23.1,55.0,24.5,-1
190 | 593,1.0,1.0,0.1,23.7,23.1,55.0,24.5,-1
191 | 596,1.0,1.0,0.1,23.7,23.1,92.0,19.7,-1
192 | 597,1.0,2.0,0.1,23.7,23.1,92.0,19.7,0
193 | 601,1.0,1.0,0.1,23.7,23.1,92.0,19.7,0
194 | 607,1.0,2.0,0.1,23.7,23.1,92.0,19.7,-1
195 | 612,2.0,2.0,0.1,23.7,23.1,61.5,21.1,0
196 | 613,1.0,2.0,0.1,23.7,23.1,61.5,21.1,0
197 | 615,2.0,1.0,0.1,23.7,23.1,61.5,21.1,0
198 | 618,1.0,1.0,0.1,23.7,23.1,61.5,21.1,0
199 | 621,3.0,1.0,0.1,23.7,23.1,61.5,21.1,0
200 | 622,4.0,2.0,0.1,23.7,23.1,61.5,21.1,0
201 | 629,1.0,2.0,0.1,23.7,23.1,61.5,21.1,1
202 | 633,6.0,2.0,0.1,23.7,23.1,55.0,22.4,0
203 | 634,2.0,2.0,0.1,23.7,23.1,55.0,22.4,-1
204 | 635,1.0,1.0,0.1,23.7,23.1,55.0,22.4,0
205 | 637,2.0,1.0,0.1,23.7,23.1,55.0,22.4,0
206 | 640,3.0,2.0,0.1,23.7,23.1,55.0,22.4,0
207 | 641,2.0,2.0,0.1,23.7,23.1,55.0,22.4,0
208 | 642,1.0,2.0,0.1,23.7,23.1,55.0,22.4,0
209 | 649,5.0,1.0,0.1,23.7,23.1,55.0,22.4,0
210 | 650,3.0,1.0,0.1,23.7,23.1,55.0,22.4,0
211 | 651,2.0,1.0,0.1,23.7,23.1,55.0,22.4,-1
212 | 652,2.0,2.0,0.1,23.7,23.1,77.0,27.7,0
213 | 653,1.0,1.0,0.1,23.7,23.1,77.0,27.7,0
214 | 654,2.0,1.0,0.1,23.7,23.1,77.0,27.7,0
215 | 655,2.0,2.0,0.1,23.7,23.1,77.0,27.7,1
216 | 657,4.0,2.0,0.1,23.7,23.1,77.0,27.7,0
217 | 658,1.0,2.0,0.1,23.7,23.1,77.0,27.7,0
218 | 660,4.0,2.0,0.1,23.7,23.1,77.0,27.7,1
219 | 668,1.0,2.0,0.1,23.7,23.1,77.0,27.7,2
220 | 670,1.0,2.0,0.1,23.7,23.1,77.0,27.7,1
221 | 673,3.0,1.0,0.1,23.7,23.1,77.0,27.7,0
222 | 677,2.0,2.0,0.1,23.7,23.1,71.0,29.5,0
223 | 680,2.0,2.0,0.1,23.7,23.1,71.0,29.5,-1
224 | 681,2.0,1.0,0.1,23.7,23.1,71.0,29.5,-1
225 | 682,2.0,1.0,0.1,23.7,23.1,71.0,29.5,0
226 | 683,3.0,2.0,0.1,23.7,23.1,71.0,29.5,0
227 | 686,1.0,1.0,0.1,23.7,23.1,97.0,25.0,-1
228 | 687,1.0,2.0,0.1,23.7,23.1,97.0,25.0,0
229 | 688,2.0,2.0,0.1,23.7,23.1,97.0,25.0,-1
230 | 692,3.0,2.0,0.1,23.7,23.1,97.0,25.0,-1
231 | 693,2.0,2.0,0.1,23.7,23.1,97.0,25.0,0
232 | 694,1.0,2.0,0.1,23.7,23.1,97.0,25.0,-1
233 | 695,2.0,1.0,0.1,23.7,23.1,97.0,25.0,0
234 | 713,1.0,2.0,0.1,23.7,23.1,76.0,27.0,0
235 | 715,2.0,1.0,0.1,23.7,23.1,76.0,27.0,-1
236 | 716,2.0,2.0,0.1,23.7,23.1,76.0,27.0,-1
237 | 717,2.0,1.0,0.1,23.7,23.1,76.0,27.0,0
238 | 718,1.0,2.0,0.1,23.7,23.1,76.0,27.0,-1
239 | 725,2.0,2.0,0.1,23.7,23.1,76.0,27.0,0
240 | 728,2.0,1.0,0.1,23.7,23.1,59.0,30.0,0
241 | 729,2.0,2.0,0.1,23.7,23.1,59.0,30.0,0
242 | 732,1.0,2.0,0.1,23.7,23.1,69.0,17.4,0
243 | 733,2.0,1.0,0.1,23.7,23.1,69.0,17.4,0
244 | 734,2.0,1.0,0.1,23.7,23.1,69.0,17.4,-1
245 | 735,1.0,2.0,0.1,23.7,23.1,69.0,17.4,-1
246 | 737,3.0,2.0,0.1,23.7,23.1,69.0,17.4,-1
247 | 738,3.0,2.0,0.1,23.7,23.1,69.0,17.4,0
248 | 739,3.0,1.0,0.1,23.7,23.1,69.0,17.4,0
249 | 740,6.0,2.0,0.1,23.7,23.1,69.0,17.4,0
250 | 741,2.0,2.0,0.1,23.7,23.1,69.0,17.4,0
251 | 742,2.0,2.0,0.1,23.7,23.1,69.0,17.4,-1
252 | 743,1.0,1.0,0.1,23.7,23.1,91.0,11.7,0
253 | 744,1.0,1.0,0.1,23.7,23.1,91.0,11.7,-1
254 | 745,1.0,2.0,0.1,23.7,23.1,91.0,11.7,-1
255 | 746,2.0,2.0,0.1,23.7,23.1,91.0,11.7,0
256 | 747,1.0,2.0,0.1,23.7,23.1,91.0,11.7,0
257 | 748,2.0,2.0,0.1,23.7,23.1,91.0,11.7,-1
258 | 749,3.0,1.0,0.1,23.7,23.1,91.0,11.7,0
259 | 752,2.0,1.0,0.1,23.7,23.1,91.0,11.7,1
260 | 753,2.0,2.0,0.1,23.7,23.1,82.0,12.0,2
261 | 754,2.0,2.0,0.1,23.7,23.1,82.0,12.0,-1
262 | 755,1.0,2.0,0.1,23.7,23.1,82.0,12.0,-1
263 | 757,2.0,1.0,0.1,23.7,23.1,82.0,12.0,-1
264 | 758,4.0,1.0,0.1,23.7,23.1,82.0,12.0,-1
265 | 761,1.0,1.0,0.1,23.7,23.1,82.0,12.0,0
266 | 762,1.0,1.0,0.1,23.7,23.1,82.0,12.0,-1
267 | 764,1.0,2.0,0.1,23.7,23.1,82.0,12.0,0
268 | 765,2.0,2.0,0.1,23.7,23.1,82.0,12.0,1
269 | 766,2.0,2.0,0.1,23.7,23.1,82.0,12.0,0
270 | 770,2.0,1.0,0.1,23.7,23.1,82.0,12.0,0
271 | 774,3.0,2.0,0.1,23.7,23.1,82.0,12.0,-1
272 | 778,1.0,2.0,0.1,23.7,23.1,82.0,12.0,1
273 | 781,2.0,2.0,0.1,23.7,23.1,67.0,12.0,-1
274 | 782,2.0,2.0,0.1,23.7,23.1,67.0,12.0,0
275 | 783,1.0,2.0,0.1,23.7,23.1,67.0,12.0,-1
276 | 785,1.0,2.0,0.1,23.7,23.1,67.0,12.0,0
277 | 786,2.0,1.0,0.1,23.7,23.1,67.0,12.0,-1
278 | 788,3.0,2.0,0.1,23.7,23.1,67.0,12.0,-1
279 | 792,2.0,2.0,0.1,23.7,23.1,67.0,12.0,-2
280 | 795,2.0,1.0,0.1,23.7,23.1,67.0,12.0,0
281 | 805,1.0,1.0,0.03,21.875,21.9,82.0,14.0,0
282 | 806,1.0,2.0,0.03,21.875,21.9,82.0,14.0,-1
283 | 807,2.0,2.0,0.03,21.875,21.9,82.0,14.0,0
284 | 808,2.0,1.0,0.03,21.875,21.9,82.0,14.0,0
285 | 809,2.0,1.0,0.03,21.875,21.9,82.0,14.0,-2
286 | 811,2.0,1.0,0.03,21.875,21.9,82.0,14.0,-1
287 | 813,1.0,2.0,0.03,21.875,21.9,82.0,14.0,0
288 | 814,2.0,2.0,0.03,21.875,21.9,82.0,14.0,-1
289 | 819,2.0,2.0,0.03,21.875,21.9,82.0,14.0,2
290 | 820,2.0,2.0,0.03,21.875,21.9,82.0,14.0,1
291 | 827,2.0,1.0,0.03,21.875,21.9,82.0,14.0,1
292 | 829,2.0,2.0,0.05,23.25,23.1,88.0,15.0,0
293 | 830,2.0,2.0,0.05,23.25,23.1,88.0,15.0,-1
294 | 832,3.0,2.0,0.05,23.25,23.1,88.0,15.0,0
295 | 833,2.0,1.0,0.05,23.25,23.1,88.0,15.0,-1
296 | 836,2.0,1.0,0.05,23.25,23.1,88.0,15.0,1
297 | 837,2.0,1.0,0.05,23.25,23.1,88.0,15.0,0
298 | 838,2.0,2.0,0.05,23.25,23.1,88.0,15.0,1
299 | 849,3.0,2.0,0.05,23.25,23.1,88.0,15.0,1
300 | 856,2.0,2.0,0.05,23.25,23.1,88.0,15.0,2
301 | 860,1.0,2.0,0.05,23.25,23.1,88.0,15.0,-1
302 | 872,1.0,2.0,0.1,23.7,23.1,20.0,12.0,0
303 | 873,1.0,1.0,0.1,23.7,23.1,20.0,12.0,0
304 | 874,1.0,2.0,0.1,23.7,23.1,20.0,12.0,-1
305 | 878,1.0,1.0,0.1,23.7,23.1,20.0,12.0,-1
306 | 881,2.0,1.0,0.1,23.7,23.1,20.0,12.0,2
307 | 882,2.0,2.0,0.1,23.7,23.1,20.0,12.0,0
308 | 893,1.0,2.0,0.1,23.7,23.1,7.0,25.0,0
309 | 895,1.0,1.0,0.1,23.7,23.1,7.0,25.0,0
310 | 896,1.0,1.0,0.1,23.7,23.1,7.0,25.0,1
311 | 900,2.0,1.0,0.1,23.7,23.1,7.0,25.0,0
312 | 903,2.0,2.0,0.1,23.7,23.1,7.0,25.0,0
313 | 904,1.0,2.0,0.1,23.7,23.1,7.0,25.0,2
314 | 906,1.0,2.0,0.1,23.7,23.1,7.0,25.0,-1
315 | 908,2.0,2.0,0.1,23.7,23.1,7.0,25.0,-1
316 | 911,1.0,1.0,0.1,23.7,23.1,40.0,30.0,0
317 | 912,1.0,2.0,0.1,23.7,23.1,40.0,30.0,0
318 | 913,2.0,2.0,0.1,23.7,23.1,40.0,30.0,0
319 | 915,2.0,1.0,0.1,23.7,23.1,40.0,30.0,-1
320 | 917,2.0,1.0,0.1,23.7,23.1,40.0,30.0,0
321 | 919,1.0,1.0,0.1,23.7,23.1,40.0,30.0,-1
322 | 922,1.0,2.0,0.1,23.7,23.1,40.0,30.0,-1
323 | 926,1.0,1.0,0.1,23.7,23.1,36.0,25.0,-1
324 | 927,1.0,1.0,0.1,23.7,23.1,36.0,25.0,0
325 | 930,1.0,2.0,0.1,23.7,23.1,36.0,25.0,0
326 | 936,2.0,1.0,0.1,23.7,23.1,36.0,25.0,0
327 | 937,1.0,2.0,0.1,23.7,23.1,36.0,25.0,2
328 | 941,2.0,1.0,0.1,23.7,23.1,36.0,25.0,-1
329 | 942,1.0,2.0,0.1,23.7,23.1,36.0,25.0,-1
330 | 943,2.0,2.0,0.1,23.7,23.1,36.0,25.0,-1
331 | 944,1.0,2.0,0.1,23.7,23.1,36.0,-9.0,-2
332 | 945,1.0,2.0,0.1,23.7,23.1,36.0,-9.0,0
333 | 951,1.0,1.0,0.1,23.7,23.1,36.0,-9.0,0
334 | 953,1.0,2.0,0.1,23.7,23.1,36.0,-9.0,1
335 | 955,1.0,2.0,0.1,23.7,23.1,36.0,-9.0,-1
336 | 958,2.0,2.0,0.1,23.7,23.1,36.0,-9.0,0
337 | 962,2.0,2.0,0.1,23.7,23.1,25.0,14.0,0
338 | 963,2.0,1.0,0.1,23.7,23.1,25.0,14.0,0
339 | 964,1.0,1.0,0.1,23.7,23.1,25.0,14.0,0
340 | 968,1.0,1.0,0.1,23.7,23.1,25.0,14.0,1
341 | 971,2.0,1.0,0.1,23.7,23.1,25.0,14.0,-1
342 | 975,3.0,2.0,0.1,23.7,23.1,25.0,14.0,0
343 | 978,1.0,2.0,0.1,23.7,23.1,25.0,14.0,0
344 | 981,3.0,1.0,0.1,23.7,23.1,25.0,14.0,-1
345 | 982,3.0,1.0,0.1,23.7,23.1,25.0,14.0,0
346 | 986,3.0,2.0,0.1,23.7,23.1,25.0,14.0,1
347 | 992,4.0,1.0,0.1,23.7,23.1,25.0,14.0,0
348 | 995,2.0,2.0,0.1,26.35,23.1,15.0,32.0,-1
349 | 996,3.0,1.0,0.1,26.35,23.1,15.0,32.0,0
350 | 998,2.0,1.0,0.1,26.35,23.1,15.0,32.0,0
351 | 999,4.0,1.0,0.1,26.35,23.1,15.0,32.0,0
352 | 1002,3.0,2.0,0.1,26.35,23.1,15.0,32.0,-1
353 | 1004,2.0,2.0,0.1,26.35,23.1,15.0,32.0,0
354 | 1010,3.0,2.0,0.1,26.35,23.1,15.0,32.0,0
355 | 1011,2.0,1.0,0.1,26.35,23.1,15.0,32.0,-1
356 | 1024,1.0,1.0,0.1,26.35,23.1,15.0,32.0,0
357 | 1025,1.0,2.0,0.1,26.35,23.1,15.0,32.0,-1
358 | 1032,4.0,2.0,0.1,26.35,23.1,15.0,32.0,-1
359 | 1034,1.0,2.0,0.1,26.35,23.1,15.0,32.0,0
360 | 1038,3.0,1.0,0.1,26.35,23.1,15.0,32.0,-1
361 | 1039,4.0,1.0,0.1,23.7,23.1,28.5,35.8,-1
362 | 1040,1.0,1.0,0.1,23.7,23.1,28.5,35.8,0
363 | 1041,1.0,1.0,0.1,23.7,23.1,28.5,35.8,2
364 | 1042,1.0,2.0,0.1,23.7,23.1,28.5,35.8,0
365 | 1045,1.0,2.0,0.1,23.7,23.1,28.5,35.8,2
366 | 1051,1.0,1.0,0.1,23.7,23.1,28.5,35.8,1
367 | 1052,2.0,1.0,0.1,23.7,23.1,28.5,35.8,0
368 | 1054,2.0,1.0,0.1,23.7,23.1,28.5,35.8,2
369 | 1064,2.0,2.0,0.1,23.7,23.1,23.3,40.8,-1
370 | 1065,4.0,1.0,0.1,23.7,23.1,23.3,40.8,-1
371 | 1066,3.0,1.0,0.1,23.7,23.1,23.3,40.8,-1
372 | 1068,3.0,1.0,0.1,23.7,23.1,23.3,40.8,0
373 | 1069,2.0,1.0,0.1,23.7,23.1,23.3,40.8,-1
374 | 1070,3.0,2.0,0.1,23.7,23.1,23.3,40.8,0
375 | 1072,1.0,1.0,0.1,23.7,23.1,48.3,32.8,2
376 | 1075,3.0,1.0,0.1,23.7,23.1,48.3,32.8,0
377 | 1077,1.0,1.0,0.1,23.7,23.1,48.3,32.8,1
378 | 1078,2.0,2.0,0.1,23.7,23.1,48.3,32.8,2
379 | 1081,1.0,2.0,0.1,23.7,23.1,48.3,32.8,2
380 | 1084,2.0,2.0,0.1,22.4,23.1,54.0,14.1,-1
381 | 1085,6.0,2.0,0.1,22.4,23.1,54.0,14.1,-1
382 | 1086,3.0,1.0,0.1,22.4,23.1,54.0,14.1,-1
383 | 1089,4.0,2.0,0.1,22.4,23.1,54.0,14.1,0
384 | 1091,3.0,2.0,0.1,22.4,23.1,54.0,14.1,-1
385 | 1094,3.0,1.0,0.1,22.4,23.1,54.0,14.1,0
386 | 1095,3.0,2.0,0.1,22.4,23.1,54.0,14.1,0
387 | 1101,5.0,2.0,0.1,22.4,23.1,54.0,14.1,-1
388 | 1104,6.0,2.0,0.1,22.4,23.1,54.0,14.1,0
389 | 1110,4.0,2.0,0.1,22.4,23.1,54.0,14.1,1
390 | 1113,2.0,1.0,0.1,22.4,23.1,54.0,14.1,-1
391 | 1114,4.0,1.0,0.1,22.4,23.1,54.0,14.1,1
392 | 1115,4.0,1.0,0.1,22.4,23.1,54.0,14.1,-1
393 | 1116,5.0,2.0,0.1,22.4,23.1,54.0,14.1,0
394 | 1119,5.0,1.0,0.1,22.4,23.1,54.0,14.1,-1
395 | 1120,5.0,1.0,0.1,22.4,23.1,54.0,14.1,0
396 | 1123,4.0,1.0,0.03,20.5,23.1,31.0,22.7,-1
397 | 1124,3.0,2.0,0.03,20.5,23.1,31.0,22.7,0
398 | 1125,2.0,1.0,0.03,20.5,23.1,31.0,22.7,-1
399 | 1126,2.0,1.0,0.03,20.5,23.1,31.0,22.7,0
400 | 1128,2.0,2.0,0.03,20.5,23.1,31.0,22.7,0
401 | 1130,2.0,2.0,0.03,20.5,23.1,31.0,22.7,-1
402 | 1131,2.0,2.0,0.1,21.9,23.1,75.0,14.1,0
403 | 1133,2.0,1.0,0.1,21.9,23.1,75.0,14.1,0
404 | 1134,2.0,1.0,0.1,21.9,23.1,75.0,14.1,-1
405 | 1136,2.0,2.0,0.1,21.9,23.1,75.0,14.1,-1
406 | 1145,2.0,1.0,0.1,21.9,23.1,75.0,14.1,-2
407 | 1146,1.0,2.0,0.1,21.9,23.1,75.0,14.1,-2
408 | 1152,2.0,2.0,0.1,21.9,23.1,75.0,14.1,-2
409 | 1159,3.0,2.0,0.1,21.9,23.1,75.0,14.1,-2
410 | 1172,1.0,1.0,0.1,22.0,23.1,60.0,14.5,0
411 | 1174,1.0,2.0,0.1,22.0,23.1,60.0,14.5,-1
412 | 1175,1.0,2.0,0.1,22.0,23.1,60.0,14.5,0
413 | 1180,1.0,1.0,0.1,22.0,23.1,60.0,14.5,-1
414 | 1191,2.0,1.0,0.1,22.0,23.1,60.0,14.5,-1
415 | 1203,1.0,1.0,0.1,22.8,23.1,36.0,20.4,0
416 | 1204,1.0,2.0,0.1,22.8,23.1,36.0,20.4,-1
417 | 1208,1.0,2.0,0.1,22.8,23.1,36.0,20.4,0
418 | 1216,1.0,2.0,0.1,22.8,23.1,36.0,20.4,1
419 | 1237,1.0,1.0,0.1,22.8,23.1,36.0,20.4,1
420 | 1244,1.0,2.0,0.1,23.7,23.1,39.0,30.43,1
421 | 1245,1.0,1.0,0.1,23.7,23.1,39.0,30.43,2
422 | 1246,2.0,2.0,0.1,23.7,23.1,39.0,30.43,0
423 | 1247,3.0,2.0,0.1,23.7,23.1,39.0,30.43,0
424 | 1248,1.0,1.0,0.1,23.7,23.1,39.0,30.43,0
425 | 1249,1.0,1.0,0.1,23.7,23.1,39.0,30.43,1
426 | 1250,1.0,1.0,0.1,23.7,23.1,39.0,30.43,-1
427 | 1251,2.0,2.0,0.1,23.7,23.1,39.0,30.43,1
428 | 1254,4.0,1.0,0.1,23.7,23.1,39.0,30.43,1
429 | 1266,2.0,1.0,0.1,23.7,23.1,39.0,30.43,1
430 | 1269,1.0,2.0,0.1,23.7,23.1,39.0,30.43,0
431 | 1284,3.0,1.0,0.1,23.7,23.1,88.14,19.46,-1
432 | 1285,2.0,1.0,0.1,23.7,23.1,88.14,19.46,0
433 | 1289,2.0,1.0,0.1,23.7,23.1,88.14,19.46,-1
434 | 1294,2.0,2.0,0.1,23.7,23.1,88.14,19.46,0
435 | 1295,2.0,2.0,0.1,23.7,23.1,99.73,3.88,0
436 | 1296,2.0,2.0,0.1,23.7,23.1,99.73,3.88,-2
437 | 1297,1.0,2.0,0.1,23.7,23.1,99.73,3.88,0
438 | 1298,1.0,1.0,0.1,23.7,23.1,99.73,3.88,-1
439 | 1300,2.0,1.0,0.1,23.7,23.1,99.73,3.88,-1
440 | 1303,2.0,1.0,0.1,23.7,23.1,99.73,3.88,0
441 | 1304,1.0,1.0,0.1,23.7,23.1,99.73,3.88,0
442 | 1307,2.0,1.0,0.1,23.7,23.1,99.73,3.88,1
443 | 1312,1.0,1.0,0.1,23.7,23.1,99.73,3.88,1
444 | 1319,1.0,2.0,0.1,23.7,23.1,99.73,3.88,1
445 | 1330,2.0,2.0,0.1,23.7,23.1,99.73,3.88,-1
446 | 1345,1.0,2.0,0.1,23.7,23.1,99.73,3.88,-1
447 | 1352,2.0,2.0,0.1,23.7,23.1,88.0,8.3,0
448 | 1353,1.0,1.0,0.1,23.7,23.1,88.0,8.3,-1
449 | 1354,1.0,2.0,0.1,23.7,23.1,88.0,8.3,0
450 | 1358,1.0,1.0,0.1,23.7,23.1,88.0,8.3,0
451 | 1361,3.0,1.0,0.1,23.7,23.1,88.0,8.3,0
452 | 1364,3.0,2.0,0.1,23.7,23.1,88.0,8.3,1
453 | 1366,3.0,2.0,0.1,23.7,23.1,88.0,8.3,0
454 | 1369,2.0,1.0,0.1,23.7,23.1,88.0,8.3,0
455 | 1373,4.0,2.0,0.1,23.7,23.1,88.0,8.3,0
456 | 1375,1.0,2.0,0.1,23.7,23.1,88.0,8.3,-1
457 | 1376,1.0,1.0,0.1,23.7,23.1,88.0,8.3,1
458 | 1378,2.0,1.0,0.1,23.7,23.1,88.0,8.3,-1
459 | 1383,4.0,1.0,0.1,23.7,23.1,88.0,8.3,0
460 | 1407,2.0,2.0,0.1,23.7,23.1,88.0,8.3,1
461 | 1413,2.0,2.0,0.1,23.7,23.1,88.0,8.3,-1
462 | 1418,1.0,2.0,0.1,23.7,23.1,88.0,8.3,1
463 | 1435,1.0,2.0,0.1,23.7,23.1,75.0,3.3,1
464 | 1436,4.0,1.0,0.1,23.7,23.1,75.0,3.3,0
465 | 1437,2.0,1.0,0.1,23.7,23.1,75.0,3.3,0
466 | 1440,2.0,2.0,0.1,23.7,23.1,75.0,3.3,0
467 | 1441,1.0,1.0,0.1,23.7,23.1,75.0,3.3,0
468 | 1447,3.0,1.0,0.1,23.7,23.1,75.0,3.3,0
469 | 1451,1.0,2.0,0.1,23.7,23.1,75.0,3.3,0
470 | 1454,2.0,2.0,0.1,23.7,23.1,91.0,1.1,0
471 | 1455,1.0,1.0,0.1,23.7,23.1,91.0,1.1,0
472 | 1456,3.0,1.0,0.1,23.7,23.1,91.0,1.1,-1
473 | 1457,2.0,2.0,0.1,23.7,23.1,91.0,1.1,-1
474 | 1459,1.0,1.0,0.1,23.7,23.1,91.0,1.1,-1
475 | 1460,2.0,2.0,0.1,23.7,23.1,91.0,1.1,1
476 | 1463,2.0,1.0,0.1,23.7,23.1,91.0,1.1,1
477 | 1464,1.0,2.0,0.1,23.7,23.1,91.0,1.1,0
478 | 1465,2.0,1.0,0.1,23.7,23.1,91.0,1.1,-1
479 | 1468,1.0,2.0,0.1,23.7,23.1,91.0,1.1,1
480 | 1470,3.0,1.0,0.1,23.7,23.1,91.0,1.1,0
481 | 1473,4.0,1.0,0.1,23.7,23.1,77.0,15.0,-1
482 | 1474,2.0,2.0,0.1,23.7,23.1,77.0,15.0,0
483 | 1477,2.0,2.0,0.1,23.7,23.1,77.0,15.0,-1
484 | 1479,5.0,2.0,0.1,23.7,23.1,77.0,15.0,0
485 | 1481,2.0,1.0,0.1,23.7,23.1,77.0,15.0,0
486 | 1486,5.0,2.0,0.1,23.7,23.1,77.0,15.0,1
487 | 1489,3.0,2.0,0.1,23.7,23.1,77.0,15.0,-1
488 | 1495,2.0,1.0,0.1,23.7,23.1,77.0,15.0,-1
489 | 1500,4.0,2.0,0.1,23.7,23.1,77.0,15.0,0
490 | 1506,3.0,2.0,0.1,23.7,23.1,77.0,15.0,0
491 | 1518,3.0,1.0,0.1,23.7,23.1,77.0,15.0,0
492 | 1519,2.0,1.0,0.1,23.7,23.1,77.0,15.0,1
493 | 1520,2.0,1.0,0.1,23.7,23.1,60.0,18.3,0
494 | 1521,1.0,2.0,0.1,23.7,23.1,60.0,18.3,0
495 | 1522,3.0,2.0,0.1,23.7,23.1,60.0,18.3,0
496 | 1523,3.0,1.0,0.1,23.7,23.1,60.0,18.3,-1
497 | 1524,1.0,1.0,0.1,23.7,23.1,60.0,18.3,1
498 | 1526,1.0,1.0,0.1,23.7,23.1,60.0,18.3,0
499 | 1532,2.0,2.0,0.1,23.7,23.1,60.0,18.3,0
500 | 1542,2.0,2.0,0.1,23.7,23.1,60.0,18.3,1
501 | 1548,3.0,2.0,0.1,23.7,23.1,60.0,18.3,1
502 | 1549,2.0,2.0,0.1,23.7,23.1,60.0,18.3,-1
503 | 1551,2.0,1.0,0.1,23.7,23.1,60.0,18.3,-1
504 | 1553,1.0,1.0,0.1,23.7,23.1,60.0,18.3,-1
505 | 1556,3.0,2.0,0.1,23.7,23.1,53.0,21.7,0
506 | 1557,2.0,1.0,0.1,23.7,23.1,53.0,21.7,0
507 | 1558,3.0,1.0,0.1,23.7,23.1,53.0,21.7,0
508 | 1559,2.0,1.0,0.1,23.7,23.1,53.0,21.7,-1
509 | 1560,1.0,1.0,0.1,23.7,23.1,53.0,21.7,0
510 | 1561,1.0,1.0,0.1,23.7,23.1,53.0,21.7,-1
511 | 1565,1.0,2.0,0.1,23.7,23.1,53.0,21.7,1
512 | 1566,2.0,2.0,0.1,23.7,23.1,53.0,21.7,0
513 | 1568,2.0,2.0,0.1,23.7,23.1,53.0,21.7,2
514 | 1569,1.0,2.0,0.1,23.7,23.1,53.0,21.7,0
515 | 1571,2.0,2.0,0.1,23.7,23.1,53.0,21.7,1
516 | 1575,1.0,2.0,0.1,23.0,23.1,44.0,12.0,0
517 | 1577,2.0,2.0,0.1,23.0,23.1,44.0,12.0,0
518 | 1578,1.0,2.0,0.1,23.0,23.1,44.0,12.0,1
519 | 1582,1.0,1.0,0.1,23.0,23.1,44.0,12.0,0
520 | 1585,2.0,2.0,0.1,23.0,23.1,44.0,12.0,1
521 | 1586,1.0,1.0,0.1,23.0,23.1,44.0,12.0,1
522 | 1587,1.0,1.0,0.1,23.0,23.1,44.0,12.0,-1
523 | 1589,2.0,1.0,0.1,23.0,23.1,44.0,12.0,0
524 | 1592,1.0,2.0,0.1,23.0,23.1,44.0,12.0,-1
525 | 1595,2.0,1.0,0.1,23.0,23.1,44.0,12.0,-1
526 | 1607,4.0,1.0,0.1,23.0,23.1,44.0,12.0,0
527 | 1613,3.0,2.0,0.1,23.0,23.1,44.0,12.0,0
528 | 1626,3.0,1.0,0.1,23.0,23.1,44.0,12.0,1
529 | 1647,2.0,2.0,0.1,23.0,23.1,44.0,12.0,-1
530 | 1687,3.0,1.0,0.1,23.0,23.1,44.0,12.0,0
531 | 1706,1.0,2.0,0.1,24.1,23.1,53.0,27.8,-1
532 | 1709,2.0,2.0,0.1,24.1,23.1,53.0,27.8,-1
533 | 1713,1.0,1.0,0.1,24.1,23.1,53.0,27.8,0
534 | 1714,1.0,1.0,0.1,24.1,23.1,53.0,27.8,-1
535 | 1717,1.0,2.0,0.1,24.1,23.1,53.0,27.8,0
536 | 1721,2.0,1.0,0.1,24.1,23.1,53.0,27.8,-1
537 | 1726,3.0,2.0,0.1,24.1,23.1,53.0,27.8,-1
538 | 1747,2.0,2.0,0.1,24.1,23.1,53.0,27.8,0
539 | 1758,4.0,2.0,0.1,24.1,23.1,53.0,27.8,-1
540 | 1776,1.0,1.0,0.1,24.1,23.1,53.0,27.8,-2
541 | 1780,1.0,1.0,0.1,24.1,23.1,53.0,27.8,1
542 | 1782,1.0,2.0,0.1,22.0,23.1,91.0,13.1,-1
543 | 1783,1.0,2.0,0.1,22.0,23.1,91.0,13.1,1
544 | 1784,1.0,2.0,0.1,22.0,23.1,91.0,13.1,0
545 | 1785,1.0,1.0,0.1,22.0,23.1,91.0,13.1,-1
546 | 1786,2.0,1.0,0.1,22.0,23.1,91.0,13.1,-1
547 | 1787,2.0,2.0,0.1,22.0,23.1,91.0,13.1,0
548 | 1796,2.0,1.0,0.1,22.0,23.1,91.0,13.1,0
549 | 1797,2.0,2.0,0.1,22.0,23.1,91.0,13.1,-1
550 | 1807,1.0,1.0,0.1,22.0,23.1,91.0,13.1,0
551 | 1871,1.0,2.0,0.1,22.0,23.1,91.0,13.1,2
552 | 1909,1.0,1.0,0.1,21.75,23.1,91.0,8.2,0
553 | 1911,3.0,2.0,0.1,21.75,23.1,91.0,8.2,0
554 | 1913,1.0,2.0,0.1,21.75,23.1,91.0,8.2,0
555 | 1916,1.0,2.0,0.1,21.75,23.1,91.0,8.2,-1
556 | 1920,1.0,1.0,0.1,21.75,23.1,91.0,8.2,-1
557 | 1928,2.0,1.0,0.1,21.75,23.1,91.0,8.2,-1
558 | 1931,1.0,2.0,0.1,21.75,23.1,91.0,8.2,1
559 | 1951,2.0,2.0,0.1,21.75,23.1,91.0,8.2,0
560 | 1952,3.0,1.0,0.1,21.75,23.1,91.0,8.2,-1
561 | 1958,2.0,1.0,0.1,21.75,23.1,91.0,8.2,0
562 | 1961,3.0,1.0,0.1,23.7,23.1,97.0,8.9,0
563 | 1962,4.0,1.0,0.1,23.7,23.1,97.0,8.9,0
564 | 1963,2.0,1.0,0.1,23.7,23.1,97.0,8.9,-1
565 | 1964,2.0,1.0,0.1,23.7,23.1,97.0,8.9,0
566 | 1971,3.0,1.0,0.1,23.7,23.1,97.0,8.9,-1
567 | 1974,3.0,2.0,0.1,23.7,23.1,97.0,8.9,1
568 | 1976,2.0,2.0,0.1,23.7,23.1,97.0,8.9,-1
569 | 1981,5.0,2.0,0.1,23.7,23.1,97.0,8.9,0
570 | 1983,5.0,2.0,0.1,23.7,23.1,97.0,8.9,-1
571 | 1985,4.0,2.0,0.1,23.7,23.1,97.0,8.9,-1
572 | 1986,2.0,2.0,0.1,23.7,23.1,87.0,10.0,-1
573 | 1987,2.0,1.0,0.1,23.7,23.1,87.0,10.0,-1
574 | 1988,1.0,2.0,0.1,23.7,23.1,87.0,10.0,0
575 | 1990,2.0,1.0,0.1,23.7,23.1,87.0,10.0,1
576 | 1991,1.0,1.0,0.1,23.7,23.1,87.0,10.0,-1
577 | 1992,1.0,1.0,0.1,23.7,23.1,87.0,10.0,0
578 | 1993,1.0,1.0,0.1,23.7,23.1,87.0,10.0,1
579 | 1994,2.0,1.0,0.1,23.7,23.1,87.0,10.0,0
580 | 1998,6.0,1.0,0.1,23.7,23.1,87.0,10.0,0
581 | 1999,2.0,1.0,0.1,23.7,23.1,87.0,10.0,-2
582 | 2003,2.0,2.0,0.1,23.7,23.1,87.0,10.0,0
583 | 2007,4.0,1.0,0.1,23.7,23.1,87.0,10.0,0
584 | 2018,3.0,2.0,0.1,23.7,23.1,87.0,10.0,0
585 | 2024,1.0,2.0,0.1,23.7,23.1,87.0,10.0,1
586 | 2049,6.0,2.0,0.1,23.7,23.1,87.0,10.0,0
587 | 2054,2.0,1.0,0.1,23.7,23.1,51.0,20.4,0
588 | 2056,2.0,1.0,0.1,23.7,23.1,51.0,20.4,1
589 | 2057,1.0,1.0,0.1,23.7,23.1,51.0,20.4,0
590 | 2060,1.0,2.0,0.1,23.7,23.1,51.0,20.4,0
591 | 2063,3.0,2.0,0.1,23.7,23.1,51.0,20.4,-1
592 | 2069,2.0,1.0,0.1,23.7,23.1,51.0,20.4,-1
593 | 2078,3.0,1.0,0.1,23.7,23.1,51.0,20.4,0
594 | 2081,2.0,2.0,0.1,23.7,23.1,51.0,20.4,0
595 | 2093,1.0,1.0,0.1,23.7,23.1,51.0,20.4,-1
596 | 2100,3.0,2.0,0.1,23.7,23.1,51.0,20.4,0
597 | 2101,3.0,1.0,0.1,23.7,23.1,51.0,20.4,-1
598 | 2102,4.0,1.0,0.1,23.7,23.1,51.0,20.4,0
599 | 2103,2.0,2.0,0.1,23.7,23.1,51.0,20.4,-1
600 | 2104,1.0,2.0,0.1,23.4,23.1,71.3,14.27,0
601 | 2105,1.0,1.0,0.1,23.4,23.1,71.3,14.27,-1
602 | 2106,2.0,2.0,0.1,23.4,23.1,71.3,14.27,0
603 | 2108,2.0,1.0,0.1,23.4,23.1,71.3,14.27,-1
604 | 2109,1.0,1.0,0.1,23.4,23.1,71.3,14.27,0
605 | 2112,4.0,1.0,0.1,23.4,23.1,71.3,14.27,0
606 | 2118,2.0,1.0,0.1,23.4,23.1,71.3,14.27,-2
607 | 2119,1.0,1.0,0.1,23.4,23.1,71.3,14.27,-2
608 | 2135,1.0,2.0,0.1,23.4,23.1,71.3,14.27,-1
609 | 2137,2.0,2.0,0.1,23.4,23.1,71.3,14.27,-1
610 | 2140,2.0,1.0,0.1,23.4,23.1,71.3,14.27,0
611 | 2160,2.0,1.0,0.1,22.6,23.1,79.0,3.3,-1
612 | 2161,2.0,2.0,0.1,22.6,23.1,79.0,3.3,0
613 | 2164,2.0,2.0,0.1,22.6,23.1,79.0,3.3,-1
614 | 2169,3.0,1.0,0.1,22.6,23.1,79.0,3.3,-1
615 | 2170,3.0,2.0,0.1,22.6,23.1,79.0,3.3,0
616 | 2172,2.0,2.0,0.1,23.7,23.1,67.8,-1.12,0
617 | 2173,1.0,2.0,0.1,23.7,23.1,67.8,-1.12,-1
618 | 2174,3.0,2.0,0.1,23.7,23.1,67.8,-1.12,-1
619 | 2177,1.0,1.0,0.1,23.7,23.1,67.8,-1.12,0
620 | 2178,1.0,1.0,0.1,23.7,23.1,67.8,-1.12,-1
621 | 2179,2.0,2.0,0.1,23.7,23.1,67.8,-1.12,-1
622 | 2181,2.0,1.0,0.1,23.7,23.1,67.8,-1.12,-1
623 | 2182,2.0,1.0,0.1,23.7,23.1,67.8,-1.12,0
624 | 2183,3.0,2.0,0.1,23.7,23.1,67.8,-1.12,0
625 | 2188,1.0,2.0,0.1,23.7,23.1,67.8,-1.12,0
626 | 2190,1.0,2.0,0.1,23.7,23.1,67.8,-1.12,1
627 | 2192,2.0,2.0,0.1,23.7,23.1,67.8,-1.12,1
628 | 2198,4.0,1.0,0.1,23.7,23.1,67.8,-1.12,0
629 | 2218,3.0,1.0,0.1,23.7,23.1,67.8,-1.12,-1
630 | 2221,1.0,2.0,0.1,23.7,23.1,53.8949999999999,16.935,-1
631 | 2224,1.0,2.0,0.1,23.7,23.1,53.8949999999999,16.935,0
632 | 2225,2.0,2.0,0.1,23.7,23.1,53.8949999999999,16.935,-1
633 | 2228,2.0,1.0,0.1,23.7,23.1,53.8949999999999,16.935,0
634 | 2233,1.0,2.0,0.1,22.0,23.1,52.245,17.045,1
635 | 2234,1.0,2.0,0.1,22.0,23.1,52.245,17.045,-1
636 | 2235,1.0,2.0,0.1,22.0,23.1,52.245,17.045,0
637 | 2236,2.0,2.0,0.1,22.0,23.1,52.245,17.045,-1
638 | 2237,1.0,2.0,0.1,22.0,23.1,52.245,17.045,-2
639 | 2238,1.0,1.0,0.1,22.0,23.1,52.245,17.045,-1
640 | 2239,2.0,2.0,0.1,22.0,23.1,52.245,17.045,0
641 | 2255,1.0,1.0,0.1,19.0,23.1,41.7,17.5366666666666,-1
642 | 2256,2.0,2.0,0.1,19.0,23.1,41.7,17.5366666666666,0
643 | 2257,1.0,2.0,0.1,19.0,23.1,41.7,17.5366666666666,-1
644 | 2258,1.0,1.0,0.1,19.0,23.1,41.7,17.5366666666666,0
645 | 2260,1.0,2.0,0.1,19.0,23.1,41.7,17.5366666666666,0
646 | 2261,2.0,2.0,0.1,19.0,23.1,41.7,17.5366666666666,-1
647 | 2264,2.0,1.0,0.1,19.0,23.1,41.7,17.5366666666666,-1
648 | 2278,2.0,1.0,0.1,19.0,23.1,41.7,17.5366666666666,0
649 | 2280,2.0,2.0,0.2,23.05,23.1,64.16,15.035,0
650 | 2281,2.0,2.0,0.2,23.05,23.1,64.16,15.035,-1
651 | 2285,3.0,2.0,0.2,23.05,23.1,64.16,15.035,0
652 | 2288,3.0,1.0,0.2,23.05,23.1,64.16,15.035,0
653 | 2289,2.0,1.0,0.2,23.05,23.1,64.16,15.035,0
654 | 2293,2.0,1.0,0.2,23.05,23.1,64.16,15.035,1
655 | 2296,2.0,2.0,0.2,23.05,23.1,64.16,15.035,1
656 | 2298,2.0,1.0,0.2,23.05,23.1,64.16,15.035,-1
657 | 2299,3.0,2.0,0.4,20.65,23.1,37.7549999999999,19.94,0
658 | 2300,2.0,2.0,0.4,20.65,23.1,37.7549999999999,19.94,0
659 | 2301,2.0,2.0,0.4,20.65,23.1,37.7549999999999,19.94,-1
660 | 2309,3.0,1.0,0.4,20.65,23.1,37.7549999999999,19.94,0
661 | 2315,2.0,1.0,0.4,20.65,23.1,37.7549999999999,19.94,0
662 | 2319,2.0,2.0,0.2,21.75,23.1,37.256666666666604,20.25,0
663 | 2320,1.0,2.0,0.2,21.75,23.1,37.256666666666604,20.25,-1
664 | 2323,1.0,2.0,0.2,21.75,23.1,37.256666666666604,20.25,0
665 | 2325,3.0,2.0,0.2,21.75,23.1,37.256666666666604,20.25,-1
666 | 2330,1.0,1.0,0.2,21.75,23.1,37.256666666666604,20.25,0
667 | 2331,1.0,2.0,0.2,21.75,23.1,37.256666666666604,20.25,-2
668 | 2341,2.0,2.0,0.3,21.25,23.1,36.565,20.25,0
669 | 2342,2.0,1.0,0.3,21.25,23.1,36.565,20.25,0
670 | 2343,1.0,2.0,0.3,21.25,23.1,36.565,20.25,0
671 | 2348,3.0,2.0,0.3,21.25,23.1,36.565,20.25,-1
672 | 2353,2.0,2.0,0.3,21.25,23.1,36.565,20.25,-1
673 | 2355,3.0,2.0,0.3,21.25,23.1,36.565,20.25,0
674 | 2362,2.0,2.0,0.3,21.25,23.1,36.565,20.25,1
675 | 2368,3.0,1.0,0.2,23.25,23.1,46.9,20.18,1
676 | 2369,2.0,2.0,0.2,23.25,23.1,46.9,20.18,0
677 | 2372,3.0,2.0,0.2,23.25,23.1,46.9,20.18,0
678 | 2374,2.0,2.0,0.2,23.25,23.1,46.9,20.18,-1
679 | 2376,2.0,2.0,0.1,21.65,23.1,82.1428571428571,26.9714285714285,-1
680 | 2377,2.0,2.0,0.1,21.65,23.1,82.1428571428571,26.9714285714285,0
681 | 2383,2.0,1.0,0.1,21.65,23.1,82.1428571428571,26.9714285714285,-1
682 | 2384,2.0,2.0,0.1,21.65,23.1,82.1428571428571,26.9714285714285,-2
683 | 2390,1.0,2.0,0.2,24.35,23.1,79.5714285714285,27.7,-1
684 | 2391,2.0,1.0,0.2,24.35,23.1,79.5714285714285,27.7,-1
685 | 2392,1.0,1.0,0.2,24.35,23.1,79.5714285714285,27.7,0
686 | 2393,2.0,2.0,0.2,24.35,23.1,79.5714285714285,27.7,-1
687 | 2398,2.0,2.0,0.2,24.35,23.1,79.5714285714285,27.7,0
688 | 2405,1.0,2.0,0.2,24.35,23.1,79.5714285714285,27.7,0
689 | 2410,1.0,1.0,0.2,24.35,23.1,79.5714285714285,27.7,-1
690 | 2415,2.0,1.0,0.2,24.35,23.1,79.5714285714285,27.7,0
691 | 2423,3.0,2.0,0.2,24.35,23.1,79.5714285714285,27.7,-1
692 | 2429,2.0,2.0,0.1,26.2,23.1,79.5714285714285,27.7,-1
693 | 2430,2.0,2.0,0.1,26.2,23.1,79.5714285714285,27.7,1
694 | 2431,2.0,2.0,0.1,26.2,23.1,79.5714285714285,27.7,0
695 | 2440,2.0,1.0,0.1,26.2,23.1,79.5714285714285,27.7,0
696 | 2447,1.0,2.0,0.1,26.2,23.1,79.5714285714285,27.7,-1
697 | 2451,1.0,2.0,0.1,21.55,23.1,74.71428571428571,28.957142857142802,-1
698 | 2452,2.0,1.0,0.1,21.55,23.1,74.71428571428571,28.957142857142802,-2
699 | 2453,1.0,1.0,0.1,21.55,23.1,74.71428571428571,28.957142857142802,-1
700 | 2455,2.0,1.0,0.1,21.55,23.1,74.71428571428571,28.957142857142802,-1
701 | 2456,2.0,2.0,0.1,21.55,23.1,74.71428571428571,28.957142857142802,-2
702 | 2458,2.0,2.0,0.1,21.55,23.1,74.71428571428571,28.957142857142802,-1
703 | 2465,3.0,2.0,0.1,21.55,23.1,74.71428571428571,28.957142857142802,-1
704 | 2467,1.0,2.0,0.1,21.55,23.1,74.71428571428571,28.957142857142802,-2
705 | 2483,2.0,2.0,0.2,23.95,23.1,67.30769230769229,30.3615384615384,-1
706 | 2485,2.0,2.0,0.2,23.95,23.1,67.30769230769229,30.3615384615384,0
707 | 2494,3.0,2.0,0.2,23.95,23.1,67.30769230769229,30.3615384615384,-1
708 | 2495,2.0,1.0,0.2,23.95,23.1,67.30769230769229,30.3615384615384,-1
709 | 2496,1.0,2.0,0.2,23.95,23.1,67.30769230769229,30.3615384615384,-1
710 | 2500,2.0,1.0,0.2,23.95,23.1,67.30769230769229,30.3615384615384,0
711 | 2508,1.0,1.0,0.3,21.35,23.1,64.8571428571428,30.7285714285714,-1
712 | 2509,2.0,1.0,0.3,21.35,23.1,64.8571428571428,30.7285714285714,-1
713 | 2510,2.0,2.0,0.3,21.35,23.1,64.8571428571428,30.7285714285714,-1
714 | 2511,3.0,2.0,0.3,21.35,23.1,64.8571428571428,30.7285714285714,-1
715 | 2512,2.0,1.0,0.3,21.35,23.1,64.8571428571428,30.7285714285714,-2
716 | 2516,3.0,1.0,0.3,21.35,23.1,64.8571428571428,30.7285714285714,-2
717 | 2517,1.0,1.0,0.3,21.35,23.1,64.8571428571428,30.7285714285714,-2
718 | 2519,1.0,2.0,0.3,21.35,23.1,64.8571428571428,30.7285714285714,-1
719 | 2539,2.0,2.0,0.2,24.25,23.1,58.076923076922995,31.0,0
720 | 2540,2.0,2.0,0.2,24.25,23.1,58.076923076922995,31.0,-1
721 | 2541,3.0,2.0,0.2,24.25,23.1,58.076923076922995,31.0,0
722 | 2549,1.0,2.0,0.2,24.25,23.1,58.076923076922995,31.0,0
723 | 2560,3.0,2.0,0.3,27.35,23.1,58.0,30.685714285714198,-1
724 | 2561,3.0,2.0,0.3,27.35,23.1,58.0,30.685714285714198,0
725 | 2562,2.0,2.0,0.3,27.35,23.1,58.0,30.685714285714198,-1
726 | 2563,2.0,2.0,0.3,27.35,23.1,58.0,30.685714285714198,1
727 | 2564,2.0,1.0,0.3,27.35,23.1,58.0,30.685714285714198,-1
728 | 2566,1.0,2.0,0.3,27.35,23.1,58.0,30.685714285714198,0
729 | 2567,2.0,2.0,0.3,27.35,23.1,58.0,30.685714285714198,0
730 | 2574,1.0,1.0,0.3,27.35,23.1,58.0,30.685714285714198,0
731 | 2577,1.0,1.0,0.2,22.35,23.1,83.07865,24.0790277777777,-1
732 | 2578,1.0,2.0,0.2,22.35,23.1,83.07865,24.0790277777777,-2
733 | 2580,2.0,1.0,0.2,22.35,23.1,83.07865,24.0790277777777,-1
734 | 2584,2.0,2.0,0.2,22.35,23.1,83.07865,24.0790277777777,-1
735 | 2585,1.0,2.0,0.2,22.35,23.1,83.07865,24.0790277777777,-1
736 | 2586,2.0,2.0,0.2,22.35,23.1,83.07865,24.0790277777777,-2
737 | 2593,1.0,1.0,0.2,22.35,23.1,83.07865,24.0790277777777,0
738 | 2594,2.0,2.0,0.2,22.35,23.1,83.07865,24.0790277777777,0
739 | 2595,1.0,2.0,0.1,23.65,23.1,89.8635583333333,22.620216666666604,-1
740 | 2596,1.0,1.0,0.1,23.65,23.1,89.8635583333333,22.620216666666604,-1
741 | 2597,2.0,1.0,0.1,23.65,23.1,89.8635583333333,22.620216666666604,-1
742 | 2608,2.0,2.0,0.1,23.65,23.1,89.8635583333333,22.620216666666604,-2
743 | 2614,1.0,2.0,0.1,24.75,23.1,91.369325,22.1769833333333,0
744 | 2615,1.0,1.0,0.1,24.75,23.1,91.369325,22.1769833333333,0
745 | 2618,1.0,2.0,0.1,24.75,23.1,91.369325,22.1769833333333,-1
746 | 2623,1.0,1.0,0.1,24.75,23.1,91.369325,22.1769833333333,-1
747 | 2632,2.0,2.0,0.1,24.75,23.1,91.369325,22.1769833333333,0
748 | 2633,2.0,2.0,0.1,24.75,23.1,91.369325,22.1769833333333,-1
749 | 2636,1.0,2.0,0.3,27.55,23.1,82.9081083333333,24.528591666666603,0
750 | 2637,1.0,1.0,0.3,27.55,23.1,82.9081083333333,24.528591666666603,0
751 | 2641,2.0,2.0,0.3,27.55,23.1,82.9081083333333,24.528591666666603,0
752 | 2642,2.0,1.0,0.3,27.55,23.1,82.9081083333333,24.528591666666603,0
753 | 2643,2.0,1.0,0.3,27.55,23.1,82.9081083333333,24.528591666666603,1
754 | 2644,3.0,1.0,0.3,27.55,23.1,82.9081083333333,24.528591666666603,1
755 | 2646,1.0,1.0,0.3,27.55,23.1,82.9081083333333,24.528591666666603,1
756 | 2649,1.0,2.0,0.3,27.55,23.1,82.9081083333333,24.528591666666603,-1
757 | 2650,1.0,2.0,0.2,23.65,23.1,96.205,22.480925,-1
758 | 2651,3.0,1.0,0.2,23.65,23.1,96.205,22.480925,-2
759 | 2653,2.0,1.0,0.2,23.65,23.1,96.205,22.480925,-1
760 | 2654,1.0,1.0,0.2,23.65,23.1,96.205,22.480925,-1
761 | 2656,2.0,2.0,0.2,23.65,23.1,96.205,22.480925,-1
762 | 2660,2.0,2.0,0.2,23.65,23.1,96.205,22.480925,0
763 | 2662,2.0,1.0,0.2,19.75,23.1,50.16,16.7,-1
764 | 2663,2.0,1.0,0.2,19.75,23.1,50.16,16.7,0
765 | 2664,2.0,2.0,0.2,19.75,23.1,50.16,16.7,-1
766 | 2667,2.0,2.0,0.2,19.75,23.1,50.16,16.7,0
767 | 2671,3.0,2.0,0.2,19.75,23.1,50.16,16.7,-1
768 | 2684,2.0,2.0,0.2,19.75,23.1,50.16,16.7,-2
769 | 2695,3.0,2.0,0.2,19.75,23.1,50.16,16.7,0
770 | 2708,2.0,1.0,0.1,23.7,23.1,68.29,14.51,-1
771 | 2709,2.0,1.0,0.1,23.7,23.1,68.29,14.51,0
772 | 2712,2.0,2.0,0.1,23.7,23.1,68.29,14.51,-1
773 | 2713,2.0,2.0,0.1,23.7,23.1,68.29,14.51,0
774 | 2720,3.0,2.0,0.1,23.7,23.1,68.29,14.51,-1
775 | 2724,3.0,1.0,0.1,23.7,23.1,68.29,14.51,-1
776 | 2739,3.0,2.0,0.1,23.7,23.1,68.29,14.51,0
777 | 2743,1.0,1.0,0.1,23.6,23.1,86.0,18.6,0
778 | 2744,1.0,1.0,0.1,23.6,23.1,86.0,18.6,-1
779 | 2745,2.0,1.0,0.1,23.6,23.1,86.0,18.6,0
780 | 2747,2.0,2.0,0.1,23.6,23.1,86.0,18.6,-1
781 | 2748,2.0,2.0,0.1,23.6,23.1,86.0,18.6,0
782 | 2749,1.0,2.0,0.1,23.6,23.1,86.0,18.6,-1
783 | 2751,3.0,1.0,0.1,23.6,23.1,86.0,18.6,0
784 | 2752,3.0,2.0,0.1,23.6,23.1,86.0,18.6,-1
785 | 2753,2.0,1.0,0.1,23.6,23.1,86.0,18.6,-1
786 | 2768,2.0,1.0,0.1,24.9,23.1,74.0,20.5,0
787 | 2770,2.0,2.0,0.1,24.9,23.1,74.0,20.5,0
788 | 2773,4.0,2.0,0.1,24.9,23.1,74.0,20.5,0
789 | 2778,2.0,1.0,0.1,24.9,23.1,74.0,20.5,-1
790 | 2779,1.0,2.0,0.1,26.5,23.1,72.0,21.8,0
791 | 2781,1.0,2.0,0.1,26.5,23.1,72.0,21.8,-1
792 | 2782,2.0,2.0,0.1,26.5,23.1,72.0,21.8,0
793 | 2784,1.0,1.0,0.1,26.5,23.1,72.0,21.8,0
794 | 2785,1.0,2.0,0.1,26.5,23.1,72.0,21.8,1
795 | 2787,2.0,1.0,0.1,26.5,23.1,72.0,21.8,0
796 | 2793,2.0,2.0,0.0,24.2,23.1,64.0,20.5,0
797 | 2794,1.0,1.0,0.0,24.2,23.1,64.0,20.5,0
798 | 2795,1.0,2.0,0.0,24.2,23.1,64.0,20.5,0
799 | 2798,1.0,2.0,0.0,24.2,23.1,64.0,20.5,-1
800 | 2804,3.0,1.0,0.0,24.2,23.1,64.0,20.5,0
801 | 2805,1.0,1.0,0.0,24.2,23.1,64.0,20.5,1
802 | 2807,1.0,2.0,0.0,24.2,23.1,64.0,20.5,1
803 | 2808,1.0,1.0,0.0,24.2,23.1,64.0,20.5,-1
804 | 2830,2.0,1.0,0.0,24.2,23.1,64.0,20.5,1
805 | 2833,2.0,1.0,0.0,24.2,23.1,64.0,20.5,0
806 | 2837,2.0,2.0,0.0,24.2,23.1,64.0,20.5,-1
807 | 2839,3.0,2.0,0.0,24.2,23.1,64.0,20.5,0
808 | 2845,2.0,2.0,0.0,24.2,23.1,64.0,20.5,1
809 | 2854,6.0,1.0,0.0,24.2,23.1,64.0,20.5,0
810 | 2855,1.0,2.0,0.0,24.7,23.1,54.7,22.2,1
811 | 2856,2.0,2.0,0.0,24.7,23.1,54.7,22.2,0
812 | 2857,2.0,1.0,0.0,24.7,23.1,54.7,22.2,0
813 | 2858,1.0,2.0,0.0,24.7,23.1,54.7,22.2,0
814 | 2860,1.0,1.0,0.0,24.7,23.1,54.7,22.2,0
815 | 2861,1.0,1.0,0.0,24.7,23.1,54.7,22.2,1
816 | 2866,2.0,2.0,0.0,24.7,23.1,54.7,22.2,-1
817 | 2878,3.0,1.0,0.0,24.7,23.1,54.7,22.2,0
818 | 2880,3.0,2.0,0.0,24.7,23.1,54.7,22.2,0
819 | 2881,1.0,2.0,0.0,23.2,23.1,61.8,20.8,-1
820 | 2882,2.0,2.0,0.0,23.2,23.1,61.8,20.8,0
821 | 2884,1.0,2.0,0.0,23.2,23.1,61.8,20.8,0
822 | 2885,1.0,1.0,0.0,23.2,23.1,61.8,20.8,0
823 | 2889,6.0,2.0,0.0,23.2,23.1,61.8,20.8,0
824 | 2891,2.0,1.0,0.1,24.2,23.1,57.9,19.2,0
825 | 2892,2.0,2.0,0.1,24.2,23.1,57.9,19.2,0
826 | 2893,1.0,2.0,0.1,24.2,23.1,57.9,19.2,0
827 | 2895,1.0,1.0,0.1,24.2,23.1,57.9,19.2,0
828 | 2912,2.0,1.0,0.1,24.2,23.1,57.9,19.2,1
829 | 2914,1.0,1.0,0.1,25.2,23.1,57.9,19.2,0
830 | 2915,2.0,1.0,0.1,25.2,23.1,57.9,19.2,0
831 | 2918,1.0,2.0,0.1,25.2,23.1,57.9,19.2,0
832 | 2920,4.0,1.0,0.1,25.2,23.1,57.9,19.2,0
833 | 2923,1.0,2.0,0.1,25.2,23.1,57.9,19.2,-1
834 | 2924,2.0,2.0,0.1,25.2,23.1,57.9,19.2,0
835 | 2937,1.0,2.0,0.1,26.95,23.1,57.9,19.2,0
836 | 2938,1.0,1.0,0.1,26.95,23.1,57.9,19.2,0
837 | 2941,2.0,2.0,0.1,26.95,23.1,57.9,19.2,0
838 | 2945,4.0,2.0,0.1,26.95,23.1,57.9,19.2,1
839 | 2949,4.0,2.0,0.1,26.95,23.1,57.9,19.2,-1
840 | 2952,4.0,2.0,0.1,26.95,23.1,57.9,19.2,0
841 | 2960,1.0,1.0,0.1,22.9,23.1,81.0,7.1,0
842 | 2962,5.0,2.0,0.1,22.9,23.1,81.0,7.1,-1
843 | 2963,1.0,2.0,0.1,22.9,23.1,81.0,7.1,0
844 | 2965,5.0,2.0,0.1,22.9,23.1,81.0,7.1,0
845 | 2967,1.0,2.0,0.1,22.9,23.1,81.0,7.1,-1
846 | 2968,2.0,2.0,0.1,22.9,23.1,81.0,7.1,1
847 | 2969,2.0,1.0,0.1,22.9,23.1,81.0,7.1,0
848 | 2975,2.0,2.0,0.1,22.9,23.1,81.0,7.1,0
849 | 2977,1.0,2.0,0.1,22.37,23.1,81.0,7.1,-1
850 | 2978,1.0,2.0,0.1,22.37,23.1,81.0,7.1,0
851 | 2979,2.0,1.0,0.1,22.37,23.1,81.0,7.1,0
852 | 2980,1.0,1.0,0.1,22.37,23.1,81.0,7.1,0
853 | 2981,2.0,1.0,0.1,22.37,23.1,81.0,7.1,-1
854 | 2982,2.0,2.0,0.1,22.37,23.1,81.0,7.1,-1
855 | 2984,1.0,1.0,0.1,22.37,23.1,81.0,7.1,-1
856 | 2992,5.0,2.0,0.1,22.37,23.1,81.0,7.1,0
857 | 2996,6.0,2.0,0.1,20.3,23.1,81.0,7.1,-1
858 | 2997,1.0,1.0,0.1,20.3,23.1,81.0,7.1,-1
859 | 2998,2.0,1.0,0.1,20.3,23.1,81.0,7.1,-1
860 | 2999,1.0,2.0,0.1,20.3,23.1,81.0,7.1,-1
861 | 3000,2.0,2.0,0.1,20.3,23.1,81.0,7.1,-1
862 | 3003,1.0,2.0,0.1,20.3,23.1,81.0,7.1,0
863 | 3014,1.0,1.0,0.1,21.3,23.1,81.0,7.1,-1
864 | 3016,2.0,2.0,0.1,21.3,23.1,81.0,7.1,0
865 | 3017,1.0,2.0,0.1,21.3,23.1,81.0,7.1,-1
866 | 3018,2.0,2.0,0.1,21.3,23.1,81.0,7.1,2
867 | 3020,1.0,2.0,0.1,21.3,23.1,81.0,7.1,0
868 | 3023,2.0,1.0,0.1,21.3,23.1,81.0,7.1,0
869 | 3025,1.0,1.0,0.1,21.3,23.1,81.0,7.1,0
870 | 3029,2.0,1.0,0.1,21.3,23.1,81.0,7.1,-1
871 | 3037,2.0,1.0,0.1,21.3,23.1,87.0,9.0,0
872 | 3038,2.0,2.0,0.1,21.3,23.1,87.0,9.0,0
873 | 3041,2.0,1.0,0.1,21.3,23.1,87.0,9.0,-1
874 | 3045,2.0,2.0,0.1,21.3,23.1,87.0,9.0,-1
875 | 3051,3.0,2.0,0.1,21.3,23.1,87.0,9.0,-1
876 | 3052,3.0,1.0,0.1,21.3,23.1,87.0,9.0,0
877 | 3068,3.0,2.0,0.1,21.3,23.1,87.0,9.0,0
878 | 3080,2.0,2.0,0.1,21.3,23.1,87.0,9.0,1
879 | 3089,2.0,1.0,0.1,21.3,23.1,87.0,9.0,1
880 | 3090,2.0,2.0,0.1,21.3,23.1,87.0,9.0,2
881 | 3093,4.0,2.0,0.1,21.3,23.1,87.0,9.0,0
882 | 3099,1.0,1.0,0.1,21.3,23.1,87.0,9.0,-1
883 | 3106,2.0,1.0,0.1,21.3,23.1,87.0,9.0,-2
884 | 3114,2.0,2.0,0.1,22.1,23.1,93.0,6.0,0
885 | 3115,3.0,2.0,0.1,22.1,23.1,93.0,6.0,-1
886 | 3117,2.0,2.0,0.1,22.1,23.1,93.0,6.0,-1
887 | 3119,2.0,1.0,0.1,22.1,23.1,93.0,6.0,-1
888 | 3121,2.0,1.0,0.1,22.1,23.1,93.0,6.0,0
889 | 3125,3.0,1.0,0.1,22.1,23.1,93.0,6.0,-1
890 | 3129,2.0,1.0,0.1,22.1,23.1,93.0,6.0,1
891 | 3130,3.0,2.0,0.1,22.1,23.1,93.0,6.0,0
892 | 3136,2.0,2.0,0.1,22.9,23.1,54.0,10.0,0
893 | 3137,3.0,2.0,0.1,22.9,23.1,54.0,10.0,0
894 | 3139,3.0,1.0,0.1,22.9,23.1,54.0,10.0,-1
895 | 3141,2.0,1.0,0.1,22.9,23.1,54.0,10.0,0
896 | 3146,3.0,1.0,0.1,22.9,23.1,54.0,10.0,0
897 | 3152,2.0,1.0,0.1,22.9,23.1,54.0,10.0,-1
898 | 3157,1.0,2.0,0.1,23.7,23.1,96.0,14.5,-1
899 | 3158,1.0,2.0,0.1,23.7,23.1,96.0,14.5,0
900 | 3159,2.0,1.0,0.1,23.7,23.1,96.0,14.5,0
901 | 3160,1.0,1.0,0.1,23.7,23.1,96.0,14.5,-1
902 | 3162,2.0,2.0,0.1,23.7,23.1,96.0,14.5,1
903 | 3163,2.0,2.0,0.1,23.7,23.1,96.0,14.5,-1
904 | 3164,1.0,1.0,0.1,23.7,23.1,96.0,14.5,0
905 | 3165,2.0,1.0,0.1,23.7,23.1,96.0,14.5,1
906 | 3167,2.0,2.0,0.1,23.7,23.1,96.0,14.5,0
907 | 3170,3.0,2.0,0.1,23.7,23.1,96.0,14.5,1
908 | 3196,1.0,1.0,0.1,23.7,23.1,96.0,14.5,1
909 | 3199,1.0,2.0,0.1,23.7,23.1,96.0,14.5,1
910 | 3206,3.0,2.0,0.1,23.7,23.1,96.0,14.5,-1
911 | 3207,4.0,2.0,0.1,23.7,23.1,96.0,14.5,0
912 | 3216,2.0,1.0,0.1,23.7,23.1,96.0,14.5,-1
913 | 3218,3.0,2.0,0.1,23.7,23.1,96.0,14.5,0
914 | 3225,1.0,2.0,0.1,23.7,23.1,96.0,14.5,2
915 | 3243,2.0,2.0,0.1,23.7,23.1,83.0,8.6,0
916 | 3244,1.0,1.0,0.1,23.7,23.1,83.0,8.6,0
917 | 3245,1.0,2.0,0.1,23.7,23.1,83.0,8.6,0
918 | 3246,2.0,2.0,0.1,23.7,23.1,83.0,8.6,-1
919 | 3251,1.0,1.0,0.1,23.7,23.1,83.0,8.6,-2
920 | 3256,1.0,2.0,0.1,23.7,23.1,83.0,8.6,-1
921 | 3267,1.0,1.0,0.1,23.7,23.1,83.0,8.6,-1
922 | 3270,1.0,2.0,0.1,23.7,23.1,83.0,8.6,-2
923 | 3271,2.0,1.0,0.1,23.7,23.1,83.0,8.6,0
924 | 3272,1.0,2.0,0.1,23.7,23.1,82.0,5.9,0
925 | 3273,2.0,2.0,0.1,23.7,23.1,82.0,5.9,0
926 | 3275,2.0,2.0,0.1,23.7,23.1,82.0,5.9,-1
927 | 3281,2.0,2.0,0.1,23.7,23.1,82.0,5.9,1
928 | 3282,6.0,2.0,0.1,23.7,23.1,82.0,5.9,0
929 | 3284,3.0,1.0,0.1,23.7,23.1,82.0,5.9,0
930 | 3285,1.0,1.0,0.1,23.7,23.1,82.0,5.9,-1
931 | 3287,2.0,2.0,0.1,23.7,23.1,82.0,5.9,-2
932 | 3292,2.0,1.0,0.1,23.7,23.1,82.0,5.9,2
933 | 3293,1.0,1.0,0.1,23.7,23.1,82.0,5.9,0
934 | 3294,6.0,1.0,0.1,23.7,23.1,82.0,5.9,-1
935 | 3298,2.0,1.0,0.1,23.7,23.1,82.0,5.9,-1
936 | 3305,2.0,1.0,0.1,23.7,23.1,82.0,5.9,0
937 | 3314,2.0,1.0,0.1,23.7,23.1,91.0,5.6,0
938 | 3315,1.0,2.0,0.1,23.7,23.1,91.0,5.6,-1
939 | 3316,2.0,2.0,0.1,23.7,23.1,91.0,5.6,-1
940 | 3317,1.0,1.0,0.1,23.7,23.1,91.0,5.6,1
941 | 3318,2.0,2.0,0.1,23.7,23.1,91.0,5.6,0
942 | 3319,1.0,1.0,0.1,23.7,23.1,91.0,5.6,-1
943 | 3320,1.0,2.0,0.1,23.7,23.1,91.0,5.6,0
944 | 3325,1.0,1.0,0.1,23.7,23.1,91.0,5.6,0
945 | 3326,2.0,1.0,0.1,23.7,23.1,91.0,5.6,-1
946 | 3327,2.0,1.0,0.1,23.7,23.1,81.0,8.0,0
947 | 3328,2.0,2.0,0.1,23.7,23.1,81.0,8.0,0
948 | 3329,1.0,1.0,0.1,23.7,23.1,81.0,8.0,0
949 | 3330,1.0,1.0,0.1,23.7,23.1,81.0,8.0,1
950 | 3331,1.0,2.0,0.1,23.7,23.1,81.0,8.0,0
951 | 3332,1.0,1.0,0.1,23.7,23.1,81.0,8.0,-1
952 | 3334,1.0,2.0,0.1,23.7,23.1,81.0,8.0,-1
953 | 3341,2.0,2.0,0.1,23.7,23.1,81.0,8.0,-1
954 | 3343,2.0,2.0,0.1,23.7,23.1,81.0,8.0,1
955 | 3345,3.0,1.0,0.1,23.7,23.1,81.0,8.0,1
956 | 3346,3.0,2.0,0.1,23.7,23.1,81.0,8.0,0
957 | 3348,1.0,2.0,0.1,23.7,23.1,81.0,8.0,1
958 | 3352,2.0,1.0,0.1,23.7,23.1,81.0,8.0,1
959 | 3365,1.0,2.0,0.1,23.7,23.1,81.0,8.0,-2
960 | 3374,2.0,1.0,0.1,23.7,23.1,80.0,11.4,0
961 | 3375,2.0,1.0,0.1,23.7,23.1,80.0,11.4,-1
962 | 3376,1.0,2.0,0.1,23.7,23.1,80.0,11.4,-1
963 | 3379,3.0,2.0,0.1,23.7,23.1,80.0,11.4,0
964 | 3382,2.0,1.0,0.1,23.7,23.1,80.0,11.4,1
965 | 3384,3.0,1.0,0.1,23.7,23.1,80.0,11.4,0
966 | 3385,2.0,2.0,0.1,23.7,23.1,80.0,11.4,0
967 | 3399,6.0,2.0,0.1,23.7,23.1,80.0,11.4,0
968 | 3402,2.0,2.0,0.1,23.7,23.1,80.0,11.4,-1
969 | 3403,4.0,2.0,0.1,23.7,23.1,80.0,11.4,0
970 | 3405,3.0,1.0,0.1,23.7,23.1,80.0,11.4,-1
971 | 3407,4.0,1.0,0.1,23.7,23.1,80.0,11.4,2
972 | 3408,1.0,1.0,0.1,23.7,23.1,80.0,11.4,0
973 | 3410,4.0,2.0,0.1,23.7,23.1,80.0,11.4,-1
974 | 3411,1.0,2.0,0.1,23.7,23.1,80.0,11.4,0
975 | 3413,1.0,1.0,0.1,23.7,23.1,80.0,11.4,-1
976 | 3423,1.0,1.0,0.1,23.7,23.1,80.0,11.4,-2
977 | 3430,1.0,2.0,0.1,23.7,23.1,24.0,11.2,0
978 | 3432,2.0,2.0,0.1,23.7,23.1,24.0,11.2,0
979 | 3433,1.0,2.0,0.1,23.7,23.1,24.0,11.2,-1
980 | 3434,1.0,1.0,0.1,23.7,23.1,24.0,11.2,0
981 | 3442,1.0,1.0,0.1,23.7,23.1,24.0,11.2,-1
982 | 3452,3.0,2.0,0.1,23.7,23.1,24.0,11.2,-1
983 | 3458,1.0,2.0,0.1,23.7,23.1,24.0,11.2,-2
984 | 3462,1.0,2.0,0.1,23.7,23.1,28.0,12.2,-2
985 | 3463,1.0,2.0,0.1,23.7,23.1,28.0,12.2,-1
986 | 3466,1.0,2.0,0.1,23.7,23.1,28.0,12.2,0
987 | 3471,1.0,1.0,0.1,23.7,23.1,28.0,12.2,-1
988 | 3474,1.0,1.0,0.1,23.7,23.1,28.0,12.2,0
989 | 3486,3.0,2.0,0.1,23.7,23.1,28.0,12.2,0
990 | 3489,2.0,2.0,0.1,23.7,23.1,28.0,12.2,-1
991 | 3497,2.0,2.0,0.1,23.7,23.1,52.0,7.0,0
992 | 3498,1.0,2.0,0.1,23.7,23.1,52.0,7.0,0
993 | 3500,1.0,2.0,0.1,23.7,23.1,52.0,7.0,-1
994 | 3501,1.0,1.0,0.1,23.7,23.1,52.0,7.0,-1
995 | 3502,2.0,2.0,0.1,23.7,23.1,52.0,7.0,-2
996 | 3505,4.0,2.0,0.1,23.7,23.1,52.0,7.0,-1
997 | 3506,2.0,2.0,0.1,23.7,23.1,52.0,7.0,-1
998 | 3521,1.0,1.0,0.1,23.7,23.1,52.0,7.0,-2
999 | 3527,1.0,1.0,0.1,24.0,23.1,52.0,23.2,-1
1000 | 3528,2.0,2.0,0.1,24.0,23.1,52.0,23.2,0
1001 | 3530,2.0,2.0,0.1,24.0,23.1,52.0,23.2,-1
1002 | 3531,3.0,1.0,0.1,24.0,23.1,52.0,23.2,-1
1003 | 3534,2.0,1.0,0.1,24.0,23.1,52.0,23.2,-2
1004 | 3535,1.0,1.0,0.1,24.0,23.1,52.0,23.2,0
1005 | 3540,1.0,2.0,0.1,24.0,23.1,52.0,23.2,0
1006 | 3548,2.0,1.0,0.1,24.0,23.1,52.0,23.2,-1
1007 | 3550,2.0,1.0,0.1,24.0,23.1,52.0,23.2,0
1008 | 3572,1.0,2.0,0.1,24.0,23.1,52.0,23.2,-1
1009 | 3586,1.0,1.0,0.1,19.0,23.1,52.0,15.5,0
1010 | 3587,1.0,1.0,0.1,19.0,23.1,52.0,15.5,-1
1011 | 3588,1.0,2.0,0.1,19.0,23.1,52.0,15.5,-1
1012 | 3590,1.0,1.0,0.1,19.0,23.1,52.0,15.5,-2
1013 | 3600,1.0,2.0,0.1,19.0,23.1,52.0,15.5,0
1014 | 3604,3.0,2.0,0.1,19.0,23.1,52.0,15.5,-1
1015 | 3606,1.0,1.0,0.1,19.0,23.1,52.0,15.5,1
1016 | 3610,1.0,2.0,0.1,19.0,23.1,52.0,15.5,-2
1017 | 3628,2.0,1.0,0.1,19.0,23.1,52.0,15.5,-1
1018 | 3631,2.0,2.0,0.1,19.0,23.1,52.0,15.5,0
1019 | 3637,2.0,2.0,0.1,19.0,23.1,52.0,15.5,-1
1020 | 3652,1.0,2.0,0.17,22.65,22.6,58.0,25.0,-1
1021 | 3654,2.0,2.0,0.17,22.65,22.6,58.0,25.0,0
1022 | 3656,2.0,2.0,0.17,22.65,22.6,58.0,25.0,-1
1023 | 3658,1.0,1.0,0.17,22.65,22.6,58.0,25.0,-1
1024 | 3659,1.0,2.0,0.17,22.65,22.6,58.0,25.0,0
1025 | 3665,1.0,2.0,0.17,22.65,22.6,58.0,25.0,-2
1026 | 3673,1.0,1.0,0.17,22.65,22.6,58.0,25.0,0
1027 | 3683,2.0,1.0,0.17,22.65,22.6,58.0,25.0,0
1028 | 3689,2.0,1.0,0.17,22.65,22.6,58.0,25.0,-1
1029 | 3697,1.0,1.0,0.17,22.65,22.6,58.0,25.0,-2
1030 | 3729,2.0,2.0,0.17,22.65,22.6,58.0,25.0,-2
1031 | 3733,1.0,1.0,0.0,20.4925,20.5,58.0,11.8,0
1032 | 3734,1.0,2.0,0.0,20.4925,20.5,58.0,11.8,0
1033 | 3735,1.0,2.0,0.0,20.4925,20.5,58.0,11.8,-1
1034 | 3736,1.0,2.0,0.0,20.4925,20.5,58.0,11.8,1
1035 | 3740,1.0,1.0,0.0,20.4925,20.5,58.0,11.8,-1
1036 | 3744,1.0,2.0,0.0,20.4925,20.5,58.0,11.8,-2
1037 | 3758,2.0,1.0,0.0,20.4925,20.5,58.0,11.8,-2
1038 | 3764,2.0,2.0,0.0,20.4925,20.5,58.0,11.8,0
1039 | 3770,2.0,2.0,0.0,20.4925,20.5,58.0,11.8,-1
1040 | 3784,2.0,1.0,0.0,20.4925,20.5,58.0,11.8,0
1041 | 3793,2.0,2.0,0.0,20.4925,20.5,58.0,11.8,2
1042 | 3794,1.0,2.0,0.0,20.4925,20.5,58.0,11.8,2
1043 | 3802,1.0,1.0,0.1,24.2,23.1,73.5,29.0,0
1044 | 3803,1.0,2.0,0.1,24.2,23.1,73.5,29.0,-1
1045 | 3804,1.0,2.0,0.1,24.2,23.1,73.5,29.0,0
1046 | 3805,1.0,1.0,0.1,24.2,23.1,73.5,29.0,-1
1047 | 3816,1.0,2.0,0.1,24.2,23.1,73.5,29.0,-2
1048 | 3847,1.0,2.0,0.109,29.035999999999998,23.1,54.0,28.0,0
1049 | 3848,1.0,1.0,0.109,29.035999999999998,23.1,54.0,28.0,0
1050 | 3850,1.0,1.0,0.109,29.035999999999998,23.1,54.0,28.0,-1
1051 | 3851,2.0,1.0,0.109,29.035999999999998,23.1,54.0,28.0,0
1052 | 3853,1.0,2.0,0.109,29.035999999999998,23.1,54.0,28.0,-1
1053 | 3860,1.0,1.0,0.109,29.035999999999998,23.1,54.0,28.0,1
1054 | 3871,1.0,2.0,0.109,29.035999999999998,23.1,54.0,28.0,1
1055 | 3877,1.0,2.0,0.1,25.79,23.1,76.0,22.0,1
1056 | 3878,1.0,2.0,0.1,25.79,23.1,76.0,22.0,0
1057 | 3879,1.0,1.0,0.1,25.79,23.1,76.0,22.0,0
1058 | 3881,1.0,1.0,0.1,25.79,23.1,76.0,22.0,-1
1059 | 3889,1.0,2.0,0.1,25.79,23.1,76.0,22.0,-1
1060 | 3947,1.0,1.0,0.1,25.79,23.1,76.0,22.0,2
1061 | 3948,3.0,1.0,0.1,23.7,23.1,43.0,28.0,0
1062 | 3949,1.0,2.0,0.1,23.7,23.1,43.0,28.0,0
1063 | 3951,1.0,1.0,0.1,23.7,23.1,43.0,28.0,-1
1064 | 3952,1.0,2.0,0.1,23.7,23.1,43.0,28.0,-1
1065 | 3957,1.0,1.0,0.1,23.7,23.1,43.0,28.0,0
1066 | 3960,3.0,2.0,0.1,23.7,23.1,43.0,28.0,-1
1067 | 3976,1.0,1.0,0.1,25.5,23.1,90.0,19.0,-1
1068 | 3977,1.0,1.0,0.1,25.5,23.1,90.0,19.0,-2
1069 | 3978,1.0,2.0,0.1,25.5,23.1,90.0,19.0,-1
1070 | 3981,1.0,2.0,0.1,25.5,23.1,90.0,19.0,-2
1071 | 3984,1.0,1.0,0.1,25.5,23.1,90.0,19.0,0
1072 | 3985,2.0,1.0,0.1,25.5,23.1,90.0,19.0,-1
1073 | 3993,2.0,2.0,0.1,25.5,23.1,90.0,19.0,-1
1074 | 4011,1.0,2.0,0.1,25.5,23.1,90.0,19.0,0
1075 | 4015,1.0,1.0,0.1,25.5,23.1,82.0,20.0,0
1076 | 4017,1.0,2.0,0.1,25.5,23.1,82.0,20.0,0
1077 | 4018,1.0,2.0,0.1,25.5,23.1,82.0,20.0,-1
1078 | 4022,1.0,1.0,0.1,25.5,23.1,82.0,20.0,-1
1079 | 4027,2.0,2.0,0.1,25.5,23.1,82.0,20.0,-1
1080 | 4030,1.0,1.0,0.1,25.5,23.1,82.0,20.0,2
1081 | 4033,1.0,2.0,0.1,25.5,23.1,82.0,20.0,-2
1082 | 4043,2.0,1.0,0.1,25.5,23.1,82.0,20.0,0
1083 | 4055,2.0,2.0,0.1,23.7,23.1,59.0,31.1,-1
1084 | 4056,3.0,1.0,0.1,23.7,23.1,59.0,31.1,-1
1085 | 4057,2.0,1.0,0.1,23.7,23.1,59.0,31.1,0
1086 | 4059,1.0,1.0,0.1,23.7,23.1,59.0,31.1,0
1087 | 4064,2.0,1.0,0.1,23.7,23.1,59.0,31.1,-1
1088 | 4073,4.0,1.0,0.1,23.7,23.1,61.0,30.7,0
1089 | 4074,1.0,2.0,0.1,23.7,23.1,61.0,30.7,0
1090 | 4075,1.0,2.0,0.1,23.7,23.1,61.0,30.7,-1
1091 | 4076,2.0,2.0,0.1,23.7,23.1,61.0,30.7,0
1092 | 4080,2.0,2.0,0.1,23.7,23.1,61.0,30.7,-1
1093 | 4081,4.0,2.0,0.1,23.7,23.1,61.0,30.7,0
1094 | 4099,2.0,2.0,0.1,23.7,23.1,61.0,30.7,2
1095 | 4101,2.0,2.0,0.1,23.7,23.1,20.2,35.1,0
1096 | 4104,1.0,2.0,0.1,23.7,23.1,20.2,35.1,0
1097 | 4109,4.0,2.0,0.1,23.7,23.1,20.2,35.1,0
1098 | 4110,3.0,1.0,0.1,23.7,23.1,20.2,35.1,1
1099 | 4111,3.0,1.0,0.1,23.7,23.1,20.2,35.1,0
1100 | 4112,2.0,2.0,0.1,23.7,23.1,85.2,27.1,0
1101 | 4113,1.0,2.0,0.1,23.7,23.1,85.2,27.1,0
1102 | 4114,1.0,2.0,0.1,23.7,23.1,85.2,27.1,-1
1103 | 4121,2.0,2.0,0.1,23.7,23.1,85.2,27.1,-1
1104 | 4130,1.0,1.0,0.1,23.7,23.1,85.2,27.1,-1
1105 | 4136,3.0,2.0,0.1,23.7,23.1,39.4,32.1,-1
1106 | 4137,3.0,2.0,0.1,23.7,23.1,39.4,32.1,0
1107 | 4138,2.0,2.0,0.1,23.7,23.1,39.4,32.1,0
1108 | 4139,2.0,2.0,0.1,23.7,23.1,39.4,32.1,-1
1109 | 4141,2.0,1.0,0.1,23.7,23.1,39.4,32.1,0
1110 | 4145,2.0,1.0,0.1,23.7,23.1,39.4,32.1,-1
1111 | 4146,4.0,2.0,0.1,23.7,23.1,39.4,32.1,0
1112 | 4148,3.0,1.0,0.1,23.7,23.1,39.4,32.1,0
1113 | 4149,1.0,2.0,0.1,23.7,23.1,39.4,32.1,-1
1114 | 4151,2.0,2.0,0.43,27.2,27.8,11.5,35.7,1
1115 | 4152,2.0,2.0,0.43,27.2,27.8,11.5,35.7,-1
1116 | 4154,3.0,2.0,0.43,27.2,27.8,11.5,35.7,0
1117 | 4155,4.0,1.0,0.43,27.2,27.8,11.5,35.7,-1
1118 | 4156,3.0,1.0,0.43,27.2,27.8,11.5,35.7,0
1119 | 4158,2.0,2.0,0.43,27.2,27.8,11.5,35.7,0
1120 | 4159,2.0,1.0,0.43,27.2,27.8,11.5,35.7,0
1121 | 4160,3.0,2.0,0.43,27.2,27.8,11.5,35.7,-1
1122 | 4173,2.0,2.0,0.69,33.9333333333333,33.7,27.4,35.8,-1
1123 | 4175,2.0,2.0,0.69,33.9333333333333,33.7,27.4,35.8,0
1124 | 4177,2.0,1.0,0.69,33.9333333333333,33.7,27.4,35.8,-1
1125 | 4182,2.0,1.0,0.69,33.9333333333333,33.7,27.4,35.8,0
1126 | 4183,3.0,1.0,0.69,33.9333333333333,33.7,27.4,35.8,0
1127 | 4185,2.0,2.0,0.69,33.9333333333333,33.7,27.4,35.8,1
1128 | 4187,2.0,1.0,0.69,33.9333333333333,33.7,27.4,35.8,2
1129 | 4189,3.0,2.0,0.69,33.9333333333333,33.7,27.4,35.8,-1
1130 | 4192,2.0,2.0,0.69,33.9333333333333,33.7,27.4,35.8,2
1131 | 4193,3.0,2.0,0.69,33.9333333333333,33.7,27.4,35.8,0
1132 | 4195,3.0,1.0,0.69,33.9333333333333,33.7,27.4,35.8,1
1133 | 4197,1.0,2.0,0.69,33.9333333333333,33.7,27.4,35.8,2
1134 | 4198,1.0,2.0,0.69,33.9333333333333,33.7,27.4,35.8,1
1135 | 4201,2.0,2.0,0.41,27.5,27.2,84.9,28.6,0
1136 | 4202,2.0,2.0,0.41,27.5,27.2,84.9,28.6,-1
1137 | 4204,3.0,2.0,0.41,27.5,27.2,84.9,28.6,-1
1138 | 4214,2.0,1.0,0.41,27.5,27.2,84.9,28.6,0
1139 | 4217,3.0,1.0,0.41,27.5,27.2,84.9,28.6,-1
1140 | 4219,3.0,1.0,0.41,27.5,27.2,84.9,28.6,0
1141 | 4221,2.0,1.0,0.41,27.5,27.2,84.9,28.6,-1
1142 | 4227,1.0,2.0,0.41,27.5,27.2,84.9,28.6,0
1143 | 4228,1.0,2.0,0.41,27.5,27.2,84.9,28.6,-1
1144 | 4239,3.0,2.0,0.41,27.5,27.2,84.9,28.6,0
1145 | 4242,4.0,1.0,0.41,27.5,27.2,84.9,28.6,-1
1146 | 4252,2.0,1.0,0.1,23.7,23.1,40.0,29.0,0
1147 | 4253,2.0,1.0,0.1,23.7,23.1,40.0,29.0,-1
1148 | 4256,1.0,2.0,0.1,23.7,23.1,40.0,29.0,0
1149 | 4257,2.0,2.0,0.1,23.7,23.1,40.0,29.0,0
1150 | 4258,2.0,2.0,0.1,23.7,23.1,40.0,29.0,-1
1151 | 4269,1.0,2.0,0.1,23.7,23.1,40.0,29.0,-1
1152 | 4271,3.0,2.0,0.1,23.7,23.1,40.0,29.0,-1
1153 | 4286,1.0,1.0,0.1,23.7,23.1,40.0,29.0,0
1154 | 4310,3.0,1.0,0.1,23.45,23.1,74.0,22.0,-1
1155 | 4311,1.0,1.0,0.1,23.45,23.1,74.0,22.0,-1
1156 | 4312,1.0,1.0,0.1,23.45,23.1,74.0,22.0,-2
1157 | 4314,1.0,1.0,0.1,23.45,23.1,74.0,22.0,0
1158 | 4328,1.0,2.0,0.1,23.45,23.1,74.0,22.0,-2
1159 | 4329,2.0,2.0,0.1,23.45,23.1,74.0,22.0,-2
1160 | 4331,2.0,2.0,0.1,23.45,23.1,74.0,22.0,-1
1161 | 4334,1.0,1.0,0.1,23.45,23.1,74.0,22.0,1
1162 | 4335,1.0,2.0,0.1,23.45,23.1,74.0,22.0,-1
1163 | 4342,1.0,2.0,0.1,23.45,23.1,74.0,22.0,0
1164 | 4347,1.0,1.0,0.1,18.0025,23.1,43.0,13.0,-2
1165 | 4348,1.0,1.0,0.1,18.0025,23.1,43.0,13.0,-1
1166 | 4354,1.0,2.0,0.1,18.0025,23.1,43.0,13.0,-1
1167 | 4356,1.0,2.0,0.1,18.0025,23.1,43.0,13.0,0
1168 | 4357,1.0,2.0,0.1,18.0025,23.1,43.0,13.0,-2
1169 | 4361,3.0,2.0,0.1,18.0025,23.1,43.0,13.0,-1
1170 | 4363,2.0,2.0,0.1,18.0025,23.1,43.0,13.0,-1
1171 | 4367,1.0,1.0,0.1,18.0025,23.1,43.0,13.0,0
1172 | 4379,1.0,1.0,0.1,24.6,23.1,68.0,25.0,0
1173 | 4385,1.0,1.0,0.1,24.6,23.1,68.0,25.0,-1
1174 | 4386,1.0,2.0,0.1,24.6,23.1,68.0,25.0,0
1175 | 4387,1.0,2.0,0.1,24.6,23.1,68.0,25.0,-1
1176 | 4393,3.0,2.0,0.1,24.6,23.1,68.0,25.0,0
1177 | 4395,2.0,2.0,0.1,24.6,23.1,68.0,25.0,0
1178 | 4402,2.0,1.0,0.1,24.6,23.1,68.0,25.0,0
1179 | 4431,1.0,1.0,0.1,19.466666666666697,23.1,49.0,15.0,-1
1180 | 4432,1.0,2.0,0.1,19.466666666666697,23.1,49.0,15.0,-1
1181 | 4433,1.0,1.0,0.1,19.466666666666697,23.1,49.0,15.0,1
1182 | 4438,1.0,1.0,0.1,19.466666666666697,23.1,49.0,15.0,0
1183 | 4439,1.0,2.0,0.1,19.466666666666697,23.1,49.0,15.0,-2
1184 | 4441,1.0,2.0,0.1,19.466666666666697,23.1,49.0,15.0,0
1185 | 4443,1.0,1.0,0.1,19.466666666666697,23.1,49.0,15.0,-2
1186 | 4446,2.0,1.0,0.1,19.466666666666697,23.1,49.0,15.0,-2
1187 | 4466,2.0,1.0,0.1,23.7,23.1,15.0,45.0,0
1188 | 4467,2.0,2.0,0.1,23.7,23.1,15.0,45.0,0
1189 | 4468,2.0,2.0,0.1,23.7,23.1,15.0,45.0,-1
1190 | 4469,2.0,1.0,0.1,23.7,23.1,15.0,45.0,-1
1191 | 4471,2.0,2.0,0.1,23.7,23.1,15.0,45.0,1
1192 | 4489,2.0,1.0,0.1,23.7,23.1,15.0,45.0,2
1193 | 4498,2.0,2.0,0.1,22.0,23.1,61.0,21.0,0
1194 | 4500,4.0,2.0,0.1,22.0,23.1,61.0,21.0,0
1195 | 4502,2.0,1.0,0.1,22.0,23.1,61.0,21.0,0
1196 | 4503,2.0,2.0,0.1,22.0,23.1,61.0,21.0,1
1197 | 4508,2.0,2.0,0.1,23.7,23.1,56.0,20.0,1
1198 | 4509,2.0,2.0,0.1,23.7,23.1,56.0,20.0,0
1199 | 4514,2.0,1.0,0.1,23.7,23.1,56.0,20.0,-1
1200 | 4515,2.0,1.0,0.1,23.7,23.1,68.0,18.0,0
1201 | 4517,2.0,2.0,0.1,23.7,23.1,68.0,18.0,0
1202 | 4519,5.0,2.0,0.1,23.7,23.1,48.0,18.0,0
1203 | 4520,1.0,2.0,0.1,23.7,23.1,48.0,18.0,-1
1204 | 4521,1.0,2.0,0.1,23.7,23.1,48.0,18.0,0
1205 | 4522,1.0,1.0,0.1,23.7,23.1,48.0,18.0,-1
1206 | 4523,1.0,1.0,0.1,23.7,23.1,48.0,18.0,0
1207 | 4525,2.0,2.0,0.1,23.7,23.1,48.0,18.0,1
1208 | 4528,2.0,2.0,0.1,23.7,23.1,48.0,18.0,0
1209 | 4529,2.0,1.0,0.1,23.7,23.1,48.0,18.0,0
1210 | 4533,1.0,2.0,0.1,23.7,23.1,48.0,18.0,1
1211 | 4535,2.0,2.0,0.1,23.7,23.1,48.0,18.0,-1
1212 | 4540,1.0,1.0,0.1,23.7,23.1,48.0,18.0,1
1213 | 4543,1.0,2.0,0.1,23.7,23.1,42.0,18.0,-1
1214 | 4544,2.0,2.0,0.1,23.7,23.1,42.0,18.0,-1
1215 | 4545,1.0,2.0,0.1,23.7,23.1,42.0,18.0,1
1216 | 4546,1.0,1.0,0.1,23.7,23.1,42.0,18.0,0
1217 | 4547,1.0,2.0,0.1,23.7,23.1,42.0,18.0,0
1218 | 4550,2.0,2.0,0.1,23.7,23.1,42.0,18.0,0
1219 | 4555,2.0,1.0,0.1,23.7,23.1,42.0,18.0,-1
1220 | 4556,2.0,1.0,0.1,23.7,23.1,42.0,18.0,0
1221 | 4564,1.0,1.0,0.1,23.7,23.1,42.0,18.0,-1
1222 | 4568,2.0,1.0,0.1,23.7,23.1,42.0,18.0,1
1223 | 4578,2.0,2.0,0.1,25.6,23.1,5.0,29.0,-1
1224 | 4579,1.0,2.0,0.1,25.6,23.1,5.0,29.0,0
1225 | 4580,2.0,2.0,0.1,25.6,23.1,5.0,29.0,0
1226 | 4597,2.0,1.0,0.1,25.6,23.1,5.0,29.0,-1
1227 | 4604,1.0,2.0,0.1,25.6,23.1,5.0,29.0,-1
1228 | 4614,1.0,1.0,0.1,25.6,23.1,5.0,29.0,0
1229 | 4646,1.0,1.0,0.1,25.6,23.1,5.0,29.0,-1
1230 | 4648,2.0,1.0,0.1,25.6,23.1,5.0,29.0,0
1231 | 4666,4.0,1.0,0.1,24.1,23.1,7.0,21.0,0
1232 | 4667,5.0,2.0,0.1,24.1,23.1,7.0,21.0,0
1233 | 4668,2.0,1.0,0.1,24.1,23.1,7.0,21.0,0
1234 | 4670,3.0,1.0,0.1,24.1,23.1,7.0,21.0,-1
1235 | 4671,2.0,2.0,0.1,24.1,23.1,7.0,21.0,0
1236 | 4672,1.0,1.0,0.1,24.1,23.1,7.0,21.0,0
1237 | 4673,3.0,1.0,0.1,24.1,23.1,7.0,21.0,0
1238 | 4675,1.0,2.0,0.1,24.1,23.1,7.0,21.0,0
1239 | 4677,1.0,2.0,0.1,24.1,23.1,7.0,21.0,-1
1240 | 4679,1.0,1.0,0.1,24.1,23.1,7.0,21.0,-1
1241 | 4681,2.0,2.0,0.1,24.1,23.1,7.0,21.0,-1
1242 | 4689,1.0,1.0,0.1,24.1,23.1,7.0,21.0,1
1243 | 4690,2.0,1.0,0.1,24.1,23.1,7.0,21.0,-1
1244 | 4706,2.0,2.0,0.1,24.1,23.1,7.0,21.0,-2
1245 | 4731,1.0,2.0,0.1,25.4,23.1,19.0,22.5,0
1246 | 4732,1.0,2.0,0.1,25.4,23.1,19.0,22.5,-1
1247 | 4733,1.0,1.0,0.1,25.4,23.1,19.0,22.5,0
1248 | 4734,1.0,1.0,0.1,25.4,23.1,19.0,22.5,-1
1249 | 4735,1.0,2.0,0.1,25.4,23.1,19.0,22.5,1
1250 | 4737,2.0,2.0,0.1,25.4,23.1,19.0,22.5,-1
1251 | 4738,2.0,2.0,0.1,25.4,23.1,19.0,22.5,0
1252 | 4747,2.0,1.0,0.1,25.4,23.1,19.0,22.5,-1
1253 | 4749,2.0,1.0,0.1,25.4,23.1,19.0,22.5,0
1254 | 4751,2.0,1.0,0.1,25.4,23.1,19.0,22.5,1
1255 | 4767,2.0,1.0,0.1,25.9,23.1,14.0,30.0,0
1256 | 4768,3.0,1.0,0.1,25.9,23.1,14.0,30.0,0
1257 | 4773,1.0,2.0,0.1,25.9,23.1,14.0,30.0,0
1258 | 4774,1.0,1.0,0.1,25.9,23.1,14.0,30.0,-1
1259 | 4777,2.0,1.0,0.1,25.9,23.1,14.0,30.0,-1
1260 | 4779,1.0,2.0,0.1,25.9,23.1,14.0,30.0,-1
1261 | 4781,1.0,1.0,0.1,25.9,23.1,14.0,30.0,0
1262 | 4794,2.0,2.0,0.1,25.9,23.1,14.0,30.0,0
1263 | 4825,3.0,1.0,0.1,24.65,23.1,29.0,11.0,0
1264 | 4827,2.0,1.0,0.1,24.65,23.1,29.0,11.0,0
1265 | 4830,1.0,1.0,0.1,24.65,23.1,29.0,11.0,-1
1266 | 4831,1.0,2.0,0.1,24.65,23.1,29.0,11.0,0
1267 | 4835,1.0,1.0,0.1,24.65,23.1,29.0,11.0,0
1268 | 4842,2.0,2.0,0.1,24.65,23.1,29.0,11.0,-1
1269 | 4844,1.0,2.0,0.1,24.65,23.1,29.0,11.0,-1
1270 | 4848,2.0,2.0,0.1,24.65,23.1,29.0,11.0,0
1271 | 4854,2.0,1.0,0.1,24.65,23.1,29.0,11.0,-1
1272 | 4879,5.0,2.0,0.1,26.7,23.1,12.0,30.0,-1
1273 | 4880,2.0,2.0,0.1,26.7,23.1,12.0,30.0,-1
1274 | 4881,2.0,1.0,0.1,26.7,23.1,12.0,30.0,-1
1275 | 4886,2.0,2.0,0.1,26.7,23.1,12.0,30.0,-2
1276 | 4897,2.0,2.0,0.1,26.7,23.1,12.0,30.0,0
1277 | 4908,1.0,2.0,0.1,26.7,23.1,12.0,30.0,0
1278 | 4909,1.0,1.0,0.1,26.7,23.1,12.0,30.0,0
1279 | 4911,2.0,2.0,0.1,26.7,23.1,12.0,30.0,1
1280 | 4912,1.0,2.0,0.1,26.7,23.1,12.0,30.0,-1
1281 | 4916,2.0,1.0,0.1,26.7,23.1,12.0,30.0,0
1282 | 4929,1.0,2.0,0.1,21.4,23.1,37.0,12.0,-1
1283 | 4930,1.0,1.0,0.1,21.4,23.1,37.0,12.0,-1
1284 | 4934,1.0,1.0,0.1,21.4,23.1,37.0,12.0,0
1285 | 4936,1.0,2.0,0.1,21.4,23.1,37.0,12.0,0
1286 | 4939,2.0,2.0,0.1,21.4,23.1,37.0,12.0,-1
1287 | 4940,1.0,1.0,0.1,21.4,23.1,37.0,12.0,-2
1288 | 4942,2.0,2.0,0.1,21.4,23.1,37.0,12.0,0
1289 | 4950,2.0,1.0,0.1,21.4,23.1,37.0,12.0,0
1290 | 4952,2.0,1.0,0.1,21.4,23.1,37.0,12.0,-1
1291 | 4972,2.0,1.0,0.1,21.4,23.1,37.0,12.0,-2
1292 | 4975,3.0,1.0,0.1,21.4,23.1,37.0,12.0,-1
1293 | 4979,2.0,1.0,0.1,25.2,23.1,47.0,21.58,0
1294 | 4980,1.0,1.0,0.1,25.2,23.1,47.0,21.58,0
1295 | 5001,1.0,2.0,0.1,25.2,23.1,47.0,21.58,0
1296 | 5003,1.0,2.0,0.1,25.2,23.1,47.0,21.58,-1
1297 | 5004,2.0,2.0,0.1,25.2,23.1,47.0,21.58,0
1298 | 5010,1.0,1.0,0.0,20.6,23.1,53.3,10.3,-2
1299 | 5011,1.0,1.0,0.0,20.6,23.1,53.3,10.3,-1
1300 | 5017,2.0,1.0,0.0,20.6,23.1,53.3,10.3,-1
1301 | 5031,1.0,1.0,0.0,20.6,23.1,53.3,10.3,0
1302 | 5047,1.0,2.0,0.0,20.6,23.1,53.3,10.3,0
1303 | 5049,2.0,2.0,0.0,20.6,23.1,53.3,10.3,0
1304 | 5050,1.0,2.0,0.0,20.6,23.1,53.3,10.3,-2
1305 | 5053,1.0,2.0,0.0,20.6,23.1,53.3,10.3,-1
1306 | 5056,2.0,1.0,0.0,20.6,23.1,53.3,10.3,-2
1307 | 5057,2.0,2.0,0.0,20.6,23.1,53.3,10.3,-1
1308 | 5065,2.0,2.0,0.0,20.6,23.1,53.3,10.3,-2
1309 | 5071,1.0,2.0,0.0,26.6333333333333,27.0,32.6,19.4,0
1310 | 5072,1.0,2.0,0.0,26.6333333333333,27.0,32.6,19.4,-1
1311 | 5076,2.0,2.0,0.0,26.6333333333333,27.0,32.6,19.4,-1
1312 | 5081,1.0,2.0,0.0,26.6333333333333,27.0,32.6,19.4,1
1313 | 5100,1.0,2.0,0.0,21.5,21.5,31.0,12.0,0
1314 | 5101,1.0,2.0,0.0,21.5,21.5,31.0,12.0,-1
1315 | 5111,1.0,2.0,0.0,21.5,21.5,31.0,12.0,1
1316 | 5128,2.0,1.0,0.0,21.5,21.5,31.0,12.0,1
1317 | 5129,2.0,1.0,0.0,21.5,21.5,31.0,12.0,0
1318 | 5147,2.0,2.0,0.0,21.5,21.5,31.0,12.0,0
1319 | 5154,2.0,2.0,0.0,21.5,21.5,31.0,12.0,1
1320 | 5155,2.0,2.0,0.0,21.5,21.5,31.0,12.0,-1
1321 | 5162,2.0,1.0,0.0,21.5,21.5,31.0,12.0,-1
1322 | 5180,1.0,1.0,0.1,23.0,23.1,30.1,18.1,-1
1323 | 5182,1.0,1.0,0.1,23.0,23.1,30.1,18.1,0
1324 | 5202,3.0,1.0,0.1,23.0,23.1,30.1,18.1,0
1325 | 5203,1.0,1.0,0.0,22.4,23.1,35.0,14.5,0
1326 | 5204,1.0,1.0,0.0,22.4,23.1,35.0,14.5,-1
1327 | 5206,2.0,1.0,0.0,22.4,23.1,35.0,14.5,-1
1328 | 5207,2.0,1.0,0.0,22.4,23.1,35.0,14.5,0
1329 | 5216,2.0,2.0,0.0,22.4,23.1,35.0,14.5,-1
1330 | 5218,1.0,2.0,0.0,22.4,23.1,35.0,14.5,0
1331 | 5221,2.0,2.0,0.0,22.4,23.1,35.0,14.5,0
1332 | 5226,1.0,2.0,0.0,22.4,23.1,35.0,14.5,-1
1333 | 5233,3.0,2.0,0.1,25.0,23.1,25.0,33.1,0
1334 | 5234,2.0,2.0,0.1,25.0,23.1,25.0,33.1,-1
1335 | 5235,2.0,2.0,0.1,25.0,23.1,25.0,33.1,0
1336 | 5238,3.0,2.0,0.1,25.0,23.1,25.0,33.1,-1
1337 | 5239,1.0,2.0,0.1,25.0,23.1,25.0,33.1,-1
1338 | 5242,1.0,1.0,0.1,25.0,23.1,25.0,33.1,-2
1339 | 5243,2.0,1.0,0.1,25.0,23.1,25.0,33.1,-1
1340 | 5244,1.0,2.0,0.1,25.0,23.1,25.0,33.1,0
1341 | 5246,1.0,1.0,0.1,25.0,23.1,25.0,33.1,0
1342 | 5250,3.0,2.0,0.1,33.3,23.1,61.0,30.5,2
1343 | 5251,4.0,2.0,0.1,33.3,23.1,61.0,30.5,2
1344 | 5252,2.0,2.0,0.1,33.3,23.1,61.0,30.5,1
1345 | 5255,2.0,2.0,0.1,33.3,23.1,61.0,30.5,2
1346 | 5257,1.0,2.0,0.1,33.3,23.1,61.0,30.5,1
1347 | 5262,2.0,2.0,0.1,33.3,23.1,61.0,30.5,0
1348 | 5265,1.0,2.0,0.1,33.3,23.1,61.0,30.5,0
1349 | 5271,3.0,1.0,0.1,33.3,23.1,61.0,30.5,2
1350 | 5272,1.0,2.0,0.1,33.3,23.1,61.0,30.5,2
1351 | 5273,2.0,2.0,0.1,23.7,23.1,24.0,36.7,0
1352 | 5274,1.0,2.0,0.1,23.7,23.1,24.0,36.7,0
1353 | 5277,2.0,2.0,0.1,23.7,23.1,24.0,36.7,1
1354 | 5279,1.0,1.0,0.1,23.7,23.1,24.0,36.7,0
1355 | 5282,2.0,2.0,0.1,23.7,23.1,24.0,36.7,2
1356 | 5283,2.0,1.0,0.1,23.7,23.1,24.0,36.7,1
1357 | 5284,2.0,2.0,0.1,23.5,23.1,54.0,20.4,0
1358 | 5285,1.0,2.0,0.1,23.5,23.1,54.0,20.4,-1
1359 | 5288,1.0,2.0,0.1,23.5,23.1,54.0,20.4,0
1360 | 5289,2.0,2.0,0.1,23.5,23.1,54.0,20.4,-1
1361 | 5291,1.0,1.0,0.1,23.5,23.1,54.0,20.4,-1
1362 | 5293,1.0,1.0,0.1,23.5,23.1,54.0,20.4,0
1363 | 5297,1.0,1.0,0.1,20.5,23.1,91.0,10.7,0
1364 | 5298,1.0,1.0,0.1,20.5,23.1,91.0,10.7,-1
1365 | 5300,3.0,1.0,0.1,20.5,23.1,91.0,10.7,-1
1366 | 5304,1.0,2.0,0.1,20.5,23.1,91.0,10.7,0
1367 | 5305,1.0,2.0,0.1,20.5,23.1,91.0,10.7,-1
1368 | 5316,2.0,2.0,0.1,20.5,23.1,91.0,10.7,0
1369 | 5317,1.0,1.0,0.1,23.7,23.1,50.0,8.0,-1
1370 | 5318,2.0,2.0,0.1,23.7,23.1,50.0,8.0,-1
1371 | 5319,1.0,1.0,0.1,23.7,23.1,50.0,8.0,1
1372 | 5320,1.0,2.0,0.1,23.7,23.1,50.0,8.0,0
1373 | 5321,1.0,1.0,0.1,23.7,23.1,50.0,8.0,0
1374 | 5326,2.0,2.0,0.1,23.7,23.1,50.0,8.0,0
1375 | 5328,3.0,2.0,0.1,23.7,23.1,50.0,8.0,-1
1376 | 5330,2.0,2.0,0.1,21.0,23.1,42.0,15.0,-1
1377 | 5331,1.0,2.0,0.1,21.0,23.1,42.0,15.0,-1
1378 | 5333,1.0,2.0,0.1,21.0,23.1,42.0,15.0,0
1379 | 5335,2.0,2.0,0.1,21.0,23.1,42.0,15.0,0
1380 | 5353,3.0,2.0,0.1,21.0,23.1,42.0,15.0,0
1381 | 5359,2.0,1.0,0.1,23.7,23.1,37.0,31.0,2
1382 | 5360,1.0,1.0,0.1,23.7,23.1,37.0,31.0,2
1383 | 5361,1.0,1.0,0.1,23.7,23.1,37.0,31.0,-1
1384 | 5362,2.0,2.0,0.1,23.7,23.1,37.0,31.0,0
1385 | 5363,1.0,2.0,0.1,23.7,23.1,37.0,31.0,1
1386 | 5364,4.0,2.0,0.1,23.7,23.1,37.0,31.0,0
1387 | 5365,2.0,1.0,0.1,23.7,23.1,37.0,31.0,1
1388 | 5368,2.0,1.0,0.1,23.7,23.1,37.0,31.0,0
1389 | 5369,1.0,2.0,0.1,23.7,23.1,37.0,31.0,-1
1390 | 5371,1.0,1.0,0.1,23.7,23.1,37.0,31.0,0
1391 | 5373,2.0,2.0,0.1,23.7,23.1,37.0,31.0,2
1392 | 5377,2.0,2.0,0.1,23.7,23.1,37.0,31.0,-1
1393 | 5378,1.0,2.0,0.1,23.7,23.1,37.0,31.0,-2
1394 | 5385,2.0,2.0,0.1,23.7,23.1,58.0,26.5,0
1395 | 5387,1.0,2.0,0.1,23.7,23.1,58.0,26.5,0
1396 | 5391,1.0,1.0,0.1,23.7,23.1,58.0,26.5,0
1397 | 5403,1.0,2.0,0.1,23.7,23.1,58.0,26.5,-1
1398 | 5404,2.0,2.0,0.1,23.7,23.1,58.0,26.5,1
1399 | 5408,1.0,1.0,0.1,23.7,23.1,26.0,32.0,-1
1400 | 5409,1.0,2.0,0.1,23.7,23.1,26.0,32.0,0
1401 | 5412,1.0,1.0,0.1,23.7,23.1,26.0,32.0,0
1402 | 5414,1.0,2.0,0.1,23.7,23.1,26.0,32.0,-1
1403 | 5420,2.0,2.0,0.1,23.7,23.1,26.0,32.0,-1
1404 | 5421,1.0,2.0,0.1,23.7,23.1,26.0,32.0,1
1405 | 5423,1.0,2.0,0.1,23.7,23.1,24.0,30.0,0
1406 | 5424,2.0,1.0,0.1,23.7,23.1,24.0,30.0,0
1407 | 5426,2.0,2.0,0.1,23.7,23.1,24.0,30.0,1
1408 | 5428,1.0,1.0,0.1,23.7,23.1,24.0,30.0,0
1409 | 5434,1.0,1.0,0.1,23.7,23.1,24.0,30.0,1
1410 | 5435,1.0,2.0,0.1,23.7,23.1,24.0,30.0,1
1411 | 5441,1.0,2.0,0.1,23.7,23.1,24.0,30.0,2
1412 | 5442,2.0,2.0,0.1,23.7,23.1,19.0,36.0,1
1413 | 5444,1.0,2.0,0.1,23.7,23.1,19.0,36.0,0
1414 | 5446,2.0,2.0,0.1,23.7,23.1,19.0,36.0,-2
1415 | 5447,2.0,2.0,0.1,23.7,23.1,19.0,36.0,-1
1416 | 5448,2.0,2.0,0.1,23.7,23.1,19.0,36.0,0
1417 | 5451,1.0,1.0,0.1,23.7,23.1,19.0,36.0,0
1418 | 5452,1.0,2.0,0.1,23.7,23.1,19.0,36.0,1
1419 | 5455,1.0,2.0,0.1,23.7,23.1,19.0,36.0,-1
1420 | 5456,2.0,2.0,0.1,23.7,23.1,19.0,35.0,0
1421 | 5458,2.0,2.0,0.1,23.7,23.1,19.0,35.0,-1
1422 | 5459,2.0,2.0,0.1,23.7,23.1,19.0,35.0,1
1423 | 5465,1.0,2.0,0.1,23.7,23.1,19.0,35.0,0
1424 | 5467,1.0,2.0,0.1,23.7,23.1,19.0,35.0,1
1425 | 5475,2.0,2.0,0.1,23.7,23.1,28.0,35.0,0
1426 | 5476,2.0,2.0,0.1,23.7,23.1,28.0,35.0,1
1427 | 5477,1.0,2.0,0.1,23.7,23.1,28.0,35.0,0
1428 | 5480,2.0,2.0,0.1,23.7,23.1,28.0,35.0,2
1429 | 5486,3.0,2.0,0.1,23.7,23.1,28.0,35.0,0
1430 | 5488,2.0,1.0,0.1,23.7,23.1,28.0,35.0,1
1431 | 5489,2.0,1.0,0.1,23.7,23.1,24.0,34.0,0
1432 | 5491,2.0,2.0,0.1,23.7,23.1,24.0,34.0,0
1433 | 5494,2.0,1.0,0.1,23.7,23.1,24.0,34.0,-1
1434 | 5496,2.0,2.0,0.1,23.7,23.1,24.0,34.0,1
1435 | 5505,2.0,1.0,0.1,23.7,23.1,30.0,18.0,0
1436 | 5506,2.0,2.0,0.1,23.7,23.1,30.0,18.0,-1
1437 | 5508,1.0,1.0,0.1,23.7,23.1,30.0,18.0,-1
1438 | 5512,2.0,1.0,0.1,23.7,23.1,30.0,18.0,-1
1439 | 5513,1.0,1.0,0.1,23.7,23.1,30.0,18.0,0
1440 | 5514,2.0,2.0,0.1,23.7,23.1,30.0,18.0,0
1441 | 5518,2.0,2.0,0.1,23.7,23.1,51.0,12.0,-1
1442 | 5519,2.0,2.0,0.1,23.7,23.1,51.0,12.0,1
1443 | 5520,2.0,2.0,0.1,23.7,23.1,51.0,12.0,0
1444 | 5532,2.0,2.0,0.1,23.7,23.1,77.0,12.0,1
1445 | 5533,2.0,2.0,0.1,23.7,23.1,77.0,12.0,0
1446 | 5535,1.0,2.0,0.1,23.7,23.1,77.0,12.0,1
1447 | 5536,1.0,2.0,0.1,23.7,23.1,77.0,12.0,-2
1448 | 5537,2.0,2.0,0.1,23.7,23.1,77.0,12.0,-1
1449 | 5543,1.0,2.0,0.1,23.7,23.1,77.0,12.0,0
1450 | 5547,2.0,2.0,0.1,23.7,23.1,77.0,12.0,2
1451 | 5551,1.0,1.0,0.1,23.7,23.1,77.0,12.0,1
1452 | 5552,2.0,1.0,0.1,23.7,23.1,77.0,12.0,0
1453 | 5553,1.0,1.0,0.1,23.7,23.1,77.0,12.0,0
1454 | 5554,1.0,1.0,0.2,28.3,23.1,69.0,31.0,0
1455 | 5556,1.0,1.0,0.2,28.3,23.1,69.0,31.0,-1
1456 | 5558,1.0,2.0,0.2,28.3,23.1,69.0,31.0,0
1457 | 5560,1.0,1.0,0.2,28.3,23.1,69.0,31.0,-2
1458 | 5566,1.0,1.0,0.2,28.3,23.1,69.0,31.0,2
1459 | 5569,2.0,1.0,0.2,28.3,23.1,69.0,31.0,0
1460 | 5570,1.0,2.0,0.2,28.3,23.1,69.0,31.0,-1
1461 | 5618,2.0,2.0,0.2,28.3,23.1,69.0,31.0,0
1462 | 5648,2.0,1.0,0.1,22.0,23.1,64.0,7.0,0
1463 | 5649,1.0,1.0,0.1,22.0,23.1,64.0,7.0,0
1464 | 5650,1.0,2.0,0.1,22.0,23.1,64.0,7.0,0
1465 | 5652,1.0,1.0,0.1,22.0,23.1,64.0,7.0,-1
1466 | 5654,2.0,2.0,0.1,22.0,23.1,64.0,7.0,-1
1467 | 5667,3.0,1.0,0.1,22.0,23.1,64.0,7.0,0
1468 | 5679,2.0,1.0,0.1,22.0,23.1,64.0,7.0,-1
1469 | 5681,1.0,1.0,0.1,22.0,23.1,64.0,7.0,2
1470 | 5691,2.0,2.0,0.1,22.0,23.1,64.0,7.0,0
1471 | 5711,6.0,1.0,0.1,22.0,23.1,64.0,7.0,0
1472 | 5717,3.0,1.0,0.02,26.3,23.1,54.0,22.8,-1
1473 | 5718,2.0,1.0,0.02,26.3,23.1,54.0,22.8,0
1474 | 5719,3.0,2.0,0.02,26.3,23.1,54.0,22.8,2
1475 | 5721,3.0,2.0,0.02,26.3,23.1,54.0,22.8,-1
1476 | 5722,3.0,1.0,0.02,26.3,23.1,54.0,22.8,2
1477 | 5723,3.0,1.0,0.02,26.3,23.1,54.0,22.8,0
1478 | 5728,4.0,2.0,0.02,26.3,23.1,54.0,22.8,1
1479 | 5730,4.0,2.0,0.02,26.3,23.1,54.0,22.8,0
1480 | 5732,3.0,1.0,0.02,26.3,23.1,54.0,22.8,1
1481 | 5733,4.0,1.0,0.02,26.3,23.1,54.0,22.8,0
1482 | 5734,3.0,1.0,0.03,26.45,23.1,54.0,24.0,0
1483 | 5735,3.0,2.0,0.03,26.45,23.1,54.0,24.0,0
1484 | 5736,2.0,1.0,0.03,26.45,23.1,54.0,24.0,0
1485 | 5737,2.0,1.0,0.03,26.45,23.1,54.0,24.0,-1
1486 | 5739,2.0,1.0,0.01,26.2,23.1,54.0,23.6,0
1487 | 5740,3.0,2.0,0.01,26.2,23.1,54.0,23.6,0
1488 | 5741,2.0,1.0,0.01,26.2,23.1,54.0,23.6,-1
1489 | 5743,2.0,1.0,0.01,26.2,23.1,54.0,23.6,1
1490 | 5752,1.0,1.0,0.01,26.2,23.1,54.0,23.6,-1
1491 | 5755,2.0,2.0,0.01,26.2,23.1,54.0,23.6,0
1492 | 5760,2.0,1.0,0.0,25.75,23.1,54.0,18.5,0
1493 | 5761,3.0,1.0,0.0,25.75,23.1,54.0,18.5,0
1494 | 5762,3.0,1.0,0.0,25.75,23.1,54.0,18.5,2
1495 | 5764,2.0,1.0,0.0,25.75,23.1,54.0,18.5,-1
1496 | 5770,4.0,1.0,0.0,25.75,23.1,54.0,18.5,0
1497 | 5772,5.0,1.0,0.0,25.75,23.1,54.0,18.5,0
1498 | 5779,1.0,1.0,0.1,25.6,23.1,54.0,24.7,-1
1499 | 5780,2.0,2.0,0.1,25.6,23.1,54.0,24.7,-1
1500 | 5781,2.0,1.0,0.1,25.6,23.1,54.0,24.7,-1
1501 | 5782,2.0,1.0,0.1,25.6,23.1,54.0,24.7,0
1502 | 5791,1.0,1.0,0.1,25.6,23.1,54.0,24.7,0
1503 | 5793,1.0,2.0,0.1,25.6,23.1,54.0,24.7,-1
1504 | 5803,2.0,1.0,0.1,25.6,23.1,54.0,24.7,1
1505 | 5808,3.0,2.0,0.1,25.2,23.1,54.0,23.9,-1
1506 | 5809,4.0,1.0,0.1,25.2,23.1,54.0,23.9,-1
1507 | 5810,3.0,1.0,0.1,25.2,23.1,54.0,23.9,1
1508 | 5811,3.0,1.0,0.1,25.2,23.1,54.0,23.9,0
1509 | 5816,3.0,1.0,0.1,25.2,23.1,54.0,23.9,-1
1510 | 5817,3.0,2.0,0.1,25.2,23.1,54.0,23.9,0
1511 | 5823,6.0,1.0,0.1,25.2,23.1,54.0,23.9,0
1512 | 5824,4.0,1.0,0.1,25.2,23.1,54.0,23.9,0
1513 | 5825,2.0,1.0,0.05,22.4,23.1,38.0,13.7,-1
1514 | 5826,2.0,1.0,0.05,22.4,23.1,38.0,13.7,0
1515 | 5831,1.0,1.0,0.05,22.4,23.1,38.0,13.7,-1
1516 | 5832,2.0,2.0,0.05,22.4,23.1,38.0,13.7,-1
1517 | 5843,2.0,2.0,0.05,22.4,23.1,38.0,13.7,0
1518 | 5844,3.0,1.0,0.05,22.4,23.1,38.0,13.7,-1
1519 | 5849,3.0,1.0,0.06,23.7,23.1,44.0,20.9,-1
1520 | 5850,2.0,1.0,0.06,23.7,23.1,44.0,20.9,-2
1521 | 5851,4.0,1.0,0.06,23.7,23.1,44.0,20.9,-1
1522 | 5852,2.0,1.0,0.06,23.7,23.1,44.0,20.9,-1
1523 | 5854,3.0,1.0,0.06,23.7,23.1,44.0,20.9,-2
1524 | 5856,3.0,2.0,0.06,23.7,23.1,44.0,20.9,0
1525 | 5858,2.0,1.0,0.09,23.3,23.1,39.0,21.7,0
1526 | 5859,2.0,1.0,0.09,23.3,23.1,39.0,21.7,-2
1527 | 5860,2.0,2.0,0.09,23.3,23.1,39.0,21.7,-1
1528 | 5862,2.0,1.0,0.09,23.3,23.1,39.0,21.7,-1
1529 | 5863,3.0,1.0,0.09,23.3,23.1,39.0,21.7,0
1530 | 5866,3.0,2.0,0.09,23.3,23.1,39.0,21.7,-1
1531 | 5873,1.0,2.0,0.09,23.3,23.1,39.0,21.7,-1
1532 | 5881,2.0,2.0,0.06,22.2,23.1,97.0,11.6,0
1533 | 5882,2.0,1.0,0.06,22.2,23.1,97.0,11.6,-1
1534 | 5883,2.0,2.0,0.06,22.2,23.1,97.0,11.6,-1
1535 | 5885,3.0,1.0,0.06,22.2,23.1,97.0,11.6,0
1536 | 5887,2.0,1.0,0.06,22.2,23.1,97.0,11.6,0
1537 | 5890,3.0,2.0,0.06,22.2,23.1,97.0,11.6,-1
1538 | 5893,2.0,1.0,0.09,22.6,23.1,98.0,10.4,-1
1539 | 5894,1.0,1.0,0.09,22.6,23.1,98.0,10.4,-1
1540 | 5897,2.0,1.0,0.02,22.85,23.1,39.0,6.5,-1
1541 | 5898,2.0,1.0,0.02,22.85,23.1,39.0,6.5,0
1542 | 5900,3.0,2.0,0.02,22.85,23.1,39.0,6.5,-1
1543 | 5903,3.0,1.0,0.02,22.85,23.1,39.0,6.5,0
1544 | 5905,2.0,2.0,0.02,22.85,23.1,39.0,6.5,-1
1545 | 5906,1.0,1.0,0.02,22.85,23.1,39.0,6.5,-1
1546 | 5909,3.0,1.0,0.02,22.85,23.1,39.0,6.5,-1
1547 | 5918,2.0,2.0,0.02,22.85,23.1,39.0,6.5,0
1548 | 5919,2.0,1.0,0.1,30.01,23.1,73.4,30.17,1
1549 | 5920,2.0,2.0,0.1,30.01,23.1,73.4,30.17,0
1550 | 5922,2.0,2.0,0.1,30.01,23.1,73.4,30.17,-1
1551 | 5923,2.0,1.0,0.1,30.01,23.1,73.4,30.17,-1
1552 | 5925,2.0,1.0,0.1,30.01,23.1,73.4,30.17,0
1553 | 5926,1.0,1.0,0.1,30.01,23.1,73.4,30.17,0
1554 | 5928,2.0,2.0,0.1,30.01,23.1,73.4,30.17,2
1555 | 5929,1.0,2.0,0.1,30.01,23.1,73.4,30.17,0
1556 | 5942,1.0,1.0,0.1,30.01,23.1,73.4,30.17,-1
1557 | 5946,1.0,1.0,0.1,30.01,23.1,73.4,30.17,1
1558 | 5949,2.0,2.0,0.1,30.01,23.1,73.4,30.17,1
1559 | 5952,1.0,1.0,0.1,30.01,23.1,73.4,30.17,2
1560 | 5964,2.0,1.0,0.1,30.01,23.1,73.4,30.17,2
1561 | 5973,1.0,1.0,0.27,30.01625,30.0,77.0,29.72,0
1562 | 5974,2.0,1.0,0.27,30.01625,30.0,77.0,29.72,0
1563 | 5976,2.0,1.0,0.27,30.01625,30.0,77.0,29.72,1
1564 | 5977,2.0,2.0,0.27,30.01625,30.0,77.0,29.72,0
1565 | 5980,2.0,2.0,0.27,30.01625,30.0,77.0,29.72,-1
1566 | 5990,2.0,2.0,0.27,30.01625,30.0,77.0,29.72,1
1567 | 5994,2.0,1.0,0.27,30.01625,30.0,77.0,29.72,-1
1568 | 5998,1.0,1.0,0.27,30.01625,30.0,77.0,29.72,1
1569 | 6010,1.0,2.0,0.27,30.01625,30.0,77.0,29.72,1
1570 | 6023,2.0,2.0,0.1,26.8,23.1,69.0,29.4,-2
1571 | 6024,1.0,1.0,0.1,26.8,23.1,69.0,29.4,-1
1572 | 6025,2.0,1.0,0.1,26.8,23.1,69.0,29.4,-1
1573 | 6028,1.0,1.0,0.1,26.8,23.1,69.0,29.4,0
1574 | 6031,2.0,2.0,0.1,26.8,23.1,69.0,29.4,-1
1575 | 6032,1.0,2.0,0.1,26.8,23.1,69.0,29.4,0
1576 | 6037,2.0,2.0,0.1,26.8,23.1,69.0,29.4,0
1577 | 6039,1.0,2.0,0.1,26.8,23.1,69.0,29.4,-1
1578 | 6051,1.0,2.0,0.1,26.8,23.1,69.0,29.4,-2
1579 | 6069,1.0,2.0,0.1,23.7,23.1,79.0,27.8,0
1580 | 6070,3.0,2.0,0.1,23.7,23.1,79.0,27.8,-1
1581 | 6072,1.0,2.0,0.1,23.7,23.1,79.0,27.8,-1
1582 | 6073,4.0,1.0,0.1,23.7,23.1,79.0,27.8,-1
1583 | 6074,1.0,1.0,0.1,23.7,23.1,79.0,27.8,-1
1584 | 6088,1.0,1.0,0.1,23.7,23.1,79.0,27.8,0
1585 | 6090,2.0,2.0,0.1,23.7,23.1,79.0,27.8,-1
1586 | 6097,2.0,2.0,0.1,23.7,23.1,79.0,27.8,0
1587 | 6098,2.0,1.0,0.1,23.7,23.1,79.0,27.8,0
1588 | 6107,2.0,1.0,0.1,23.7,23.1,79.0,27.8,-1
1589 | 6115,2.0,2.0,0.1,28.5,23.1,86.5,27.5,0
1590 | 6119,3.0,2.0,0.1,28.5,23.1,86.5,27.5,-1
1591 | 6120,2.0,2.0,0.1,28.5,23.1,86.5,27.5,-1
1592 | 6121,2.0,2.0,0.1,28.5,23.1,86.5,27.5,1
1593 | 6122,2.0,1.0,0.1,28.5,23.1,86.5,27.5,0
1594 | 6125,3.0,1.0,0.1,28.5,23.1,86.5,27.5,1
1595 | 6127,3.0,1.0,0.1,28.5,23.1,86.5,27.5,0
1596 | 6129,5.0,1.0,0.1,28.5,23.1,86.5,27.5,1
1597 | 6135,3.0,2.0,0.1,28.5,23.1,86.5,27.5,0
1598 | 6138,2.0,1.0,0.1,28.5,23.1,86.5,27.5,2
1599 | 6151,2.0,2.0,0.15,25.7,23.1,81.5,27.8,-1
1600 | 6155,2.0,1.0,0.15,25.7,23.1,81.5,27.8,-1
1601 | 6156,2.0,1.0,0.15,25.7,23.1,81.5,27.8,0
1602 | 6158,3.0,1.0,0.15,25.7,23.1,81.5,27.8,0
1603 | 6160,1.0,1.0,0.15,25.7,23.1,81.5,27.8,-1
1604 | 6161,3.0,2.0,0.15,25.7,23.1,81.5,27.8,-1
1605 | 6164,2.0,2.0,0.15,25.7,23.1,81.5,27.8,0
1606 | 6168,3.0,2.0,0.15,25.7,23.1,81.5,27.8,0
1607 | 6185,2.0,1.0,0.15,25.7,23.1,81.5,27.8,1
1608 | 6188,2.0,1.0,0.1,27.4,23.1,68.0,31.7,-1
1609 | 6189,1.0,1.0,0.1,27.4,23.1,68.0,31.7,-1
1610 | 6193,1.0,2.0,0.1,27.4,23.1,68.0,31.7,-1
1611 | 6195,1.0,1.0,0.1,27.4,23.1,68.0,31.7,0
1612 | 6203,2.0,2.0,0.1,27.4,23.1,68.0,31.7,-1
1613 | 6209,3.0,1.0,0.1,27.4,23.1,68.0,31.7,0
1614 | 6210,1.0,1.0,0.1,27.4,23.1,68.0,31.7,-2
1615 | 6214,1.0,2.0,0.1,27.4,23.1,68.0,31.7,0
1616 | 6237,2.0,1.0,0.1,27.4,23.1,68.0,31.7,0
1617 | 6240,2.0,2.0,0.1,23.7,23.1,32.0,33.0,0
1618 | 6241,2.0,1.0,0.1,23.7,23.1,32.0,33.0,1
1619 | 6242,2.0,1.0,0.1,23.7,23.1,32.0,33.0,0
1620 | 6246,3.0,2.0,0.1,23.7,23.1,32.0,33.0,0
1621 | 6250,1.0,2.0,0.1,23.7,23.1,32.0,33.0,2
1622 | 6252,4.0,2.0,0.1,23.7,23.1,32.0,33.0,0
1623 | 6254,5.0,2.0,0.1,23.7,23.1,32.0,33.0,0
1624 | 6255,3.0,2.0,0.1,23.7,23.1,32.0,33.0,1
1625 | 6259,2.0,1.0,0.1,23.7,23.1,32.0,33.0,-1
1626 | 6262,2.0,2.0,0.1,23.7,23.1,32.0,33.0,-1
1627 | 6265,1.0,2.0,0.1,23.7,23.1,32.0,33.0,1
1628 | 6266,2.0,2.0,0.1,23.7,23.1,32.0,33.0,1
1629 | 6270,2.0,1.0,0.1,23.7,23.1,32.0,33.0,2
1630 | 6275,3.0,1.0,0.1,23.7,23.1,32.0,33.0,0
1631 | 6277,2.0,2.0,0.1,23.7,23.1,32.0,33.0,2
1632 | 6280,3.0,2.0,0.1,23.7,23.1,32.0,33.0,-1
1633 | 6296,4.0,2.0,0.1,23.7,23.1,32.0,33.0,2
1634 | 6318,2.0,2.0,0.1,23.7,23.1,22.0,36.9,2
1635 | 6320,2.0,1.0,0.1,23.7,23.1,22.0,36.9,0
1636 | 6321,2.0,2.0,0.1,23.7,23.1,22.0,36.9,1
1637 | 6323,4.0,2.0,0.1,23.7,23.1,22.0,36.9,0
1638 | 6325,2.0,2.0,0.1,23.7,23.1,22.0,36.9,0
1639 | 6326,3.0,2.0,0.1,23.7,23.1,22.0,36.9,-2
1640 | 6327,1.0,2.0,0.1,23.7,23.1,22.0,36.9,1
1641 | 6328,3.0,2.0,0.1,23.7,23.1,22.0,36.9,0
1642 | 6329,3.0,2.0,0.1,23.7,23.1,22.0,36.9,-1
1643 | 6343,3.0,2.0,0.1,23.7,23.1,90.0,24.5,0
1644 | 6344,1.0,1.0,0.1,23.7,23.1,90.0,24.5,-1
1645 | 6345,2.0,1.0,0.1,23.7,23.1,90.0,24.5,-1
1646 | 6348,1.0,2.0,0.1,23.7,23.1,90.0,24.5,0
1647 | 6349,1.0,2.0,0.1,23.7,23.1,90.0,24.5,-1
1648 | 6351,1.0,1.0,0.1,23.7,23.1,90.0,24.5,0
1649 | 6353,2.0,2.0,0.1,23.7,23.1,90.0,24.5,-1
1650 | 6356,2.0,2.0,0.1,23.7,23.1,90.0,24.5,0
1651 | 6357,2.0,1.0,0.1,23.7,23.1,90.0,24.5,0
1652 | 6358,1.0,1.0,0.1,23.7,23.1,90.0,24.5,-2
1653 | 6361,1.0,2.0,0.1,23.7,23.1,90.0,24.5,1
1654 | 6372,5.0,2.0,0.1,23.7,23.1,90.0,24.5,-1
1655 | 6391,2.0,1.0,0.1,23.7,23.1,62.5,19.94,0
1656 | 6393,1.0,1.0,0.1,23.7,23.1,62.5,19.94,0
1657 | 6394,1.0,1.0,0.1,23.7,23.1,62.5,19.94,-1
1658 | 6395,1.0,2.0,0.1,23.7,23.1,62.5,19.94,0
1659 | 6397,4.0,1.0,0.1,23.7,23.1,62.5,19.94,0
1660 | 6398,1.0,2.0,0.1,23.7,23.1,62.5,19.94,1
1661 | 6399,1.0,2.0,0.1,23.7,23.1,62.5,19.94,-1
1662 | 6400,2.0,2.0,0.1,23.7,23.1,62.5,19.94,0
1663 | 6401,2.0,2.0,0.1,23.7,23.1,62.5,19.94,1
1664 | 6404,2.0,2.0,0.1,23.7,23.1,62.5,19.94,-1
1665 | 6406,3.0,2.0,0.1,23.7,23.1,62.5,19.94,0
1666 | 6408,3.0,2.0,0.1,23.7,23.1,62.5,19.94,-1
1667 | 6425,2.0,2.0,0.1,23.7,23.1,62.5,19.94,2
1668 | 6427,1.0,2.0,0.1,23.7,23.1,72.0,31.0,0
1669 | 6429,1.0,2.0,0.1,23.7,23.1,72.0,31.0,-1
1670 | 6431,1.0,1.0,0.1,23.7,23.1,72.0,31.0,-1
1671 | 6432,2.0,2.0,0.1,23.7,23.1,72.0,31.0,-1
1672 | 6433,2.0,2.0,0.1,23.7,23.1,72.0,31.0,0
1673 | 6434,2.0,2.0,0.1,23.7,23.1,72.0,31.0,1
1674 | 6445,4.0,1.0,0.1,23.7,23.1,72.0,31.0,-1
1675 | 6446,2.0,1.0,0.1,23.7,23.1,72.0,31.0,-2
1676 | 6448,3.0,2.0,0.1,23.7,23.1,72.0,31.0,1
1677 | 6449,3.0,2.0,0.1,23.7,23.1,72.0,31.0,2
1678 | 6450,3.0,2.0,0.1,23.7,23.1,72.0,31.0,0
1679 | 6453,2.0,1.0,0.1,23.7,23.1,72.0,31.0,0
1680 | 6456,2.0,1.0,0.1,23.7,23.1,72.0,31.0,-1
1681 | 6460,3.0,2.0,0.1,23.7,23.1,72.0,31.0,-1
1682 | 6465,2.0,2.0,0.1,23.7,23.1,72.0,31.0,-2
1683 | 6471,2.0,2.0,0.1,23.7,23.1,83.0,24.0,0
1684 | 6472,2.0,2.0,0.1,23.7,23.1,83.0,24.0,-1
1685 | 6473,3.0,2.0,0.1,23.7,23.1,83.0,24.0,0
1686 | 6476,3.0,2.0,0.1,23.7,23.1,83.0,24.0,-1
1687 | 6483,2.0,2.0,0.1,23.7,23.1,83.0,24.0,2
1688 | 6485,1.0,2.0,0.1,23.7,23.1,83.0,24.0,2
1689 | 6490,2.0,1.0,0.1,23.7,23.1,83.0,24.0,-1
1690 | 6492,3.0,1.0,0.1,23.7,23.1,83.0,24.0,0
1691 | 6495,2.0,1.0,0.1,23.7,23.1,83.0,24.0,0
1692 | 6500,2.0,2.0,0.1,23.7,23.1,83.0,24.0,1
1693 | 6516,3.0,2.0,0.1,23.7,23.1,83.0,24.0,1
1694 | 6521,1.0,1.0,0.1,23.7,23.1,100.0,7.0,0
1695 | 6525,1.0,2.0,0.1,23.7,23.1,100.0,7.0,-1
1696 | 6526,2.0,1.0,0.1,23.7,23.1,100.0,7.0,0
1697 | 6529,3.0,2.0,0.1,23.7,23.1,100.0,7.0,-1
1698 | 6530,2.0,2.0,0.1,23.7,23.1,100.0,7.0,0
1699 | 6532,1.0,2.0,0.1,23.7,23.1,100.0,7.0,0
1700 | 6541,1.0,1.0,0.1,23.7,23.1,100.0,7.0,-1
1701 | 6546,2.0,1.0,0.1,23.7,23.1,100.0,7.0,-1
1702 | 6579,1.0,1.0,0.1,23.7,23.1,100.0,7.0,-2
1703 | 6587,3.0,2.0,0.1,23.7,23.1,100.0,7.0,0
1704 | 6588,2.0,2.0,0.1,23.7,23.1,100.0,7.0,-1
1705 | 6592,2.0,2.0,0.1,23.7,23.1,100.0,7.0,1
1706 | 6596,6.0,2.0,0.1,23.7,23.1,100.0,7.0,0
1707 | 6630,1.0,1.0,0.1,23.7,23.1,100.0,7.0,1
1708 | 6632,1.0,2.0,0.1,23.7,23.1,100.0,7.0,1
1709 | 6633,4.0,2.0,0.1,23.7,23.1,100.0,7.0,0
1710 | 6670,1.0,1.0,0.1,23.7,23.1,69.0,22.0,-1
1711 | 6671,2.0,1.0,0.1,23.7,23.1,69.0,22.0,-1
1712 | 6672,1.0,1.0,0.1,23.7,23.1,69.0,22.0,0
1713 | 6674,2.0,2.0,0.1,23.7,23.1,69.0,22.0,-1
1714 | 6688,2.0,2.0,0.1,23.7,23.1,69.0,22.0,-2
1715 | 6695,2.0,1.0,0.1,23.7,23.1,69.0,22.0,0
1716 | 6696,1.0,2.0,0.1,23.7,23.1,69.0,22.0,0
1717 | 6706,1.0,2.0,0.1,23.7,23.1,69.0,22.0,-1
1718 | 6710,2.0,2.0,0.1,23.7,23.1,69.0,22.0,0
1719 | 6711,1.0,1.0,0.1,23.7,23.1,69.0,14.0,0
1720 | 6712,1.0,1.0,0.1,23.7,23.1,69.0,14.0,-1
1721 | 6714,1.0,2.0,0.1,23.7,23.1,69.0,14.0,-1
1722 | 6716,2.0,2.0,0.1,23.7,23.1,69.0,14.0,-1
1723 | 6718,2.0,1.0,0.1,23.7,23.1,69.0,14.0,-1
1724 | 6720,2.0,1.0,0.1,23.7,23.1,69.0,14.0,-2
1725 | 6722,2.0,1.0,0.1,23.7,23.1,69.0,14.0,0
1726 | 6725,2.0,1.0,0.1,23.7,23.1,75.0,32.0,-1
1727 | 6726,2.0,1.0,0.1,23.7,23.1,75.0,32.0,0
1728 | 6729,2.0,2.0,0.1,23.7,23.1,75.0,32.0,-1
1729 | 6733,2.0,1.0,0.1,23.7,23.1,75.0,32.0,-2
1730 | 6736,1.0,1.0,0.1,23.7,23.1,75.0,32.0,-1
1731 | 6738,2.0,1.0,0.1,23.7,23.1,75.0,32.0,2
1732 | 6747,2.0,1.0,0.1,23.7,23.1,63.0,32.8,0
1733 | 6748,2.0,1.0,0.1,23.7,23.1,63.0,32.8,-1
1734 | 6751,2.0,2.0,0.1,23.7,23.1,63.0,32.8,-1
1735 | 6760,2.0,1.0,0.1,23.7,23.1,63.0,32.8,2
1736 | 6769,2.0,2.0,0.1,23.7,23.1,63.0,32.8,0
1737 | 6771,2.0,2.0,0.1,23.7,23.1,20.0,35.0,0
1738 | 6772,2.0,1.0,0.1,23.7,23.1,20.0,35.0,0
1739 | 6777,1.0,1.0,0.1,23.7,23.1,20.0,35.0,0
1740 | 6778,2.0,1.0,0.1,23.7,23.1,20.0,35.0,-1
1741 | 6784,1.0,1.0,0.1,23.7,23.1,20.0,35.0,2
1742 | 6786,2.0,2.0,0.1,23.7,23.1,20.0,35.0,-1
1743 | 6797,2.0,2.0,0.1,23.7,23.1,89.0,27.8,-1
1744 | 6798,2.0,1.0,0.1,23.7,23.1,89.0,27.8,2
1745 | 6800,2.0,2.0,0.1,23.7,23.1,89.0,27.8,0
1746 | 6804,1.0,1.0,0.1,23.7,23.1,89.0,27.8,-1
1747 | 6806,2.0,1.0,0.1,23.7,23.1,89.0,27.8,1
1748 | 6807,2.0,1.0,0.1,23.7,23.1,89.0,27.8,0
1749 | 6808,2.0,2.0,0.1,23.7,23.1,61.0,27.2,0
1750 | 6811,2.0,2.0,0.1,23.7,23.1,61.0,27.2,-1
1751 | 6812,1.0,1.0,0.1,23.7,23.1,61.0,27.2,0
1752 | 6815,2.0,1.0,0.1,23.7,23.1,61.0,27.2,0
1753 | 6816,1.0,1.0,0.1,23.7,23.1,61.0,27.2,-1
1754 | 6824,2.0,1.0,0.1,23.7,23.1,61.0,27.2,2
1755 | 6825,2.0,1.0,0.1,23.7,23.1,61.0,27.2,-1
1756 | 6834,1.0,1.0,0.1,23.7,23.1,61.0,27.2,2
1757 | 6863,1.0,1.0,0.02,26.725,26.6,58.3,19.8,0
1758 | 6864,1.0,1.0,0.02,26.725,26.6,58.3,19.8,2
1759 | 6865,1.0,2.0,0.02,26.725,26.6,58.3,19.8,0
1760 | 6866,1.0,1.0,0.02,26.725,26.6,58.3,19.8,1
1761 | 6867,2.0,2.0,0.02,26.725,26.6,58.3,19.8,0
1762 | 6868,2.0,1.0,0.02,26.725,26.6,58.3,19.8,0
1763 | 6873,2.0,2.0,0.11,25.095,24.9,47.2,22.7,-1
1764 | 6874,1.0,1.0,0.11,25.095,24.9,47.2,22.7,-1
1765 | 6875,2.0,2.0,0.11,25.095,24.9,47.2,22.7,0
1766 | 6876,1.0,1.0,0.11,25.095,24.9,47.2,22.7,0
1767 | 6877,2.0,1.0,0.11,25.095,24.9,47.2,22.7,-2
1768 | 6879,1.0,1.0,0.13,24.75,24.7,39.5,25.8,1
1769 | 6880,1.0,1.0,0.13,24.75,24.7,39.5,25.8,0
1770 | 6881,1.0,2.0,0.13,24.75,24.7,39.5,25.8,-1
1771 | 6882,2.0,2.0,0.13,24.75,24.7,39.5,25.8,0
1772 | 6884,1.0,2.0,0.13,24.75,24.7,39.5,25.8,0
1773 | 6886,1.0,2.0,0.29,28.15,27.7,35.1,30.6,1
1774 | 6887,1.0,1.0,0.29,28.15,27.7,35.1,30.6,0
1775 | 6888,2.0,2.0,0.29,28.15,27.7,35.1,30.6,0
1776 | 6889,1.0,1.0,0.29,28.15,27.7,35.1,30.6,1
1777 | 6891,1.0,1.0,0.29,28.15,27.7,35.1,30.6,2
1778 | 6892,2.0,2.0,0.29,28.15,27.7,35.1,30.6,1
1779 | 6898,2.0,1.0,0.29,28.15,27.7,35.1,30.6,1
1780 | 6904,1.0,2.0,0.29,28.15,27.7,35.1,30.6,0
1781 | 6914,1.0,2.0,0.29,28.15,27.7,35.1,30.6,2
1782 | 6923,1.0,1.0,0.1,24.72,23.1,50.4,22.5,1
1783 | 6924,2.0,2.0,0.1,24.72,23.1,50.4,22.5,0
1784 | 6925,1.0,2.0,0.1,24.72,23.1,50.4,22.5,0
1785 | 6928,1.0,1.0,0.1,24.72,23.1,50.4,22.5,0
1786 | 6929,1.0,2.0,0.1,24.72,23.1,50.4,22.5,1
1787 | 6945,1.0,1.0,0.1,24.72,23.1,50.4,22.5,-1
1788 | 6947,2.0,2.0,0.1,24.72,23.1,50.4,22.5,-1
1789 | 6948,1.0,2.0,0.1,24.72,23.1,50.4,22.5,-1
1790 | 6950,1.0,2.0,0.1,25.11,23.1,54.2,22.2,1
1791 | 6951,1.0,2.0,0.1,25.11,23.1,54.2,22.2,0
1792 | 6952,2.0,2.0,0.1,25.11,23.1,54.2,22.2,0
1793 | 6961,1.0,1.0,0.1,25.11,23.1,54.2,22.2,0
1794 | 6967,1.0,2.0,0.1,25.11,23.1,54.2,22.2,-1
1795 | 6973,2.0,1.0,0.1,25.11,23.1,54.2,22.2,0
1796 | 6977,2.0,2.0,0.1,25.11,23.1,54.2,22.2,1
1797 | 6978,1.0,1.0,0.1,25.11,23.1,54.2,22.2,1
1798 | 6986,1.0,1.0,0.1,25.11,23.1,54.2,22.2,2
1799 | 7012,1.0,1.0,0.1,25.11,23.1,54.2,22.2,-1
1800 | 7030,3.0,2.0,0.1,25.11,23.1,54.2,22.2,0
1801 | 7058,1.0,1.0,0.1,23.7,23.1,34.0,16.0,0
1802 | 7059,2.0,2.0,0.1,23.7,23.1,34.0,16.0,0
1803 | 7061,1.0,1.0,0.1,23.7,23.1,34.0,16.0,1
1804 | 7062,2.0,2.0,0.1,23.7,23.1,34.0,16.0,-1
1805 | 7063,1.0,2.0,0.1,23.7,23.1,34.0,16.0,0
1806 | 7065,2.0,1.0,0.1,23.7,23.1,34.0,16.0,0
1807 | 7072,3.0,1.0,0.1,23.7,23.1,34.0,16.0,-1
1808 | 7075,2.0,1.0,0.1,23.7,23.1,34.0,16.0,-1
1809 | 7076,2.0,2.0,0.1,23.7,23.1,34.0,16.0,1
1810 | 7078,1.0,1.0,0.1,23.7,23.1,34.0,16.0,-1
1811 | 7098,3.0,2.0,0.1,23.7,23.1,34.0,16.0,0
1812 | 7100,3.0,2.0,0.1,23.7,23.1,34.0,16.0,1
1813 | 7102,1.0,2.0,0.1,23.7,23.1,63.0,14.0,-1
1814 | 7103,2.0,2.0,0.1,23.7,23.1,63.0,14.0,0
1815 | 7104,1.0,2.0,0.1,23.7,23.1,63.0,14.0,0
1816 | 7105,2.0,2.0,0.1,23.7,23.1,63.0,14.0,-1
1817 | 7108,1.0,1.0,0.1,23.7,23.1,63.0,14.0,0
1818 | 7109,5.0,2.0,0.1,23.7,23.1,63.0,14.0,0
1819 | 7110,2.0,1.0,0.1,23.7,23.1,63.0,14.0,0
1820 | 7118,1.0,1.0,0.1,23.7,23.1,63.0,14.0,-1
1821 | 7120,2.0,2.0,0.1,23.7,23.1,93.0,-2.0,-1
1822 | 7121,1.0,1.0,0.1,23.7,23.1,93.0,-2.0,-1
1823 | 7122,4.0,2.0,0.1,23.7,23.1,93.0,-2.0,0
1824 | 7124,1.0,2.0,0.1,23.7,23.1,93.0,-2.0,-1
1825 | 7126,2.0,1.0,0.1,23.7,23.1,93.0,-2.0,-1
1826 | 7132,2.0,2.0,0.1,23.7,23.1,93.0,-2.0,0
1827 | 7139,6.0,2.0,0.1,23.7,23.1,93.0,-2.0,-1
1828 | 7140,1.0,2.0,0.1,23.7,23.1,93.0,-2.0,0
1829 | 7143,3.0,2.0,0.1,23.7,23.1,93.0,-2.0,0
1830 | 7152,2.0,1.0,0.1,23.7,23.1,93.0,-2.0,0
1831 | 7154,1.0,2.0,0.1,23.7,23.1,67.0,-10.0,0
1832 | 7155,2.0,2.0,0.1,23.7,23.1,67.0,-10.0,0
1833 | 7156,4.0,1.0,0.1,23.7,23.1,67.0,-10.0,0
1834 | 7159,3.0,2.0,0.1,23.7,23.1,67.0,-10.0,0
1835 | 7161,1.0,2.0,0.1,23.7,23.1,67.0,-10.0,1
1836 | 7162,2.0,2.0,0.1,23.7,23.1,67.0,-10.0,-1
1837 | 7164,4.0,2.0,0.1,23.7,23.1,67.0,-10.0,0
1838 | 7178,5.0,2.0,0.1,23.7,23.1,67.0,-10.0,0
1839 | 7181,3.0,1.0,0.1,23.7,23.1,67.0,-10.0,-1
1840 | 7185,1.0,2.0,0.1,23.7,23.1,67.0,-10.0,-1
1841 | 7195,3.0,2.0,0.1,23.7,23.1,67.0,-10.0,-1
1842 | 7208,2.0,1.0,0.1,23.7,23.1,67.0,-10.0,0
1843 | 7217,2.0,1.0,0.1,23.7,23.1,67.0,-10.0,-1
1844 | 7223,3.0,1.0,0.1,23.0,23.1,67.0,15.0,-1
1845 | 7224,3.0,2.0,0.1,23.0,23.1,67.0,15.0,-1
1846 | 7225,2.0,1.0,0.1,23.0,23.1,67.0,15.0,0
1847 | 7226,2.0,2.0,0.1,23.0,23.1,67.0,15.0,0
1848 | 7229,1.0,1.0,0.1,23.0,23.1,67.0,15.0,-1
1849 | 7230,1.0,2.0,0.1,23.0,23.1,67.0,15.0,0
1850 | 7231,6.0,1.0,0.1,23.0,23.1,67.0,15.0,0
1851 | 7233,2.0,2.0,0.1,23.0,23.1,67.0,15.0,-1
1852 | 7234,3.0,1.0,0.1,23.0,23.1,67.0,15.0,0
1853 | 7235,4.0,2.0,0.1,23.0,23.1,67.0,15.0,-1
1854 | 7238,3.0,2.0,0.1,23.0,23.1,67.0,15.0,0
1855 | 7242,2.0,2.0,0.1,23.0,23.1,67.0,15.0,1
1856 | 7243,2.0,1.0,0.1,23.0,23.1,67.0,15.0,-1
1857 | 7258,3.0,1.0,0.1,23.0,23.1,67.0,15.0,1
1858 | 7274,2.0,1.0,0.1,19.0,23.1,78.0,8.0,-1
1859 | 7275,6.0,2.0,0.1,19.0,23.1,78.0,8.0,-1
1860 | 7276,3.0,1.0,0.1,19.0,23.1,78.0,8.0,-1
1861 | 7277,1.0,1.0,0.1,19.0,23.1,78.0,8.0,-1
1862 | 7279,1.0,1.0,0.1,19.0,23.1,78.0,8.0,0
1863 | 7280,2.0,2.0,0.1,19.0,23.1,78.0,8.0,0
1864 | 7282,1.0,2.0,0.1,19.0,23.1,78.0,8.0,-1
1865 | 7285,2.0,1.0,0.1,19.0,23.1,78.0,8.0,0
1866 | 7288,2.0,2.0,0.1,19.0,23.1,78.0,8.0,-1
1867 | 7295,4.0,1.0,0.1,19.0,23.1,78.0,8.0,-1
1868 | 7297,3.0,1.0,0.1,19.0,23.1,78.0,8.0,0
1869 | 7300,3.0,2.0,0.1,19.0,23.1,78.0,8.0,-1
1870 | 7304,1.0,2.0,0.1,19.0,23.1,78.0,8.0,0
1871 | 7308,2.0,1.0,0.1,19.0,23.1,78.0,8.0,-2
1872 | 7331,6.0,1.0,0.1,19.0,23.1,78.0,8.0,-2
1873 | 7338,3.0,2.0,0.1,19.0,23.1,78.0,8.0,0
1874 | 7341,4.0,2.0,0.1,19.0,23.1,78.0,8.0,-1
1875 | 7343,1.0,1.0,0.3,27.36,27.55,60.912,27.65,-1
1876 | 7345,1.0,1.0,0.3,27.36,27.55,60.912,27.65,0
1877 | 7346,1.0,2.0,0.3,27.36,27.55,60.912,27.65,-1
1878 | 7348,2.0,1.0,0.3,27.36,27.55,60.912,27.65,-1
1879 | 7351,1.0,1.0,0.3,27.36,27.55,60.912,27.65,-2
1880 | 7353,1.0,1.0,0.3,27.36,27.55,60.912,27.65,1
1881 | 7355,6.0,1.0,0.3,27.36,27.55,60.912,27.65,-1
1882 | 7358,3.0,1.0,0.3,27.36,27.55,60.912,27.65,-1
1883 | 7360,2.0,2.0,0.3,27.36,27.55,60.912,27.65,-1
1884 | 7363,2.0,1.0,0.3,27.36,27.55,60.912,27.65,-2
1885 | 7371,2.0,1.0,0.7,27.267,27.41,59.4,26.27,-1
1886 | 7372,2.0,1.0,0.7,27.267,27.41,59.4,26.27,0
1887 | 7374,1.0,1.0,0.7,27.267,27.41,59.4,26.27,0
1888 | 7379,2.0,1.0,0.7,27.267,27.41,59.4,26.27,-2
1889 | 7380,1.0,2.0,0.7,27.267,27.41,59.4,26.27,-1
1890 | 7383,2.0,2.0,0.7,27.267,27.41,59.4,26.27,-1
1891 | 7390,2.0,1.0,0.7,27.267,27.41,59.4,26.27,1
1892 | 7393,1.0,1.0,0.7,27.267,27.41,59.4,26.27,-1
1893 | 7394,1.0,2.0,0.7,27.267,27.41,59.4,26.27,0
1894 | 7399,2.0,2.0,0.7,27.267,27.41,59.4,26.27,0
1895 | 7401,2.0,2.0,0.7,27.267,27.41,59.4,26.27,1
1896 | 7405,1.0,1.0,0.7,27.267,27.41,59.4,26.27,1
1897 | 7410,1.0,1.0,0.6,25.9,26.72,60.7,25.08,0
1898 | 7412,1.0,1.0,0.6,25.9,26.72,60.7,25.08,-2
1899 | 7413,1.0,1.0,0.6,25.9,26.72,60.7,25.08,-1
1900 | 7414,2.0,2.0,0.6,25.9,26.72,60.7,25.08,-1
1901 | 7416,2.0,1.0,0.6,25.9,26.72,60.7,25.08,0
1902 | 7418,2.0,2.0,0.6,25.9,26.72,60.7,25.08,0
1903 | 7426,1.0,2.0,0.6,25.9,26.72,60.7,25.08,0
1904 | 7430,2.0,1.0,0.6,25.9,26.72,60.7,25.08,-1
1905 | 7431,1.0,2.0,0.6,25.9,26.72,60.7,25.08,-2
1906 | 7432,2.0,1.0,0.6,25.9,26.72,60.7,25.08,-2
1907 | 7444,1.0,2.0,0.6,25.9,26.72,60.7,25.08,-1
1908 | 7452,2.0,1.0,0.4,19.29,25.4,49.0,13.18,0
1909 | 7453,2.0,2.0,0.4,19.29,25.4,49.0,13.18,-1
1910 | 7454,2.0,1.0,0.4,19.29,25.4,49.0,13.18,-1
1911 | 7465,2.0,2.0,0.4,19.29,25.4,49.0,13.18,0
1912 | 7472,4.0,1.0,0.4,19.29,25.4,49.0,13.18,0
1913 | 7473,3.0,1.0,0.4,19.29,25.4,49.0,13.18,-1
1914 | 7487,2.0,1.0,0.4,19.29,25.4,49.0,13.18,-2
1915 | 7489,6.0,1.0,0.4,19.29,25.4,49.0,13.18,-1
1916 | 7490,2.0,2.0,0.4,19.29,25.4,49.0,13.18,-2
1917 | 7497,1.0,1.0,0.04,29.85,23.1,75.0,24.0,-1
1918 | 7499,2.0,2.0,0.04,29.85,23.1,75.0,24.0,0
1919 | 7503,2.0,1.0,0.04,29.85,23.1,75.0,24.0,-1
1920 | 7504,2.0,2.0,0.04,29.85,23.1,75.0,24.0,2
1921 | 7505,3.0,1.0,0.04,29.85,23.1,75.0,24.0,2
1922 | 7506,1.0,1.0,0.04,29.85,23.1,75.0,24.0,0
1923 | 7510,1.0,2.0,0.04,29.85,23.1,75.0,24.0,2
1924 | 7512,1.0,2.0,0.04,29.85,23.1,75.0,24.0,0
1925 | 7514,1.0,2.0,0.04,29.85,23.1,75.0,24.0,-1
1926 | 7524,1.0,1.0,0.04,29.85,23.1,75.0,24.0,2
1927 | 7526,2.0,1.0,0.04,29.85,23.1,75.0,24.0,0
1928 | 7528,3.0,1.0,0.04,29.85,23.1,75.0,24.0,0
1929 | 7538,2.0,2.0,0.04,29.85,23.1,75.0,24.0,1
1930 | 7545,1.0,2.0,0.04,29.85,23.1,75.0,24.0,1
1931 | 7548,2.0,2.0,0.04,25.65,23.1,78.0,24.0,-1
1932 | 7549,2.0,1.0,0.04,25.65,23.1,78.0,24.0,-2
1933 | 7550,2.0,1.0,0.04,25.65,23.1,78.0,24.0,-1
1934 | 7552,2.0,2.0,0.04,25.65,23.1,78.0,24.0,0
1935 | 7558,2.0,2.0,0.04,25.65,23.1,78.0,24.0,-2
1936 | 7561,1.0,1.0,0.04,25.65,23.1,78.0,24.0,-1
1937 | 7568,4.0,2.0,0.04,25.65,23.1,78.0,24.0,0
1938 | 7569,3.0,1.0,0.04,25.65,23.1,78.0,24.0,0
1939 | 7572,3.0,2.0,0.04,25.65,23.1,78.0,24.0,1
1940 | 7574,1.0,1.0,0.04,25.65,23.1,78.0,24.0,-2
1941 | 7575,1.0,2.0,0.04,25.65,23.1,78.0,24.0,-1
1942 | 7579,2.0,1.0,0.04,25.65,23.1,78.0,24.0,0
1943 | 7580,2.0,2.0,0.04,25.65,23.1,78.0,24.0,1
1944 | 7597,1.0,1.0,0.04,25.65,23.1,78.0,24.0,0
1945 | 7598,3.0,2.0,0.1,23.7,23.1,72.0,20.4,0
1946 | 7599,2.0,1.0,0.1,23.7,23.1,72.0,20.4,1
1947 | 7600,1.0,1.0,0.1,23.7,23.1,72.0,20.4,0
1948 | 7601,1.0,1.0,0.1,23.7,23.1,72.0,20.4,-1
1949 | 7602,2.0,1.0,0.1,23.7,23.1,72.0,20.4,-1
1950 | 7604,2.0,1.0,0.1,23.7,23.1,72.0,20.4,0
1951 | 7605,1.0,2.0,0.1,23.7,23.1,72.0,20.4,-1
1952 | 7606,2.0,2.0,0.1,23.7,23.1,72.0,20.4,-1
1953 | 7609,2.0,2.0,0.1,23.7,23.1,72.0,20.4,0
1954 | 7610,3.0,2.0,0.1,23.7,23.1,72.0,20.4,-1
1955 | 7614,4.0,2.0,0.1,23.7,23.1,72.0,20.4,-1
1956 | 7627,2.0,1.0,0.1,23.7,23.1,72.0,20.4,-2
1957 | 7629,3.0,1.0,0.1,23.7,23.1,72.0,20.4,0
1958 | 7634,4.0,2.0,0.1,23.0,23.1,48.0,7.1,0
1959 | 7635,3.0,1.0,0.1,23.0,23.1,48.0,7.1,0
1960 | 7636,4.0,1.0,0.1,23.0,23.1,48.0,7.1,-1
1961 | 7637,5.0,2.0,0.1,23.0,23.1,48.0,7.1,-1
1962 | 7638,3.0,2.0,0.1,23.0,23.1,48.0,7.1,-1
1963 | 7639,2.0,1.0,0.1,23.0,23.1,48.0,7.1,0
1964 | 7640,2.0,1.0,0.1,23.0,23.1,48.0,7.1,-1
1965 | 7641,2.0,2.0,0.1,23.0,23.1,48.0,7.1,-2
1966 | 7644,3.0,2.0,0.1,23.0,23.1,48.0,7.1,0
1967 | 7648,2.0,2.0,0.1,23.0,23.1,48.0,7.1,-1
1968 | 7651,3.0,1.0,0.1,23.0,23.1,48.0,7.1,-1
1969 | 7664,2.0,2.0,0.1,23.0,23.1,48.0,7.1,0
1970 | 7667,2.0,2.0,0.1,23.0,23.1,48.0,7.1,1
1971 | 7670,4.0,2.0,0.1,24.5,23.1,76.0,23.0,0
1972 | 7671,1.0,1.0,0.1,24.5,23.1,76.0,23.0,-1
1973 | 7672,1.0,2.0,0.1,24.5,23.1,76.0,23.0,-1
1974 | 7673,3.0,1.0,0.1,24.5,23.1,76.0,23.0,0
1975 | 7675,2.0,1.0,0.1,24.5,23.1,76.0,23.0,0
1976 | 7676,2.0,2.0,0.1,24.5,23.1,76.0,23.0,-1
1977 | 7680,2.0,1.0,0.1,24.5,23.1,76.0,23.0,-1
1978 | 7681,2.0,2.0,0.1,24.5,23.1,76.0,23.0,0
1979 | 7683,3.0,2.0,0.1,24.5,23.1,76.0,23.0,-1
1980 | 7692,3.0,1.0,0.1,24.5,23.1,76.0,23.0,-1
1981 | 7699,3.0,2.0,0.1,24.5,23.1,76.0,23.0,0
1982 | 7712,1.0,2.0,0.1,28.3,23.1,62.5,26.4,2
1983 | 7713,2.0,2.0,0.1,28.3,23.1,62.5,26.4,2
1984 | 7714,1.0,1.0,0.1,28.3,23.1,62.5,26.4,0
1985 | 7715,2.0,1.0,0.1,28.3,23.1,62.5,26.4,0
1986 | 7716,1.0,2.0,0.1,28.3,23.1,62.5,26.4,0
1987 | 7717,2.0,2.0,0.1,28.3,23.1,62.5,26.4,1
1988 | 7720,1.0,1.0,0.1,28.3,23.1,62.5,26.4,1
1989 | 7721,2.0,1.0,0.1,28.3,23.1,62.5,26.4,1
1990 | 7722,1.0,1.0,0.1,28.3,23.1,62.5,26.4,2
1991 | 7728,1.0,2.0,0.1,28.3,23.1,62.5,26.4,1
1992 | 7730,4.0,1.0,0.1,28.3,23.1,62.5,26.4,2
1993 | 7737,2.0,1.0,0.1,28.3,23.1,62.5,26.4,2
1994 | 7743,3.0,2.0,0.1,28.3,23.1,62.5,26.4,1
1995 | 7753,3.0,2.0,0.1,28.3,23.1,62.5,26.4,2
1996 | 7759,6.0,2.0,0.1,28.3,23.1,62.5,26.4,0
1997 | 7787,3.0,1.0,0.1,23.7,23.1,80.0,16.0,-1
1998 | 7788,2.0,1.0,0.1,23.7,23.1,80.0,16.0,0
1999 | 7789,2.0,2.0,0.1,23.7,23.1,80.0,16.0,-1
2000 | 7790,1.0,1.0,0.1,23.7,23.1,80.0,16.0,-1
2001 | 7792,4.0,2.0,0.1,23.7,23.1,80.0,16.0,0
2002 | 7793,2.0,1.0,0.1,23.7,23.1,80.0,16.0,-1
2003 | 7794,2.0,2.0,0.1,23.7,23.1,80.0,16.0,0
2004 | 7800,1.0,2.0,0.1,23.7,23.1,80.0,16.0,-1
2005 | 7802,1.0,1.0,0.1,23.7,23.1,80.0,16.0,0
2006 | 7811,2.0,2.0,0.1,23.7,23.1,80.0,16.0,-2
2007 | 7813,2.0,1.0,0.1,23.7,23.1,80.0,16.0,-2
2008 | 7823,1.0,2.0,0.1,23.7,23.1,80.0,16.0,0
2009 | 7855,4.0,1.0,0.1,23.7,23.1,80.0,16.0,-1
2010 | 7860,3.0,2.0,0.1,23.7,23.1,80.0,16.0,-1
2011 | 7885,4.0,2.0,0.1,23.7,23.1,80.0,16.0,-2
2012 | 7899,3.0,2.0,0.1,23.7,23.1,80.0,16.0,0
2013 | 7908,3.0,1.0,0.1,23.7,23.1,80.0,16.0,0
2014 | 7916,1.0,1.0,0.1,23.7,23.1,80.0,16.0,-2
2015 | 7944,4.0,2.0,0.1,23.7,23.1,80.0,16.0,-1
2016 | 7945,3.0,2.0,0.1,21.315,20.3,99.0,7.6,-1
2017 | 7946,2.0,2.0,0.1,21.315,20.3,99.0,7.6,-1
2018 | 7947,2.0,2.0,0.1,21.315,20.3,99.0,7.6,0
2019 | 7948,4.0,1.0,0.1,21.315,20.3,99.0,7.6,1
2020 | 7952,3.0,2.0,0.1,21.315,20.3,99.0,7.6,0
2021 | 7953,1.0,1.0,0.1,21.315,20.3,99.0,7.6,0
2022 | 7954,1.0,1.0,0.1,21.315,20.3,99.0,7.6,-1
2023 | 7957,2.0,1.0,0.1,21.315,20.3,99.0,7.6,-1
2024 | 7960,1.0,2.0,0.1,21.315,20.3,99.0,7.6,0
2025 | 7966,2.0,1.0,0.1,21.315,20.3,99.0,7.6,0
2026 | 7971,4.0,1.0,0.1,21.315,20.3,99.0,7.6,-2
2027 | 7974,4.0,2.0,0.1,21.315,20.3,99.0,7.6,-1
2028 | 7975,1.0,2.0,0.1,21.315,20.3,99.0,7.6,-1
2029 | 7977,2.0,2.0,0.1,21.315,20.3,99.0,7.6,-2
2030 | 7978,4.0,2.0,0.1,21.315,20.3,99.0,7.6,0
2031 | 7991,2.0,1.0,0.1,21.315,20.3,99.0,7.6,-2
2032 | 8002,3.0,1.0,0.1,21.315,20.3,99.0,7.6,0
2033 | 8005,6.0,1.0,0.1,21.315,20.3,99.0,7.6,-1
2034 | 8010,5.0,2.0,0.1,21.315,20.3,99.0,7.6,0
2035 | 8015,6.0,2.0,0.1,21.315,20.3,99.0,7.6,-1
2036 | 8024,2.0,2.0,0.1,21.8,20.5,83.0,12.8,-1
2037 | 8025,3.0,2.0,0.1,21.8,20.5,83.0,12.8,-1
2038 | 8026,3.0,1.0,0.1,21.8,20.5,83.0,12.8,-1
2039 | 8027,2.0,2.0,0.1,21.8,20.5,83.0,12.8,0
2040 | 8028,1.0,2.0,0.1,21.8,20.5,83.0,12.8,0
2041 | 8029,6.0,2.0,0.1,21.8,20.5,83.0,12.8,-1
2042 | 8030,3.0,1.0,0.1,21.8,20.5,83.0,12.8,0
2043 | 8031,1.0,1.0,0.1,21.8,20.5,83.0,12.8,1
2044 | 8032,6.0,1.0,0.1,21.8,20.5,83.0,12.8,-1
2045 | 8035,3.0,2.0,0.1,21.8,20.5,83.0,12.8,0
2046 | 8038,2.0,1.0,0.1,21.8,20.5,83.0,12.8,-1
2047 | 8042,2.0,1.0,0.1,21.8,20.5,83.0,12.8,0
2048 | 8043,1.0,2.0,0.1,21.8,20.5,83.0,12.8,1
2049 | 8045,1.0,1.0,0.1,21.8,20.5,83.0,12.8,0
2050 | 8047,1.0,1.0,0.1,21.8,20.5,83.0,12.8,-1
2051 | 8050,4.0,1.0,0.1,21.8,20.5,83.0,12.8,-1
2052 | 8051,1.0,2.0,0.1,21.8,20.5,83.0,12.8,-1
2053 | 8057,1.0,2.0,0.1,21.8,20.5,83.0,12.8,2
2054 | 8103,6.0,2.0,0.1,21.8,20.5,83.0,12.8,0
2055 | 8109,4.0,1.0,0.1,21.8,20.5,83.0,12.8,0
2056 | 8120,3.0,1.0,0.1,23.7,23.1,50.0,29.2,0
2057 | 8121,2.0,1.0,0.1,23.7,23.1,50.0,29.2,0
2058 | 8122,2.0,2.0,0.1,23.7,23.1,50.0,29.2,0
2059 | 8124,4.0,1.0,0.1,23.7,23.1,50.0,29.2,0
2060 | 8129,2.0,1.0,0.1,23.7,23.1,50.0,29.2,-1
2061 | 8131,2.0,2.0,0.1,23.7,23.1,50.0,29.2,-1
2062 | 8134,6.0,2.0,0.1,23.7,23.1,50.0,29.2,-1
2063 | 8139,4.0,2.0,0.1,23.7,23.1,50.0,29.2,-1
2064 | 8140,2.0,1.0,0.1,23.7,23.1,50.0,29.2,-2
2065 | 8143,3.0,2.0,0.1,23.7,23.1,50.0,29.2,-1
2066 | 8144,6.0,1.0,0.1,23.7,23.1,50.0,29.2,-1
2067 | 8145,4.0,2.0,0.1,23.7,23.1,50.0,29.2,-2
2068 | 8150,3.0,1.0,0.1,23.7,23.1,50.0,29.2,-1
2069 | 8157,2.0,2.0,0.1,23.7,23.1,83.0,17.2,-1
2070 | 8158,4.0,1.0,0.1,23.7,23.1,83.0,17.2,0
2071 | 8159,2.0,2.0,0.1,23.7,23.1,83.0,17.2,0
2072 | 8160,2.0,1.0,0.1,23.7,23.1,83.0,17.2,0
2073 | 8162,4.0,1.0,0.1,23.7,23.1,83.0,17.2,-2
2074 | 8164,3.0,2.0,0.1,23.7,23.1,83.0,17.2,-1
2075 | 8166,3.0,1.0,0.1,23.7,23.1,83.0,17.2,-1
2076 | 8167,2.0,1.0,0.1,23.7,23.1,83.0,17.2,-1
2077 | 8168,3.0,2.0,0.1,23.7,23.1,83.0,17.2,0
2078 | 8169,3.0,1.0,0.1,23.7,23.1,83.0,17.2,0
2079 | 8178,2.0,2.0,0.1,23.7,23.1,83.0,17.2,1
2080 | 8179,4.0,2.0,0.1,23.7,23.1,83.0,17.2,-1
2081 | 8186,2.0,1.0,0.1,23.7,23.1,83.0,17.2,-2
2082 | 8204,3.0,2.0,0.1,23.7,23.1,83.0,17.2,-2
2083 | 8206,2.0,1.0,0.1,23.7,23.1,54.0,18.0,-1
2084 | 8207,4.0,1.0,0.1,23.7,23.1,54.0,18.0,0
2085 | 8208,4.0,2.0,0.1,23.7,23.1,54.0,18.0,-1
2086 | 8209,2.0,1.0,0.1,23.7,23.1,54.0,18.0,0
2087 | 8210,2.0,2.0,0.1,23.7,23.1,54.0,18.0,-1
2088 | 8211,2.0,2.0,0.1,23.7,23.1,54.0,18.0,0
2089 | 8212,1.0,1.0,0.1,23.7,23.1,54.0,18.0,0
2090 | 8221,3.0,2.0,0.1,23.7,23.1,54.0,18.0,-1
2091 | 8222,4.0,2.0,0.1,23.7,23.1,54.0,18.0,0
2092 | 8224,3.0,1.0,0.1,23.7,23.1,54.0,18.0,-1
2093 |
--------------------------------------------------------------------------------