├── .gitignore
├── Basic-DeepFM-model
├── .idea
│ ├── Basic-DeepFM-model.iml
│ ├── misc.xml
│ ├── modules.xml
│ └── workspace.xml
├── .ipynb_checkpoints
│ └── DeepFM-StepByStep-checkpoint.ipynb
├── DataReader.py
├── DeepFM-StepByStep.ipynb
├── DeepFM.py
├── __pycache__
│ ├── DataReader.cpython-36.pyc
│ ├── DeepFM.cpython-36.pyc
│ ├── config.cpython-36.pyc
│ └── metrics.cpython-36.pyc
├── config.py
├── data
│ ├── test.csv
│ └── train.csv
├── fig
│ ├── DNN.png
│ ├── DeepFM.png
│ └── FM.png
├── main.py
├── metrics.py
└── output
│ ├── DNN_Mean-0.31183_Std0.29369.csv
│ ├── DNN_Mean0.13436_Std0.06001.csv
│ ├── DNN_Mean0.13817_Std0.06173.csv
│ ├── DeepFM_Mean-0.11470_Std0.37335.csv
│ ├── DeepFM_Mean0.01434_Std0.10176.csv
│ ├── DeepFM_Mean0.05735_Std0.20027.csv
│ ├── DeepFM_Mean0.06921_Std0.06395.csv
│ ├── DeepFM_Mean0.17892_Std0.01572.csv
│ ├── DeepFM_Mean0.26137_Std0.00210.csv
│ ├── FM_Mean0.15581_Std0.02785.csv
│ ├── FM_Mean0.19988_Std0.03441.csv
│ └── FM_Mean0.23297_Std0.05576.csv
├── README.md
├── SVD_jia_jia_demo
├── .idea
│ ├── SVD_jia_jia_demo.iml
│ ├── misc.xml
│ ├── modules.xml
│ └── workspace.xml
└── main.py
├── SVD_recom_demo
└── main.py
├── recommendation-FFM-Demo
├── .idea
│ ├── misc.xml
│ ├── modules.xml
│ ├── recommendation-FFM-Demo.iml
│ └── workspace.xml
├── FFM_model.py
└── TFModel
│ ├── FFM-0.data-00000-of-00001
│ ├── FFM-0.index
│ ├── FFM-0.meta
│ └── checkpoint
└── recommendation-FM-demo
├── .idea
├── misc.xml
├── modules.xml
├── recommendation-FM-demo.iml
└── workspace.xml
├── FM_data.rar
├── FM_model.py
└── data
├── ua.base
└── ua.test
/.gitignore:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/isthegoal/recommendation_model_master/77c2117631e779527b92e04dc53358a88abbbbbc/.gitignore
--------------------------------------------------------------------------------
/Basic-DeepFM-model/.idea/Basic-DeepFM-model.iml:
--------------------------------------------------------------------------------
1 |
2 |
3 |
4 |
5 |
6 |
7 |
8 |
9 |
10 |
11 |
12 |
--------------------------------------------------------------------------------
/Basic-DeepFM-model/.idea/misc.xml:
--------------------------------------------------------------------------------
1 |
2 |
3 |
4 |
--------------------------------------------------------------------------------
/Basic-DeepFM-model/.idea/modules.xml:
--------------------------------------------------------------------------------
1 |
2 |
3 |
4 |
5 |
6 |
7 |
8 |
--------------------------------------------------------------------------------
/Basic-DeepFM-model/.idea/workspace.xml:
--------------------------------------------------------------------------------
1 |
2 |
3 |
4 |
5 |
6 |
7 |
8 |
9 |
10 |
11 |
12 |
13 |
14 |
15 |
16 |
17 |
18 |
19 |
20 |
21 |
22 |
23 |
24 |
25 |
26 |
27 |
28 |
29 |
30 |
31 |
32 |
33 |
34 |
35 |
36 |
37 |
38 |
39 |
40 |
41 |
42 |
43 |
44 |
45 |
46 |
47 |
48 |
49 |
50 |
51 |
52 |
53 |
54 |
55 |
56 |
57 |
58 |
59 |
60 |
61 |
62 |
63 |
64 |
65 |
66 |
67 |
68 |
69 |
70 |
71 |
72 | feat_value
73 | predict
74 | feature_bias
75 |
76 |
77 |
78 |
87 |
88 |
89 |
90 |
91 |
92 |
93 |
94 |
95 |
96 |
97 |
98 |
99 |
100 |
101 |
102 |
103 |
104 |
105 |
106 |
107 |
108 |
109 |
110 |
111 |
112 |
113 |
114 |
115 |
116 |
117 |
118 |
119 |
120 |
121 |
122 |
123 |
124 |
125 |
126 |
127 |
128 |
129 |
130 |
131 |
132 |
133 |
134 |
135 |
136 |
137 |
138 |
139 |
140 |
141 |
142 |
143 |
144 |
145 |
146 |
147 |
148 |
149 |
150 |
151 |
152 |
153 |
154 |
155 |
156 |
157 |
158 |
159 |
160 |
161 |
162 |
163 |
164 |
165 |
166 |
167 |
168 |
169 | 1565762525817
170 |
171 |
172 | 1565762525817
173 |
174 |
175 |
176 |
177 |
178 |
179 |
180 |
181 |
182 |
183 |
184 |
185 |
186 |
187 |
188 |
189 |
190 |
191 |
192 |
193 |
194 |
195 |
196 |
197 |
198 |
199 |
200 |
201 |
202 |
203 |
204 |
205 |
206 |
207 |
208 |
209 |
210 |
211 |
212 |
213 |
214 |
215 |
216 |
217 |
218 |
219 |
220 |
221 |
222 |
223 |
224 |
225 |
226 |
227 |
228 |
229 |
230 |
231 |
232 |
233 |
234 |
235 |
236 |
237 |
238 |
239 |
240 |
241 |
242 |
243 |
244 |
245 |
246 |
247 |
248 |
249 |
250 |
251 |
252 |
253 |
254 |
255 |
256 |
257 |
258 |
259 |
260 |
261 |
262 |
263 |
264 |
265 |
266 |
267 |
268 |
269 |
270 |
271 |
272 |
273 |
274 |
275 |
276 |
277 |
278 |
279 |
280 |
281 |
282 |
283 |
284 |
285 |
286 |
287 |
288 |
289 |
290 |
291 |
292 |
293 |
294 |
295 |
296 |
297 |
298 |
--------------------------------------------------------------------------------
/Basic-DeepFM-model/DataReader.py:
--------------------------------------------------------------------------------
1 | # -*- encoding:utf-8 -*-
2 | import pandas as pd
3 |
4 | class FeatureDictionary(object):
5 |
6 | '''
7 |
8 | 这个只是用来制作特征字典的吗?
9 |
10 | 首先,创建一个特征处理的字典。在初始化方法中,传入第一步读取得到的训练集和测试集。然后生成字典,在生成字典中,
11 | 循环遍历特征的每一列,如果当前的特征是数值型的,直接将特征作为键值,和目前对应的索引作为 value 存到字典中。
12 | 如果当前的特征是 categories ,统计当前的特征总共有多少个不同的取值,这时候当前特征在字典的 value
13 | 就不是一个简单的索引了,value 也是一个字典,特征的每个取值作为 key,对应的索引作为 value,组成新的字典。
14 | 总而言之,这里面主要是计算了特征的的维度,numerical 的特征只占一位,categories 的特征有多少个取值,就占多少位。
15 |
16 |
17 | 好吧,one-hot上的特征区别就体现在这里了,对于离散特征 把每个取值都当做key了
18 | '''
19 | def __init__(self,trainfile=None,testfile=None,
20 | dfTrain=None,dfTest=None,numeric_cols=[],
21 | ignore_cols=[]):
22 | assert not ((trainfile is None) and (dfTrain is None)), "trainfile or dfTrain at least one is set"
23 | assert not ((trainfile is not None) and (dfTrain is not None)), "only one can be set"
24 | assert not ((testfile is None) and (dfTest is None)), "testfile or dfTest at least one is set"
25 | assert not ((testfile is not None) and (dfTest is not None)), "only one can be set"
26 |
27 | self.trainfile = trainfile
28 | self.testfile = testfile
29 | self.dfTrain = dfTrain
30 | self.dfTest = dfTest
31 | self.numeric_cols = numeric_cols
32 | self.ignore_cols = ignore_cols
33 | self.gen_feat_dict()
34 |
35 |
36 |
37 |
38 | def gen_feat_dict(self):
39 | '''
40 | 创建特征字典,计算了特征的的维度,numerical 的特征只占一位,categories 的特征有多少个取值,就占多少位
41 | '''
42 | if self.dfTrain is None:
43 | dfTrain = pd.read_csv(self.trainfile)
44 |
45 | else:
46 | dfTrain = self.dfTrain
47 |
48 | if self.dfTest is None:
49 | dfTest = pd.read_csv(self.testfile)
50 |
51 | else:
52 | dfTest = self.dfTest
53 |
54 | df = pd.concat([dfTrain,dfTest])
55 |
56 | self.feat_dict = {}
57 | tc = 0
58 | for col in df.columns:
59 | if col in self.ignore_cols:
60 | continue
61 | if col in self.numeric_cols:
62 | self.feat_dict[col] = tc
63 | tc += 1
64 |
65 | else:
66 | # 这里是特别的 针对 离散型特征的处理, 这里假定对每个one-hot过的特征值都当做特征来使用
67 | us = df[col].unique()
68 | print(us)
69 | self.feat_dict[col] = dict(zip(us,range(tc,len(us)+tc)))
70 | tc += len(us)
71 | print('feat_dict:',self.feat_dict)
72 | self.feat_dim = tc
73 |
74 |
75 | class DataParser(object):
76 |
77 | '''
78 | 这里的作用,可以主要观察下结果,比如下面对 验证集进行生成的 特征索引和特征值结果:
79 |
80 | 特征索引(已转 one-hot过的)
81 | [180, 186, 200, 202, 205, 213, 215, 217, 219, 221, 223, 225, 227, 229, 235, 248, 250, 251, 253, 254, 255, 3, 14, 16, 19, 30, 40, 50, 54, 56, 61, 147, 66, 172, 173, 175, 176, 0, 174]
82 | [181, 185, 190, 202, 205, 213, 216, 217, 220, 221, 223, 225, 227, 229, 242, 248, 250, 251, 253, 254, 255, 7, 14, 16, 19, 31, 33, 50, 54, 55, 61, 79, 66, 172, 173, 175, 176, 0, 174]
83 |
84 | 对应索引位置特征值记录: (可以发现要么是对应 为one-hot标识1,要么对应离散值。 说明那种ont-hot为0的情况是不记录的,不像通常lgb思路中,都做记录)
85 | [1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 0.5, 0.3, 0.6103277807999999, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 0.316227766, 0.6695564092, 0.3521363372, 3.4641016150999997, 2.0, 0.4086488773474527],
86 | [1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 0.9, 0.5, 0.7713624309999999, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 0.316227766, 0.6063200202000001, 0.3583294573, 2.8284271247, 1.0, 0.4676924847454411]
87 | '''
88 | def __init__(self,feat_dict):
89 | self.feat_dict = feat_dict
90 |
91 | def parse(self,infile=None,df=None,has_label=False):
92 | assert not ((infile is None) and (df is None)), "infile or df at least one is set"
93 | assert not ((infile is not None) and (df is not None)), "only one can be set"
94 |
95 |
96 | if infile is None:
97 | dfi = df.copy()
98 | else:
99 | dfi = pd.read_csv(infile)
100 |
101 | if has_label:
102 | y = dfi['target'].values.tolist()
103 | dfi.drop(['id','target'],axis=1,inplace=True)
104 | else:
105 | ids = dfi['id'].values.tolist()
106 | dfi.drop(['id'],axis=1,inplace=True)
107 | # dfi for feature index 特征索引
108 | # dfv for feature value which can be either binary (1/0) or float (e.g., 10.24) 对应特征值
109 | dfv = dfi.copy()
110 | for col in dfi.columns:
111 | if col in self.feat_dict.ignore_cols:
112 | dfi.drop(col,axis=1,inplace=True)
113 | dfv.drop(col,axis=1,inplace=True)
114 | continue
115 | if col in self.feat_dict.numeric_cols:
116 | dfi[col] = self.feat_dict.feat_dict[col]
117 | else:
118 | dfi[col] = dfi[col].map(self.feat_dict.feat_dict[col])
119 | dfv[col] = 1.
120 |
121 | xi = dfi.values.tolist()
122 | xv = dfv.values.tolist()
123 |
124 | if has_label:
125 | return xi,xv,y
126 | else:
127 | return xi,xv,ids
128 |
129 |
130 |
--------------------------------------------------------------------------------
/Basic-DeepFM-model/DeepFM.py:
--------------------------------------------------------------------------------
1 | # -*- encoding:utf-8 -*-
2 | import numpy as np
3 | import tensorflow as tf
4 |
5 | from time import time
6 | from sklearn.base import BaseEstimator, TransformerMixin
7 | from sklearn.metrics import roc_auc_score
8 |
9 | class DeepFM(BaseEstimator, TransformerMixin):
10 | '''
11 |
12 | 很简单的道理 就是 Deep + FM 的结合 (这里没考虑域,在另一个模型DeepFFM才开始考虑到域和隐向量,不要混杂,另一个是deep+FM,)
13 | 其中 FM 用于低阶特征提取
14 | Deep用于高阶特征提取。
15 |
16 | 共享相同的嵌入层输入。对field做embeding嵌入, 嵌入结果 作为FM部分和 dnn部分的输入。
17 |
18 |
19 | 所以首先第一步就是做了embedidng操作,其本质是一层全连接的神经网络,来将特征转成向量。
20 |
21 | 可参考此处的解释,代码解释挺多的:https://blog.csdn.net/qq_15111861/article/details/94194240
22 |
23 | '''
24 | def __init__(self, feature_size, field_size,
25 | embedding_size=8, dropout_fm=[1.0, 1.0],
26 | deep_layers=[32, 32], dropout_deep=[0.5, 0.5, 0.5],
27 | deep_layer_activation=tf.nn.relu,
28 | epoch=10, batch_size=256,
29 | learning_rate=0.001, optimizer="adam",
30 | batch_norm=0, batch_norm_decay=0.995,
31 | verbose=False, random_seed=2016,
32 | use_fm=True, use_deep=True,
33 | loss_type="logloss", eval_metric=roc_auc_score,
34 | l2_reg=0.0, greater_is_better=True):
35 | assert (use_fm or use_deep)
36 | assert loss_type in ["logloss", "mse"], \
37 | "loss_type can be either 'logloss' for classification task or 'mse' for regression task"
38 |
39 | self.feature_size = feature_size
40 | self.field_size = field_size
41 | self.embedding_size = embedding_size
42 |
43 | self.dropout_fm = dropout_fm
44 | self.deep_layers = deep_layers
45 | self.dropout_dep = dropout_deep
46 | self.deep_layers_activation = deep_layer_activation
47 | self.use_fm = use_fm
48 | self.use_deep = use_deep
49 | self.l2_reg = l2_reg
50 |
51 | self.epoch = epoch
52 | self.batch_size = batch_size
53 | self.learning_rate = learning_rate
54 | self.optimizer_type = optimizer
55 |
56 | self.batch_norm = batch_norm
57 | self.batch_norm_decay = batch_norm_decay
58 |
59 | self.verbose = verbose
60 | self.random_seed = random_seed
61 | self.loss_type = loss_type
62 | self.eval_metric = eval_metric
63 | self.greater_is_better = greater_is_better
64 | self.train_result,self.valid_result = [],[]
65 |
66 | self._init_graph()
67 | print('feature_size:', self.feature_size)
68 | print('field_size:',self.field_size)
69 | def _init_graph(self):
70 | self.graph = tf.Graph()
71 | with self.graph.as_default():
72 | '''
73 | 主要的图定义的部分,可以主要参考着网络架构来看
74 |
75 | feat_index:特征的一个序号,主要用于通过embedding_lookup选择我们的embedding。 (单纯的特征的embedding转换)
76 | feat_value:对应的特征值,如果是离散特征的话,就是1,如果不是离散特征的话,就保留原来的特征值。
77 | label:实际值。还定义了两个dropout来防止过拟合。
78 |
79 |
80 | 这里整体的结果 有个疑问是对 不同的 field 怎么进行操作的, deep 和 FFM部分看图是有不同输入组织的...好吧deepFM内还没考虑域
81 |
82 |
83 | 在这之后,调用权重的初始化方法。将所有的权重放到一个字典中。feature_embeddings 本质上就是 FM 中的 latent vector 。
84 | 对于每一个特征都建立一个隐特征向量。feature_bias 代表了 FM 中的 w 的权重。然后就是搭建深度图,输入到深度网络的大小
85 | 为:特征的个数 * 每个隐特征向量的长度。根据每层的配置文件,生产相应的权重。对于输出层,根据不同的配置,生成不同的
86 | 输出的大小。如果只是使用 FM 算法,那么
87 |
88 |
89 |
90 | '''
91 |
92 | tf.set_random_seed(self.random_seed)
93 |
94 |
95 | # 分别是特征 对应特征字典的索引 、 对应特征值
96 | self.feat_index = tf.placeholder(tf.int32,
97 | shape=[None,None],
98 | name='feat_index')
99 | self.feat_value = tf.placeholder(tf.float32,
100 | shape=[None,None],
101 | name='feat_value')
102 |
103 | self.label = tf.placeholder(tf.float32,shape=[None,1],name='label')
104 | self.dropout_keep_fm = tf.placeholder(tf.float32,shape=[None],name='dropout_keep_fm')
105 | self.dropout_keep_deep = tf.placeholder(tf.float32,shape=[None],name='dropout_deep_deep')
106 | self.train_phase = tf.placeholder(tf.bool,name='train_phase')
107 |
108 | self.weights = self._initialize_weights()
109 |
110 |
111 | print('------- 嵌入层,用于根据特征索引获取特征对应的embedding ---------')
112 | # model 模型的第一部分都是做embeding,对应就是此部分 共有的 embedding部分
113 | #根据feat_index选择对应的weights['feature_embeddings']中的embedding值,然后再与对应的feat_value相乘就可以了
114 | #对应公式中的 隐向量v,参考此处的解释:https://www.jianshu.com/p/cf796fca244c
115 | '''
116 | 根据每次输入的特征的索引,从隐特征向量中取出其对应的隐向量。将每一个特征对应的具体的值,和自己对应的隐向量相乘。
117 | 如果是 numerical 的,就直接用对应的 value 乘以隐向量。如果是 categories 的特征,其对应的特征值是 1,相乘完还是原来的隐向量。
118 | 最后,self.embeddings 存放的就是输入的样本的特征值和隐向量的乘积。大小为 batch_sizefield_sizeembedding_size
119 | '''
120 | ########### 这里N * F * K 表示 特征数量 * Field隐向量数量 *隐向量长度
121 | '''
122 | 要求到的是 每个特征 针对其他field的embeddings.
123 | (1)从weights['feature_embeddings'] 中根据特征索引找到 自己这个样本包含的特征的所有隐变量
124 | (2)对样本对应的特征值 进行reshape一下,维度是 -1表示不明确尺寸,之后是确定 field_size行一列。三维的
125 | (3)以上两步的结果进行相乘,结果embeddings的含义是什么呢,对feature对应的embeding特定feature值 和 特征值相乘,
126 |
127 | 根据此处https://www.jianshu.com/p/e7b2d53ec42b 对嵌入层的说明,不同field的长度 经过embedding之后向量的长度均为embedding-size。
128 |
129 | 可算是明白了,这里有一个很大很大的弯,就是feat_value 部分为什么要是field_size,这是因为经过one-hot过的有效的列值是跟field是
130 | 对应的,因为我们针对一个离散field转成 one-hot表达时候,其实也只保存了一个为1的特征,而其他无效的是没保存的,所以 feat_value的长度尺寸其实就是field_size,
131 | 这里就是 第一步的维度是field* embedding-size的, 第二步提取出每个field下的value, 第三步相乘,相当于获取了对每个field_value的embedding。
132 | 这个embeding相对于从 feature_size尺寸到embedding-size对每个特征的降维,维护的是全长的 one-hot后的特征size。
133 |
134 |
135 | 难点就是把握好 field_size 和 embedding-size 以及 feature_size 的含义和区别。最后虽然特征索引长度是feature_size,但是真实有效存储的列表长度还是field_size。
136 | 而embedidng时候的权值矩阵应为可以按索引取,对应的行数还是feature_size。 最后维度就是field_size * self.embedding_size 表示特征与v的相乘
137 | '''
138 | self.embeddings = tf.nn.embedding_lookup(self.weights['feature_embeddings'],self.feat_index) # N * F * K
139 | print('self.embeddings.shape:',self.embeddings.shape)
140 | feat_value = tf.reshape(self.feat_value,shape=[-1,self.field_size,1])
141 | print('feat_value.shape:', self.embeddings.shape)
142 | self.embeddings = tf.multiply(self.embeddings,feat_value)
143 | print('after self.embeddings.shape:', self.embeddings.shape)
144 |
145 |
146 |
147 | print('--------- 第一部分网络的定义,FM的一次项计算部分(第一、二合并整体是FM部分) -----------')
148 |
149 | '''
150 | 计算一阶项,从 self.weights[“feature_bias”] 取出对应的 w ,得到一阶项,大小为 batch_size*field_size。
151 | 二阶 二阶项的计算,也就是 FM 的计算,利用了的技巧。先将 embeddings 在 filed_size 的维度上求和,最后得到红框里面的项
152 | '''
153 |
154 | # first order term 代表FM公式中的一次项 这里的feat_index 是用来去除其对应的权值的,
155 | #这里只经过 简单的 weights ,进行了简单的w*x处理, 做了对应feature下的单值相乘
156 | self.y_first_order = tf.nn.embedding_lookup(self.weights['feature_bias'],self.feat_index)
157 | self.y_first_order = tf.reduce_sum(tf.multiply(self.y_first_order,feat_value),2)
158 | self.y_first_order = tf.nn.dropout(self.y_first_order,self.dropout_keep_fm[0])
159 |
160 | print('--------- FM的二次项计算部分(对应于整体的公式后部分) -----------')
161 | # second order term 这整体区间代表FM公式中的二次项计算
162 | # sum-square-part 在公式中 相减的前一部分。 先加后平方 这里的1表示维度内相加,对应公式中的,对所有的u*x的结果相加
163 | self.summed_features_emb = tf.reduce_sum(self.embeddings,1) # None * k
164 | self.summed_features_emb_square = tf.square(self.summed_features_emb) # None * K
165 |
166 | # squre-sum-part 在公式中 相减的后一部分。 先平方后加
167 | self.squared_features_emb = tf.square(self.embeddings)
168 | self.squared_sum_features_emb = tf.reduce_sum(self.squared_features_emb, 1) # None * K
169 |
170 | #second order
171 | self.y_second_order = 0.5 * tf.subtract(self.summed_features_emb_square,self.squared_sum_features_emb)
172 | self.y_second_order = tf.nn.dropout(self.y_second_order,self.dropout_keep_fm[1])
173 |
174 | print('--------- 第二部分网络的定义,deep部分深度网络结构的定义 -----------')
175 | '''
176 | 计算 deep 的项。将 self.embeddings(大小为 batch_sizeself.field_size * self.embedding_size) reshape
177 | 成 batch_size(self.field_size * self.embedding_size) 的大小,然后输入到网络里面进行计算。
178 | '''
179 | # Deep component 将Embedding part的输出再经过几层全链接层
180 | self.y_deep = tf.reshape(self.embeddings,shape=[-1,self.field_size * self.embedding_size])
181 | self.y_deep = tf.nn.dropout(self.y_deep,self.dropout_keep_deep[0])
182 |
183 | for i in range(0,len(self.deep_layers)):
184 | self.y_deep = tf.add(tf.matmul(self.y_deep,self.weights["layer_%d" %i]), self.weights["bias_%d"%i])
185 | self.y_deep = self.deep_layers_activation(self.y_deep)
186 | self.y_deep = tf.nn.dropout(self.y_deep,self.dropout_keep_deep[i+1])
187 |
188 |
189 |
190 | ######################## 这里是一个特色吧,之前相当于把所有的结构都定义过了,这里会决定对那些结构进行使用,组成不同的网络。
191 | #----DeepFM--------- 我们可以使用logloss(如果定义为分类问题),或者mse(如果定义为预测问题),以及多种的优化器去进行尝试
192 | '''
193 | 最后将所有项 concat 起来,投影到一个值。如果是只要 FM ,不要 deep 的部分,则投影的大小为 filed_size+embedding_size 的大小。
194 | 如果需要 deep 的部分,则大小再加上 deep 的部分。利用最后的全连接层,将特征映射到一个 scalar
195 | '''
196 | if self.use_fm and self.use_deep:
197 | concat_input = tf.concat([self.y_first_order, self.y_second_order, self.y_deep], axis=1)
198 | elif self.use_fm:
199 | concat_input = tf.concat([self.y_first_order, self.y_second_order], axis=1)
200 | elif self.use_deep:
201 | concat_input = self.y_deep
202 |
203 | self.out = tf.add(tf.matmul(concat_input,self.weights['concat_projection']),self.weights['concat_bias'])
204 |
205 | # loss
206 | if self.loss_type == "logloss":
207 | self.out = tf.nn.sigmoid(self.out)
208 | self.loss = tf.losses.log_loss(self.label, self.out)
209 | elif self.loss_type == "mse":
210 | self.loss = tf.nn.l2_loss(tf.subtract(self.label, self.out))
211 | # l2 regularization on weights
212 | if self.l2_reg > 0:
213 | self.loss += tf.contrib.layers.l2_regularizer(
214 | self.l2_reg)(self.weights["concat_projection"])
215 | if self.use_deep:
216 | for i in range(len(self.deep_layers)):
217 | self.loss += tf.contrib.layers.l2_regularizer(
218 | self.l2_reg)(self.weights["layer_%d" % i])
219 |
220 |
221 | if self.optimizer_type == "adam":
222 | self.optimizer = tf.train.AdamOptimizer(learning_rate=self.learning_rate, beta1=0.9, beta2=0.999,
223 | epsilon=1e-8).minimize(self.loss)
224 | elif self.optimizer_type == "adagrad":
225 | self.optimizer = tf.train.AdagradOptimizer(learning_rate=self.learning_rate,
226 | initial_accumulator_value=1e-8).minimize(self.loss)
227 | elif self.optimizer_type == "gd":
228 | self.optimizer = tf.train.GradientDescentOptimizer(learning_rate=self.learning_rate).minimize(self.loss)
229 | elif self.optimizer_type == "momentum":
230 | self.optimizer = tf.train.MomentumOptimizer(learning_rate=self.learning_rate, momentum=0.95).minimize(
231 | self.loss)
232 |
233 |
234 | #init
235 | self.saver = tf.train.Saver()
236 | init = tf.global_variables_initializer()
237 | self.sess = tf.Session()
238 | self.sess.run(init)
239 |
240 | # number of params
241 | total_parameters = 0
242 | for variable in self.weights.values():
243 | shape = variable.get_shape()
244 | variable_parameters = 1
245 | for dim in shape:
246 | variable_parameters *= dim.value
247 | total_parameters += variable_parameters
248 | if self.verbose > 0:
249 | print("#params: %d" % total_parameters)
250 |
251 |
252 |
253 |
254 |
255 | def _initialize_weights(self):
256 | '''
257 | 对网络参数的初始定义, 这里feature_embeddings 表示的是 隐函数数量的设置。
258 |
259 |
260 | weights['feature_embeddings'] :存放的每一个值其实就是FM中的vik,所以它是F * K的。其中,F代表feture的大小(将离散特征转换成one-hot之后的特征总量),K代表dense vector的大小。
261 | weights['feature_bias'] :FM中的一次项的权重
262 |
263 | '''
264 | weights = dict()
265 |
266 | #embeddings
267 | weights['feature_embeddings'] = tf.Variable(
268 | tf.random_normal([self.feature_size,self.embedding_size],0.0,0.01),
269 | name='feature_embeddings')
270 | weights['feature_bias'] = tf.Variable(tf.random_normal([self.feature_size,1],0.0,1.0),name='feature_bias')
271 |
272 | #deep layers
273 | num_layer = len(self.deep_layers)
274 | input_size = self.field_size * self.embedding_size
275 | glorot = np.sqrt(2.0/(input_size + self.deep_layers[0]))
276 |
277 | weights['layer_0'] = tf.Variable(
278 | np.random.normal(loc=0,scale=glorot,size=(input_size,self.deep_layers[0])),dtype=np.float32
279 | )
280 | weights['bias_0'] = tf.Variable(
281 | np.random.normal(loc=0,scale=glorot,size=(1,self.deep_layers[0])),dtype=np.float32
282 | )
283 |
284 |
285 | for i in range(1,num_layer):
286 | glorot = np.sqrt(2.0 / (self.deep_layers[i - 1] + self.deep_layers[i]))
287 | weights["layer_%d" % i] = tf.Variable(
288 | np.random.normal(loc=0, scale=glorot, size=(self.deep_layers[i - 1], self.deep_layers[i])),
289 | dtype=np.float32) # layers[i-1] * layers[i]
290 | weights["bias_%d" % i] = tf.Variable(
291 | np.random.normal(loc=0, scale=glorot, size=(1, self.deep_layers[i])),
292 | dtype=np.float32) # 1 * layer[i]
293 |
294 |
295 | # final concat projection layer
296 | # 为什么self.field_size + self.embedding_size 是FM输入层的size?
297 |
298 | if self.use_fm and self.use_deep:
299 | input_size = self.field_size + self.embedding_size + self.deep_layers[-1]
300 | elif self.use_fm:
301 | input_size = self.field_size + self.embedding_size
302 | elif self.use_deep:
303 | input_size = self.deep_layers[-1]
304 |
305 | glorot = np.sqrt(2.0/(input_size + 1))
306 | weights['concat_projection'] = tf.Variable(np.random.normal(loc=0,scale=glorot,size=(input_size,1)),dtype=np.float32)
307 | weights['concat_bias'] = tf.Variable(tf.constant(0.01),dtype=np.float32)
308 |
309 |
310 | return weights
311 |
312 |
313 | def get_batch(self,Xi,Xv,y,batch_size,index):
314 | start = index * batch_size
315 | end = (index + 1) * batch_size
316 | end = end if end < len(y) else len(y)
317 | return Xi[start:end],Xv[start:end],[[y_] for y_ in y[start:end]]
318 |
319 | # shuffle three lists simutaneously
320 | def shuffle_in_unison_scary(self, a, b, c):
321 | #通过设置相同的state,使得random.shuffle以相同的规律打乱两个列表,进而使得两个列表被打乱后,仍旧能维持两个列表间元素的一一对应关系
322 | #为了保持shuffle以后 feature的索引和 对应的值是对应的
323 | rng_state = np.random.get_state()
324 | np.random.shuffle(a)
325 | np.random.set_state(rng_state)
326 | np.random.shuffle(b)
327 | np.random.set_state(rng_state)
328 | np.random.shuffle(c)
329 |
330 |
331 | def evaluate(self, Xi, Xv, y):
332 | """
333 | :param Xi: list of list of feature indices of each sample in the dataset
334 | :param Xv: list of list of feature values of each sample in the dataset
335 | :param y: label of each sample in the dataset
336 | :return: metric of the evaluation
337 | """
338 | y_pred = self.predict(Xi, Xv)
339 | return self.eval_metric(y, y_pred)
340 |
341 | def predict(self, Xi, Xv):
342 | """
343 | :param Xi: list of list of feature indices of each sample in the dataset
344 | :param Xv: list of list of feature values of each sample in the dataset
345 | :return: predicted probability of each sample
346 |
347 |
348 | 使用模型根据 输入特征情况得到预测结果
349 | """
350 | # dummy y
351 | dummy_y = [1] * len(Xi)
352 | batch_index = 0
353 | Xi_batch, Xv_batch, y_batch = self.get_batch(Xi, Xv, dummy_y, self.batch_size, batch_index)
354 | y_pred = None
355 | while len(Xi_batch) > 0:
356 | num_batch = len(y_batch)
357 | feed_dict = {self.feat_index: Xi_batch,
358 | self.feat_value: Xv_batch,
359 | self.label: y_batch,
360 | self.dropout_keep_fm: [1.0] * len(self.dropout_fm),
361 | self.dropout_keep_deep: [1.0] * len(self.dropout_dep),
362 | self.train_phase: False}
363 | batch_out = self.sess.run(self.out, feed_dict=feed_dict)
364 |
365 | if batch_index == 0:
366 | y_pred = np.reshape(batch_out, (num_batch,))
367 | else:
368 | y_pred = np.concatenate((y_pred, np.reshape(batch_out, (num_batch,))))
369 |
370 | batch_index += 1
371 | Xi_batch, Xv_batch, y_batch = self.get_batch(Xi, Xv, dummy_y, self.batch_size, batch_index)
372 |
373 | return y_pred
374 |
375 |
376 | def fit_on_batch(self,Xi,Xv,y):
377 | #如下是每轮的训练过程
378 | feed_dict = {self.feat_index:Xi,
379 | self.feat_value:Xv,
380 | self.label:y,
381 | self.dropout_keep_fm:self.dropout_fm,
382 | self.dropout_keep_deep:self.dropout_dep,
383 | self.train_phase:True}
384 |
385 | loss,opt = self.sess.run([self.loss,self.optimizer],feed_dict=feed_dict)
386 |
387 | return loss
388 |
389 | def fit(self, Xi_train, Xv_train, y_train,
390 | Xi_valid=None, Xv_valid=None, y_valid=None,
391 | early_stopping=False, refit=False):
392 | """
393 | :param Xi_train: [[ind1_1, ind1_2, ...], [ind2_1, ind2_2, ...], ..., [indi_1, indi_2, ..., indi_j, ...], ...]
394 | indi_j is the feature index of feature field j of sample i in the training set
395 | :param Xv_train: [[val1_1, val1_2, ...], [val2_1, val2_2, ...], ..., [vali_1, vali_2, ..., vali_j, ...], ...]
396 | vali_j is the feature value of feature field j of sample i in the training set
397 | vali_j can be either binary (1/0, for binary/categorical features) or float (e.g., 10.24, for numerical features)
398 | :param y_train: label of each sample in the training set
399 | :param Xi_valid: list of list of feature indices of each sample in the validation set
400 | :param Xv_valid: list of list of feature values of each sample in the validation set
401 | :param y_valid: label of each sample in the validation set
402 | :param early_stopping: perform early stopping or not
403 | :param refit: refit the model on the train+valid dataset or not
404 | :return: None
405 |
406 |
407 | 主要的训练过程, 做epoch 批次的训练,每次训练都是调用函数fit_on_batch 启动每个批次的网络训练过程。
408 | """
409 | has_valid = Xv_valid is not None
410 | for epoch in range(self.epoch):
411 | t1 = time()
412 | self.shuffle_in_unison_scary(Xi_train, Xv_train, y_train)
413 | total_batch = int(len(y_train) / self.batch_size)
414 | #执行多个 total_batch 的训练
415 | for i in range(total_batch):
416 | Xi_batch, Xv_batch, y_batch = self.get_batch(Xi_train, Xv_train, y_train, self.batch_size, i)
417 | self.fit_on_batch(Xi_batch, Xv_batch, y_batch)
418 |
419 |
420 | # evaluate training and validation datasets
421 | train_result = self.evaluate(Xi_train, Xv_train, y_train)
422 | self.train_result.append(train_result)
423 | if has_valid:
424 | valid_result = self.evaluate(Xi_valid, Xv_valid, y_valid)
425 | self.valid_result.append(valid_result)
426 | if self.verbose > 0 and epoch % self.verbose == 0:
427 | if has_valid:
428 | print("[%d] train-result=%.4f, valid-result=%.4f [%.1f s]"
429 | % (epoch + 1, train_result, valid_result, time() - t1))
430 | else:
431 | print("[%d] train-result=%.4f [%.1f s]"
432 | % (epoch + 1, train_result, time() - t1))
433 | if has_valid and early_stopping and self.training_termination(self.valid_result):
434 | break
435 |
436 | # fit a few more epoch on train+valid until result reaches the best_train_score
437 | if has_valid and refit:
438 | if self.greater_is_better:
439 | best_valid_score = max(self.valid_result)
440 | else:
441 | best_valid_score = min(self.valid_result)
442 | best_epoch = self.valid_result.index(best_valid_score)
443 | best_train_score = self.train_result[best_epoch]
444 | Xi_train = Xi_train + Xi_valid
445 | Xv_train = Xv_train + Xv_valid
446 | y_train = y_train + y_valid
447 | for epoch in range(100):
448 | self.shuffle_in_unison_scary(Xi_train, Xv_train, y_train)
449 | total_batch = int(len(y_train) / self.batch_size)
450 | for i in range(total_batch):
451 | Xi_batch, Xv_batch, y_batch = self.get_batch(Xi_train, Xv_train, y_train,
452 | self.batch_size, i)
453 | self.fit_on_batch(Xi_batch, Xv_batch, y_batch)
454 | # check
455 | train_result = self.evaluate(Xi_train, Xv_train, y_train)
456 | if abs(train_result - best_train_score) < 0.001 or \
457 | (self.greater_is_better and train_result > best_train_score) or \
458 | ((not self.greater_is_better) and train_result < best_train_score):
459 | break
460 |
461 |
462 | def training_termination(self, valid_result):
463 | if len(valid_result) > 5:
464 | if self.greater_is_better:
465 | if valid_result[-1] < valid_result[-2] and \
466 | valid_result[-2] < valid_result[-3] and \
467 | valid_result[-3] < valid_result[-4] and \
468 | valid_result[-4] < valid_result[-5]:
469 | return True
470 | else:
471 | if valid_result[-1] > valid_result[-2] and \
472 | valid_result[-2] > valid_result[-3] and \
473 | valid_result[-3] > valid_result[-4] and \
474 | valid_result[-4] > valid_result[-5]:
475 | return True
476 | return False
477 |
478 |
479 |
480 |
481 |
482 |
483 |
484 |
485 |
486 |
487 |
488 |
489 |
490 |
--------------------------------------------------------------------------------
/Basic-DeepFM-model/__pycache__/DataReader.cpython-36.pyc:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/isthegoal/recommendation_model_master/77c2117631e779527b92e04dc53358a88abbbbbc/Basic-DeepFM-model/__pycache__/DataReader.cpython-36.pyc
--------------------------------------------------------------------------------
/Basic-DeepFM-model/__pycache__/DeepFM.cpython-36.pyc:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/isthegoal/recommendation_model_master/77c2117631e779527b92e04dc53358a88abbbbbc/Basic-DeepFM-model/__pycache__/DeepFM.cpython-36.pyc
--------------------------------------------------------------------------------
/Basic-DeepFM-model/__pycache__/config.cpython-36.pyc:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/isthegoal/recommendation_model_master/77c2117631e779527b92e04dc53358a88abbbbbc/Basic-DeepFM-model/__pycache__/config.cpython-36.pyc
--------------------------------------------------------------------------------
/Basic-DeepFM-model/__pycache__/metrics.cpython-36.pyc:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/isthegoal/recommendation_model_master/77c2117631e779527b92e04dc53358a88abbbbbc/Basic-DeepFM-model/__pycache__/metrics.cpython-36.pyc
--------------------------------------------------------------------------------
/Basic-DeepFM-model/config.py:
--------------------------------------------------------------------------------
1 | # -*- encoding:utf-8 -*-
2 | TRAIN_FILE = "data/train.csv"
3 | TEST_FILE = "data/test.csv"
4 |
5 | SUB_DIR = "output"
6 |
7 | '''
8 | 对不同类型特征列的配置。
9 | '''
10 | NUM_SPLITS = 3
11 | RANDOM_SEED = 2017
12 |
13 | # types of columns of the dataset dataframe
14 | CATEGORICAL_COLS = [
15 | # 'ps_ind_02_cat', 'ps_ind_04_cat', 'ps_ind_05_cat',
16 | # 'ps_car_01_cat', 'ps_car_02_cat', 'ps_car_03_cat',
17 | # 'ps_car_04_cat', 'ps_car_05_cat', 'ps_car_06_cat',
18 | # 'ps_car_07_cat', 'ps_car_08_cat', 'ps_car_09_cat',
19 | # 'ps_car_10_cat', 'ps_car_11_cat',
20 | ]
21 |
22 | NUMERIC_COLS = [
23 | # # binary
24 | # "ps_ind_06_bin", "ps_ind_07_bin", "ps_ind_08_bin",
25 | # "ps_ind_09_bin", "ps_ind_10_bin", "ps_ind_11_bin",
26 | # "ps_ind_12_bin", "ps_ind_13_bin", "ps_ind_16_bin",
27 | # "ps_ind_17_bin", "ps_ind_18_bin",
28 | # "ps_calc_15_bin", "ps_calc_16_bin", "ps_calc_17_bin",
29 | # "ps_calc_18_bin", "ps_calc_19_bin", "ps_calc_20_bin",
30 | # numeric
31 | "ps_reg_01", "ps_reg_02", "ps_reg_03",
32 | "ps_car_12", "ps_car_13", "ps_car_14", "ps_car_15",
33 |
34 | # feature engineering
35 | "missing_feat", "ps_car_13_x_ps_reg_03",
36 | ]
37 |
38 | IGNORE_COLS = [
39 | "id", "target",
40 | "ps_calc_01", "ps_calc_02", "ps_calc_03", "ps_calc_04",
41 | "ps_calc_05", "ps_calc_06", "ps_calc_07", "ps_calc_08",
42 | "ps_calc_09", "ps_calc_10", "ps_calc_11", "ps_calc_12",
43 | "ps_calc_13", "ps_calc_14",
44 | "ps_calc_15_bin", "ps_calc_16_bin", "ps_calc_17_bin",
45 | "ps_calc_18_bin", "ps_calc_19_bin", "ps_calc_20_bin"
46 | ]
47 |
--------------------------------------------------------------------------------
/Basic-DeepFM-model/fig/DNN.png:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/isthegoal/recommendation_model_master/77c2117631e779527b92e04dc53358a88abbbbbc/Basic-DeepFM-model/fig/DNN.png
--------------------------------------------------------------------------------
/Basic-DeepFM-model/fig/DeepFM.png:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/isthegoal/recommendation_model_master/77c2117631e779527b92e04dc53358a88abbbbbc/Basic-DeepFM-model/fig/DeepFM.png
--------------------------------------------------------------------------------
/Basic-DeepFM-model/fig/FM.png:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/isthegoal/recommendation_model_master/77c2117631e779527b92e04dc53358a88abbbbbc/Basic-DeepFM-model/fig/FM.png
--------------------------------------------------------------------------------
/Basic-DeepFM-model/main.py:
--------------------------------------------------------------------------------
1 | # -*- encoding:utf-8 -*-
2 | import os
3 | import numpy as np
4 | import pandas as pd
5 | import tensorflow as tf
6 | from sklearn.metrics import make_scorer
7 | from sklearn.model_selection import StratifiedKFold
8 | from DataReader import FeatureDictionary, DataParser
9 | from matplotlib import pyplot as plt
10 |
11 | import config
12 | from metrics import gini_norm
13 | from DeepFM import DeepFM
14 | '''
15 | 优点:
16 | (1) 不需要预训练 FM 得到隐向量;
17 | (2) 不需要人工特征工程;
18 | (3)能同时学习低阶和高阶的组合特征;
19 | (4)FM 模块和 Deep 模块共享 Feature Embedding 部分,可以更快的训练,以及更精确的训练学习。
20 |
21 | 感觉这里唯一的难点就是在 DeepFM中对网络结构的理解,其他都不是问题,
22 | deep+FM两部分, 相同的特征embeding嵌入,不考虑域的概念,分别进行低阶特征抽取、高阶特征交互。
23 |
24 | 感觉这才是一个 比较好的项目源码,可调控性非常强,既可以做FM 也可以做 deep模型、deepFM模型。
25 |
26 |
27 | 好吧,没难度了,就这几个过程:
28 | (1)数据的加载。 训练集和测试集的预处理、分割
29 | (2)对模型的构建,其中deepFM的构建,包括参数初始化、前置特征embedidng处理、FM一阶处理、FM二阶处理、Deep高阶nn处理
30 | 三部分输出相加并sigmoid的过程。
31 | (3) 模型的损失计算及优化。
32 |
33 |
34 | 这里有个点embeding层到底是怎么做的呢。
35 | feature_size: 256 经过onehot后的特征数量,会对这个特征维度做embeding转换
36 |
37 | 在程序中主要是在声明变量时候加以使用
38 | field_size: 39 这里的field_size是什么意思,在deepFM也有域的概念?这里表示在进行特征one-hot之前特征的数量,
39 | 好吧,这可能就是field最本质的含义,用来表示进行one-hot之前的特征情况, 那么这样含义可能就是
40 | 身高和性别不是同一field,而 性别=男与 性别=女 属于同一field 。
41 | 被大量使用,特征首先是被转成这个形式的。
42 |
43 |
44 | 在FM部分的输入, 一次项对应输入长度是 39, 二次项对应输入长度是256 。
45 |
46 |
47 | emmm....这个源码还是相当复杂的,要琢磨的点好多啊,首先Xv_train_ 就不知道怎么获取到的, 还有field_size 是怎么使用的,为什么
48 | 需要这一含义......等等,好多问题啊、 为什么分field_size和feature_size使用,FeatureDictionary怎么包装和解析的都是问题啊
49 |
50 |
51 | 慢慢看吧,也算有点了解了,这里确实用到了不同域field 进行分别处理的思想。
52 |
53 |
54 | ok,都搞明白了,只要理解好field_size 和 embedding-size 以及 feature_size 的含义和区别 就能懂了。
55 |
56 |
57 | '''
58 | def load_data():
59 | '''
60 | 加载数据,这个也是非常重要的。
61 |
62 | 会发现这里好像有缺失值的统计, config.IGNORE_COLS 和config.CATEGORICAL_COLS的筛选
63 |
64 |
65 | 利用 pandas 读取数据,然后获得对应的特征和 target ,保存到对应的变量中。并且将 categories 的变量保存下来。
66 | '''
67 | dfTrain = pd.read_csv(config.TRAIN_FILE)
68 | dfTest = pd.read_csv(config.TEST_FILE)
69 |
70 | def preprocess(df):
71 | cols = [c for c in df.columns if c not in ['id','target']]
72 | #df['missing_feat'] = np.sum(df[df[cols]==-1].values,axis=1)
73 | df["missing_feat"] = np.sum((df[cols] == -1).values, axis=1)
74 | df['ps_car_13_x_ps_reg_03'] = df['ps_car_13'] * df['ps_reg_03']
75 | return df
76 |
77 | dfTrain = preprocess(dfTrain)
78 | dfTest = preprocess(dfTest)
79 |
80 | cols = [c for c in dfTrain.columns if c not in ['id','target']]
81 | cols = [c for c in cols if (not c in config.IGNORE_COLS)]
82 |
83 | X_train = dfTrain[cols].values
84 | y_train = dfTrain['target'].values
85 |
86 | X_test = dfTest[cols].values
87 | ids_test = dfTest['id'].values
88 |
89 | cat_features_indices = [i for i,c in enumerate(cols) if c in config.CATEGORICAL_COLS]
90 |
91 | return dfTrain,dfTest,X_train,y_train,X_test,ids_test,cat_features_indices
92 |
93 | def run_base_model_dfm(dfTrain,dfTest,folds,dfm_params):
94 | '''
95 | 对模型的运行部分, 可以往下看发现,这部分 同时可以用于 设置使用FM 、Deep 、DeepFM这三种不同的模型
96 | '''
97 |
98 | # 别忽视了 FeatureDictionary 这里面有非常多的信息包装 转换的。 这里 解析 和字典包装真的是有点不明白,太复杂了,v是怎么获取使用的
99 | fd = FeatureDictionary(dfTrain=dfTrain,
100 | dfTest=dfTest,
101 | numeric_cols=config.NUMERIC_COLS,
102 | ignore_cols = config.IGNORE_COLS)
103 | # 在解析数据中,逐行处理每一条数据,dfi 记录了当前的特征在总的输入的特征中的索引。dfv 中记录的是具体的值,
104 | # 如果是 numerical 特征,存的是原始的值,如果是 categories 类型的,就存放 1。这个相当于进行了 one-hot 编码,
105 | # 在 dfi 存储了特征所在的索引。输入到网络中的特征的长度是 ( numerical 特征的个数 +categories 特征 one-hot 编码的长度 )。
106 | # 最终,Xi 和 Xv 是一个二维的 list,里面的每一个 list 是一行数据,Xi 存放的是特征所在的索引,Xv 存放的是具体的特征值。
107 | data_parser = DataParser(feat_dict= fd)
108 |
109 | # Xi_train :列的序号
110 | # Xv_train :列的对应的值
111 |
112 | # 解析数据 Xi_train 存放的是特征对应的索引 Xv_train 存放的是特征的具体的值
113 | Xi_train,Xv_train,y_train = data_parser.parse(df=dfTrain,has_label=True)
114 | Xi_test,Xv_test,ids_test = data_parser.parse(df=dfTest)
115 |
116 |
117 | #这里面是二维的, 大列表是 每个样本,小列表表示具体对应feature_index下的value的长度 。 小列表长度应该不是统一的,因为针对one-hot,只显示为1的
118 | print('Xi_train:',Xi_train) #存储了对应标签索引
119 | print('Xv_train:', Xv_train) #存储了真实值
120 | print('y_train:', y_train)
121 | print('Xi_test:', Xi_test)
122 | print('Xv_test:', Xv_test)
123 |
124 | print('Xi_train shape:', len(Xi_train)) # 存储了对应标签索引
125 | print('Xv_train shape:', len(Xv_train)) # 存储了真实值
126 | print('y_train shape:', len(y_train))
127 | print('Xi_test shape:', len(Xi_test))
128 | print('Xv_test shape:', len(Xv_test))
129 | #print('ids_test:', ids_test)
130 | print(dfTrain.dtypes)
131 |
132 | #field_size 是原始的特征size, feature_size是经过对离散型数据one-hot处理后的特征数量
133 | dfm_params['feature_size'] = fd.feat_dim
134 | dfm_params['field_size'] = len(Xi_train[0])
135 |
136 | y_train_meta = np.zeros((dfTrain.shape[0],1),dtype=float)
137 | y_test_meta = np.zeros((dfTest.shape[0],1),dtype=float)
138 |
139 | _get = lambda x,l:[x[i] for i in l]
140 |
141 | gini_results_cv = np.zeros(len(folds),dtype=float)
142 | gini_results_epoch_train = np.zeros((len(folds),dfm_params['epoch']),dtype=float)
143 | gini_results_epoch_valid = np.zeros((len(folds),dfm_params['epoch']),dtype=float)
144 |
145 | for i, (train_idx, valid_idx) in enumerate(folds):
146 |
147 | # 这里Xi_train_, Xv_train_, y_train_ 分别表示当前的特征在总的输入的特征中的索引、特征的具体的值、对应的标签索引
148 | Xi_train_, Xv_train_, y_train_ = _get(Xi_train, train_idx), _get(Xv_train, train_idx), _get(y_train, train_idx)
149 | Xi_valid_, Xv_valid_, y_valid_ = _get(Xi_train, valid_idx), _get(Xv_train, valid_idx), _get(y_train, valid_idx)
150 |
151 | # 训练好模型 并进行预测
152 | dfm = DeepFM(**dfm_params)
153 |
154 | print('before fit Xi_train_:', Xi_train_[0:3])
155 | print('before fit Xv_train_:', Xv_train_[0:3])
156 | print('before fit y_train_:', y_train_[0:3])
157 | dfm.fit(Xi_train_, Xv_train_, y_train_, Xi_valid_, Xv_valid_, y_valid_)
158 |
159 | y_train_meta[valid_idx,0] = dfm.predict(Xi_valid_, Xv_valid_)
160 | y_test_meta[:,0] += dfm.predict(Xi_test, Xv_test)
161 |
162 | gini_results_cv[i] = gini_norm(y_valid_, y_train_meta[valid_idx])
163 | gini_results_epoch_train[i] = dfm.train_result
164 | gini_results_epoch_valid[i] = dfm.valid_result
165 |
166 | y_test_meta /= float(len(folds))
167 |
168 | # save result
169 | if dfm_params["use_fm"] and dfm_params["use_deep"]:
170 | clf_str = "DeepFM"
171 | elif dfm_params["use_fm"]:
172 | clf_str = "FM"
173 | elif dfm_params["use_deep"]:
174 | clf_str = "DNN"
175 | print("%s: %.5f (%.5f)"%(clf_str, gini_results_cv.mean(), gini_results_cv.std()))
176 | filename = "%s_Mean%.5f_Std%.5f.csv"%(clf_str, gini_results_cv.mean(), gini_results_cv.std())
177 | _make_submission(ids_test, y_test_meta, filename)
178 |
179 | _plot_fig(gini_results_epoch_train, gini_results_epoch_valid, clf_str)
180 |
181 | return y_train_meta, y_test_meta
182 |
183 | def _make_submission(ids, y_pred, filename="submission.csv"):
184 | pd.DataFrame({"id": ids, "target": y_pred.flatten()}).to_csv(
185 | os.path.join(config.SUB_DIR, filename), index=False, float_format="%.5f")
186 |
187 |
188 | def _plot_fig(train_results, valid_results, model_name):
189 | colors = ["red", "blue", "green"]
190 | xs = np.arange(1, train_results.shape[1]+1)
191 | plt.figure()
192 | legends = []
193 | for i in range(train_results.shape[0]):
194 | plt.plot(xs, train_results[i], color=colors[i], linestyle="solid", marker="o")
195 | plt.plot(xs, valid_results[i], color=colors[i], linestyle="dashed", marker="o")
196 | legends.append("train-%d"%(i+1))
197 | legends.append("valid-%d"%(i+1))
198 | plt.xlabel("Epoch")
199 | plt.ylabel("Normalized Gini")
200 | plt.title("%s"%model_name)
201 | plt.legend(legends)
202 | plt.savefig("fig/%s.png"%model_name)
203 | plt.close()
204 |
205 |
206 |
207 |
208 | print('--------- 一切由此开始 -----------')
209 | dfm_params = {
210 | "use_fm":True,
211 | "use_deep":True,
212 | "embedding_size":8, # 对特征embedding的大小, 估计是隐向量的长度
213 | "dropout_fm":[1.0,1.0],
214 | "deep_layers":[32,32],
215 | "dropout_deep":[0.5,0.5,0.5],
216 | "deep_layer_activation":tf.nn.relu,
217 | "epoch":30,
218 | "batch_size":1024,
219 | "learning_rate":0.001,
220 | "optimizer":"adam",
221 | "batch_norm":1,
222 | "batch_norm_decay":0.995,
223 | "l2_reg":0.01,
224 | "verbose":True,
225 | "eval_metric":gini_norm,
226 | "random_seed":config.RANDOM_SEED
227 | }
228 |
229 | # load data
230 | dfTrain, dfTest, X_train, y_train, X_test, ids_test, cat_features_indices = load_data()
231 |
232 | # folds
233 | folds = list(StratifiedKFold(n_splits=config.NUM_SPLITS, shuffle=True,
234 | random_state=config.RANDOM_SEED).split(X_train, y_train))
235 |
236 | #y_train_dfm,y_test_dfm = run_base_model_dfm(dfTrain,dfTest,folds,dfm_params)
237 | y_train_dfm, y_test_dfm = run_base_model_dfm(dfTrain, dfTest, folds, dfm_params)
238 |
239 |
240 | # ------------------ FM Model ------------------
241 | fm_params = dfm_params.copy()
242 | fm_params["use_deep"] = False
243 | y_train_fm, y_test_fm = run_base_model_dfm(dfTrain, dfTest, folds, fm_params)
244 |
245 |
246 | # ------------------ DNN Model ------------------
247 | dnn_params = dfm_params.copy()
248 | dnn_params["use_fm"] = False
249 | y_train_dnn, y_test_dnn = run_base_model_dfm(dfTrain, dfTest, folds, dnn_params)
--------------------------------------------------------------------------------
/Basic-DeepFM-model/metrics.py:
--------------------------------------------------------------------------------
1 | # -*- encoding:utf-8 -*-
2 | import numpy as np
3 | '''
4 | 工具类, 用于计算 预测结果评估的基尼系数。
5 | '''
6 | def gini(actual, pred):
7 | assert (len(actual) == len(pred))
8 | all = np.asarray(np.c_[actual, pred, np.arange(len(actual))], dtype=np.float)
9 | all = all[np.lexsort((all[:, 2], -1 * all[:, 1]))]
10 | totalLosses = all[:, 0].sum()
11 | giniSum = all[:, 0].cumsum().sum() / totalLosses
12 |
13 | giniSum -= (len(actual) + 1) / 2.
14 | return giniSum / len(actual)
15 |
16 | def gini_norm(actual, pred):
17 | return gini(actual, pred) / gini(actual, actual)
18 |
--------------------------------------------------------------------------------
/Basic-DeepFM-model/output/DNN_Mean-0.31183_Std0.29369.csv:
--------------------------------------------------------------------------------
1 | id,target
2 | 0,0.54321
3 | 1,0.54492
4 | 2,0.54194
5 | 3,0.54175
6 | 4,0.54266
7 | 5,0.54154
8 | 6,0.54395
9 | 8,0.54214
10 | 10,0.54383
11 | 11,0.54348
12 | 12,0.54175
13 | 14,0.54253
14 | 15,0.54449
15 | 18,0.54221
16 | 21,0.54521
17 | 23,0.54488
18 | 24,0.54286
19 | 25,0.54416
20 | 27,0.54511
21 | 29,0.54365
22 | 30,0.54272
23 | 31,0.54500
24 | 32,0.54485
25 | 33,0.54332
26 | 37,0.54277
27 | 38,0.54376
28 | 39,0.54478
29 | 40,0.54178
30 | 41,0.54429
31 | 42,0.54348
32 | 44,0.54377
33 | 45,0.54288
34 | 47,0.54235
35 | 49,0.54258
36 | 51,0.54283
37 | 52,0.54266
38 | 53,0.54156
39 | 54,0.54426
40 | 55,0.54256
41 | 56,0.54520
42 | 57,0.54370
43 | 59,0.54359
44 | 60,0.54405
45 | 62,0.54316
46 | 63,0.54492
47 | 67,0.54511
48 | 68,0.54221
49 | 69,0.54548
50 | 70,0.54249
51 | 71,0.54415
52 | 73,0.54462
53 | 75,0.54333
54 | 76,0.54298
55 | 81,0.54271
56 | 82,0.54458
57 | 83,0.54240
58 | 86,0.54130
59 | 87,0.54291
60 | 88,0.54318
61 | 91,0.54448
62 | 92,0.54372
63 | 94,0.54307
64 | 97,0.54280
65 | 100,0.54605
66 | 102,0.54389
67 | 103,0.54320
68 | 105,0.54275
69 | 106,0.54410
70 | 108,0.54228
71 | 113,0.54418
72 | 114,0.54378
73 | 115,0.54324
74 | 118,0.54449
75 | 122,0.54158
76 | 124,0.54290
77 | 126,0.54196
78 | 128,0.54516
79 | 129,0.54435
80 | 130,0.54562
81 | 131,0.54449
82 | 132,0.54567
83 | 133,0.54219
84 | 134,0.54506
85 | 135,0.54226
86 | 136,0.54301
87 | 139,0.54165
88 | 140,0.54528
89 | 141,0.54275
90 | 146,0.54209
91 | 148,0.54334
92 | 151,0.54195
93 | 152,0.54450
94 | 154,0.54214
95 | 157,0.54393
96 | 158,0.54286
97 | 159,0.54520
98 | 161,0.54310
99 | 164,0.54332
100 | 165,0.54339
101 | 167,0.54313
102 |
--------------------------------------------------------------------------------
/Basic-DeepFM-model/output/DeepFM_Mean-0.11470_Std0.37335.csv:
--------------------------------------------------------------------------------
1 | id,target
2 | 0,0.46803
3 | 1,0.55377
4 | 2,0.53959
5 | 3,0.48240
6 | 4,0.42640
7 | 5,0.53783
8 | 6,0.43397
9 | 8,0.54862
10 | 10,0.39992
11 | 11,0.48496
12 | 12,0.56211
13 | 14,0.48791
14 | 15,0.40421
15 | 18,0.38874
16 | 21,0.48275
17 | 23,0.36596
18 | 24,0.54895
19 | 25,0.55286
20 | 27,0.46398
21 | 29,0.25796
22 | 30,0.52880
23 | 31,0.53623
24 | 32,0.38785
25 | 33,0.49019
26 | 37,0.53059
27 | 38,0.32213
28 | 39,0.48938
29 | 40,0.44188
30 | 41,0.39470
31 | 42,0.43526
32 | 44,0.38037
33 | 45,0.44053
34 | 47,0.47693
35 | 49,0.43951
36 | 51,0.52558
37 | 52,0.56112
38 | 53,0.63015
39 | 54,0.28074
40 | 55,0.50253
41 | 56,0.36943
42 | 57,0.41124
43 | 59,0.47449
44 | 60,0.41512
45 | 62,0.45376
46 | 63,0.56464
47 | 67,0.48383
48 | 68,0.44448
49 | 69,0.43281
50 | 70,0.41257
51 | 71,0.36101
52 | 73,0.24134
53 | 75,0.48104
54 | 76,0.41155
55 | 81,0.52558
56 | 82,0.40699
57 | 83,0.35711
58 | 86,0.36253
59 | 87,0.42458
60 | 88,0.57573
61 | 91,0.50545
62 | 92,0.57203
63 | 94,0.53472
64 | 97,0.47725
65 | 100,0.42449
66 | 102,0.49121
67 | 103,0.48863
68 | 105,0.59440
69 | 106,0.40794
70 | 108,0.49273
71 | 113,0.33953
72 | 114,0.50476
73 | 115,0.53934
74 | 118,0.48991
75 | 122,0.50319
76 | 124,0.41910
77 | 126,0.41064
78 | 128,0.36258
79 | 129,0.31102
80 | 130,0.45700
81 | 131,0.55222
82 | 132,0.47241
83 | 133,0.47101
84 | 134,0.45344
85 | 135,0.55308
86 | 136,0.50106
87 | 139,0.42091
88 | 140,0.44550
89 | 141,0.42207
90 | 146,0.46423
91 | 148,0.52868
92 | 151,0.44960
93 | 152,0.26475
94 | 154,0.56421
95 | 157,0.58842
96 | 158,0.42789
97 | 159,0.43978
98 | 161,0.62290
99 | 164,0.54502
100 | 165,0.38185
101 | 167,0.53922
102 |
--------------------------------------------------------------------------------
/Basic-DeepFM-model/output/DeepFM_Mean0.01434_Std0.10176.csv:
--------------------------------------------------------------------------------
1 | id,target
2 | 0,0.32278
3 | 1,0.41663
4 | 2,0.44417
5 | 3,0.47512
6 | 4,0.52361
7 | 5,0.33677
8 | 6,0.44370
9 | 8,0.30100
10 | 10,0.48097
11 | 11,0.52027
12 | 12,0.26543
13 | 14,0.40398
14 | 15,0.46376
15 | 18,0.38902
16 | 21,0.35526
17 | 23,0.41269
18 | 24,0.37623
19 | 25,0.30560
20 | 27,0.41068
21 | 29,0.49968
22 | 30,0.48046
23 | 31,0.53911
24 | 32,0.37760
25 | 33,0.42462
26 | 37,0.43910
27 | 38,0.43226
28 | 39,0.40951
29 | 40,0.42573
30 | 41,0.38593
31 | 42,0.45659
32 | 44,0.42400
33 | 45,0.46563
34 | 47,0.41856
35 | 49,0.43669
36 | 51,0.39470
37 | 52,0.35710
38 | 53,0.35468
39 | 54,0.58721
40 | 55,0.34572
41 | 56,0.49496
42 | 57,0.52123
43 | 59,0.43579
44 | 60,0.37308
45 | 62,0.36949
46 | 63,0.36458
47 | 67,0.40002
48 | 68,0.40630
49 | 69,0.51984
50 | 70,0.43685
51 | 71,0.34467
52 | 73,0.49609
53 | 75,0.42494
54 | 76,0.36640
55 | 81,0.41558
56 | 82,0.49456
57 | 83,0.41528
58 | 86,0.38741
59 | 87,0.42377
60 | 88,0.49288
61 | 91,0.43845
62 | 92,0.50188
63 | 94,0.38807
64 | 97,0.43247
65 | 100,0.37401
66 | 102,0.36822
67 | 103,0.39734
68 | 105,0.38886
69 | 106,0.40349
70 | 108,0.29820
71 | 113,0.38590
72 | 114,0.53072
73 | 115,0.37515
74 | 118,0.34776
75 | 122,0.35378
76 | 124,0.35501
77 | 126,0.36031
78 | 128,0.36464
79 | 129,0.48796
80 | 130,0.40816
81 | 131,0.44641
82 | 132,0.40488
83 | 133,0.39336
84 | 134,0.51089
85 | 135,0.49477
86 | 136,0.35754
87 | 139,0.46074
88 | 140,0.38236
89 | 141,0.37077
90 | 146,0.29805
91 | 148,0.43685
92 | 151,0.45538
93 | 152,0.35027
94 | 154,0.35761
95 | 157,0.36037
96 | 158,0.39519
97 | 159,0.33552
98 | 161,0.41159
99 | 164,0.42803
100 | 165,0.44548
101 | 167,0.39931
102 |
--------------------------------------------------------------------------------
/Basic-DeepFM-model/output/DeepFM_Mean0.05735_Std0.20027.csv:
--------------------------------------------------------------------------------
1 | id,target
2 | 0,0.72139
3 | 1,0.51760
4 | 2,0.59032
5 | 3,0.63660
6 | 4,0.50603
7 | 5,0.57058
8 | 6,0.72299
9 | 8,0.62921
10 | 10,0.64393
11 | 11,0.62246
12 | 12,0.64539
13 | 14,0.62271
14 | 15,0.63971
15 | 18,0.74351
16 | 21,0.56603
17 | 23,0.65027
18 | 24,0.62978
19 | 25,0.56364
20 | 27,0.55366
21 | 29,0.64651
22 | 30,0.63995
23 | 31,0.51301
24 | 32,0.65243
25 | 33,0.62960
26 | 37,0.61379
27 | 38,0.62845
28 | 39,0.56194
29 | 40,0.55361
30 | 41,0.65380
31 | 42,0.56262
32 | 44,0.52620
33 | 45,0.56058
34 | 47,0.67995
35 | 49,0.58040
36 | 51,0.57256
37 | 52,0.57186
38 | 53,0.74692
39 | 54,0.63829
40 | 55,0.61376
41 | 56,0.57716
42 | 57,0.66004
43 | 59,0.60760
44 | 60,0.68578
45 | 62,0.68983
46 | 63,0.62641
47 | 67,0.59588
48 | 68,0.59095
49 | 69,0.56658
50 | 70,0.60620
51 | 71,0.53494
52 | 73,0.73047
53 | 75,0.56699
54 | 76,0.68507
55 | 81,0.59263
56 | 82,0.45351
57 | 83,0.65228
58 | 86,0.67729
59 | 87,0.63932
60 | 88,0.62208
61 | 91,0.50822
62 | 92,0.60571
63 | 94,0.61354
64 | 97,0.62548
65 | 100,0.69225
66 | 102,0.50505
67 | 103,0.61700
68 | 105,0.65031
69 | 106,0.66246
70 | 108,0.67469
71 | 113,0.66512
72 | 114,0.53249
73 | 115,0.55344
74 | 118,0.68072
75 | 122,0.53538
76 | 124,0.65328
77 | 126,0.64717
78 | 128,0.73029
79 | 129,0.63653
80 | 130,0.63030
81 | 131,0.50802
82 | 132,0.58770
83 | 133,0.62624
84 | 134,0.44326
85 | 135,0.63895
86 | 136,0.56856
87 | 139,0.53739
88 | 140,0.63811
89 | 141,0.70656
90 | 146,0.57495
91 | 148,0.62791
92 | 151,0.60073
93 | 152,0.73494
94 | 154,0.60894
95 | 157,0.60582
96 | 158,0.54721
97 | 159,0.70589
98 | 161,0.63762
99 | 164,0.53981
100 | 165,0.65285
101 | 167,0.52954
102 |
--------------------------------------------------------------------------------
/Basic-DeepFM-model/output/DeepFM_Mean0.26137_Std0.00210.csv:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/isthegoal/recommendation_model_master/77c2117631e779527b92e04dc53358a88abbbbbc/Basic-DeepFM-model/output/DeepFM_Mean0.26137_Std0.00210.csv
--------------------------------------------------------------------------------
/Basic-DeepFM-model/output/FM_Mean0.15581_Std0.02785.csv:
--------------------------------------------------------------------------------
1 | id,target
2 | 0,0.02137
3 | 1,0.04144
4 | 2,0.04471
5 | 3,0.01212
6 | 4,0.01288
7 | 5,0.02232
8 | 6,0.04143
9 | 8,0.03222
10 | 10,0.08007
11 | 11,0.05541
12 | 12,0.04291
13 | 14,0.01621
14 | 15,0.04741
15 | 18,0.04954
16 | 21,0.03595
17 | 23,0.02186
18 | 24,0.03885
19 | 25,0.04164
20 | 27,0.02831
21 | 29,0.06579
22 | 30,0.01897
23 | 31,0.06697
24 | 32,0.03181
25 | 33,0.03514
26 | 37,0.03447
27 | 38,0.05280
28 | 39,0.08885
29 | 40,0.02838
30 | 41,0.03931
31 | 42,0.03544
32 | 44,0.02313
33 | 45,0.02591
34 | 47,0.03439
35 | 49,0.02282
36 | 51,0.05606
37 | 52,0.02866
38 | 53,0.04512
39 | 54,0.08186
40 | 55,0.01681
41 | 56,0.02742
42 | 57,0.04719
43 | 59,0.06081
44 | 60,0.03631
45 | 62,0.03341
46 | 63,0.06130
47 | 67,0.02999
48 | 68,0.01031
49 | 69,0.08032
50 | 70,0.02475
51 | 71,0.04025
52 | 73,0.04612
53 | 75,0.03133
54 | 76,0.02484
55 | 81,0.01701
56 | 82,0.02330
57 | 83,0.02341
58 | 86,0.03231
59 | 87,0.05000
60 | 88,0.02218
61 | 91,0.06655
62 | 92,0.02490
63 | 94,0.02113
64 | 97,0.03231
65 | 100,0.02458
66 | 102,0.03453
67 | 103,0.02473
68 | 105,0.04644
69 | 106,0.04308
70 | 108,0.04596
71 | 113,0.06040
72 | 114,0.03574
73 | 115,0.01760
74 | 118,0.03519
75 | 122,0.02640
76 | 124,0.04379
77 | 126,0.02763
78 | 128,0.02363
79 | 129,0.05051
80 | 130,0.04009
81 | 131,0.02278
82 | 132,0.02751
83 | 133,0.01800
84 | 134,0.04405
85 | 135,0.05400
86 | 136,0.02337
87 | 139,0.01717
88 | 140,0.02429
89 | 141,0.05788
90 | 146,0.03688
91 | 148,0.03516
92 | 151,0.01776
93 | 152,0.04876
94 | 154,0.02949
95 | 157,0.03676
96 | 158,0.01532
97 | 159,0.05604
98 | 161,0.02131
99 | 164,0.00924
100 | 165,0.05796
101 | 167,0.03488
102 | 168,0.04918
103 | 169,0.04153
104 | 171,0.05334
105 | 172,0.06337
106 | 173,0.03265
107 | 174,0.01935
108 | 175,0.03873
109 | 176,0.02806
110 | 178,0.03723
111 | 179,0.02317
112 | 180,0.02497
113 | 181,0.02111
114 | 182,0.05606
115 | 183,0.06036
116 | 184,0.05321
117 | 185,0.03847
118 | 187,0.04269
119 | 188,0.02551
120 | 190,0.04144
121 | 191,0.03344
122 | 192,0.05640
123 | 195,0.04065
124 | 196,0.02412
125 | 198,0.09440
126 | 201,0.04224
127 | 202,0.03604
128 | 203,0.02854
129 | 204,0.02297
130 | 207,0.03084
131 | 213,0.04167
132 | 219,0.02524
133 | 220,0.02404
134 | 222,0.03262
135 | 226,0.01460
136 | 228,0.07685
137 | 230,0.02559
138 | 231,0.02831
139 | 232,0.03209
140 | 233,0.03212
141 | 234,0.00891
142 | 236,0.05885
143 | 237,0.02689
144 | 238,0.05714
145 | 240,0.07047
146 | 241,0.01755
147 | 242,0.06908
148 | 243,0.03813
149 | 245,0.02041
150 | 246,0.04221
151 | 247,0.02802
152 | 249,0.04872
153 | 250,0.02031
154 | 251,0.01560
155 | 252,0.01588
156 | 253,0.01642
157 | 254,0.05440
158 | 255,0.08624
159 | 256,0.06203
160 | 257,0.03278
161 | 258,0.02504
162 | 259,0.05255
163 | 260,0.01118
164 | 262,0.01544
165 | 267,0.02741
166 | 268,0.03889
167 | 270,0.02099
168 | 272,0.02666
169 | 273,0.03326
170 | 274,0.01495
171 | 275,0.02311
172 | 276,0.04014
173 | 277,0.03958
174 | 278,0.04811
175 | 279,0.04887
176 | 281,0.03564
177 | 282,0.02666
178 | 283,0.05227
179 | 287,0.02920
180 | 288,0.06619
181 | 289,0.03019
182 | 290,0.01322
183 | 291,0.05087
184 | 292,0.10250
185 | 293,0.02951
186 | 294,0.02099
187 | 295,0.02181
188 | 296,0.07827
189 | 297,0.04296
190 | 298,0.03006
191 | 299,0.04876
192 | 300,0.05420
193 | 303,0.02225
194 | 304,0.04885
195 | 308,0.01451
196 | 314,0.13191
197 | 315,0.01927
198 | 317,0.04414
199 | 318,0.04896
200 | 320,0.04172
201 | 322,0.03623
202 | 323,0.01593
203 | 327,0.02656
204 | 329,0.08434
205 | 331,0.02709
206 | 332,0.11561
207 | 333,0.04616
208 | 334,0.02970
209 | 335,0.02529
210 | 336,0.04372
211 | 337,0.03587
212 | 341,0.03323
213 | 343,0.03423
214 | 345,0.04228
215 | 347,0.02640
216 | 348,0.02743
217 | 349,0.03604
218 | 352,0.02360
219 | 353,0.02738
220 | 354,0.06913
221 | 355,0.03274
222 | 356,0.02489
223 | 357,0.01605
224 | 358,0.02752
225 | 359,0.02768
226 | 361,0.06056
227 | 362,0.03260
228 | 365,0.03320
229 | 366,0.06213
230 | 368,0.03351
231 | 369,0.02525
232 | 371,0.01706
233 | 373,0.02669
234 | 376,0.08058
235 | 377,0.02993
236 | 379,0.03749
237 | 381,0.03386
238 | 382,0.02151
239 | 383,0.02524
240 | 384,0.05078
241 | 385,0.02165
242 | 386,0.04097
243 | 387,0.02219
244 | 388,0.08156
245 | 390,0.02022
246 | 391,0.02696
247 | 392,0.10585
248 | 393,0.02853
249 | 394,0.02931
250 | 396,0.03891
251 | 400,0.04818
252 | 402,0.02452
253 | 403,0.02123
254 | 405,0.03230
255 | 407,0.03592
256 | 409,0.03035
257 | 411,0.04085
258 | 412,0.02008
259 | 414,0.07313
260 | 415,0.03157
261 | 416,0.01998
262 | 418,0.01459
263 | 419,0.02777
264 | 421,0.02699
265 | 422,0.04841
266 | 423,0.03348
267 | 424,0.04760
268 | 426,0.01993
269 | 427,0.09358
270 | 428,0.03221
271 | 436,0.04739
272 | 438,0.05444
273 | 442,0.02409
274 | 443,0.02416
275 | 445,0.04393
276 | 446,0.03265
277 | 447,0.02344
278 | 450,0.07262
279 | 452,0.01743
280 | 453,0.03313
281 | 455,0.03944
282 | 458,0.05844
283 | 459,0.01610
284 | 461,0.05545
285 | 463,0.02435
286 | 467,0.04372
287 | 470,0.03145
288 | 471,0.02805
289 | 472,0.02517
290 | 476,0.06958
291 | 477,0.03871
292 | 481,0.05014
293 | 482,0.05815
294 | 483,0.10528
295 | 485,0.02912
296 | 487,0.01170
297 | 488,0.01771
298 | 490,0.01575
299 | 492,0.02053
300 | 494,0.06358
301 | 497,0.07635
302 | 499,0.02453
303 | 500,0.03010
304 | 501,0.03638
305 | 503,0.02387
306 | 505,0.02676
307 | 506,0.05130
308 | 507,0.02977
309 | 509,0.04277
310 | 510,0.04814
311 | 511,0.04070
312 | 512,0.01808
313 | 514,0.02048
314 | 515,0.02285
315 | 516,0.02087
316 | 517,0.10278
317 | 519,0.01688
318 | 520,0.03563
319 | 521,0.02999
320 | 522,0.07002
321 | 523,0.03837
322 | 527,0.03881
323 | 530,0.05502
324 | 532,0.04515
325 | 534,0.02031
326 | 536,0.05627
327 | 539,0.01929
328 | 543,0.01691
329 | 547,0.02397
330 | 550,0.03934
331 | 551,0.04820
332 | 552,0.03862
333 | 553,0.04219
334 | 555,0.04360
335 | 556,0.03110
336 | 557,0.03231
337 | 558,0.03386
338 | 559,0.03723
339 | 560,0.03201
340 | 561,0.02760
341 | 565,0.02744
342 | 566,0.04780
343 | 567,0.03678
344 | 571,0.03345
345 | 572,0.05078
346 | 574,0.01192
347 | 575,0.01854
348 | 576,0.05544
349 | 577,0.04590
350 | 578,0.01267
351 | 579,0.05386
352 | 581,0.01175
353 | 587,0.02291
354 | 589,0.02641
355 | 590,0.07509
356 | 591,0.01643
357 | 592,0.04024
358 | 593,0.02998
359 | 594,0.07079
360 | 595,0.06429
361 | 596,0.01532
362 | 597,0.04174
363 | 598,0.10215
364 | 600,0.02056
365 | 602,0.01213
366 | 605,0.02695
367 | 606,0.04343
368 | 607,0.02268
369 | 612,0.02245
370 | 613,0.01806
371 | 614,0.01223
372 | 615,0.02749
373 | 616,0.01249
374 | 617,0.02477
375 | 618,0.06806
376 | 620,0.01832
377 | 621,0.04815
378 | 622,0.01625
379 | 623,0.02761
380 | 628,0.03954
381 | 631,0.04637
382 | 632,0.02584
383 | 633,0.02652
384 | 634,0.04527
385 | 635,0.06343
386 | 637,0.04134
387 | 638,0.03402
388 | 640,0.02958
389 | 643,0.01955
390 | 644,0.05771
391 | 645,0.06161
392 | 646,0.03607
393 | 649,0.02411
394 | 651,0.07278
395 | 653,0.01506
396 | 654,0.04318
397 | 655,0.09689
398 | 656,0.03698
399 | 658,0.07992
400 | 659,0.01920
401 | 661,0.07407
402 | 662,0.03948
403 | 663,0.03730
404 | 664,0.05306
405 | 665,0.05745
406 | 667,0.03094
407 | 668,0.05080
408 | 671,0.05052
409 | 673,0.04177
410 | 674,0.05878
411 | 675,0.02724
412 | 676,0.04555
413 | 677,0.02467
414 | 678,0.03649
415 | 679,0.03734
416 | 680,0.05172
417 | 681,0.06061
418 | 682,0.05667
419 | 683,0.02471
420 | 684,0.02122
421 | 686,0.05223
422 | 687,0.05709
423 | 688,0.07305
424 | 690,0.03339
425 | 691,0.03403
426 | 692,0.06742
427 | 693,0.04543
428 | 694,0.04885
429 | 695,0.01604
430 | 696,0.01880
431 | 698,0.04097
432 | 699,0.01696
433 | 702,0.04270
434 | 703,0.01795
435 | 705,0.10553
436 | 706,0.07866
437 | 707,0.02366
438 | 708,0.01600
439 | 709,0.06039
440 | 710,0.07234
441 | 713,0.01886
442 | 714,0.02923
443 | 715,0.04931
444 | 716,0.03935
445 | 717,0.06554
446 | 718,0.02349
447 | 719,0.03435
448 | 720,0.03497
449 | 721,0.03922
450 | 723,0.03463
451 | 724,0.04552
452 | 726,0.02437
453 | 728,0.08724
454 | 730,0.02665
455 | 732,0.02749
456 | 734,0.01993
457 | 735,0.04592
458 | 737,0.02212
459 | 738,0.07010
460 | 741,0.04361
461 | 742,0.03204
462 | 743,0.02097
463 | 746,0.04247
464 | 747,0.03332
465 | 749,0.03827
466 | 750,0.07019
467 | 752,0.03944
468 | 755,0.03002
469 | 757,0.02200
470 | 758,0.04128
471 | 759,0.02672
472 | 765,0.03095
473 | 766,0.02763
474 | 767,0.09597
475 | 768,0.03164
476 | 769,0.02425
477 | 770,0.01013
478 | 771,0.02834
479 | 772,0.01642
480 | 773,0.06211
481 | 775,0.03548
482 | 776,0.03391
483 | 778,0.03863
484 | 779,0.02139
485 | 780,0.02466
486 | 782,0.04150
487 | 783,0.04347
488 | 784,0.02978
489 | 785,0.01618
490 | 786,0.02217
491 | 788,0.02727
492 | 789,0.03284
493 | 792,0.08908
494 | 793,0.02436
495 | 794,0.03590
496 | 797,0.06842
497 | 798,0.02186
498 | 799,0.07458
499 | 800,0.03901
500 | 801,0.06658
501 | 802,0.07883
502 | 805,0.03134
503 | 807,0.04379
504 | 808,0.01580
505 | 811,0.02507
506 | 813,0.01636
507 | 814,0.01622
508 | 815,0.09209
509 | 816,0.05064
510 | 817,0.01943
511 | 819,0.01776
512 | 821,0.04597
513 | 823,0.02325
514 | 825,0.03866
515 | 826,0.02009
516 | 827,0.02922
517 | 828,0.02679
518 | 830,0.02659
519 | 831,0.04254
520 | 833,0.01555
521 | 834,0.03503
522 | 835,0.02005
523 | 836,0.03511
524 | 837,0.06837
525 | 838,0.02781
526 | 840,0.03809
527 | 842,0.01387
528 | 844,0.02166
529 | 845,0.03885
530 | 847,0.04782
531 | 848,0.07800
532 | 850,0.01724
533 | 851,0.02992
534 | 852,0.02443
535 | 853,0.07526
536 | 855,0.02112
537 | 856,0.05708
538 | 857,0.03467
539 | 859,0.04996
540 | 860,0.05007
541 | 862,0.07559
542 | 863,0.03387
543 | 864,0.02991
544 | 867,0.02467
545 | 868,0.02546
546 | 869,0.02098
547 | 871,0.01852
548 | 873,0.03233
549 | 875,0.04759
550 | 876,0.05134
551 | 877,0.01954
552 | 879,0.04410
553 | 880,0.08325
554 | 883,0.02379
555 | 884,0.02805
556 | 885,0.02906
557 | 886,0.01317
558 | 887,0.01950
559 | 888,0.01700
560 | 891,0.03976
561 | 893,0.02057
562 | 895,0.04284
563 | 897,0.04789
564 | 898,0.08516
565 | 901,0.02540
566 | 902,0.03362
567 | 904,0.09219
568 | 905,0.02703
569 | 907,0.07311
570 | 908,0.03470
571 | 909,0.05096
572 | 910,0.01889
573 | 911,0.01486
574 | 913,0.04781
575 | 914,0.01938
576 | 915,0.09150
577 | 916,0.02468
578 | 917,0.01214
579 | 919,0.02731
580 | 920,0.02437
581 | 923,0.03158
582 | 924,0.02690
583 | 925,0.05978
584 | 926,0.02650
585 | 928,0.02753
586 | 929,0.01413
587 | 932,0.05838
588 | 935,0.04857
589 | 936,0.04494
590 | 937,0.03494
591 | 938,0.05145
592 | 939,0.02889
593 | 940,0.04280
594 | 941,0.02011
595 | 944,0.05272
596 | 945,0.02647
597 | 946,0.05497
598 | 947,0.04094
599 | 949,0.02412
600 | 950,0.05656
601 | 952,0.02609
602 | 953,0.04982
603 | 955,0.02237
604 | 956,0.03098
605 | 957,0.01159
606 | 960,0.01718
607 | 961,0.03180
608 | 962,0.03164
609 | 963,0.01360
610 | 964,0.04082
611 | 967,0.04039
612 | 975,0.03942
613 | 976,0.02425
614 | 977,0.02446
615 | 978,0.04201
616 | 980,0.01737
617 | 985,0.05860
618 | 986,0.06464
619 | 987,0.01257
620 | 989,0.05181
621 | 992,0.02417
622 | 994,0.06469
623 | 995,0.02592
624 | 996,0.01117
625 | 997,0.07424
626 | 998,0.03120
627 | 999,0.06613
628 | 1000,0.03701
629 | 1004,0.02156
630 | 1005,0.04597
631 | 1006,0.01905
632 | 1007,0.09843
633 | 1010,0.03406
634 | 1011,0.05903
635 | 1012,0.01125
636 | 1013,0.03215
637 | 1014,0.02146
638 | 1016,0.04120
639 | 1018,0.03244
640 | 1019,0.01761
641 | 1020,0.03934
642 | 1022,0.02265
643 | 1023,0.04233
644 | 1024,0.03350
645 | 1025,0.03836
646 | 1029,0.04124
647 | 1031,0.02080
648 | 1032,0.02527
649 | 1033,0.03888
650 | 1034,0.07779
651 | 1035,0.03626
652 | 1036,0.02586
653 | 1037,0.02257
654 | 1038,0.03574
655 | 1039,0.02851
656 | 1041,0.02009
657 | 1043,0.02126
658 | 1049,0.05096
659 | 1050,0.10280
660 | 1051,0.03985
661 | 1052,0.03240
662 | 1053,0.02452
663 | 1054,0.03106
664 | 1056,0.03256
665 | 1057,0.06544
666 | 1060,0.05280
667 | 1061,0.01878
668 | 1062,0.03111
669 | 1063,0.05339
670 | 1064,0.03513
671 | 1065,0.01427
672 | 1066,0.01707
673 | 1068,0.05136
674 | 1073,0.04261
675 | 1075,0.01978
676 | 1079,0.02872
677 | 1081,0.03594
678 | 1083,0.02759
679 | 1085,0.02505
680 | 1086,0.02080
681 | 1087,0.03001
682 | 1088,0.01253
683 | 1089,0.01831
684 | 1090,0.07080
685 | 1091,0.02897
686 | 1092,0.01559
687 | 1095,0.02970
688 | 1098,0.03193
689 | 1099,0.04034
690 | 1100,0.04200
691 | 1101,0.05324
692 | 1103,0.04856
693 | 1104,0.03682
694 | 1106,0.01477
695 | 1107,0.04158
696 | 1108,0.07769
697 | 1109,0.01806
698 | 1113,0.04745
699 | 1114,0.03662
700 | 1116,0.02300
701 | 1117,0.04743
702 | 1118,0.03422
703 | 1119,0.01699
704 | 1120,0.02695
705 | 1121,0.02170
706 | 1122,0.01460
707 | 1123,0.04144
708 | 1125,0.01723
709 | 1126,0.03946
710 | 1127,0.01688
711 | 1128,0.05495
712 | 1129,0.04411
713 | 1130,0.02320
714 | 1132,0.03043
715 | 1133,0.06522
716 | 1136,0.01580
717 | 1137,0.07199
718 | 1140,0.03043
719 | 1142,0.05902
720 | 1144,0.02766
721 | 1145,0.02560
722 | 1146,0.01444
723 | 1148,0.04655
724 | 1150,0.02188
725 | 1151,0.02906
726 | 1153,0.05440
727 | 1154,0.03210
728 | 1155,0.04355
729 | 1156,0.04935
730 | 1157,0.02149
731 | 1158,0.03199
732 | 1160,0.03335
733 | 1161,0.01757
734 | 1167,0.02403
735 | 1168,0.02445
736 | 1170,0.02970
737 | 1171,0.06405
738 | 1173,0.03441
739 | 1174,0.09087
740 | 1175,0.03160
741 | 1176,0.05257
742 | 1177,0.02082
743 | 1178,0.03551
744 | 1179,0.01621
745 | 1180,0.03987
746 | 1181,0.03936
747 | 1182,0.02568
748 | 1183,0.03271
749 | 1185,0.03281
750 | 1188,0.04380
751 | 1189,0.01728
752 | 1190,0.03732
753 | 1191,0.04364
754 | 1192,0.06011
755 | 1194,0.03269
756 | 1197,0.04015
757 | 1198,0.03468
758 | 1199,0.04524
759 | 1200,0.03107
760 | 1203,0.03234
761 | 1204,0.05834
762 | 1207,0.04653
763 | 1208,0.04914
764 | 1209,0.01564
765 | 1210,0.06671
766 | 1211,0.02344
767 | 1213,0.02197
768 | 1214,0.02886
769 | 1215,0.02680
770 | 1216,0.07393
771 | 1217,0.03690
772 | 1218,0.02186
773 | 1219,0.01886
774 | 1223,0.04924
775 | 1225,0.02442
776 | 1226,0.02637
777 | 1228,0.11694
778 | 1229,0.02809
779 | 1231,0.08302
780 | 1232,0.06685
781 | 1233,0.03364
782 | 1235,0.02203
783 | 1238,0.02043
784 | 1239,0.01155
785 | 1240,0.03061
786 | 1241,0.01816
787 | 1242,0.02007
788 | 1243,0.03715
789 | 1244,0.02635
790 | 1246,0.07828
791 | 1249,0.07242
792 | 1250,0.01425
793 | 1252,0.02029
794 | 1254,0.05279
795 | 1255,0.02235
796 | 1256,0.03417
797 | 1257,0.02345
798 | 1258,0.02497
799 | 1259,0.06803
800 | 1260,0.02638
801 | 1262,0.03779
802 | 1263,0.01859
803 | 1264,0.01574
804 | 1265,0.09114
805 | 1266,0.01655
806 | 1269,0.04064
807 | 1270,0.02078
808 | 1271,0.01487
809 | 1272,0.06826
810 | 1273,0.02605
811 | 1274,0.02466
812 | 1277,0.03171
813 | 1278,0.02826
814 | 1281,0.03249
815 | 1282,0.04850
816 | 1283,0.02574
817 | 1284,0.02861
818 | 1285,0.01621
819 | 1286,0.05682
820 | 1289,0.01620
821 | 1291,0.02023
822 | 1292,0.02541
823 | 1293,0.06230
824 | 1294,0.05555
825 | 1295,0.07279
826 | 1297,0.03061
827 | 1298,0.05682
828 | 1301,0.03488
829 | 1304,0.03088
830 | 1305,0.03700
831 | 1307,0.06508
832 | 1308,0.03646
833 | 1309,0.07957
834 | 1313,0.06383
835 | 1314,0.04944
836 | 1316,0.03042
837 | 1317,0.02856
838 | 1318,0.04821
839 | 1320,0.02688
840 | 1321,0.01508
841 | 1322,0.02222
842 | 1323,0.02267
843 | 1325,0.03708
844 | 1330,0.02305
845 | 1331,0.03183
846 | 1334,0.04181
847 | 1337,0.01744
848 | 1339,0.03185
849 | 1340,0.03310
850 | 1341,0.02919
851 | 1343,0.01679
852 | 1344,0.01251
853 | 1347,0.04312
854 | 1348,0.06515
855 | 1350,0.03106
856 | 1351,0.01983
857 | 1355,0.03342
858 | 1356,0.04156
859 | 1357,0.02142
860 | 1358,0.02759
861 | 1360,0.01803
862 | 1363,0.05023
863 | 1364,0.01985
864 | 1366,0.02524
865 | 1367,0.05587
866 | 1368,0.06105
867 | 1369,0.02605
868 | 1370,0.03596
869 | 1375,0.03311
870 | 1376,0.01014
871 | 1378,0.09283
872 | 1379,0.01883
873 | 1380,0.04493
874 | 1384,0.00900
875 | 1385,0.01953
876 | 1386,0.03573
877 | 1388,0.02532
878 | 1389,0.01541
879 | 1391,0.03710
880 | 1392,0.01409
881 | 1393,0.04297
882 | 1395,0.04611
883 | 1396,0.05699
884 | 1399,0.03929
885 | 1400,0.06285
886 | 1401,0.03260
887 | 1404,0.01714
888 | 1405,0.02361
889 | 1406,0.03298
890 | 1407,0.03045
891 | 1409,0.02705
892 | 1411,0.04077
893 | 1414,0.05028
894 | 1415,0.03792
895 | 1416,0.06025
896 | 1418,0.08074
897 | 1419,0.03191
898 | 1420,0.01486
899 | 1422,0.05260
900 | 1424,0.01511
901 | 1426,0.01697
902 | 1427,0.02725
903 | 1428,0.03972
904 | 1429,0.03264
905 | 1431,0.02269
906 | 1432,0.08291
907 | 1434,0.04273
908 | 1435,0.02937
909 | 1439,0.04123
910 | 1440,0.06025
911 | 1441,0.05331
912 | 1442,0.03888
913 | 1446,0.04092
914 | 1447,0.05683
915 | 1451,0.01567
916 | 1452,0.01120
917 | 1454,0.03851
918 | 1457,0.01890
919 | 1458,0.02732
920 | 1459,0.05225
921 | 1461,0.04182
922 | 1462,0.03933
923 | 1465,0.01939
924 | 1468,0.04955
925 | 1470,0.04732
926 | 1471,0.03285
927 | 1472,0.03100
928 | 1476,0.02461
929 | 1482,0.01910
930 | 1485,0.02481
931 | 1486,0.04904
932 | 1487,0.02902
933 | 1489,0.02726
934 | 1492,0.03804
935 | 1494,0.11900
936 | 1495,0.02512
937 | 1497,0.04039
938 | 1499,0.02702
939 | 1500,0.02204
940 | 1501,0.02079
941 | 1505,0.08787
942 | 1507,0.02222
943 | 1509,0.08587
944 | 1511,0.01696
945 | 1513,0.02131
946 | 1514,0.02933
947 | 1515,0.08499
948 | 1517,0.09652
949 | 1520,0.04254
950 | 1522,0.02006
951 | 1523,0.01415
952 | 1524,0.07593
953 | 1526,0.02294
954 | 1527,0.01957
955 | 1528,0.03169
956 | 1531,0.02280
957 | 1533,0.04953
958 | 1534,0.04523
959 | 1535,0.10260
960 | 1538,0.03656
961 | 1541,0.04766
962 | 1542,0.02079
963 | 1543,0.02560
964 | 1546,0.04148
965 | 1548,0.03220
966 | 1549,0.03512
967 | 1550,0.02955
968 | 1551,0.04845
969 | 1553,0.07285
970 | 1558,0.02008
971 | 1560,0.05832
972 | 1561,0.03146
973 | 1562,0.01969
974 | 1565,0.05840
975 | 1566,0.06756
976 | 1567,0.02081
977 | 1568,0.02750
978 | 1569,0.03025
979 | 1570,0.04384
980 | 1573,0.01726
981 | 1576,0.03025
982 | 1577,0.01830
983 | 1579,0.03963
984 | 1580,0.05791
985 | 1581,0.04229
986 | 1584,0.03786
987 | 1586,0.02127
988 | 1587,0.02610
989 | 1590,0.02389
990 | 1591,0.02505
991 | 1594,0.03956
992 | 1595,0.01626
993 | 1596,0.02238
994 | 1601,0.02870
995 | 1605,0.05412
996 | 1607,0.11038
997 | 1608,0.02318
998 | 1610,0.01953
999 | 1611,0.04542
1000 | 1612,0.04120
1001 | 1616,0.04978
1002 | 1617,0.04062
1003 | 1618,0.02456
1004 | 1620,0.02379
1005 | 1621,0.03176
1006 | 1623,0.03354
1007 | 1624,0.02115
1008 | 1625,0.03149
1009 | 1626,0.02625
1010 | 1628,0.02338
1011 | 1630,0.02120
1012 | 1632,0.02577
1013 | 1633,0.03295
1014 | 1635,0.03600
1015 | 1637,0.03157
1016 | 1638,0.08234
1017 | 1639,0.04089
1018 | 1640,0.01638
1019 | 1643,0.07820
1020 | 1644,0.03485
1021 | 1645,0.04847
1022 | 1647,0.01840
1023 | 1649,0.01905
1024 | 1651,0.03170
1025 | 1652,0.02815
1026 | 1654,0.02520
1027 | 1655,0.02012
1028 | 1656,0.05692
1029 | 1658,0.04691
1030 | 1660,0.01527
1031 | 1661,0.01722
1032 | 1662,0.06574
1033 | 1663,0.02248
1034 | 1664,0.02735
1035 | 1665,0.07235
1036 | 1668,0.05515
1037 | 1669,0.02090
1038 | 1673,0.04758
1039 | 1674,0.03142
1040 | 1675,0.01473
1041 | 1676,0.03533
1042 | 1677,0.02292
1043 | 1679,0.06132
1044 | 1682,0.05306
1045 | 1683,0.05762
1046 | 1685,0.08564
1047 | 1687,0.02051
1048 | 1689,0.03007
1049 | 1690,0.02221
1050 | 1694,0.03107
1051 | 1696,0.01888
1052 | 1697,0.03333
1053 | 1699,0.10530
1054 | 1700,0.06503
1055 | 1701,0.01959
1056 | 1702,0.03387
1057 | 1704,0.02651
1058 | 1705,0.04038
1059 | 1708,0.02783
1060 | 1710,0.02366
1061 | 1712,0.03362
1062 | 1719,0.03960
1063 | 1720,0.02764
1064 | 1722,0.04385
1065 | 1723,0.04205
1066 | 1724,0.02739
1067 | 1725,0.04597
1068 | 1726,0.03925
1069 | 1727,0.02979
1070 | 1728,0.03004
1071 | 1732,0.03149
1072 | 1733,0.02263
1073 | 1734,0.07838
1074 | 1735,0.03940
1075 | 1739,0.03538
1076 | 1740,0.02762
1077 | 1741,0.06053
1078 | 1742,0.05751
1079 | 1743,0.02372
1080 | 1744,0.03840
1081 | 1746,0.03539
1082 | 1750,0.03102
1083 | 1753,0.01960
1084 | 1755,0.02124
1085 | 1758,0.04367
1086 | 1760,0.01685
1087 | 1763,0.02492
1088 | 1765,0.02407
1089 | 1766,0.04929
1090 | 1769,0.02547
1091 | 1771,0.02254
1092 | 1772,0.01935
1093 | 1775,0.02444
1094 | 1776,0.01629
1095 | 1778,0.03376
1096 | 1780,0.04069
1097 | 1781,0.02509
1098 | 1783,0.01377
1099 | 1785,0.03729
1100 | 1788,0.04074
1101 | 1791,0.02454
1102 | 1793,0.03332
1103 | 1794,0.04379
1104 | 1796,0.02096
1105 | 1797,0.03326
1106 | 1798,0.04255
1107 | 1800,0.03213
1108 | 1803,0.05979
1109 | 1804,0.03461
1110 | 1810,0.02444
1111 | 1812,0.01889
1112 | 1813,0.04627
1113 | 1816,0.02693
1114 | 1817,0.03644
1115 | 1818,0.02420
1116 | 1819,0.04746
1117 | 1820,0.01894
1118 | 1821,0.03974
1119 | 1822,0.01840
1120 | 1823,0.03803
1121 | 1826,0.02625
1122 | 1827,0.03683
1123 | 1828,0.03990
1124 | 1829,0.02384
1125 | 1830,0.02119
1126 | 1831,0.01907
1127 | 1834,0.01510
1128 | 1836,0.03371
1129 | 1839,0.02866
1130 | 1841,0.01911
1131 | 1846,0.03699
1132 | 1847,0.04394
1133 | 1850,0.08791
1134 | 1852,0.04969
1135 | 1853,0.02031
1136 | 1854,0.18610
1137 | 1855,0.03066
1138 | 1857,0.06965
1139 | 1858,0.02500
1140 | 1860,0.03428
1141 | 1863,0.03577
1142 | 1866,0.04337
1143 | 1868,0.03097
1144 | 1869,0.02622
1145 | 1870,0.01628
1146 | 1874,0.01855
1147 | 1875,0.01976
1148 | 1877,0.04172
1149 | 1878,0.02434
1150 | 1879,0.02540
1151 | 1881,0.03618
1152 | 1882,0.04389
1153 | 1885,0.03281
1154 | 1886,0.06993
1155 | 1887,0.05249
1156 | 1888,0.01084
1157 | 1889,0.04269
1158 | 1890,0.05946
1159 | 1891,0.08268
1160 | 1892,0.02991
1161 | 1894,0.03322
1162 | 1895,0.05913
1163 | 1896,0.05797
1164 | 1897,0.02325
1165 | 1898,0.08161
1166 | 1901,0.03024
1167 | 1904,0.01699
1168 | 1905,0.02333
1169 | 1906,0.01479
1170 | 1907,0.03725
1171 | 1908,0.04444
1172 | 1910,0.03598
1173 | 1911,0.02761
1174 | 1914,0.03610
1175 | 1916,0.01645
1176 | 1919,0.07094
1177 | 1920,0.07703
1178 | 1921,0.02924
1179 | 1922,0.02717
1180 | 1923,0.08017
1181 | 1924,0.04572
1182 | 1926,0.03758
1183 | 1928,0.02600
1184 | 1929,0.03406
1185 | 1931,0.01943
1186 | 1932,0.04594
1187 | 1933,0.01721
1188 | 1934,0.03091
1189 | 1935,0.03034
1190 | 1936,0.02368
1191 | 1937,0.03773
1192 | 1939,0.02337
1193 | 1940,0.05754
1194 | 1942,0.03267
1195 | 1944,0.03301
1196 | 1945,0.05053
1197 | 1946,0.08466
1198 | 1947,0.02095
1199 | 1948,0.09667
1200 | 1954,0.03237
1201 | 1955,0.12327
1202 | 1956,0.01799
1203 | 1958,0.02369
1204 | 1961,0.03528
1205 | 1963,0.01463
1206 | 1967,0.01302
1207 | 1968,0.02309
1208 | 1970,0.01505
1209 | 1971,0.01388
1210 | 1973,0.07022
1211 | 1979,0.05201
1212 | 1980,0.02803
1213 | 1982,0.02774
1214 | 1984,0.03895
1215 | 1986,0.02834
1216 | 1987,0.06350
1217 | 1989,0.01557
1218 | 1990,0.06079
1219 | 1991,0.04899
1220 | 1993,0.02953
1221 | 1994,0.03348
1222 | 1995,0.04063
1223 | 1996,0.03609
1224 | 1999,0.02954
1225 | 2001,0.02433
1226 | 2002,0.01607
1227 | 2005,0.03173
1228 | 2006,0.02069
1229 | 2008,0.02990
1230 | 2009,0.03549
1231 | 2011,0.02083
1232 | 2012,0.05962
1233 | 2013,0.01305
1234 | 2015,0.04536
1235 | 2017,0.01439
1236 | 2019,0.01149
1237 | 2020,0.02529
1238 | 2022,0.04364
1239 | 2023,0.02356
1240 | 2024,0.02169
1241 | 2026,0.02010
1242 | 2027,0.06065
1243 | 2028,0.02183
1244 | 2029,0.04914
1245 | 2030,0.03253
1246 | 2032,0.02514
1247 | 2033,0.02164
1248 | 2035,0.05274
1249 | 2036,0.02080
1250 | 2037,0.05844
1251 | 2038,0.08426
1252 | 2039,0.04625
1253 | 2040,0.07838
1254 | 2041,0.02629
1255 | 2042,0.07286
1256 | 2043,0.03916
1257 | 2044,0.03479
1258 | 2046,0.02246
1259 | 2047,0.02598
1260 | 2048,0.04410
1261 | 2049,0.03661
1262 | 2050,0.04388
1263 | 2054,0.02799
1264 | 2061,0.03434
1265 | 2063,0.03850
1266 | 2064,0.02885
1267 | 2065,0.02440
1268 | 2067,0.03909
1269 | 2068,0.01597
1270 | 2069,0.03511
1271 | 2070,0.02314
1272 | 2072,0.01912
1273 | 2073,0.04392
1274 | 2075,0.01316
1275 | 2076,0.02826
1276 | 2077,0.02016
1277 | 2078,0.03216
1278 | 2079,0.00953
1279 | 2081,0.01818
1280 | 2082,0.14772
1281 | 2085,0.03580
1282 | 2086,0.06479
1283 | 2087,0.02997
1284 | 2088,0.07133
1285 | 2089,0.03680
1286 | 2091,0.04639
1287 | 2092,0.03676
1288 | 2093,0.06455
1289 | 2094,0.03480
1290 | 2095,0.03631
1291 | 2096,0.06342
1292 | 2097,0.01626
1293 | 2099,0.03048
1294 | 2100,0.09279
1295 | 2103,0.02365
1296 | 2104,0.01370
1297 | 2105,0.02182
1298 | 2106,0.09975
1299 | 2107,0.01247
1300 | 2110,0.03953
1301 | 2113,0.01914
1302 | 2114,0.09134
1303 | 2115,0.03101
1304 | 2118,0.05084
1305 | 2119,0.02450
1306 | 2120,0.03452
1307 | 2121,0.01856
1308 | 2123,0.01854
1309 | 2124,0.05193
1310 | 2128,0.08822
1311 | 2129,0.07647
1312 | 2131,0.07093
1313 | 2132,0.01443
1314 | 2133,0.03675
1315 | 2134,0.04753
1316 | 2137,0.03993
1317 | 2140,0.02768
1318 | 2141,0.03480
1319 | 2142,0.02754
1320 | 2146,0.02479
1321 | 2148,0.07366
1322 | 2149,0.02669
1323 | 2151,0.06428
1324 | 2152,0.04903
1325 | 2153,0.08221
1326 | 2154,0.05095
1327 | 2155,0.03757
1328 | 2158,0.01337
1329 | 2159,0.07610
1330 | 2160,0.03450
1331 | 2161,0.02773
1332 | 2164,0.04688
1333 | 2166,0.03006
1334 | 2167,0.02328
1335 | 2170,0.04343
1336 | 2171,0.04183
1337 | 2172,0.02637
1338 | 2173,0.05864
1339 | 2175,0.05302
1340 | 2177,0.03572
1341 | 2178,0.05070
1342 | 2179,0.04691
1343 | 2180,0.03233
1344 | 2182,0.04040
1345 | 2183,0.03527
1346 | 2184,0.04318
1347 | 2186,0.02628
1348 | 2187,0.04550
1349 | 2188,0.02010
1350 | 2189,0.02038
1351 | 2191,0.03573
1352 | 2192,0.08421
1353 | 2194,0.02844
1354 | 2196,0.06681
1355 | 2197,0.03776
1356 | 2198,0.02046
1357 | 2199,0.03627
1358 | 2200,0.01737
1359 | 2201,0.02054
1360 | 2202,0.01915
1361 | 2205,0.01558
1362 | 2206,0.02874
1363 | 2207,0.03928
1364 | 2208,0.02070
1365 | 2212,0.03171
1366 | 2215,0.04089
1367 | 2216,0.02330
1368 | 2217,0.02192
1369 | 2218,0.02667
1370 | 2220,0.04200
1371 | 2221,0.11872
1372 | 2223,0.03914
1373 | 2225,0.02434
1374 | 2226,0.04778
1375 | 2231,0.02318
1376 | 2232,0.03173
1377 | 2233,0.02443
1378 | 2234,0.04093
1379 | 2235,0.02184
1380 | 2236,0.03031
1381 | 2237,0.03512
1382 | 2239,0.04355
1383 | 2240,0.01799
1384 | 2242,0.03650
1385 | 2247,0.02799
1386 | 2248,0.02909
1387 | 2250,0.02115
1388 | 2251,0.03101
1389 | 2255,0.05059
1390 | 2257,0.02267
1391 | 2258,0.06179
1392 | 2259,0.01469
1393 | 2260,0.02470
1394 | 2261,0.03603
1395 | 2263,0.03288
1396 | 2265,0.01978
1397 | 2266,0.03241
1398 | 2267,0.06480
1399 | 2268,0.02726
1400 | 2269,0.03881
1401 | 2270,0.01922
1402 | 2271,0.05014
1403 | 2272,0.01964
1404 | 2273,0.04621
1405 | 2275,0.02583
1406 | 2276,0.05456
1407 | 2277,0.02913
1408 | 2278,0.04062
1409 | 2279,0.04318
1410 | 2282,0.01056
1411 | 2285,0.02831
1412 | 2286,0.04986
1413 | 2289,0.03368
1414 | 2294,0.05925
1415 | 2296,0.02760
1416 | 2297,0.05229
1417 | 2299,0.05839
1418 | 2300,0.06978
1419 | 2301,0.07398
1420 | 2302,0.03489
1421 | 2305,0.03488
1422 | 2306,0.06845
1423 | 2307,0.03551
1424 | 2308,0.04586
1425 | 2310,0.09188
1426 | 2312,0.04032
1427 | 2316,0.13307
1428 | 2317,0.05738
1429 | 2319,0.05059
1430 | 2320,0.02815
1431 | 2322,0.02651
1432 | 2323,0.01914
1433 | 2324,0.02292
1434 | 2326,0.04383
1435 | 2327,0.01793
1436 | 2328,0.08132
1437 | 2329,0.03479
1438 | 2331,0.02833
1439 | 2335,0.03847
1440 | 2338,0.01516
1441 | 2339,0.08670
1442 | 2340,0.03910
1443 | 2341,0.02947
1444 | 2342,0.04363
1445 | 2345,0.07966
1446 | 2349,0.02595
1447 | 2350,0.04171
1448 | 2351,0.05676
1449 | 2353,0.04785
1450 | 2354,0.03843
1451 | 2355,0.05748
1452 | 2356,0.03725
1453 | 2357,0.06773
1454 | 2359,0.02922
1455 | 2360,0.04748
1456 | 2361,0.04982
1457 | 2362,0.03142
1458 | 2364,0.02467
1459 | 2366,0.03645
1460 | 2368,0.05112
1461 | 2369,0.03722
1462 | 2370,0.05490
1463 | 2372,0.04913
1464 | 2376,0.02308
1465 | 2381,0.03331
1466 | 2383,0.02182
1467 | 2384,0.03297
1468 | 2385,0.04905
1469 | 2386,0.03086
1470 | 2388,0.03606
1471 | 2390,0.03177
1472 | 2392,0.03277
1473 | 2394,0.09278
1474 | 2395,0.01651
1475 | 2396,0.02510
1476 | 2397,0.01541
1477 | 2399,0.07763
1478 | 2401,0.03953
1479 | 2402,0.01491
1480 | 2403,0.04853
1481 | 2405,0.07792
1482 | 2406,0.03941
1483 | 2407,0.03384
1484 | 2408,0.01594
1485 | 2410,0.01622
1486 | 2412,0.02761
1487 | 2415,0.04266
1488 | 2417,0.03926
1489 | 2418,0.03446
1490 | 2420,0.03556
1491 | 2421,0.07298
1492 | 2425,0.05550
1493 | 2426,0.01277
1494 | 2427,0.02363
1495 | 2428,0.01556
1496 | 2429,0.06094
1497 | 2434,0.05673
1498 | 2435,0.02433
1499 | 2436,0.01766
1500 | 2437,0.03066
1501 | 2438,0.04824
1502 | 2439,0.03002
1503 | 2441,0.02494
1504 | 2443,0.02689
1505 | 2444,0.02570
1506 | 2445,0.03318
1507 | 2446,0.03554
1508 | 2449,0.02069
1509 | 2452,0.04216
1510 | 2453,0.15202
1511 | 2454,0.02941
1512 | 2455,0.03467
1513 | 2456,0.03428
1514 | 2457,0.02816
1515 | 2460,0.06577
1516 | 2461,0.02251
1517 | 2463,0.01863
1518 | 2465,0.02050
1519 | 2466,0.03224
1520 | 2468,0.03705
1521 | 2469,0.04173
1522 | 2471,0.03402
1523 | 2472,0.04261
1524 | 2473,0.02770
1525 | 2474,0.03049
1526 | 2475,0.01824
1527 | 2476,0.02447
1528 | 2477,0.02591
1529 | 2478,0.03653
1530 | 2480,0.00902
1531 | 2483,0.02306
1532 | 2484,0.01687
1533 | 2486,0.05842
1534 | 2488,0.02627
1535 | 2489,0.03848
1536 | 2490,0.02551
1537 | 2495,0.04593
1538 | 2496,0.02896
1539 | 2497,0.08558
1540 | 2498,0.03483
1541 | 2503,0.02337
1542 | 2505,0.04531
1543 | 2506,0.03684
1544 | 2507,0.04413
1545 | 2508,0.08005
1546 | 2509,0.02773
1547 | 2514,0.04419
1548 | 2515,0.03914
1549 | 2521,0.04136
1550 | 2522,0.01942
1551 | 2523,0.01860
1552 | 2524,0.02113
1553 | 2526,0.03886
1554 | 2527,0.02480
1555 | 2529,0.04708
1556 | 2530,0.02306
1557 | 2531,0.04695
1558 | 2532,0.02377
1559 | 2534,0.08889
1560 | 2535,0.02567
1561 | 2536,0.01757
1562 | 2537,0.03323
1563 | 2539,0.03548
1564 | 2542,0.02591
1565 | 2545,0.01594
1566 | 2546,0.03801
1567 | 2548,0.07908
1568 | 2549,0.02327
1569 | 2550,0.03315
1570 | 2552,0.01486
1571 | 2554,0.02728
1572 | 2555,0.05176
1573 | 2558,0.02162
1574 | 2560,0.05188
1575 | 2563,0.02375
1576 | 2564,0.06689
1577 | 2565,0.02429
1578 | 2571,0.07415
1579 | 2574,0.06553
1580 | 2575,0.03542
1581 | 2576,0.03756
1582 | 2577,0.06513
1583 | 2578,0.04819
1584 | 2579,0.05827
1585 | 2581,0.06095
1586 | 2582,0.04895
1587 | 2583,0.02284
1588 | 2585,0.03617
1589 | 2586,0.02896
1590 | 2587,0.05524
1591 | 2591,0.03477
1592 | 2592,0.01070
1593 | 2594,0.03024
1594 | 2595,0.05240
1595 | 2597,0.04208
1596 | 2598,0.04997
1597 | 2599,0.02180
1598 | 2600,0.11646
1599 | 2602,0.04287
1600 | 2603,0.03177
1601 | 2605,0.02445
1602 | 2606,0.01486
1603 | 2607,0.04734
1604 | 2608,0.04496
1605 | 2609,0.02620
1606 | 2610,0.08625
1607 | 2611,0.02986
1608 | 2613,0.02657
1609 | 2615,0.01503
1610 | 2616,0.03210
1611 | 2621,0.04083
1612 | 2623,0.02602
1613 | 2624,0.03890
1614 | 2625,0.03515
1615 | 2626,0.02880
1616 | 2627,0.03249
1617 | 2628,0.03766
1618 | 2629,0.04691
1619 | 2630,0.01955
1620 | 2631,0.07763
1621 | 2633,0.02445
1622 | 2634,0.05629
1623 | 2635,0.02919
1624 | 2636,0.04564
1625 | 2638,0.04718
1626 | 2639,0.02918
1627 | 2640,0.02610
1628 | 2642,0.03899
1629 | 2643,0.03428
1630 | 2646,0.04707
1631 | 2647,0.04342
1632 | 2648,0.06607
1633 | 2649,0.01751
1634 | 2650,0.05442
1635 | 2652,0.02461
1636 | 2654,0.03423
1637 | 2658,0.02497
1638 | 2659,0.03095
1639 | 2660,0.03522
1640 | 2661,0.05149
1641 | 2662,0.04748
1642 | 2663,0.05314
1643 | 2665,0.02921
1644 | 2666,0.00815
1645 | 2667,0.02825
1646 | 2668,0.02114
1647 | 2671,0.03620
1648 | 2673,0.03617
1649 | 2674,0.05153
1650 | 2675,0.03802
1651 | 2676,0.03988
1652 | 2677,0.01688
1653 | 2678,0.02060
1654 | 2681,0.02660
1655 | 2683,0.01784
1656 | 2685,0.06764
1657 | 2687,0.03688
1658 | 2690,0.09285
1659 | 2692,0.02192
1660 | 2693,0.02034
1661 | 2694,0.01069
1662 | 2695,0.03158
1663 | 2696,0.07735
1664 | 2697,0.04363
1665 | 2698,0.11074
1666 | 2699,0.03928
1667 | 2701,0.03489
1668 | 2702,0.03522
1669 | 2703,0.10895
1670 | 2705,0.05067
1671 | 2711,0.04721
1672 | 2715,0.03422
1673 | 2720,0.06856
1674 | 2721,0.06344
1675 | 2722,0.03735
1676 | 2724,0.03699
1677 | 2726,0.02510
1678 | 2729,0.01851
1679 | 2730,0.01236
1680 | 2731,0.03278
1681 | 2732,0.02143
1682 | 2733,0.03687
1683 | 2734,0.01408
1684 | 2736,0.03277
1685 | 2738,0.01579
1686 | 2739,0.04033
1687 | 2740,0.02712
1688 | 2741,0.04617
1689 | 2743,0.02233
1690 | 2746,0.03697
1691 | 2747,0.03744
1692 | 2750,0.10187
1693 | 2751,0.04497
1694 | 2752,0.02853
1695 | 2753,0.02293
1696 | 2755,0.03898
1697 | 2756,0.02402
1698 | 2757,0.01548
1699 | 2761,0.04100
1700 | 2763,0.04147
1701 | 2764,0.07384
1702 | 2765,0.02305
1703 | 2767,0.04271
1704 | 2769,0.04552
1705 | 2772,0.03074
1706 | 2774,0.02755
1707 | 2776,0.01548
1708 | 2777,0.00962
1709 | 2778,0.02529
1710 | 2782,0.04300
1711 | 2783,0.01772
1712 | 2786,0.01152
1713 | 2790,0.03286
1714 | 2791,0.01369
1715 | 2792,0.01927
1716 | 2793,0.03661
1717 | 2794,0.03748
1718 | 2795,0.03639
1719 | 2796,0.03975
1720 | 2797,0.03756
1721 | 2800,0.03887
1722 | 2801,0.01699
1723 | 2802,0.02237
1724 | 2804,0.02152
1725 | 2805,0.04622
1726 | 2807,0.01456
1727 | 2808,0.03980
1728 | 2811,0.07481
1729 | 2812,0.03281
1730 | 2813,0.03020
1731 | 2814,0.07077
1732 | 2815,0.05038
1733 | 2817,0.03541
1734 | 2819,0.04293
1735 | 2821,0.04454
1736 | 2825,0.07322
1737 | 2828,0.02573
1738 | 2829,0.02087
1739 | 2831,0.03716
1740 | 2832,0.03208
1741 | 2834,0.03278
1742 | 2835,0.02194
1743 | 2836,0.07931
1744 | 2837,0.07634
1745 | 2838,0.03958
1746 | 2840,0.01941
1747 | 2841,0.01976
1748 | 2842,0.05737
1749 | 2843,0.01821
1750 | 2845,0.02574
1751 | 2847,0.03696
1752 | 2848,0.02633
1753 | 2850,0.03125
1754 | 2851,0.04084
1755 | 2854,0.01710
1756 | 2856,0.03819
1757 | 2857,0.01906
1758 | 2858,0.02556
1759 | 2859,0.02114
1760 | 2862,0.03869
1761 | 2864,0.02501
1762 | 2865,0.10733
1763 | 2866,0.01822
1764 | 2867,0.02928
1765 | 2868,0.02100
1766 | 2869,0.05906
1767 | 2871,0.03429
1768 | 2872,0.03856
1769 | 2873,0.01628
1770 | 2874,0.01517
1771 | 2875,0.03920
1772 | 2878,0.06138
1773 | 2882,0.03559
1774 | 2883,0.02586
1775 | 2886,0.01112
1776 | 2887,0.03170
1777 | 2888,0.03083
1778 | 2889,0.02518
1779 | 2892,0.03106
1780 | 2893,0.03265
1781 | 2895,0.02900
1782 | 2897,0.02607
1783 | 2898,0.04409
1784 | 2899,0.03316
1785 | 2903,0.02070
1786 | 2906,0.06875
1787 | 2907,0.02869
1788 | 2909,0.03117
1789 | 2913,0.02309
1790 | 2915,0.01665
1791 | 2916,0.03309
1792 | 2917,0.03022
1793 | 2918,0.04747
1794 | 2921,0.02866
1795 | 2924,0.03753
1796 | 2925,0.05512
1797 | 2928,0.05041
1798 | 2929,0.06297
1799 | 2931,0.07811
1800 | 2932,0.04548
1801 | 2934,0.03617
1802 | 2936,0.11179
1803 | 2937,0.04859
1804 | 2939,0.02086
1805 | 2940,0.06180
1806 | 2941,0.03808
1807 | 2942,0.02039
1808 | 2943,0.02894
1809 | 2946,0.01689
1810 | 2947,0.02648
1811 | 2949,0.04262
1812 | 2952,0.04155
1813 | 2953,0.04393
1814 | 2954,0.02650
1815 | 2956,0.01026
1816 | 2960,0.01664
1817 | 2962,0.08491
1818 | 2964,0.02704
1819 | 2965,0.01236
1820 | 2966,0.15145
1821 | 2967,0.03928
1822 | 2968,0.05353
1823 | 2971,0.02056
1824 | 2972,0.05188
1825 | 2973,0.06748
1826 | 2975,0.03110
1827 | 2976,0.03330
1828 | 2977,0.04226
1829 | 2981,0.02475
1830 | 2982,0.03890
1831 | 2983,0.04451
1832 | 2984,0.02520
1833 | 2985,0.09968
1834 | 2986,0.06286
1835 | 2988,0.03968
1836 | 2989,0.06144
1837 | 2990,0.02911
1838 | 2991,0.02667
1839 | 2993,0.02039
1840 | 2995,0.02008
1841 | 2998,0.02826
1842 | 2999,0.03970
1843 | 3000,0.09770
1844 | 3001,0.08585
1845 | 3003,0.05654
1846 | 3005,0.06833
1847 | 3007,0.03267
1848 | 3008,0.04427
1849 | 3009,0.02863
1850 | 3011,0.01994
1851 | 3012,0.01940
1852 | 3013,0.01941
1853 | 3014,0.04032
1854 | 3015,0.02540
1855 | 3016,0.02522
1856 | 3017,0.02819
1857 | 3018,0.04790
1858 | 3022,0.03873
1859 | 3023,0.02838
1860 | 3024,0.07961
1861 | 3026,0.01822
1862 | 3029,0.06980
1863 | 3031,0.03165
1864 | 3032,0.03717
1865 | 3033,0.02682
1866 | 3034,0.01277
1867 | 3035,0.04248
1868 | 3036,0.01875
1869 | 3037,0.03034
1870 | 3039,0.01866
1871 | 3040,0.04431
1872 | 3041,0.04111
1873 | 3042,0.05350
1874 | 3045,0.03977
1875 | 3050,0.02356
1876 | 3053,0.02655
1877 | 3055,0.02727
1878 | 3060,0.03789
1879 | 3061,0.02286
1880 | 3062,0.02986
1881 | 3064,0.05143
1882 | 3065,0.03930
1883 | 3067,0.05231
1884 | 3068,0.03368
1885 | 3072,0.02462
1886 | 3073,0.08040
1887 | 3076,0.03896
1888 | 3077,0.06156
1889 | 3079,0.02568
1890 | 3080,0.05475
1891 | 3082,0.04786
1892 | 3085,0.01376
1893 | 3086,0.01611
1894 | 3087,0.06199
1895 | 3090,0.04887
1896 | 3092,0.01798
1897 | 3094,0.02590
1898 | 3095,0.01952
1899 | 3101,0.04315
1900 | 3102,0.01694
1901 | 3103,0.11532
1902 | 3104,0.02495
1903 | 3105,0.05245
1904 | 3107,0.10440
1905 | 3108,0.02916
1906 | 3114,0.04025
1907 | 3115,0.03867
1908 | 3116,0.03287
1909 | 3118,0.02032
1910 | 3120,0.03101
1911 | 3121,0.04766
1912 | 3123,0.04809
1913 | 3125,0.01110
1914 | 3128,0.01971
1915 | 3129,0.08539
1916 | 3130,0.02749
1917 | 3133,0.03862
1918 | 3134,0.02045
1919 | 3136,0.08246
1920 | 3137,0.06557
1921 | 3138,0.01279
1922 | 3139,0.03450
1923 | 3140,0.04087
1924 | 3143,0.02401
1925 | 3144,0.02333
1926 | 3146,0.01959
1927 | 3148,0.04848
1928 | 3149,0.03643
1929 | 3150,0.02535
1930 | 3151,0.04393
1931 | 3153,0.04631
1932 | 3155,0.08111
1933 | 3157,0.02298
1934 | 3160,0.04012
1935 | 3161,0.04626
1936 | 3162,0.05015
1937 | 3163,0.03126
1938 | 3164,0.01469
1939 | 3165,0.05837
1940 | 3166,0.05339
1941 | 3168,0.03704
1942 | 3169,0.06385
1943 | 3170,0.04084
1944 | 3171,0.01831
1945 | 3176,0.02375
1946 | 3179,0.02109
1947 | 3180,0.02118
1948 | 3182,0.04545
1949 | 3183,0.04185
1950 | 3184,0.02117
1951 | 3185,0.03667
1952 | 3186,0.02378
1953 | 3188,0.02065
1954 | 3189,0.02353
1955 | 3191,0.03195
1956 | 3193,0.02377
1957 | 3194,0.03412
1958 | 3195,0.03202
1959 | 3196,0.02737
1960 | 3197,0.01421
1961 | 3199,0.02251
1962 | 3201,0.04045
1963 | 3202,0.03803
1964 | 3203,0.03069
1965 | 3205,0.03497
1966 | 3206,0.03084
1967 | 3207,0.01832
1968 | 3208,0.03376
1969 | 3209,0.03123
1970 | 3210,0.03556
1971 | 3211,0.05473
1972 | 3212,0.02016
1973 | 3213,0.07273
1974 | 3215,0.02503
1975 | 3216,0.03046
1976 | 3217,0.01520
1977 | 3218,0.02375
1978 | 3219,0.05416
1979 | 3221,0.07693
1980 | 3222,0.03120
1981 | 3226,0.03108
1982 | 3227,0.03746
1983 | 3229,0.02242
1984 | 3231,0.01541
1985 | 3234,0.07657
1986 | 3240,0.03823
1987 | 3242,0.02284
1988 | 3244,0.01403
1989 | 3246,0.03746
1990 | 3247,0.06502
1991 | 3248,0.03494
1992 | 3249,0.03189
1993 | 3250,0.03411
1994 | 3251,0.01573
1995 | 3252,0.01308
1996 | 3253,0.03216
1997 | 3255,0.02268
1998 | 3257,0.02306
1999 | 3258,0.01903
2000 | 3259,0.02097
2001 | 3265,0.05537
2002 |
--------------------------------------------------------------------------------
/Basic-DeepFM-model/output/FM_Mean0.19988_Std0.03441.csv:
--------------------------------------------------------------------------------
1 | id,target
2 | 0,0.02106
3 | 1,0.04457
4 | 2,0.02782
5 | 3,0.01416
6 | 4,0.00964
7 | 5,0.02544
8 | 6,0.03712
9 | 8,0.02755
10 | 10,0.05166
11 | 11,0.05716
12 | 12,0.04162
13 | 14,0.01343
14 | 15,0.06427
15 | 18,0.04551
16 | 21,0.04682
17 | 23,0.02536
18 | 24,0.04333
19 | 25,0.03980
20 | 27,0.04065
21 | 29,0.03564
22 | 30,0.01663
23 | 31,0.10769
24 | 32,0.03938
25 | 33,0.03448
26 | 37,0.03387
27 | 38,0.04785
28 | 39,0.08104
29 | 40,0.03052
30 | 41,0.04156
31 | 42,0.02251
32 | 44,0.03129
33 | 45,0.02447
34 | 47,0.05115
35 | 49,0.01440
36 | 51,0.04861
37 | 52,0.01996
38 | 53,0.04354
39 | 54,0.05833
40 | 55,0.01453
41 | 56,0.01865
42 | 57,0.10944
43 | 59,0.04290
44 | 60,0.05372
45 | 62,0.02011
46 | 63,0.09944
47 | 67,0.03059
48 | 68,0.00746
49 | 69,0.08878
50 | 70,0.03288
51 | 71,0.04892
52 | 73,0.02364
53 | 75,0.02147
54 | 76,0.02149
55 | 81,0.01487
56 | 82,0.02661
57 | 83,0.02263
58 | 86,0.03437
59 | 87,0.06457
60 | 88,0.01570
61 | 91,0.08846
62 | 92,0.02174
63 | 94,0.01152
64 | 97,0.05132
65 | 100,0.02086
66 | 102,0.03659
67 | 103,0.02490
68 | 105,0.03858
69 | 106,0.06019
70 | 108,0.03221
71 | 113,0.05400
72 | 114,0.01971
73 | 115,0.02613
74 | 118,0.06921
75 | 122,0.02442
76 | 124,0.06011
77 | 126,0.03379
78 | 128,0.03006
79 | 129,0.06900
80 | 130,0.04590
81 | 131,0.01830
82 | 132,0.04256
83 | 133,0.01885
84 | 134,0.03571
85 | 135,0.07927
86 | 136,0.02105
87 | 139,0.01279
88 | 140,0.01749
89 | 141,0.08784
90 | 146,0.04522
91 | 148,0.02017
92 | 151,0.01708
93 | 152,0.03936
94 | 154,0.04421
95 | 157,0.04344
96 | 158,0.00809
97 | 159,0.08441
98 | 161,0.01726
99 | 164,0.01467
100 | 165,0.06067
101 | 167,0.03176
102 | 168,0.04659
103 | 169,0.04119
104 | 171,0.06748
105 | 172,0.06091
106 | 173,0.01445
107 | 174,0.02148
108 | 175,0.03924
109 | 176,0.04297
110 | 178,0.03804
111 | 179,0.01796
112 | 180,0.01664
113 | 181,0.01287
114 | 182,0.13741
115 | 183,0.06345
116 | 184,0.08218
117 | 185,0.03182
118 | 187,0.03816
119 | 188,0.01638
120 | 190,0.02068
121 | 191,0.03526
122 | 192,0.05751
123 | 195,0.03846
124 | 196,0.02153
125 | 198,0.06344
126 | 201,0.05688
127 | 202,0.05259
128 | 203,0.01575
129 | 204,0.01547
130 | 207,0.03040
131 | 213,0.03567
132 | 219,0.02623
133 | 220,0.02978
134 | 222,0.08108
135 | 226,0.00883
136 | 228,0.12727
137 | 230,0.03176
138 | 231,0.02553
139 | 232,0.04153
140 | 233,0.01163
141 | 234,0.00794
142 | 236,0.03871
143 | 237,0.03326
144 | 238,0.09965
145 | 240,0.05497
146 | 241,0.01821
147 | 242,0.08924
148 | 243,0.02147
149 | 245,0.01986
150 | 246,0.04947
151 | 247,0.02748
152 | 249,0.05554
153 | 250,0.02032
154 | 251,0.01530
155 | 252,0.01978
156 | 253,0.01468
157 | 254,0.06551
158 | 255,0.10213
159 | 256,0.04431
160 | 257,0.02225
161 | 258,0.02100
162 | 259,0.08747
163 | 260,0.00546
164 | 262,0.01107
165 | 267,0.02216
166 | 268,0.02670
167 | 270,0.01332
168 | 272,0.01817
169 | 273,0.03087
170 | 274,0.01449
171 | 275,0.02238
172 | 276,0.02657
173 | 277,0.04895
174 | 278,0.08116
175 | 279,0.04394
176 | 281,0.02229
177 | 282,0.02863
178 | 283,0.04123
179 | 287,0.02475
180 | 288,0.06459
181 | 289,0.03068
182 | 290,0.01097
183 | 291,0.08660
184 | 292,0.14462
185 | 293,0.04348
186 | 294,0.01972
187 | 295,0.01924
188 | 296,0.11808
189 | 297,0.05156
190 | 298,0.03580
191 | 299,0.07977
192 | 300,0.03038
193 | 303,0.03607
194 | 304,0.05858
195 | 308,0.01172
196 | 314,0.10954
197 | 315,0.02365
198 | 317,0.05087
199 | 318,0.09415
200 | 320,0.05659
201 | 322,0.06933
202 | 323,0.01340
203 | 327,0.04045
204 | 329,0.08198
205 | 331,0.01116
206 | 332,0.22350
207 | 333,0.07234
208 | 334,0.01787
209 | 335,0.02734
210 | 336,0.05176
211 | 337,0.04835
212 | 341,0.02616
213 | 343,0.02694
214 | 345,0.03818
215 | 347,0.03294
216 | 348,0.05930
217 | 349,0.04303
218 | 352,0.01769
219 | 353,0.01953
220 | 354,0.05657
221 | 355,0.04019
222 | 356,0.02964
223 | 357,0.01329
224 | 358,0.01730
225 | 359,0.02886
226 | 361,0.05685
227 | 362,0.03108
228 | 365,0.03822
229 | 366,0.07108
230 | 368,0.02944
231 | 369,0.02507
232 | 371,0.01535
233 | 373,0.02514
234 | 376,0.04650
235 | 377,0.01891
236 | 379,0.05868
237 | 381,0.02495
238 | 382,0.02030
239 | 383,0.01970
240 | 384,0.04631
241 | 385,0.03064
242 | 386,0.04417
243 | 387,0.02590
244 | 388,0.07014
245 | 390,0.02143
246 | 391,0.02527
247 | 392,0.11623
248 | 393,0.03126
249 | 394,0.02886
250 | 396,0.03806
251 | 400,0.03481
252 | 402,0.03533
253 | 403,0.01664
254 | 405,0.04318
255 | 407,0.04306
256 | 409,0.04420
257 | 411,0.04226
258 | 412,0.01916
259 | 414,0.07864
260 | 415,0.02247
261 | 416,0.01075
262 | 418,0.01783
263 | 419,0.01989
264 | 421,0.00774
265 | 422,0.05880
266 | 423,0.02996
267 | 424,0.05576
268 | 426,0.00863
269 | 427,0.10756
270 | 428,0.03380
271 | 436,0.04486
272 | 438,0.04291
273 | 442,0.02838
274 | 443,0.02264
275 | 445,0.03160
276 | 446,0.03714
277 | 447,0.02118
278 | 450,0.06052
279 | 452,0.01257
280 | 453,0.03120
281 | 455,0.03367
282 | 458,0.08158
283 | 459,0.01122
284 | 461,0.04049
285 | 463,0.01400
286 | 467,0.03967
287 | 470,0.04131
288 | 471,0.02291
289 | 472,0.02346
290 | 476,0.09920
291 | 477,0.02185
292 | 481,0.07102
293 | 482,0.07681
294 | 483,0.09457
295 | 485,0.02441
296 | 487,0.01427
297 | 488,0.02334
298 | 490,0.02401
299 | 492,0.02260
300 | 494,0.05725
301 | 497,0.11459
302 | 499,0.01542
303 | 500,0.05290
304 | 501,0.04439
305 | 503,0.01314
306 | 505,0.02564
307 | 506,0.04498
308 | 507,0.04211
309 | 509,0.05588
310 | 510,0.07256
311 | 511,0.05907
312 | 512,0.01618
313 | 514,0.02199
314 | 515,0.02488
315 | 516,0.01777
316 | 517,0.13774
317 | 519,0.02075
318 | 520,0.03108
319 | 521,0.02786
320 | 522,0.04082
321 | 523,0.04490
322 | 527,0.03709
323 | 530,0.08774
324 | 532,0.03540
325 | 534,0.02255
326 | 536,0.05105
327 | 539,0.02650
328 | 543,0.01658
329 | 547,0.04018
330 | 550,0.02304
331 | 551,0.02882
332 | 552,0.02092
333 | 553,0.05959
334 | 555,0.03019
335 | 556,0.03021
336 | 557,0.02690
337 | 558,0.03145
338 | 559,0.03162
339 | 560,0.01524
340 | 561,0.04109
341 | 565,0.03398
342 | 566,0.03401
343 | 567,0.03249
344 | 571,0.05144
345 | 572,0.03747
346 | 574,0.01259
347 | 575,0.01432
348 | 576,0.09834
349 | 577,0.06258
350 | 578,0.01265
351 | 579,0.04898
352 | 581,0.01315
353 | 587,0.01616
354 | 589,0.02522
355 | 590,0.05354
356 | 591,0.01257
357 | 592,0.07016
358 | 593,0.01835
359 | 594,0.11301
360 | 595,0.06156
361 | 596,0.01313
362 | 597,0.05327
363 | 598,0.19354
364 | 600,0.01938
365 | 602,0.00754
366 | 605,0.03230
367 | 606,0.02792
368 | 607,0.02166
369 | 612,0.01229
370 | 613,0.01295
371 | 614,0.01264
372 | 615,0.01578
373 | 616,0.00983
374 | 617,0.01649
375 | 618,0.05706
376 | 620,0.03541
377 | 621,0.01765
378 | 622,0.01665
379 | 623,0.02953
380 | 628,0.03046
381 | 631,0.03909
382 | 632,0.01778
383 | 633,0.03035
384 | 634,0.04707
385 | 635,0.06833
386 | 637,0.02666
387 | 638,0.04592
388 | 640,0.02960
389 | 643,0.01987
390 | 644,0.10400
391 | 645,0.03464
392 | 646,0.04537
393 | 649,0.02613
394 | 651,0.05792
395 | 653,0.01515
396 | 654,0.09839
397 | 655,0.13727
398 | 656,0.04275
399 | 658,0.05915
400 | 659,0.01237
401 | 661,0.06578
402 | 662,0.02471
403 | 663,0.03530
404 | 664,0.08331
405 | 665,0.05249
406 | 667,0.03232
407 | 668,0.03055
408 | 671,0.03135
409 | 673,0.03864
410 | 674,0.05761
411 | 675,0.01885
412 | 676,0.05196
413 | 677,0.01849
414 | 678,0.02222
415 | 679,0.02808
416 | 680,0.03654
417 | 681,0.06727
418 | 682,0.05106
419 | 683,0.02463
420 | 684,0.01803
421 | 686,0.09861
422 | 687,0.04605
423 | 688,0.08591
424 | 690,0.02840
425 | 691,0.03412
426 | 692,0.06201
427 | 693,0.03859
428 | 694,0.03732
429 | 695,0.01386
430 | 696,0.01448
431 | 698,0.05599
432 | 699,0.01807
433 | 702,0.02454
434 | 703,0.01204
435 | 705,0.16651
436 | 706,0.06695
437 | 707,0.02346
438 | 708,0.01397
439 | 709,0.06972
440 | 710,0.11081
441 | 713,0.01675
442 | 714,0.02366
443 | 715,0.05847
444 | 716,0.07193
445 | 717,0.04460
446 | 718,0.01078
447 | 719,0.02391
448 | 720,0.02077
449 | 721,0.02695
450 | 723,0.01956
451 | 724,0.04883
452 | 726,0.02017
453 | 728,0.10979
454 | 730,0.02380
455 | 732,0.01841
456 | 734,0.01363
457 | 735,0.06248
458 | 737,0.01072
459 | 738,0.08874
460 | 741,0.06949
461 | 742,0.02415
462 | 743,0.01855
463 | 746,0.02504
464 | 747,0.03382
465 | 749,0.03595
466 | 750,0.05433
467 | 752,0.04344
468 | 755,0.03449
469 | 757,0.01488
470 | 758,0.02592
471 | 759,0.03580
472 | 765,0.01334
473 | 766,0.01933
474 | 767,0.12100
475 | 768,0.03476
476 | 769,0.02830
477 | 770,0.00757
478 | 771,0.02745
479 | 772,0.01579
480 | 773,0.09142
481 | 775,0.03487
482 | 776,0.02953
483 | 778,0.03170
484 | 779,0.01961
485 | 780,0.04085
486 | 782,0.02819
487 | 783,0.03534
488 | 784,0.04083
489 | 785,0.01697
490 | 786,0.01106
491 | 788,0.04989
492 | 789,0.01824
493 | 792,0.08747
494 | 793,0.02794
495 | 794,0.04083
496 | 797,0.10755
497 | 798,0.01187
498 | 799,0.10028
499 | 800,0.03292
500 | 801,0.05246
501 | 802,0.11143
502 | 805,0.02632
503 | 807,0.02952
504 | 808,0.00902
505 | 811,0.01941
506 | 813,0.01791
507 | 814,0.02660
508 | 815,0.15133
509 | 816,0.05385
510 | 817,0.01505
511 | 819,0.01347
512 | 821,0.02023
513 | 823,0.02034
514 | 825,0.02270
515 | 826,0.01051
516 | 827,0.02287
517 | 828,0.01905
518 | 830,0.02938
519 | 831,0.03126
520 | 833,0.01456
521 | 834,0.07177
522 | 835,0.02345
523 | 836,0.04540
524 | 837,0.03401
525 | 838,0.04537
526 | 840,0.02883
527 | 842,0.01262
528 | 844,0.03345
529 | 845,0.07593
530 | 847,0.03548
531 | 848,0.05017
532 | 850,0.01266
533 | 851,0.02443
534 | 852,0.03337
535 | 853,0.09342
536 | 855,0.01290
537 | 856,0.04493
538 | 857,0.02578
539 | 859,0.03115
540 | 860,0.05299
541 | 862,0.07277
542 | 863,0.03118
543 | 864,0.02651
544 | 867,0.04948
545 | 868,0.03424
546 | 869,0.01733
547 | 871,0.01353
548 | 873,0.05875
549 | 875,0.04697
550 | 876,0.04649
551 | 877,0.02177
552 | 879,0.08544
553 | 880,0.10681
554 | 883,0.02676
555 | 884,0.02255
556 | 885,0.03319
557 | 886,0.00686
558 | 887,0.01450
559 | 888,0.01793
560 | 891,0.04633
561 | 893,0.01601
562 | 895,0.06109
563 | 897,0.04179
564 | 898,0.12867
565 | 901,0.02775
566 | 902,0.03360
567 | 904,0.04278
568 | 905,0.01791
569 | 907,0.05344
570 | 908,0.04337
571 | 909,0.04267
572 | 910,0.01255
573 | 911,0.01337
574 | 913,0.04299
575 | 914,0.02196
576 | 915,0.16695
577 | 916,0.02552
578 | 917,0.01473
579 | 919,0.02089
580 | 920,0.02882
581 | 923,0.02145
582 | 924,0.02373
583 | 925,0.05756
584 | 926,0.03516
585 | 928,0.02980
586 | 929,0.00951
587 | 932,0.03727
588 | 935,0.04956
589 | 936,0.04676
590 | 937,0.02722
591 | 938,0.07023
592 | 939,0.01718
593 | 940,0.05047
594 | 941,0.01584
595 | 944,0.05425
596 | 945,0.01649
597 | 946,0.06535
598 | 947,0.05790
599 | 949,0.03225
600 | 950,0.03046
601 | 952,0.03659
602 | 953,0.05154
603 | 955,0.01516
604 | 956,0.02659
605 | 957,0.01454
606 | 960,0.02294
607 | 961,0.04117
608 | 962,0.02339
609 | 963,0.01520
610 | 964,0.03369
611 | 967,0.04037
612 | 975,0.03251
613 | 976,0.02103
614 | 977,0.01698
615 | 978,0.04951
616 | 980,0.01114
617 | 985,0.05849
618 | 986,0.06147
619 | 987,0.00892
620 | 989,0.02720
621 | 992,0.02718
622 | 994,0.03272
623 | 995,0.01507
624 | 996,0.01034
625 | 997,0.05277
626 | 998,0.01032
627 | 999,0.06653
628 | 1000,0.03802
629 | 1004,0.01670
630 | 1005,0.07139
631 | 1006,0.02353
632 | 1007,0.13455
633 | 1010,0.02809
634 | 1011,0.06843
635 | 1012,0.00959
636 | 1013,0.02534
637 | 1014,0.02176
638 | 1016,0.04428
639 | 1018,0.03051
640 | 1019,0.01456
641 | 1020,0.03854
642 | 1022,0.02671
643 | 1023,0.03056
644 | 1024,0.03733
645 | 1025,0.05902
646 | 1029,0.04140
647 | 1031,0.02718
648 | 1032,0.02238
649 | 1033,0.03268
650 | 1034,0.07049
651 | 1035,0.03758
652 | 1036,0.03068
653 | 1037,0.01887
654 | 1038,0.03508
655 | 1039,0.03421
656 | 1041,0.01425
657 | 1043,0.02571
658 | 1049,0.04450
659 | 1050,0.14631
660 | 1051,0.03705
661 | 1052,0.02284
662 | 1053,0.01925
663 | 1054,0.02444
664 | 1056,0.05918
665 | 1057,0.10079
666 | 1060,0.03795
667 | 1061,0.01854
668 | 1062,0.03376
669 | 1063,0.07289
670 | 1064,0.04714
671 | 1065,0.01334
672 | 1066,0.03138
673 | 1068,0.06072
674 | 1073,0.03587
675 | 1075,0.01060
676 | 1079,0.02339
677 | 1081,0.05695
678 | 1083,0.05603
679 | 1085,0.01668
680 | 1086,0.01698
681 | 1087,0.02474
682 | 1088,0.01016
683 | 1089,0.02155
684 | 1090,0.06037
685 | 1091,0.01966
686 | 1092,0.01692
687 | 1095,0.02293
688 | 1098,0.03094
689 | 1099,0.03196
690 | 1100,0.03501
691 | 1101,0.10881
692 | 1103,0.06461
693 | 1104,0.02846
694 | 1106,0.01214
695 | 1107,0.05877
696 | 1108,0.12848
697 | 1109,0.01828
698 | 1113,0.05848
699 | 1114,0.04694
700 | 1116,0.01060
701 | 1117,0.04767
702 | 1118,0.02673
703 | 1119,0.01943
704 | 1120,0.03315
705 | 1121,0.02143
706 | 1122,0.01484
707 | 1123,0.04054
708 | 1125,0.01403
709 | 1126,0.03214
710 | 1127,0.00808
711 | 1128,0.07597
712 | 1129,0.04919
713 | 1130,0.02794
714 | 1132,0.02166
715 | 1133,0.05955
716 | 1136,0.01125
717 | 1137,0.08137
718 | 1140,0.01953
719 | 1142,0.07760
720 | 1144,0.02724
721 | 1145,0.01709
722 | 1146,0.00836
723 | 1148,0.04169
724 | 1150,0.01233
725 | 1151,0.01943
726 | 1153,0.04892
727 | 1154,0.02560
728 | 1155,0.04192
729 | 1156,0.02899
730 | 1157,0.02102
731 | 1158,0.05356
732 | 1160,0.01159
733 | 1161,0.03239
734 | 1167,0.01826
735 | 1168,0.02312
736 | 1170,0.01819
737 | 1171,0.06108
738 | 1173,0.03571
739 | 1174,0.17408
740 | 1175,0.02103
741 | 1176,0.04902
742 | 1177,0.02341
743 | 1178,0.04142
744 | 1179,0.01595
745 | 1180,0.03223
746 | 1181,0.06746
747 | 1182,0.01630
748 | 1183,0.01435
749 | 1185,0.01344
750 | 1188,0.03507
751 | 1189,0.01770
752 | 1190,0.04995
753 | 1191,0.06014
754 | 1192,0.05483
755 | 1194,0.03036
756 | 1197,0.02843
757 | 1198,0.02758
758 | 1199,0.05125
759 | 1200,0.02443
760 | 1203,0.07361
761 | 1204,0.03020
762 | 1207,0.02934
763 | 1208,0.04162
764 | 1209,0.01482
765 | 1210,0.08023
766 | 1211,0.02101
767 | 1213,0.03828
768 | 1214,0.01608
769 | 1215,0.03440
770 | 1216,0.11220
771 | 1217,0.04061
772 | 1218,0.02927
773 | 1219,0.01908
774 | 1223,0.03819
775 | 1225,0.04124
776 | 1226,0.01672
777 | 1228,0.11855
778 | 1229,0.01602
779 | 1231,0.10179
780 | 1232,0.10232
781 | 1233,0.04384
782 | 1235,0.02483
783 | 1238,0.01165
784 | 1239,0.01887
785 | 1240,0.05551
786 | 1241,0.01546
787 | 1242,0.01622
788 | 1243,0.03715
789 | 1244,0.02423
790 | 1246,0.10911
791 | 1249,0.06044
792 | 1250,0.00680
793 | 1252,0.01106
794 | 1254,0.03748
795 | 1255,0.01354
796 | 1256,0.02942
797 | 1257,0.01535
798 | 1258,0.01687
799 | 1259,0.06460
800 | 1260,0.04740
801 | 1262,0.03068
802 | 1263,0.02243
803 | 1264,0.01576
804 | 1265,0.04047
805 | 1266,0.01327
806 | 1269,0.04344
807 | 1270,0.02878
808 | 1271,0.00969
809 | 1272,0.07335
810 | 1273,0.02057
811 | 1274,0.04240
812 | 1277,0.02559
813 | 1278,0.00811
814 | 1281,0.03142
815 | 1282,0.05209
816 | 1283,0.02284
817 | 1284,0.02007
818 | 1285,0.02415
819 | 1286,0.05659
820 | 1289,0.01013
821 | 1291,0.01220
822 | 1292,0.02239
823 | 1293,0.09690
824 | 1294,0.06815
825 | 1295,0.10301
826 | 1297,0.02962
827 | 1298,0.04945
828 | 1301,0.02881
829 | 1304,0.04368
830 | 1305,0.02598
831 | 1307,0.05987
832 | 1308,0.02373
833 | 1309,0.15005
834 | 1313,0.11633
835 | 1314,0.04829
836 | 1316,0.03134
837 | 1317,0.01891
838 | 1318,0.09235
839 | 1320,0.01356
840 | 1321,0.01162
841 | 1322,0.01641
842 | 1323,0.01897
843 | 1325,0.04974
844 | 1330,0.02630
845 | 1331,0.03528
846 | 1334,0.05307
847 | 1337,0.01015
848 | 1339,0.03225
849 | 1340,0.01672
850 | 1341,0.01831
851 | 1343,0.02819
852 | 1344,0.01259
853 | 1347,0.01815
854 | 1348,0.10224
855 | 1350,0.01988
856 | 1351,0.02468
857 | 1355,0.02890
858 | 1356,0.03781
859 | 1357,0.02149
860 | 1358,0.02286
861 | 1360,0.01620
862 | 1363,0.05099
863 | 1364,0.01705
864 | 1366,0.01879
865 | 1367,0.05336
866 | 1368,0.05846
867 | 1369,0.02877
868 | 1370,0.05507
869 | 1375,0.01501
870 | 1376,0.00934
871 | 1378,0.07918
872 | 1379,0.01920
873 | 1380,0.03152
874 | 1384,0.00577
875 | 1385,0.02412
876 | 1386,0.04971
877 | 1388,0.04120
878 | 1389,0.00995
879 | 1391,0.03674
880 | 1392,0.00868
881 | 1393,0.03988
882 | 1395,0.03221
883 | 1396,0.03855
884 | 1399,0.03952
885 | 1400,0.03244
886 | 1401,0.02425
887 | 1404,0.01576
888 | 1405,0.01225
889 | 1406,0.07155
890 | 1407,0.01261
891 | 1409,0.03042
892 | 1411,0.02533
893 | 1414,0.05683
894 | 1415,0.04861
895 | 1416,0.06612
896 | 1418,0.11145
897 | 1419,0.03007
898 | 1420,0.01526
899 | 1422,0.07545
900 | 1424,0.01329
901 | 1426,0.01653
902 | 1427,0.03526
903 | 1428,0.03606
904 | 1429,0.04781
905 | 1431,0.01520
906 | 1432,0.03660
907 | 1434,0.08042
908 | 1435,0.03557
909 | 1439,0.08344
910 | 1440,0.13749
911 | 1441,0.04867
912 | 1442,0.03809
913 | 1446,0.04824
914 | 1447,0.06929
915 | 1451,0.01293
916 | 1452,0.01022
917 | 1454,0.03553
918 | 1457,0.00809
919 | 1458,0.03422
920 | 1459,0.03307
921 | 1461,0.03051
922 | 1462,0.03324
923 | 1465,0.03465
924 | 1468,0.05504
925 | 1470,0.02368
926 | 1471,0.03931
927 | 1472,0.02623
928 | 1476,0.04230
929 | 1482,0.02291
930 | 1485,0.01802
931 | 1486,0.05376
932 | 1487,0.04225
933 | 1489,0.02685
934 | 1492,0.03281
935 | 1494,0.14064
936 | 1495,0.01951
937 | 1497,0.02674
938 | 1499,0.01996
939 | 1500,0.01386
940 | 1501,0.02276
941 | 1505,0.12761
942 | 1507,0.02387
943 | 1509,0.10278
944 | 1511,0.01268
945 | 1513,0.01642
946 | 1514,0.02849
947 | 1515,0.07399
948 | 1517,0.19601
949 | 1520,0.08015
950 | 1522,0.01826
951 | 1523,0.01176
952 | 1524,0.10177
953 | 1526,0.04659
954 | 1527,0.01740
955 | 1528,0.03146
956 | 1531,0.01335
957 | 1533,0.07137
958 | 1534,0.06378
959 | 1535,0.09966
960 | 1538,0.04767
961 | 1541,0.05580
962 | 1542,0.03314
963 | 1543,0.02901
964 | 1546,0.02719
965 | 1548,0.02939
966 | 1549,0.03713
967 | 1550,0.01630
968 | 1551,0.04633
969 | 1553,0.07737
970 | 1558,0.01728
971 | 1560,0.04177
972 | 1561,0.02305
973 | 1562,0.03120
974 | 1565,0.10370
975 | 1566,0.04492
976 | 1567,0.02465
977 | 1568,0.02406
978 | 1569,0.02583
979 | 1570,0.03981
980 | 1573,0.01417
981 | 1576,0.01856
982 | 1577,0.01706
983 | 1579,0.03869
984 | 1580,0.09820
985 | 1581,0.04776
986 | 1584,0.02013
987 | 1586,0.02048
988 | 1587,0.01561
989 | 1590,0.01345
990 | 1591,0.03244
991 | 1594,0.02539
992 | 1595,0.01245
993 | 1596,0.01497
994 | 1601,0.05519
995 | 1605,0.04823
996 | 1607,0.13519
997 | 1608,0.01471
998 | 1610,0.02023
999 | 1611,0.03745
1000 | 1612,0.05764
1001 | 1616,0.06072
1002 | 1617,0.06056
1003 | 1618,0.03516
1004 | 1620,0.02476
1005 | 1621,0.02796
1006 | 1623,0.04027
1007 | 1624,0.01962
1008 | 1625,0.04021
1009 | 1626,0.01793
1010 | 1628,0.02184
1011 | 1630,0.02006
1012 | 1632,0.03973
1013 | 1633,0.03609
1014 | 1635,0.03875
1015 | 1637,0.02671
1016 | 1638,0.02922
1017 | 1639,0.04365
1018 | 1640,0.01708
1019 | 1643,0.15243
1020 | 1644,0.03717
1021 | 1645,0.05238
1022 | 1647,0.01124
1023 | 1649,0.00937
1024 | 1651,0.02941
1025 | 1652,0.02268
1026 | 1654,0.03325
1027 | 1655,0.03397
1028 | 1656,0.09615
1029 | 1658,0.05329
1030 | 1660,0.01448
1031 | 1661,0.02952
1032 | 1662,0.07532
1033 | 1663,0.01466
1034 | 1664,0.02169
1035 | 1665,0.09011
1036 | 1668,0.04422
1037 | 1669,0.01607
1038 | 1673,0.08881
1039 | 1674,0.07308
1040 | 1675,0.00891
1041 | 1676,0.02289
1042 | 1677,0.02051
1043 | 1679,0.06178
1044 | 1682,0.04091
1045 | 1683,0.04107
1046 | 1685,0.05854
1047 | 1687,0.01575
1048 | 1689,0.03551
1049 | 1690,0.01769
1050 | 1694,0.02102
1051 | 1696,0.02587
1052 | 1697,0.02211
1053 | 1699,0.09794
1054 | 1700,0.06310
1055 | 1701,0.01473
1056 | 1702,0.02364
1057 | 1704,0.03068
1058 | 1705,0.03970
1059 | 1708,0.01812
1060 | 1710,0.02023
1061 | 1712,0.03617
1062 | 1719,0.04606
1063 | 1720,0.02458
1064 | 1722,0.04518
1065 | 1723,0.05507
1066 | 1724,0.04655
1067 | 1725,0.05122
1068 | 1726,0.03225
1069 | 1727,0.03334
1070 | 1728,0.02925
1071 | 1732,0.02090
1072 | 1733,0.03837
1073 | 1734,0.06909
1074 | 1735,0.05475
1075 | 1739,0.05865
1076 | 1740,0.01990
1077 | 1741,0.04463
1078 | 1742,0.03980
1079 | 1743,0.02545
1080 | 1744,0.05895
1081 | 1746,0.03808
1082 | 1750,0.05048
1083 | 1753,0.01499
1084 | 1755,0.02137
1085 | 1758,0.02594
1086 | 1760,0.00739
1087 | 1763,0.02463
1088 | 1765,0.01807
1089 | 1766,0.06746
1090 | 1769,0.02085
1091 | 1771,0.01779
1092 | 1772,0.01109
1093 | 1775,0.05737
1094 | 1776,0.01384
1095 | 1778,0.03872
1096 | 1780,0.06057
1097 | 1781,0.01410
1098 | 1783,0.01349
1099 | 1785,0.02162
1100 | 1788,0.04581
1101 | 1791,0.03107
1102 | 1793,0.05753
1103 | 1794,0.04843
1104 | 1796,0.01103
1105 | 1797,0.04213
1106 | 1798,0.06799
1107 | 1800,0.07587
1108 | 1803,0.11849
1109 | 1804,0.02977
1110 | 1810,0.01079
1111 | 1812,0.02879
1112 | 1813,0.05986
1113 | 1816,0.03141
1114 | 1817,0.03021
1115 | 1818,0.02711
1116 | 1819,0.06512
1117 | 1820,0.01965
1118 | 1821,0.04392
1119 | 1822,0.01579
1120 | 1823,0.02740
1121 | 1826,0.01779
1122 | 1827,0.03968
1123 | 1828,0.04177
1124 | 1829,0.02090
1125 | 1830,0.02685
1126 | 1831,0.03731
1127 | 1834,0.01105
1128 | 1836,0.04049
1129 | 1839,0.02431
1130 | 1841,0.01221
1131 | 1846,0.03493
1132 | 1847,0.03762
1133 | 1850,0.06355
1134 | 1852,0.03995
1135 | 1853,0.02568
1136 | 1854,0.11095
1137 | 1855,0.01657
1138 | 1857,0.04380
1139 | 1858,0.03831
1140 | 1860,0.02664
1141 | 1863,0.04074
1142 | 1866,0.06993
1143 | 1868,0.03114
1144 | 1869,0.02505
1145 | 1870,0.01537
1146 | 1874,0.01454
1147 | 1875,0.01798
1148 | 1877,0.04295
1149 | 1878,0.02169
1150 | 1879,0.01752
1151 | 1881,0.01218
1152 | 1882,0.06092
1153 | 1885,0.01906
1154 | 1886,0.05696
1155 | 1887,0.08896
1156 | 1888,0.00868
1157 | 1889,0.05723
1158 | 1890,0.04566
1159 | 1891,0.04426
1160 | 1892,0.02783
1161 | 1894,0.03990
1162 | 1895,0.08419
1163 | 1896,0.05057
1164 | 1897,0.01756
1165 | 1898,0.06249
1166 | 1901,0.03254
1167 | 1904,0.01744
1168 | 1905,0.02106
1169 | 1906,0.01409
1170 | 1907,0.06471
1171 | 1908,0.03953
1172 | 1910,0.07330
1173 | 1911,0.01925
1174 | 1914,0.01618
1175 | 1916,0.01600
1176 | 1919,0.07723
1177 | 1920,0.10848
1178 | 1921,0.02670
1179 | 1922,0.05141
1180 | 1923,0.06713
1181 | 1924,0.05279
1182 | 1926,0.02657
1183 | 1928,0.01547
1184 | 1929,0.03781
1185 | 1931,0.01787
1186 | 1932,0.03813
1187 | 1933,0.00973
1188 | 1934,0.02662
1189 | 1935,0.01780
1190 | 1936,0.02988
1191 | 1937,0.03038
1192 | 1939,0.02751
1193 | 1940,0.06047
1194 | 1942,0.02260
1195 | 1944,0.03705
1196 | 1945,0.03311
1197 | 1946,0.07148
1198 | 1947,0.02900
1199 | 1948,0.04919
1200 | 1954,0.04268
1201 | 1955,0.16486
1202 | 1956,0.01729
1203 | 1958,0.02137
1204 | 1961,0.05127
1205 | 1963,0.01023
1206 | 1967,0.02212
1207 | 1968,0.01780
1208 | 1970,0.01034
1209 | 1971,0.01179
1210 | 1973,0.08288
1211 | 1979,0.05328
1212 | 1980,0.04825
1213 | 1982,0.01704
1214 | 1984,0.03256
1215 | 1986,0.02584
1216 | 1987,0.09435
1217 | 1989,0.04690
1218 | 1990,0.08493
1219 | 1991,0.03631
1220 | 1993,0.01981
1221 | 1994,0.02685
1222 | 1995,0.02612
1223 | 1996,0.03635
1224 | 1999,0.04034
1225 | 2001,0.05342
1226 | 2002,0.01648
1227 | 2005,0.03109
1228 | 2006,0.01984
1229 | 2008,0.01949
1230 | 2009,0.03337
1231 | 2011,0.01113
1232 | 2012,0.07963
1233 | 2013,0.00719
1234 | 2015,0.04331
1235 | 2017,0.01144
1236 | 2019,0.00786
1237 | 2020,0.01610
1238 | 2022,0.05061
1239 | 2023,0.01739
1240 | 2024,0.01375
1241 | 2026,0.02111
1242 | 2027,0.04138
1243 | 2028,0.03467
1244 | 2029,0.03353
1245 | 2030,0.04302
1246 | 2032,0.03622
1247 | 2033,0.04761
1248 | 2035,0.04030
1249 | 2036,0.01871
1250 | 2037,0.10694
1251 | 2038,0.04792
1252 | 2039,0.03522
1253 | 2040,0.09139
1254 | 2041,0.00665
1255 | 2042,0.06341
1256 | 2043,0.03753
1257 | 2044,0.02502
1258 | 2046,0.01887
1259 | 2047,0.02089
1260 | 2048,0.03211
1261 | 2049,0.08317
1262 | 2050,0.05365
1263 | 2054,0.03240
1264 | 2061,0.04441
1265 | 2063,0.04933
1266 | 2064,0.03039
1267 | 2065,0.02628
1268 | 2067,0.03319
1269 | 2068,0.01489
1270 | 2069,0.01907
1271 | 2070,0.03339
1272 | 2072,0.01719
1273 | 2073,0.02270
1274 | 2075,0.00946
1275 | 2076,0.01006
1276 | 2077,0.01380
1277 | 2078,0.03388
1278 | 2079,0.00752
1279 | 2081,0.02919
1280 | 2082,0.22133
1281 | 2085,0.03248
1282 | 2086,0.10566
1283 | 2087,0.02255
1284 | 2088,0.09862
1285 | 2089,0.02357
1286 | 2091,0.04802
1287 | 2092,0.05951
1288 | 2093,0.08525
1289 | 2094,0.03000
1290 | 2095,0.01727
1291 | 2096,0.06037
1292 | 2097,0.01855
1293 | 2099,0.03512
1294 | 2100,0.10391
1295 | 2103,0.02461
1296 | 2104,0.01123
1297 | 2105,0.02515
1298 | 2106,0.07906
1299 | 2107,0.00794
1300 | 2110,0.04355
1301 | 2113,0.01699
1302 | 2114,0.14074
1303 | 2115,0.03531
1304 | 2118,0.03490
1305 | 2119,0.03127
1306 | 2120,0.01949
1307 | 2121,0.03587
1308 | 2123,0.01005
1309 | 2124,0.09067
1310 | 2128,0.14750
1311 | 2129,0.07514
1312 | 2131,0.08503
1313 | 2132,0.01700
1314 | 2133,0.02216
1315 | 2134,0.05906
1316 | 2137,0.03232
1317 | 2140,0.04770
1318 | 2141,0.01954
1319 | 2142,0.01563
1320 | 2146,0.01486
1321 | 2148,0.10532
1322 | 2149,0.02174
1323 | 2151,0.06720
1324 | 2152,0.04447
1325 | 2153,0.07188
1326 | 2154,0.04140
1327 | 2155,0.05637
1328 | 2158,0.01305
1329 | 2159,0.06867
1330 | 2160,0.03837
1331 | 2161,0.03042
1332 | 2164,0.04002
1333 | 2166,0.02687
1334 | 2167,0.03435
1335 | 2170,0.04678
1336 | 2171,0.08816
1337 | 2172,0.02344
1338 | 2173,0.08385
1339 | 2175,0.05273
1340 | 2177,0.03775
1341 | 2178,0.04059
1342 | 2179,0.05498
1343 | 2180,0.03291
1344 | 2182,0.05139
1345 | 2183,0.03631
1346 | 2184,0.02606
1347 | 2186,0.02548
1348 | 2187,0.03413
1349 | 2188,0.01695
1350 | 2189,0.02062
1351 | 2191,0.03398
1352 | 2192,0.08822
1353 | 2194,0.03804
1354 | 2196,0.07417
1355 | 2197,0.02706
1356 | 2198,0.02650
1357 | 2199,0.03348
1358 | 2200,0.01044
1359 | 2201,0.01922
1360 | 2202,0.01587
1361 | 2205,0.01051
1362 | 2206,0.01850
1363 | 2207,0.02708
1364 | 2208,0.02650
1365 | 2212,0.02964
1366 | 2215,0.05830
1367 | 2216,0.02632
1368 | 2217,0.02374
1369 | 2218,0.01690
1370 | 2220,0.04279
1371 | 2221,0.04795
1372 | 2223,0.03052
1373 | 2225,0.02859
1374 | 2226,0.05107
1375 | 2231,0.01882
1376 | 2232,0.03153
1377 | 2233,0.02681
1378 | 2234,0.06704
1379 | 2235,0.00801
1380 | 2236,0.02967
1381 | 2237,0.05269
1382 | 2239,0.03611
1383 | 2240,0.01309
1384 | 2242,0.04269
1385 | 2247,0.01880
1386 | 2248,0.02975
1387 | 2250,0.01900
1388 | 2251,0.03993
1389 | 2255,0.04330
1390 | 2257,0.01948
1391 | 2258,0.07305
1392 | 2259,0.02458
1393 | 2260,0.01417
1394 | 2261,0.02861
1395 | 2263,0.02971
1396 | 2265,0.01724
1397 | 2266,0.03466
1398 | 2267,0.05877
1399 | 2268,0.01442
1400 | 2269,0.04526
1401 | 2270,0.03245
1402 | 2271,0.04772
1403 | 2272,0.01526
1404 | 2273,0.02494
1405 | 2275,0.03253
1406 | 2276,0.06099
1407 | 2277,0.02006
1408 | 2278,0.04400
1409 | 2279,0.04899
1410 | 2282,0.02442
1411 | 2285,0.03744
1412 | 2286,0.05944
1413 | 2289,0.03478
1414 | 2294,0.09244
1415 | 2296,0.02372
1416 | 2297,0.06246
1417 | 2299,0.03861
1418 | 2300,0.06687
1419 | 2301,0.10773
1420 | 2302,0.03072
1421 | 2305,0.03111
1422 | 2306,0.10653
1423 | 2307,0.02635
1424 | 2308,0.04719
1425 | 2310,0.07940
1426 | 2312,0.06054
1427 | 2316,0.05634
1428 | 2317,0.03562
1429 | 2319,0.05685
1430 | 2320,0.03358
1431 | 2322,0.01871
1432 | 2323,0.01290
1433 | 2324,0.01975
1434 | 2326,0.04282
1435 | 2327,0.01314
1436 | 2328,0.07800
1437 | 2329,0.04979
1438 | 2331,0.02051
1439 | 2335,0.04496
1440 | 2338,0.01817
1441 | 2339,0.13329
1442 | 2340,0.04357
1443 | 2341,0.01927
1444 | 2342,0.04732
1445 | 2345,0.06967
1446 | 2349,0.01793
1447 | 2350,0.02646
1448 | 2351,0.07339
1449 | 2353,0.05577
1450 | 2354,0.05816
1451 | 2355,0.03434
1452 | 2356,0.02947
1453 | 2357,0.11916
1454 | 2359,0.01796
1455 | 2360,0.05231
1456 | 2361,0.05944
1457 | 2362,0.06797
1458 | 2364,0.03897
1459 | 2366,0.03420
1460 | 2368,0.02515
1461 | 2369,0.05319
1462 | 2370,0.03754
1463 | 2372,0.02797
1464 | 2376,0.01048
1465 | 2381,0.03139
1466 | 2383,0.01892
1467 | 2384,0.04868
1468 | 2385,0.03577
1469 | 2386,0.03135
1470 | 2388,0.04801
1471 | 2390,0.04195
1472 | 2392,0.02156
1473 | 2394,0.10181
1474 | 2395,0.01987
1475 | 2396,0.01454
1476 | 2397,0.01828
1477 | 2399,0.12967
1478 | 2401,0.03258
1479 | 2402,0.01940
1480 | 2403,0.05024
1481 | 2405,0.15510
1482 | 2406,0.08019
1483 | 2407,0.06778
1484 | 2408,0.01616
1485 | 2410,0.01090
1486 | 2412,0.01911
1487 | 2415,0.03392
1488 | 2417,0.03537
1489 | 2418,0.04491
1490 | 2420,0.05985
1491 | 2421,0.07094
1492 | 2425,0.05582
1493 | 2426,0.00994
1494 | 2427,0.01440
1495 | 2428,0.00886
1496 | 2429,0.05726
1497 | 2434,0.03301
1498 | 2435,0.01799
1499 | 2436,0.01364
1500 | 2437,0.04230
1501 | 2438,0.04187
1502 | 2439,0.02345
1503 | 2441,0.02912
1504 | 2443,0.03290
1505 | 2444,0.01567
1506 | 2445,0.03595
1507 | 2446,0.03649
1508 | 2449,0.01830
1509 | 2452,0.09464
1510 | 2453,0.13340
1511 | 2454,0.03136
1512 | 2455,0.03659
1513 | 2456,0.03052
1514 | 2457,0.03155
1515 | 2460,0.14853
1516 | 2461,0.01705
1517 | 2463,0.01334
1518 | 2465,0.02707
1519 | 2466,0.02479
1520 | 2468,0.04172
1521 | 2469,0.06187
1522 | 2471,0.03763
1523 | 2472,0.03978
1524 | 2473,0.03195
1525 | 2474,0.02920
1526 | 2475,0.02542
1527 | 2476,0.03408
1528 | 2477,0.02201
1529 | 2478,0.04101
1530 | 2480,0.01530
1531 | 2483,0.02199
1532 | 2484,0.01612
1533 | 2486,0.11038
1534 | 2488,0.03490
1535 | 2489,0.02540
1536 | 2490,0.01932
1537 | 2495,0.03319
1538 | 2496,0.02413
1539 | 2497,0.05767
1540 | 2498,0.04548
1541 | 2503,0.02023
1542 | 2505,0.03898
1543 | 2506,0.04017
1544 | 2507,0.08172
1545 | 2508,0.10440
1546 | 2509,0.02482
1547 | 2514,0.04911
1548 | 2515,0.04724
1549 | 2521,0.04696
1550 | 2522,0.01058
1551 | 2523,0.01596
1552 | 2524,0.01865
1553 | 2526,0.03497
1554 | 2527,0.02788
1555 | 2529,0.03136
1556 | 2530,0.02253
1557 | 2531,0.07053
1558 | 2532,0.02951
1559 | 2534,0.10151
1560 | 2535,0.01855
1561 | 2536,0.00982
1562 | 2537,0.03778
1563 | 2539,0.02607
1564 | 2542,0.02126
1565 | 2545,0.04437
1566 | 2546,0.03411
1567 | 2548,0.04584
1568 | 2549,0.02040
1569 | 2550,0.02035
1570 | 2552,0.02689
1571 | 2554,0.01498
1572 | 2555,0.03694
1573 | 2558,0.01507
1574 | 2560,0.05857
1575 | 2563,0.01828
1576 | 2564,0.04681
1577 | 2565,0.02336
1578 | 2571,0.15878
1579 | 2574,0.11613
1580 | 2575,0.02168
1581 | 2576,0.02149
1582 | 2577,0.09334
1583 | 2578,0.03121
1584 | 2579,0.04127
1585 | 2581,0.09717
1586 | 2582,0.05975
1587 | 2583,0.01875
1588 | 2585,0.01814
1589 | 2586,0.02476
1590 | 2587,0.03847
1591 | 2591,0.02320
1592 | 2592,0.01191
1593 | 2594,0.02388
1594 | 2595,0.07373
1595 | 2597,0.04297
1596 | 2598,0.03410
1597 | 2599,0.02367
1598 | 2600,0.12612
1599 | 2602,0.05198
1600 | 2603,0.02238
1601 | 2605,0.02653
1602 | 2606,0.01202
1603 | 2607,0.05492
1604 | 2608,0.03092
1605 | 2609,0.03613
1606 | 2610,0.09298
1607 | 2611,0.01815
1608 | 2613,0.02608
1609 | 2615,0.01267
1610 | 2616,0.02702
1611 | 2621,0.03842
1612 | 2623,0.03878
1613 | 2624,0.03922
1614 | 2625,0.02619
1615 | 2626,0.02820
1616 | 2627,0.03136
1617 | 2628,0.01468
1618 | 2629,0.02910
1619 | 2630,0.01484
1620 | 2631,0.08308
1621 | 2633,0.03648
1622 | 2634,0.08456
1623 | 2635,0.03152
1624 | 2636,0.04244
1625 | 2638,0.07097
1626 | 2639,0.03632
1627 | 2640,0.02684
1628 | 2642,0.02651
1629 | 2643,0.03120
1630 | 2646,0.02402
1631 | 2647,0.07504
1632 | 2648,0.05214
1633 | 2649,0.01509
1634 | 2650,0.04906
1635 | 2652,0.01160
1636 | 2654,0.03310
1637 | 2658,0.02031
1638 | 2659,0.03451
1639 | 2660,0.03814
1640 | 2661,0.05525
1641 | 2662,0.02819
1642 | 2663,0.04194
1643 | 2665,0.04746
1644 | 2666,0.00991
1645 | 2667,0.03351
1646 | 2668,0.01227
1647 | 2671,0.02359
1648 | 2673,0.02938
1649 | 2674,0.11486
1650 | 2675,0.04480
1651 | 2676,0.06374
1652 | 2677,0.02233
1653 | 2678,0.01507
1654 | 2681,0.01927
1655 | 2683,0.01850
1656 | 2685,0.09201
1657 | 2687,0.02000
1658 | 2690,0.21009
1659 | 2692,0.02509
1660 | 2693,0.01494
1661 | 2694,0.01742
1662 | 2695,0.02532
1663 | 2696,0.08736
1664 | 2697,0.05599
1665 | 2698,0.16435
1666 | 2699,0.02788
1667 | 2701,0.02526
1668 | 2702,0.04373
1669 | 2703,0.11650
1670 | 2705,0.02585
1671 | 2711,0.03358
1672 | 2715,0.02153
1673 | 2720,0.07988
1674 | 2721,0.05813
1675 | 2722,0.02435
1676 | 2724,0.05628
1677 | 2726,0.01638
1678 | 2729,0.00870
1679 | 2730,0.01619
1680 | 2731,0.03537
1681 | 2732,0.01308
1682 | 2733,0.01810
1683 | 2734,0.04028
1684 | 2736,0.01835
1685 | 2738,0.01399
1686 | 2739,0.02650
1687 | 2740,0.02900
1688 | 2741,0.09031
1689 | 2743,0.01641
1690 | 2746,0.04867
1691 | 2747,0.03860
1692 | 2750,0.08970
1693 | 2751,0.05632
1694 | 2752,0.02213
1695 | 2753,0.01727
1696 | 2755,0.04726
1697 | 2756,0.02558
1698 | 2757,0.01338
1699 | 2761,0.02773
1700 | 2763,0.03749
1701 | 2764,0.12354
1702 | 2765,0.01428
1703 | 2767,0.05748
1704 | 2769,0.07018
1705 | 2772,0.02051
1706 | 2774,0.02712
1707 | 2776,0.01603
1708 | 2777,0.00816
1709 | 2778,0.02813
1710 | 2782,0.03325
1711 | 2783,0.01281
1712 | 2786,0.01568
1713 | 2790,0.03373
1714 | 2791,0.01689
1715 | 2792,0.01682
1716 | 2793,0.02464
1717 | 2794,0.03148
1718 | 2795,0.03491
1719 | 2796,0.06654
1720 | 2797,0.02587
1721 | 2800,0.05794
1722 | 2801,0.01415
1723 | 2802,0.01944
1724 | 2804,0.01986
1725 | 2805,0.05897
1726 | 2807,0.01781
1727 | 2808,0.04988
1728 | 2811,0.09092
1729 | 2812,0.03969
1730 | 2813,0.03330
1731 | 2814,0.11248
1732 | 2815,0.04435
1733 | 2817,0.03833
1734 | 2819,0.02178
1735 | 2821,0.03133
1736 | 2825,0.08389
1737 | 2828,0.02854
1738 | 2829,0.01648
1739 | 2831,0.02660
1740 | 2832,0.03261
1741 | 2834,0.03649
1742 | 2835,0.02011
1743 | 2836,0.11205
1744 | 2837,0.09120
1745 | 2838,0.05430
1746 | 2840,0.02631
1747 | 2841,0.01448
1748 | 2842,0.05237
1749 | 2843,0.01519
1750 | 2845,0.01909
1751 | 2847,0.03794
1752 | 2848,0.01103
1753 | 2850,0.01868
1754 | 2851,0.05085
1755 | 2854,0.02069
1756 | 2856,0.02652
1757 | 2857,0.01897
1758 | 2858,0.01707
1759 | 2859,0.02174
1760 | 2862,0.02729
1761 | 2864,0.01457
1762 | 2865,0.11344
1763 | 2866,0.01457
1764 | 2867,0.03504
1765 | 2868,0.01960
1766 | 2869,0.05116
1767 | 2871,0.04009
1768 | 2872,0.03012
1769 | 2873,0.01782
1770 | 2874,0.01050
1771 | 2875,0.04737
1772 | 2878,0.06599
1773 | 2882,0.02004
1774 | 2883,0.03852
1775 | 2886,0.00976
1776 | 2887,0.04198
1777 | 2888,0.02848
1778 | 2889,0.02274
1779 | 2892,0.02430
1780 | 2893,0.03402
1781 | 2895,0.03021
1782 | 2897,0.03411
1783 | 2898,0.03741
1784 | 2899,0.04867
1785 | 2903,0.02807
1786 | 2906,0.03745
1787 | 2907,0.02129
1788 | 2909,0.02304
1789 | 2913,0.02135
1790 | 2915,0.02362
1791 | 2916,0.02771
1792 | 2917,0.03818
1793 | 2918,0.04074
1794 | 2921,0.02306
1795 | 2924,0.04265
1796 | 2925,0.08505
1797 | 2928,0.07862
1798 | 2929,0.06167
1799 | 2931,0.07673
1800 | 2932,0.04694
1801 | 2934,0.03074
1802 | 2936,0.11101
1803 | 2937,0.04399
1804 | 2939,0.01897
1805 | 2940,0.07241
1806 | 2941,0.02516
1807 | 2942,0.01911
1808 | 2943,0.05654
1809 | 2946,0.01336
1810 | 2947,0.01821
1811 | 2949,0.03430
1812 | 2952,0.06134
1813 | 2953,0.04539
1814 | 2954,0.02921
1815 | 2956,0.00748
1816 | 2960,0.01001
1817 | 2962,0.07634
1818 | 2964,0.02391
1819 | 2965,0.01055
1820 | 2966,0.22931
1821 | 2967,0.01972
1822 | 2968,0.06440
1823 | 2971,0.02050
1824 | 2972,0.07471
1825 | 2973,0.11792
1826 | 2975,0.04871
1827 | 2976,0.03696
1828 | 2977,0.04068
1829 | 2981,0.01511
1830 | 2982,0.04459
1831 | 2983,0.02434
1832 | 2984,0.01860
1833 | 2985,0.13133
1834 | 2986,0.08253
1835 | 2988,0.02652
1836 | 2989,0.05183
1837 | 2990,0.01541
1838 | 2991,0.03312
1839 | 2993,0.05904
1840 | 2995,0.01050
1841 | 2998,0.03599
1842 | 2999,0.01970
1843 | 3000,0.18311
1844 | 3001,0.03882
1845 | 3003,0.05342
1846 | 3005,0.08938
1847 | 3007,0.05535
1848 | 3008,0.05003
1849 | 3009,0.03337
1850 | 3011,0.02011
1851 | 3012,0.02791
1852 | 3013,0.01724
1853 | 3014,0.03414
1854 | 3015,0.02177
1855 | 3016,0.02712
1856 | 3017,0.02194
1857 | 3018,0.03537
1858 | 3022,0.03078
1859 | 3023,0.03937
1860 | 3024,0.07144
1861 | 3026,0.01582
1862 | 3029,0.05043
1863 | 3031,0.02778
1864 | 3032,0.04605
1865 | 3033,0.01924
1866 | 3034,0.01154
1867 | 3035,0.07463
1868 | 3036,0.01939
1869 | 3037,0.02952
1870 | 3039,0.01228
1871 | 3040,0.04000
1872 | 3041,0.04349
1873 | 3042,0.04523
1874 | 3045,0.03032
1875 | 3050,0.00936
1876 | 3053,0.02107
1877 | 3055,0.01789
1878 | 3060,0.03198
1879 | 3061,0.02512
1880 | 3062,0.04288
1881 | 3064,0.05519
1882 | 3065,0.03365
1883 | 3067,0.09654
1884 | 3068,0.03489
1885 | 3072,0.02484
1886 | 3073,0.06498
1887 | 3076,0.04733
1888 | 3077,0.04777
1889 | 3079,0.01984
1890 | 3080,0.03978
1891 | 3082,0.02868
1892 | 3085,0.00861
1893 | 3086,0.02567
1894 | 3087,0.07999
1895 | 3090,0.03922
1896 | 3092,0.00794
1897 | 3094,0.01883
1898 | 3095,0.01763
1899 | 3101,0.06121
1900 | 3102,0.01399
1901 | 3103,0.07426
1902 | 3104,0.06389
1903 | 3105,0.08839
1904 | 3107,0.10772
1905 | 3108,0.05239
1906 | 3114,0.06414
1907 | 3115,0.04014
1908 | 3116,0.02192
1909 | 3118,0.02041
1910 | 3120,0.02258
1911 | 3121,0.07751
1912 | 3123,0.04449
1913 | 3125,0.01365
1914 | 3128,0.01644
1915 | 3129,0.13060
1916 | 3130,0.01384
1917 | 3133,0.03393
1918 | 3134,0.02546
1919 | 3136,0.09919
1920 | 3137,0.06523
1921 | 3138,0.01143
1922 | 3139,0.03732
1923 | 3140,0.07436
1924 | 3143,0.02803
1925 | 3144,0.01390
1926 | 3146,0.01483
1927 | 3148,0.05732
1928 | 3149,0.02404
1929 | 3150,0.02091
1930 | 3151,0.05787
1931 | 3153,0.04337
1932 | 3155,0.12177
1933 | 3157,0.02066
1934 | 3160,0.05559
1935 | 3161,0.06230
1936 | 3162,0.04502
1937 | 3163,0.02805
1938 | 3164,0.01483
1939 | 3165,0.06139
1940 | 3166,0.05171
1941 | 3168,0.01543
1942 | 3169,0.11459
1943 | 3170,0.04349
1944 | 3171,0.01715
1945 | 3176,0.02652
1946 | 3179,0.02250
1947 | 3180,0.02227
1948 | 3182,0.02551
1949 | 3183,0.02487
1950 | 3184,0.01791
1951 | 3185,0.02003
1952 | 3186,0.03444
1953 | 3188,0.01620
1954 | 3189,0.02901
1955 | 3191,0.03504
1956 | 3193,0.02604
1957 | 3194,0.01982
1958 | 3195,0.03905
1959 | 3196,0.01470
1960 | 3197,0.01353
1961 | 3199,0.01727
1962 | 3201,0.02664
1963 | 3202,0.05180
1964 | 3203,0.01794
1965 | 3205,0.03530
1966 | 3206,0.02320
1967 | 3207,0.02581
1968 | 3208,0.03562
1969 | 3209,0.03558
1970 | 3210,0.03100
1971 | 3211,0.05292
1972 | 3212,0.01857
1973 | 3213,0.08392
1974 | 3215,0.01498
1975 | 3216,0.02656
1976 | 3217,0.01308
1977 | 3218,0.01941
1978 | 3219,0.06661
1979 | 3221,0.06981
1980 | 3222,0.03026
1981 | 3226,0.02000
1982 | 3227,0.03924
1983 | 3229,0.02469
1984 | 3231,0.01520
1985 | 3234,0.15871
1986 | 3240,0.03433
1987 | 3242,0.04365
1988 | 3244,0.00935
1989 | 3246,0.04290
1990 | 3247,0.05710
1991 | 3248,0.03343
1992 | 3249,0.02140
1993 | 3250,0.01782
1994 | 3251,0.01385
1995 | 3252,0.00918
1996 | 3253,0.01173
1997 | 3255,0.02949
1998 | 3257,0.01438
1999 | 3258,0.01954
2000 | 3259,0.01299
2001 | 3265,0.01828
2002 |
--------------------------------------------------------------------------------
/Basic-DeepFM-model/output/FM_Mean0.23297_Std0.05576.csv:
--------------------------------------------------------------------------------
1 | id,target
2 | 0,0.37706
3 | 1,0.40747
4 | 2,0.28335
5 | 3,0.29426
6 | 4,0.25722
7 | 5,0.28061
8 | 6,0.36010
9 | 8,0.26813
10 | 10,0.50419
11 | 11,0.29652
12 | 12,0.22183
13 | 14,0.28447
14 | 15,0.44019
15 | 18,0.43666
16 | 21,0.32927
17 | 23,0.28054
18 | 24,0.25594
19 | 25,0.27155
20 | 27,0.26363
21 | 29,0.34340
22 | 30,0.37857
23 | 31,0.30758
24 | 32,0.41682
25 | 33,0.26732
26 | 37,0.39802
27 | 38,0.32117
28 | 39,0.39406
29 | 40,0.24067
30 | 41,0.39323
31 | 42,0.40359
32 | 44,0.28283
33 | 45,0.36268
34 | 47,0.31174
35 | 49,0.35913
36 | 51,0.27528
37 | 52,0.28072
38 | 53,0.35339
39 | 54,0.45116
40 | 55,0.33479
41 | 56,0.47107
42 | 57,0.34473
43 | 59,0.34868
44 | 60,0.45001
45 | 62,0.35572
46 | 63,0.39236
47 | 67,0.36394
48 | 68,0.27234
49 | 69,0.51613
50 | 70,0.33188
51 | 71,0.24334
52 | 73,0.36806
53 | 75,0.41980
54 | 76,0.37788
55 | 81,0.31707
56 | 82,0.33174
57 | 83,0.35205
58 | 86,0.34927
59 | 87,0.45646
60 | 88,0.27697
61 | 91,0.34399
62 | 92,0.42113
63 | 94,0.35314
64 | 97,0.29256
65 | 100,0.44001
66 | 102,0.30431
67 | 103,0.25371
68 | 105,0.41161
69 | 106,0.39540
70 | 108,0.36266
71 | 113,0.36232
72 | 114,0.37745
73 | 115,0.28234
74 | 118,0.37840
75 | 122,0.22426
76 | 124,0.30503
77 | 126,0.35986
78 | 128,0.30551
79 | 129,0.32311
80 | 130,0.35530
81 | 131,0.33789
82 | 132,0.39140
83 | 133,0.30195
84 | 134,0.31456
85 | 135,0.41466
86 | 136,0.24149
87 | 139,0.23444
88 | 140,0.36823
89 | 141,0.36059
90 | 146,0.25876
91 | 148,0.48031
92 | 151,0.31372
93 | 152,0.50250
94 | 154,0.26625
95 | 157,0.28990
96 | 158,0.24773
97 | 159,0.48179
98 | 161,0.46381
99 | 164,0.34177
100 | 165,0.48971
101 | 167,0.30779
102 |
--------------------------------------------------------------------------------
/README.md:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/isthegoal/recommendation_model_master/77c2117631e779527b92e04dc53358a88abbbbbc/README.md
--------------------------------------------------------------------------------
/SVD_jia_jia_demo/.idea/SVD_jia_jia_demo.iml:
--------------------------------------------------------------------------------
1 |
2 |
3 |
4 |
5 |
6 |
7 |
8 |
9 |
10 |
11 |
--------------------------------------------------------------------------------
/SVD_jia_jia_demo/.idea/misc.xml:
--------------------------------------------------------------------------------
1 |
2 |
3 |
4 |
--------------------------------------------------------------------------------
/SVD_jia_jia_demo/.idea/modules.xml:
--------------------------------------------------------------------------------
1 |
2 |
3 |
4 |
5 |
6 |
7 |
8 |
--------------------------------------------------------------------------------
/SVD_jia_jia_demo/.idea/workspace.xml:
--------------------------------------------------------------------------------
1 |
2 |
3 |
4 |
5 |
6 |
7 |
8 |
9 |
10 |
11 |
12 |
13 |
14 |
15 |
16 |
17 |
18 |
19 |
20 |
21 |
22 |
23 |
24 |
25 |
26 |
27 |
28 |
29 |
30 |
31 |
32 |
33 |
34 |
35 |
36 |
37 |
38 |
39 |
40 |
41 |
42 |
43 |
44 |
45 |
46 |
47 |
48 |
49 |
50 |
51 |
52 |
53 |
54 |
55 |
56 |
57 |
58 |
59 |
60 |
61 |
62 |
63 |
64 |
65 |
66 |
67 |
68 |
69 |
70 |
71 |
72 |
73 |
74 |
75 |
76 |
77 |
78 |
79 |
80 |
81 |
82 |
83 |
84 |
85 |
86 |
87 |
88 |
89 |
90 |
91 |
92 |
93 |
94 |
95 |
96 |
97 |
98 |
99 |
100 |
101 |
102 |
103 |
104 |
105 |
106 |
107 |
108 |
109 |
110 |
111 |
112 |
113 |
114 | 1565360683569
115 |
116 |
117 | 1565360683569
118 |
119 |
120 |
121 |
122 |
123 |
124 |
125 |
126 |
127 |
128 |
129 |
130 |
131 |
132 |
133 |
134 |
135 |
136 |
137 |
138 |
139 |
140 |
141 |
142 |
143 |
144 |
145 |
146 |
147 |
148 |
149 |
150 |
151 |
152 |
153 |
154 |
155 |
156 |
157 |
158 |
159 |
160 |
161 |
162 |
163 |
164 |
165 |
166 |
167 |
168 |
169 |
170 |
171 |
172 |
173 |
174 |
175 |
176 |
177 |
178 |
179 |
180 |
181 |
182 |
183 |
184 |
--------------------------------------------------------------------------------
/SVD_jia_jia_demo/main.py:
--------------------------------------------------------------------------------
1 | # -*- encoding:utf-8 -*-
2 | # author: wangle
3 |
4 | import numpy as np
5 | import random
6 | import time
7 | import pickle
8 | '''
9 |
10 | 可以参考知乎这里的说明, https://zhuanlan.zhihu.com/p/42269534
11 | 但是发现训练速度好慢好慢。
12 |
13 | SVD++相对SVD做了进一步的改进,主要的改进点还是在于兴趣矩阵的计算上, 相对于之前的兴趣度计算方式,就入了更多的特征项
14 | 来作为更好的评估依据,其主要增加了增加了用户对商品的隐式反馈向量的计算get_Yi ,训练,更新;以及用户对商品的行为集合构建。。
15 |
16 |
17 |
18 | 其相对SVD的调整主要在这里把:
19 | rating = self.avg + self.bi[iid] + self.bu[uid] + np.sum(self.qi[iid] * (self.pu[uid] + u_impl_prf)) # 预测评分公式
20 | 具体新加的原因,参看https://blog.csdn.net/winone361/article/details/49427627 和 https://blog.csdn.net/akiyamamio11/article/details/79313339
21 |
22 |
23 | '''
24 | import numpy as np
25 | import random
26 |
27 | '''
28 | author:huang
29 | svd++ algorithm
30 | '''
31 |
32 |
33 | class SVDPP:
34 | def __init__(self, mat, K=20):
35 | self.mat = np.array(mat)
36 | self.K = K
37 | self.bi = {}
38 | self.bu = {}
39 | self.qi = {}
40 | self.pu = {}
41 | self.avg = np.mean(self.mat[:, 2])
42 | self.y = {}
43 | self.u_dict = {}
44 | for i in range(self.mat.shape[0]):
45 | uid = self.mat[i, 0]
46 | iid = self.mat[i, 1]
47 | self.u_dict.setdefault(uid, [])
48 | self.u_dict[uid].append(iid)
49 | self.bi.setdefault(iid, 0)
50 | self.bu.setdefault(uid, 0)
51 | self.qi.setdefault(iid, np.random.random((self.K, 1)) / 10 * np.sqrt(self.K))
52 | self.pu.setdefault(uid, np.random.random((self.K, 1)) / 10 * np.sqrt(self.K))
53 | self.y.setdefault(iid, np.zeros((self.K, 1)) + .1)
54 |
55 | def predict(self, uid, iid): # 预测评分的函数
56 | # setdefault的作用是当该用户或者物品未出现过时,新建它的bi,bu,qi,pu及用户评价过的物品u_dict,并设置初始值为0
57 | self.bi.setdefault(iid, 0)
58 | self.bu.setdefault(uid, 0)
59 | self.qi.setdefault(iid, np.zeros((self.K, 1)))
60 | self.pu.setdefault(uid, np.zeros((self.K, 1)))
61 | self.y.setdefault(uid, np.zeros((self.K, 1)))
62 | self.u_dict.setdefault(uid, [])
63 | u_impl_prf, sqrt_Nu = self.getY(uid, iid)
64 | rating = self.avg + self.bi[iid] + self.bu[uid] + np.sum(self.qi[iid] * (self.pu[uid] + u_impl_prf)) # 预测评分公式
65 | # 由于评分范围在1到5,所以当分数大于5或小于1时,返回5,1.
66 | if rating > 5:
67 | rating = 5
68 | if rating < 1:
69 | rating = 1
70 | return rating
71 |
72 | # 计算sqrt_Nu和∑yj 最为核心的部分
73 | def getY(self, uid, iid):
74 | Nu = self.u_dict[uid]
75 | I_Nu = len(Nu)
76 | sqrt_Nu = np.sqrt(I_Nu)
77 | y_u = np.zeros((self.K, 1))
78 | if I_Nu == 0:
79 | u_impl_prf = y_u
80 | else:
81 | for i in Nu:
82 | y_u += self.y[i]
83 | u_impl_prf = y_u / sqrt_Nu
84 |
85 | return u_impl_prf, sqrt_Nu
86 |
87 | def train(self, steps=30, gamma=0.04, Lambda=0.15): # 训练函数,step为迭代次数。
88 | print('train data size', self.mat.shape)
89 | for step in range(steps):
90 | print('step', step + 1, 'is running')
91 | KK = np.random.permutation(self.mat.shape[0]) # 随机梯度下降算法,kk为对矩阵进行随机洗牌
92 | rmse = 0.0
93 | for i in range(self.mat.shape[0]):
94 | j = KK[i]
95 | uid = self.mat[j, 0]
96 | iid = self.mat[j, 1]
97 | rating = self.mat[j, 2]
98 | predict = self.predict(uid, iid)
99 | u_impl_prf, sqrt_Nu = self.getY(uid, iid)
100 | eui = rating - predict
101 | rmse += eui ** 2
102 | self.bu[uid] += gamma * (eui - Lambda * self.bu[uid])
103 | self.bi[iid] += gamma * (eui - Lambda * self.bi[iid])
104 | self.pu[uid] += gamma * (eui * self.qi[iid] - Lambda * self.pu[uid])
105 | self.qi[iid] += gamma * (eui * (self.pu[uid] + u_impl_prf) - Lambda * self.qi[iid])
106 | for j in self.u_dict[uid]:
107 | self.y[j] += gamma * (eui * self.qi[j] / sqrt_Nu - Lambda * self.y[j])
108 |
109 | gamma = 0.93 * gamma
110 | print('rmse is', np.sqrt(rmse / self.mat.shape[0]))
111 |
112 | def test(self, test_data): # gamma以0.93的学习率递减
113 |
114 | test_data = np.array(test_data)
115 | print('test data size', test_data.shape)
116 | rmse = 0.0
117 | for i in range(test_data.shape[0]):
118 | uid = test_data[i, 0]
119 | iid = test_data[i, 1]
120 | rating = test_data[i, 2]
121 | eui = rating - self.predict(uid, iid)
122 | rmse += eui ** 2
123 | print('rmse of test data is', np.sqrt(rmse / test_data.shape[0]))
124 |
125 |
126 | def getMLData(): # 获取训练集和测试集的函数
127 | import re
128 | f = open("../data/ml-100k/u1.base", 'r')
129 | lines = f.readlines()
130 | f.close()
131 | data = []
132 | for line in lines:
133 | list = re.split('\t|\n', line)
134 | if int(list[2]) != 0:
135 | data.append([int(i) for i in list[:3]])
136 | train_data = data
137 | f = open("../data/ml-100k/u1.test", 'r')
138 | lines = f.readlines()
139 | f.close()
140 | data = []
141 | for line in lines:
142 | list = re.split('\t|\n', line)
143 | if int(list[2]) != 0:
144 | data.append([int(i) for i in list[:3]])
145 | test_data = data
146 |
147 | return train_data, test_data
148 |
149 |
150 | train_data, test_data = getMLData()
151 | a = SVDPP(train_data, 30)
152 | a.train()
153 | a.test(test_data)
154 |
155 |
156 |
--------------------------------------------------------------------------------
/SVD_recom_demo/main.py:
--------------------------------------------------------------------------------
1 | # -*- coding: utf-8 -*-
2 | import numpy as np
3 | import time
4 | import pickle
5 | '''
6 | 可以参考知乎这里的说明:https://zhuanlan.zhihu.com/p/42147194
7 | 感觉这里的SVD相当于是LFM的简化版吧
8 | '''
9 | class SVD(object):
10 | """
11 | implementation of SVD for CF “https://zhuanlan.zhihu.com/p/42147194/”
12 | Reference:
13 | A Guide to Singular Value Decomposition for Collaborative Filtering
14 |
15 | 核心是:SVD在进行用户i对商品j的评分时,考虑了用户特征向量和商品特征向量,以及评分相对于平均评分的偏置向量。
16 |
17 | 发现跟 LFM算法基本是一致的,核心就是下面的式子, 也是通过一定的计算来模拟构建 兴趣矩阵,只不过这里构建矩阵时,要考虑的参数
18 | 包括 U 矩阵参数、M矩阵参数 、bu 、bi 这几种参数,其他都是差不多, 其余就是通过构建兴趣预估后的训练过程,这里正负样例是不需要构建的,
19 | 因为用户的评分值rate是已知的,可以当做标签来使用。
20 |
21 | 其实本质上是找到 用户特征矩阵U 、 物品特征向量M,学会对这两个向量的模拟。 这里新加的bi和bu整好应对之前感觉使用坐标来定位感觉信息不够,
22 | bi和bu是新加的信息,其中bi表示电影i的评分相对于平均评分的偏差,bu表示用户u所做的评分相对于平均评分的偏差,相当于是一种补足信息吧。
23 |
24 | p = self.meanV + self.bu[uid] + self.bi[iid] + np.sum(self.U[uid] * self.M[iid])
25 | """
26 |
27 | def __init__(self, epoch, eta, userNums, itemNums, ku=0.001, km=0.001,f=30, save_model=False):
28 | super(SVD, self).__init__()
29 | self.epoch = epoch
30 | self.userNums = userNums
31 | self.itemNums = itemNums
32 | self.eta = eta
33 | self.ku = ku
34 | self.km = km
35 | self.f = f
36 | self.save_model = save_model
37 |
38 | self.U = None
39 | self.M = None
40 |
41 | def fit(self, train, val=None):
42 | rateNums = train.shape[0]
43 | self.meanV = np.sum(train[:, 2]) / rateNums
44 | initv = np.sqrt((self.meanV - 1) / self.f)
45 | self.U = initv + np.random.uniform(-0.01, 0.01, (self.userNums + 1, self.f))
46 | self.M = initv + np.random.uniform(-0.01, 0.01, (self.itemNums + 1, self.f))
47 | self.bu = np.zeros(self.userNums + 1)
48 | self.bi = np.zeros(self.itemNums + 1)
49 |
50 | start = time.time()
51 | for i in range(self.epoch):
52 | sumRmse = 0.0
53 | for sample in train:
54 | uid = sample[0]
55 | iid = sample[1]
56 | vij = float(sample[2])
57 | # p(U_i,M_j) = mu + b_i + b_u + U_i^TM_j
58 | p = self.meanV + self.bu[uid] + self.bi[iid] + \
59 | np.sum(self.U[uid] * self.M[iid])
60 | error = vij - p
61 | sumRmse += error ** 2
62 | # 计算Ui,Mj的梯度
63 | deltaU = error * self.M[iid] - self.ku * self.U[uid]
64 | deltaM = error * self.U[uid] - self.km * self.M[iid]
65 | # 更新参数
66 | self.U[uid] += self.eta * deltaU
67 | self.M[iid] += self.eta * deltaM
68 |
69 | self.bu[uid] += self.eta * (error - self.ku * self.bu[uid])
70 | self.bi[iid] += self.eta * (error - self.km * self.bi[iid])
71 |
72 | trainRmse = np.sqrt(sumRmse / rateNums)
73 |
74 | if val.any():
75 | _, valRmse = self.evaluate(val)
76 | print("Epoch %d cost time %.4f, train RMSE: %.4f, validation RMSE: %.4f" % \
77 | (i, time.time() - start, trainRmse, valRmse))
78 | else:
79 | print("Epoch %d cost time %.4f, train RMSE: %.4f" % \
80 | (i, time.time() - start, trainRmse))
81 |
82 | if self.save_model:
83 | save_model='../data'
84 | model = (self.meanV, self.bu, self.bi, self.U, self.M)
85 | pickle.dump(model, open(save_model + '/svcRecModel.pkl', 'wb'))
86 |
87 | def evaluate(self, val):
88 |
89 | '''
90 | 根据用户id和商品 id 去定位 去按照预估的参数,计算出定位得分,得到兴趣度评估分。
91 | '''
92 | loss = 0
93 | pred = []
94 | for sample in val:
95 | uid = sample[0]
96 | iid = sample[1]
97 | if uid > self.userNums or iid > self.itemNums:
98 | continue
99 |
100 | predi = self.meanV + self.bu[uid] + self.bi[iid] \
101 | + np.sum(self.U[uid] * self.M[iid])
102 | if predi < 1:
103 | predi = 1
104 | elif predi > 5:
105 | predi = 5
106 | pred.append(predi)
107 |
108 | if val.shape[1] == 3:
109 | vij = sample[2]
110 | loss += (predi - vij) ** 2
111 |
112 | if val.shape[1] == 3:
113 | rmse = np.sqrt(loss / val.shape[0])
114 | return pred, rmse
115 |
116 | return pred
117 |
118 | def predict(self, test):
119 |
120 | return self.evaluate(test)
121 |
122 |
123 | def test():
124 | import pandas as pd
125 | data_path = '../data/ml-1m/ratings.dat'
126 | data = pd.read_csv(data_path, sep='::', header=None,names=['user', 'item', 'rate', 'time'], engine='python')
127 |
128 | data = data.sample(frac=1)
129 | print(data.head())
130 |
131 | del data['time']
132 | trainNum = int(data.shape[0] * 0.8)
133 | train = data[:trainNum].values
134 | val = data[trainNum:].values
135 |
136 | userNums = data['user'].max()
137 | itemNums = data['item'].max()
138 | svd = SVD(35, 0.001, userNums, itemNums, f=50)
139 | svd.fit(train, val=val)
140 | svd.predict(val)
141 | if __name__ == '__main__':
142 | test()
143 |
--------------------------------------------------------------------------------
/recommendation-FFM-Demo/.idea/misc.xml:
--------------------------------------------------------------------------------
1 |
2 |
3 |
4 |
--------------------------------------------------------------------------------
/recommendation-FFM-Demo/.idea/modules.xml:
--------------------------------------------------------------------------------
1 |
2 |
3 |
4 |
5 |
6 |
7 |
8 |
--------------------------------------------------------------------------------
/recommendation-FFM-Demo/.idea/recommendation-FFM-Demo.iml:
--------------------------------------------------------------------------------
1 |
2 |
3 |
4 |
5 |
6 |
7 |
8 |
9 |
10 |
11 |
12 |
--------------------------------------------------------------------------------
/recommendation-FFM-Demo/.idea/workspace.xml:
--------------------------------------------------------------------------------
1 |
2 |
3 |
4 |
5 |
6 |
7 |
8 |
9 |
10 |
11 |
12 |
13 |
14 |
15 |
16 |
17 |
18 |
19 |
20 |
21 |
22 |
23 |
24 |
25 |
26 |
27 |
28 |
29 |
30 |
31 |
32 |
33 |
34 |
35 |
36 |
37 |
38 |
39 |
40 |
41 |
42 |
43 |
44 |
45 |
46 |
47 |
48 |
49 |
50 |
51 |
52 |
53 |
54 |
55 |
56 |
57 |
58 |
59 |
60 |
61 |
62 |
63 |
64 |
65 |
66 |
67 |
68 |
69 |
70 |
71 |
72 |
73 |
74 |
75 |
76 |
77 |
78 |
79 |
80 |
81 |
82 |
83 |
84 |
85 |
86 |
87 |
88 |
89 |
90 |
91 |
92 |
93 |
94 |
95 |
96 |
97 |
98 |
99 |
100 |
101 |
102 |
103 |
104 |
105 |
106 |
107 |
108 |
109 |
110 |
111 |
112 | 1565504252390
113 |
114 |
115 | 1565504252390
116 |
117 |
118 |
119 |
120 |
121 |
122 |
123 |
124 |
125 |
126 |
127 |
128 |
129 |
130 |
131 |
132 |
133 |
134 |
135 |
136 |
137 |
138 |
139 |
140 |
141 |
142 |
143 |
144 |
145 |
146 |
147 |
148 |
149 |
150 |
151 |
152 |
153 |
154 |
155 |
156 |
157 |
158 |
159 |
160 |
161 |
162 |
163 |
164 |
165 |
166 |
167 |
168 |
169 |
170 |
--------------------------------------------------------------------------------
/recommendation-FFM-Demo/FFM_model.py:
--------------------------------------------------------------------------------
1 | import tensorflow as tf
2 | import pandas as pd
3 | import numpy as np
4 | import os
5 | '''
6 | 原版代码解读:https://www.jianshu.com/p/781cde3d5f3d (好吧,估计这几个模型,全都是这一系列下对应的源码)
7 |
8 | 在 FM基础上 引入了 将特征归属不同field的性质,这样 就相当于在之前FM式中加上了field
9 | field跟特征之间是 1:n的关系,也就是多个特征是属于同一个field的,我们在做特征交互时,也要考虑特征域所带来的隐向量作用关系
10 |
11 |
12 |
13 | 看了下推荐系统遇上深度学习(二)--FFM模型理论和实践 就明白了,链接和这个代码是配套的。
14 |
15 | 首先是(1) 生成数据集,指定 field匹配
16 | (2)定义在FFM公式中起主要作用的三个变量,三个权重项,首先是bias,然后是一维特征的权重,最后是交叉特征的权重。
17 |
18 | 三者分别定义 模型方程中的 w0、wi 和最后相乘的那几个部分。可详见链接中的公式y(x)
19 |
20 | 这里在最后一部分部分会牵扯到隐向量长度k,隐向量作用是针对每一维特征 xi,针对其它特征的每一种field fj,
21 | 都会学习一个隐向量 v_i,fj, ,共有n*f个长度为k的隐变量,第三部分参数是牵扯到nfk的。
22 | (3)进行网络的训练阶段,使用输入 数据和 定义的权重进行FFM公式的计算,主要对应在inference的计算中。估计值的计算
23 | 这里不能像FM一样先将公式化简再来做,对于交叉特征,只能写两重循环。【核心部分吧,对变量和输入参数的组织】
24 | (4)定义损失函数,并定义正则性,进行网络外部循环的训练。
25 |
26 |
27 | 可以看出其核心就是 在FM公式基础上,加上了Field的概念,不同的特征可以归属同一个Field,这样进行进一步Field划分的原因是因为细化不同
28 | 类型含义间特征的区别意义, 其相对于FM模型的特点是FM可以看作FFM的特例,是把所有特征都归属到一个field时的FFM模型。根据FFM的field敏感特性,
29 | 可以导出其模型方程。
30 |
31 | '''
32 |
33 | #这里 定义有两个不用不同的field, 部分特征属于一种field,另一部分特征属于其他field.
34 | #三个重要度维度的定义, 这用于进行权重的定义, 其中20是特征数量, 2是field的数量, 3是指 隐变量v的长度
35 | input_x_size = 20
36 | field_size = 2
37 | vector_dimension = 3
38 |
39 | total_plan_train_steps = 1000
40 | # 使用SGD,每一个样本进行依次梯度下降,更新参数
41 | batch_size = 1
42 |
43 | all_data_size = 1000
44 |
45 | lr = 0.01
46 |
47 | MODEL_SAVE_PATH = "TFModel"
48 | MODEL_NAME = "FFM"
49 |
50 | def createTwoDimensionWeight(input_x_size,field_size,vector_dimension):
51 | #对 第三部分矩阵的定义,这个是最复杂的部分, 对应在公式中,隐变量长度是k个,FFM二次参数是nfk个, n表示特征数、f表示域数量、k表示假定的隐向量长度
52 | weights = tf.truncated_normal([input_x_size,field_size,vector_dimension])
53 |
54 | tf_weights = tf.Variable(weights)
55 |
56 | return tf_weights
57 |
58 | def createOneDimensionWeight(input_x_size):
59 | weights = tf.truncated_normal([input_x_size])
60 | tf_weights = tf.Variable(weights)
61 | return tf_weights
62 |
63 | def createZeroDimensionWeight():
64 | weights = tf.truncated_normal([1])
65 | tf_weights = tf.Variable(weights)
66 | return tf_weights
67 |
68 | def inference(input_x,input_x_field,zeroWeights,oneDimWeights,thirdWeight):
69 | """计算回归模型输出的值
70 | 这里是主模型结构
71 | """
72 |
73 | #公式中前两部分的计算
74 | secondValue = tf.reduce_sum(tf.multiply(oneDimWeights,input_x,name='secondValue'))
75 |
76 | firstTwoValue = tf.add(zeroWeights, secondValue, name="firstTwoValue")
77 |
78 | #公式第三部分的计算,使用一个双层的 循环获得结果
79 | thirdValue = tf.Variable(0.0,dtype=tf.float32) #第三部分累计和
80 | input_shape = input_x_size
81 | #可以严格按照公式去看, 外面的加是 i,内部循环是 i+1
82 | for i in range(input_shape):
83 | featureIndex1 = i
84 | fieldIndex1 = int(input_x_field[i])
85 | for j in range(i+1,input_shape):
86 |
87 | ########################第一部分 vi vj部分
88 | featureIndex2 = j
89 | fieldIndex2 = int(input_x_field[j])
90 |
91 | #Vi,f计算 注意看到公式中和下面计算上的区别,[featureIndex1,fieldIndex2,i]两个是反着的,表示不同的对应向量。
92 | vectorLeft = tf.convert_to_tensor([[featureIndex1,fieldIndex2,i] for i in range(vector_dimension)])
93 | weightLeft = tf.gather_nd(thirdWeight,vectorLeft) #允许在多维上进行索引,提取出矩阵。这是按照vectorLeft指定的index位置提取矩阵数据
94 | weightLeftAfterCut = tf.squeeze(weightLeft)#该函数返回一个张量,这个张量是将原始input中所有维度为1的那些维都删掉的结果
95 | # Vj,f计算
96 | vectorRight = tf.convert_to_tensor([[featureIndex2,fieldIndex1,i] for i in range(vector_dimension)])
97 | weightRight = tf.gather_nd(thirdWeight,vectorRight)
98 | weightRightAfterCut = tf.squeeze(weightRight)
99 |
100 | tempValue = tf.reduce_sum(tf.multiply(weightLeftAfterCut,weightRightAfterCut))
101 |
102 | #######################内部训练的 xi * xj 的部分 会获取不同的用于交叉的特征值。
103 | indices2 = [i]
104 | indices3 = [j]
105 |
106 | xi = tf.squeeze(tf.gather_nd(input_x, indices2))
107 | xj = tf.squeeze(tf.gather_nd(input_x, indices3))
108 |
109 | product = tf.reduce_sum(tf.multiply(xi, xj))
110 |
111 | secondItemVal = tf.multiply(tempValue, product)
112 | # 对第三部分结果的累加,thirdValue 是不断进行积累的。 assign就是一个赋值
113 | tf.assign(thirdValue, tf.add(thirdValue, secondItemVal))
114 |
115 | return tf.add(firstTwoValue,thirdValue)
116 |
117 | def gen_data():
118 | '''
119 | 这点其实有点不太好, 数据都是随机生成的,不太好捕捉 field所起到的不同feature作用
120 | '''
121 | labels = [-1,1]
122 | y = [np.random.choice(labels,1)[0] for _ in range(all_data_size)]
123 | x_field = [i // 10 for i in range(input_x_size)]
124 | x = np.random.randint(0,2,size=(all_data_size,input_x_size))
125 |
126 | print('生成的特征:',x.shape,x)
127 | print('随机生成的分类标签:',len(y),y)
128 | print('生成的每个特征所属的field,没有具体特征含义就可以这么任性:',x_field)
129 | return x,y,x_field
130 |
131 |
132 | if __name__ == '__main__':
133 | global_step = tf.Variable(0,trainable=False)
134 | trainx,trainy,trainx_field = gen_data()
135 | #
136 | input_x = tf.placeholder(tf.float32,[input_x_size ])
137 | input_y = tf.placeholder(tf.float32)
138 | #
139 |
140 | lambda_w = tf.constant(0.001, name='lambda_w')
141 | lambda_v = tf.constant(0.001, name='lambda_v')
142 |
143 | ### 这三个Weights是用来干嘛的呢
144 | zeroWeights = createZeroDimensionWeight()
145 |
146 | oneDimWeights = createOneDimensionWeight(input_x_size)
147 |
148 | thirdWeight = createTwoDimensionWeight(input_x_size, # 创建二次项的权重变量
149 | field_size,
150 | vector_dimension) # n * f * k
151 |
152 | y_ = inference(input_x, trainx_field,zeroWeights,oneDimWeights,thirdWeight)
153 |
154 |
155 |
156 | l2_norm = tf.reduce_sum(
157 | tf.add(
158 | tf.multiply(lambda_w, tf.pow(oneDimWeights, 2)),
159 | tf.reduce_sum(tf.multiply(lambda_v, tf.pow(thirdWeight, 2)),axis=[1,2])
160 | )
161 | )
162 | #对输出结果附加 正则项,来防止模型过于复杂
163 |
164 | loss = tf.log(1 + tf.exp(input_y * y_)) + l2_norm
165 |
166 | train_step = tf.train.GradientDescentOptimizer(learning_rate=lr).minimize(loss)
167 |
168 | saver = tf.train.Saver()
169 | with tf.Session() as sess:
170 | sess.run(tf.global_variables_initializer())
171 |
172 | #共进行了1000轮对 FFM网络的训练。
173 | for i in range(total_plan_train_steps):
174 | for t in range(all_data_size):
175 | input_x_batch = trainx[t]
176 | input_y_batch = trainy[t]
177 | predict_loss,_, steps = sess.run([loss,train_step, global_step],
178 | feed_dict={input_x: input_x_batch, input_y: input_y_batch})
179 |
180 | print("After {step} training step(s) , loss on training batch is {predict_loss} "
181 | .format(step=steps, predict_loss=predict_loss))
182 |
183 | saver.save(sess, os.path.join(MODEL_SAVE_PATH, MODEL_NAME), global_step=steps)
184 | writer = tf.summary.FileWriter(os.path.join(MODEL_SAVE_PATH, MODEL_NAME), tf.get_default_graph())
185 | writer.close()
186 | #
187 |
188 |
189 |
190 |
191 |
192 |
193 |
194 |
195 |
196 |
197 |
198 |
199 |
200 |
201 |
202 |
203 |
204 |
--------------------------------------------------------------------------------
/recommendation-FFM-Demo/TFModel/FFM-0.data-00000-of-00001:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/isthegoal/recommendation_model_master/77c2117631e779527b92e04dc53358a88abbbbbc/recommendation-FFM-Demo/TFModel/FFM-0.data-00000-of-00001
--------------------------------------------------------------------------------
/recommendation-FFM-Demo/TFModel/FFM-0.index:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/isthegoal/recommendation_model_master/77c2117631e779527b92e04dc53358a88abbbbbc/recommendation-FFM-Demo/TFModel/FFM-0.index
--------------------------------------------------------------------------------
/recommendation-FFM-Demo/TFModel/FFM-0.meta:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/isthegoal/recommendation_model_master/77c2117631e779527b92e04dc53358a88abbbbbc/recommendation-FFM-Demo/TFModel/FFM-0.meta
--------------------------------------------------------------------------------
/recommendation-FFM-Demo/TFModel/checkpoint:
--------------------------------------------------------------------------------
1 | model_checkpoint_path: "FFM-0"
2 | all_model_checkpoint_paths: "FFM-0"
3 |
--------------------------------------------------------------------------------
/recommendation-FM-demo/.idea/misc.xml:
--------------------------------------------------------------------------------
1 |
2 |
3 |
4 |
--------------------------------------------------------------------------------
/recommendation-FM-demo/.idea/modules.xml:
--------------------------------------------------------------------------------
1 |
2 |
3 |
4 |
5 |
6 |
7 |
8 |
--------------------------------------------------------------------------------
/recommendation-FM-demo/.idea/recommendation-FM-demo.iml:
--------------------------------------------------------------------------------
1 |
2 |
3 |
4 |
5 |
6 |
7 |
8 |
9 |
10 |
11 |
12 |
--------------------------------------------------------------------------------
/recommendation-FM-demo/.idea/workspace.xml:
--------------------------------------------------------------------------------
1 |
2 |
3 |
4 |
5 |
6 |
7 |
8 |
9 |
10 |
11 |
12 |
13 |
14 |
15 |
16 |
17 |
18 |
19 |
20 |
21 |
22 |
23 |
24 |
25 |
26 |
27 |
28 |
29 |
30 |
31 |
32 |
33 |
34 |
35 |
36 |
37 |
38 |
39 |
40 |
41 |
42 |
43 |
44 |
45 |
46 |
47 |
48 |
49 |
50 |
51 |
52 |
53 |
54 |
55 |
56 |
57 |
58 |
59 |
60 |
61 |
62 | x_train
63 |
64 |
65 |
66 |
67 |
68 |
69 |
70 |
71 |
72 |
73 |
74 |
75 |
76 |
77 |
78 |
79 |
80 |
81 |
82 |
83 |
84 |
85 |
86 |
87 |
88 |
89 |
90 |
91 |
92 |
93 |
94 |
95 |
96 |
97 |
98 |
99 |
100 |
101 |
102 |
103 |
104 |
105 |
106 |
107 |
108 |
109 |
110 |
111 |
112 |
113 |
114 |
115 |
116 |
117 |
118 |
119 |
120 |
121 |
122 |
123 |
124 |
125 |
126 |
127 |
128 |
129 |
130 |
131 |
132 |
133 |
134 |
135 |
136 |
137 |
138 |
139 |
140 |
141 |
142 |
143 |
144 | 1565359186405
145 |
146 |
147 | 1565359186405
148 |
149 |
150 |
151 |
152 |
153 |
154 |
155 |
156 |
157 |
158 |
159 |
160 |
161 |
162 |
163 |
164 |
165 |
166 |
167 |
168 |
169 |
170 |
171 |
172 |
173 |
174 |
175 |
176 |
177 |
178 |
179 |
180 |
181 |
182 |
183 |
184 |
185 |
186 |
187 |
188 |
189 |
190 |
191 |
192 |
193 |
194 |
195 |
196 |
197 |
198 |
199 |
200 |
201 |
202 |
203 |
204 |
205 |
206 |
207 |
208 |
209 |
210 |
211 |
212 |
213 |
214 |
215 |
216 |
217 |
218 |
219 |
220 |
221 |
222 |
223 |
224 |
225 |
226 |
227 |
228 |
229 |
230 |
231 |
232 |
233 |
234 |
235 |
236 |
237 |
238 |
239 |
240 |
241 |
242 |
243 |
244 |
245 |
246 |
247 |
248 |
249 |
250 |
251 |
252 |
253 |
254 |
255 |
256 |
257 |
258 |
259 |
260 |
261 |
262 |
263 |
264 |
265 |
266 |
267 |
268 |
269 |
270 |
271 |
272 |
273 |
274 |
275 |
276 |
277 |
278 |
279 |
280 |
281 |
282 |
283 |
284 |
285 |
286 |
287 |
288 |
289 |
290 |
291 |
292 |
293 |
294 |
295 |
296 |
297 |
298 |
299 |
300 |
301 |
302 |
303 |
304 |
305 |
306 |
307 |
308 |
309 |
310 |
311 |
312 |
313 |
314 |
315 |
316 |
317 |
318 |
319 |
320 |
321 |
322 |
323 |
324 |
325 |
326 |
327 |
328 |
329 |
330 |
331 |
332 |
333 |
334 |
335 |
336 |
337 |
338 |
339 |
340 |
341 |
342 |
343 |
344 |
345 |
346 |
347 |
348 |
349 |
350 |
351 |
352 |
353 |
354 |
355 |
356 |
357 |
358 |
359 |
360 |
361 |
362 |
363 |
364 |
365 |
366 |
367 |
368 |
369 |
370 |
371 |
372 |
373 |
374 |
375 |
376 |
377 |
--------------------------------------------------------------------------------
/recommendation-FM-demo/FM_data.rar:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/isthegoal/recommendation_model_master/77c2117631e779527b92e04dc53358a88abbbbbc/recommendation-FM-demo/FM_data.rar
--------------------------------------------------------------------------------
/recommendation-FM-demo/FM_model.py:
--------------------------------------------------------------------------------
1 | # -*- encoding:utf-8 -*-
2 | from itertools import count
3 | from collections import defaultdict
4 | from scipy.sparse import csr
5 | import numpy as np
6 | import pandas as pd
7 | import numpy as np
8 | from sklearn.feature_extraction import DictVectorizer
9 | import tensorflow as tf
10 | from tqdm import tqdm_notebook as tqdm
11 |
12 |
13 | def vectorize_dic(dic,ix=None,p=None,n=0,g=0):
14 | """
15 | dic -- dictionary of feature lists. Keys are the name of features
16 | ix -- index generator (default None)
17 | p -- dimension of feature space (number of columns in the sparse matrix) (default None)
18 | 把字典直接输入网络是非常不合适的,所以这里从id里面提取特征,做了embedding的一个向量化表示。
19 |
20 | 这里把 项的字典 转换成了 embedding表示,至于怎么简单对id进行词向量转换的,还挺有意思的,字典的向量转换(这里可以看到进行压缩前,比较有意义的矩阵是 对出现次数统计的矩阵)
21 | 其余还有全为0和全为1的参与到 稀疏矩阵的合成中。
22 | csr.csr_matrix: [[0. 2. 0. ... 0. 0. 0.]
23 | [0. 1. 1. ... 0. 0. 0.]
24 | [0. 1. 0. ... 0. 0. 0.]
25 | ...
26 | [0. 0. 0. ... 0. 0. 0.]
27 | [0. 0. 0. ... 0. 0. 0.]
28 | [0. 0. 0. ... 0. 0. 0.]]
29 | 向量转换的简单计算再这里,可以看得出是简单的 出现次数下的计算累加 (这里)
30 | for k,lis in dic.items():
31 | for t in range(len(lis)):
32 | ix[str(lis[t]) + str(k)] = ix.get(str(lis[t]) + str(k),0) + 1
33 | col_ix[i+t*g] = ix[str(lis[t]) + str(k)]
34 | i += 1
35 |
36 | 常常需要把一个稀疏的np.array压缩,这时候就用到scipy库中的sparse.csr_matrix(csr:Compressed Sparse Row marix) 和sparse.csc_matric(csc:Compressed Sparse Column marix)
37 |
38 | 现在明白了,这就是一个对特征 的字典形式存储转成 的二维矩阵 形式,外加矩阵的压缩。 从而实现对 特征的embedding转换,由id索引转成词向量。
39 |
40 |
41 | 接下来就是将向量投入矩阵进行训练即可, 这就是一个线性模型,所以是比较简单的网络处理思路。 这里field也并不是分开处理的。
42 |
43 |
44 | 这里是对
45 | """
46 | print('dic:',dic)
47 | if ix==None:
48 | ix = dict()
49 |
50 | nz = n * g
51 |
52 | col_ix = np.empty(nz,dtype = int)
53 |
54 | i = 0
55 | for k,lis in dic.items():
56 | for t in range(len(lis)):
57 | ix[str(lis[t]) + str(k)] = ix.get(str(lis[t]) + str(k),0) + 1
58 | col_ix[i+t*g] = ix[str(lis[t]) + str(k)]
59 | i += 1
60 |
61 | row_ix = np.repeat(np.arange(0,n),g)
62 | data = np.ones(nz)
63 | if p == None:
64 | p = len(ix)
65 |
66 | ixx = np.where(col_ix < p) #这里 row_ix[ixx]和 col_ix[ixx] 都是一个通过不断累加统计下的二维矩阵,行是field,列是域列值的数量。
67 | print('csr.csr_matrix:', csr.csr_matrix((c[ixx],(row_ix[ixx],col_ix[ixx])),shape=(n,p)).todense())
68 | print('csr.csr_matrix shape:', csr.csr_matrix((data[ixx], (row_ix[ixx], col_ix[ixx])), shape=(n, p)).todense().shape)
69 | print('data[ixx] shape :',data)
70 | print('row_ix[ixx] shape :', row_ix)
71 | print('col_ix[ixx] shape :',col_ix)
72 |
73 | return csr.csr_matrix((data[ixx],(row_ix[ixx],col_ix[ixx])),shape=(n,p)),ix
74 |
75 |
76 | def batcher(X_, y_=None, batch_size=-1):
77 | n_samples = X_.shape[0]
78 |
79 | if batch_size == -1:
80 | batch_size = n_samples
81 | if batch_size < 1:
82 | raise ValueError('Parameter batch_size={} is unsupported'.format(batch_size))
83 |
84 | for i in range(0, n_samples, batch_size):
85 | upper_bound = min(i + batch_size, n_samples)
86 | ret_x = X_[i:upper_bound]
87 | ret_y = None
88 | if y_ is not None:
89 | ret_y = y_[i:i + batch_size]
90 | yield (ret_x, ret_y)
91 |
92 |
93 | cols = ['user','item','rating','timestamp']
94 |
95 | train = pd.read_csv('data/ua.base',delimiter='\t',names = cols)
96 | test = pd.read_csv('data/ua.test',delimiter='\t',names = cols)
97 |
98 | x_train,ix = vectorize_dic({'users':train['user'].values,
99 | 'items':train['item'].values},n=len(train.index),g=2)
100 | print('x_train:',x_train)
101 |
102 | x_test,ix = vectorize_dic({'users':test['user'].values,
103 | 'items':test['item'].values},ix,x_train.shape[1],n=len(test.index),g=2)
104 |
105 |
106 | print(x_train)
107 | y_train = train['rating'].values
108 | y_test = test['rating'].values
109 |
110 | x_train = x_train.todense()
111 | x_test = x_test.todense()
112 |
113 | print('x_train todense:', x_train)
114 |
115 | print(x_train.shape)
116 | print (x_test.shape)
117 |
118 |
119 | n,p = x_train.shape
120 |
121 | k = 10
122 |
123 |
124 | x = tf.placeholder('float',[None,p])
125 |
126 | y = tf.placeholder('float',[None,1])
127 |
128 | w0 = tf.Variable(tf.zeros([1]))
129 | w = tf.Variable(tf.zeros([p]))
130 |
131 | v = tf.Variable(tf.random_normal([k,p],mean=0,stddev=0.01))
132 |
133 | #y_hat = tf.Variable(tf.zeros([n,1]))
134 |
135 | linear_terms = tf.add(w0,tf.reduce_sum(tf.multiply(w,x),1,keep_dims=True)) # n * 1
136 | pair_interactions = 0.5 * tf.reduce_sum(
137 | tf.subtract(
138 | tf.pow(
139 | tf.matmul(x,tf.transpose(v)),2),
140 | tf.matmul(tf.pow(x,2),tf.transpose(tf.pow(v,2)))
141 | ),axis = 1 , keep_dims=True)
142 |
143 | y_hat = tf.add(linear_terms,pair_interactions)
144 |
145 | lambda_w = tf.constant(0.001,name='lambda_w')
146 | lambda_v = tf.constant(0.001,name='lambda_v')
147 |
148 | l2_norm = tf.reduce_sum(
149 | tf.add(
150 | tf.multiply(lambda_w,tf.pow(w,2)),
151 | tf.multiply(lambda_v,tf.pow(v,2))
152 | )
153 | )
154 |
155 | error = tf.reduce_mean(tf.square(y-y_hat))
156 | loss = tf.add(error,l2_norm)
157 |
158 |
159 | train_op = tf.train.GradientDescentOptimizer(learning_rate=0.01).minimize(loss)
160 |
161 |
162 | epochs = 10
163 | batch_size = 1000
164 |
165 | # Launch the graph
166 | init = tf.global_variables_initializer()
167 | with tf.Session() as sess:
168 | sess.run(init)
169 |
170 | for epoch in tqdm(range(epochs), unit='epoch'):
171 | perm = np.random.permutation(x_train.shape[0]) #随机打乱,np.random.permutation(x):不在原数组上进行,返回新的数组,不改变自身数组。
172 | # iterate over batches
173 | for bX, bY in batcher(x_train[perm], y_train[perm], batch_size):
174 | _,t = sess.run([train_op,loss], feed_dict={x: bX.reshape(-1, p), y: bY.reshape(-1, 1)})
175 | print(t)
176 |
177 |
178 | errors = []
179 | for bX, bY in batcher(x_test, y_test):
180 | errors.append(sess.run(error, feed_dict={x: bX.reshape(-1, p), y: bY.reshape(-1, 1)}))
181 | print(errors)
182 | RMSE = np.sqrt(np.array(errors).mean())
183 | print (RMSE)
184 |
185 |
186 |
187 |
188 |
189 |
190 |
--------------------------------------------------------------------------------