├── BPR+MatrixFactorization.py ├── LICENSE ├── README.md ├── RNN+BPR.py ├── RNN+BPR2.py ├── SVD.py ├── data ├── test.txt ├── train.txt └── user_cart.json └── result ├── bpr_test_result ├── rnn_bpr_result └── svd_test_result /BPR+MatrixFactorization.py: -------------------------------------------------------------------------------- 1 | #!/usr/bin/env python2 2 | # -*- coding: utf-8 -*- 3 | """ 4 | Created on 06/09/2017 6:05 PM 5 | 6 | @author: Tangrizzly 7 | """ 8 | # best result: 27*1000 times bootstrap sampling 9 | # train 346462.827393 10 | # test 88397.3271923 11 | 12 | import numpy as np 13 | 14 | 15 | def sigmoid(x): 16 | return 1 / (1 + np.exp(-x)) 17 | 18 | 19 | def update(w, hi, hj, learning_rate, regularization): 20 | xi = np.dot(w, hi.T) 21 | xj = np.dot(w, hj.T) 22 | xij = xi - xj 23 | w += learning_rate * (np.exp(-xij) * sigmoid(xij) * (hi - hj) + w * regularization) 24 | hi += learning_rate * (np.exp(-xij) * sigmoid(xij) * (w) + hi * regularization) 25 | hj += learning_rate * (np.exp(-xij) * sigmoid(xij) * (-w) + hj * regularization) 26 | 27 | 28 | def rse_train(): 29 | rse_train = 0 30 | for rui in train_orgi: 31 | utrain = int(rui[0]) 32 | itrain = int(rui[1]) 33 | rui_hat = np.dot(W[utrain, :], H[itrain, :].T) 34 | rse_train += np.square(rui[2] - rui_hat) + lmd * ( 35 | np.square(np.linalg.norm(W[utrain, :])) + np.square(np.linalg.norm(H[itrain, :].T))) 36 | return rse_train 37 | 38 | 39 | def rse_test(): 40 | rse_test = 0 41 | for rui in test_orgi: 42 | utest = int(rui[0]) 43 | itest = int(rui[1]) 44 | if itest>=1680: 45 | rse_test += 0 46 | else: 47 | rui_hat = np.dot(W[utest, :], H[itest, :].T) 48 | rse_test += np.square(rui[2] - rui_hat) + lmd * ( 49 | np.square(np.linalg.norm(W[utest, :])) + np.square(np.linalg.norm(H[itest, :].T))) 50 | return rse_test 51 | 52 | test_orgi = np.loadtxt("./data/test.txt") 53 | train_orgi = np.loadtxt("./data/train.txt") 54 | 55 | us = np.unique(train_orgi[:, 0]).shape[0]+1 56 | it = int(np.max(train_orgi[:, 1])+1) 57 | avg = np.average(train_orgi[:, 2]) 58 | 59 | # initialization 60 | np.random.seed(7) 61 | f = 20 62 | gm = 0.005 63 | lmd = 0.02 64 | W = np.random.rand(us, f) 65 | H = np.random.rand(it, f) 66 | for b in range(0, 50): 67 | for a in range(0, 1000): 68 | u = np.random.choice(train_orgi[:, 0], 1)[0] 69 | [i, j] = np.random.choice(train_orgi[:, 1], 2) 70 | update(W[int(u), :], H[int(i), :], H[int(j), :], gm, lmd) 71 | print rse_train() 72 | print rse_test() 73 | print b 74 | 75 | X = np.dot(W, H.T) 76 | for rui in test_orgi: 77 | a = int(rui[0]) 78 | b = int(rui[1]) 79 | if b >= 1680: 80 | rui_hat=avg 81 | else: 82 | rui_hat = X[a, b] 83 | rui[3] = rui_hat 84 | 85 | np.savetxt('bpr_result', test_orgi, fmt='%.2f') -------------------------------------------------------------------------------- /LICENSE: -------------------------------------------------------------------------------- 1 | MIT License 2 | 3 | Copyright (c) 2018 Tangrizzly 4 | 5 | Permission is hereby granted, free of charge, to any person obtaining a copy 6 | of this software and associated documentation files (the "Software"), to deal 7 | in the Software without restriction, including without limitation the rights 8 | to use, copy, modify, merge, publish, distribute, sublicense, and/or sell 9 | copies of the Software, and to permit persons to whom the Software is 10 | furnished to do so, subject to the following conditions: 11 | 12 | The above copyright notice and this permission notice shall be included in all 13 | copies or substantial portions of the Software. 14 | 15 | THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR 16 | IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, 17 | FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE 18 | AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER 19 | LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, 20 | OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE 21 | SOFTWARE. 22 | -------------------------------------------------------------------------------- /README.md: -------------------------------------------------------------------------------- 1 | # 3RecommendationSystemAlgorithms 2 | 3 | ### SVD & BPR+MatrixFactorization 4 | using a movie rating dataset 5 | 6 | ### RNN+BPR+BPTT 7 | using taobao marketing dataset 8 | 9 | ### reference: 10 | 1. Yehuda Koren and Robert Bell: Advances in Collaborative Filtering 11 | 2. Steffen Rendle, Christoph Freudenthaler, Zeno Gantner and Lars Schmidt-Thieme: BPR: Bayesian Personalized Ranking from Implicit Feedback 12 | 3. Liu, Wu, Wang: Context-aware Sequential Recommendation 13 | -------------------------------------------------------------------------------- /RNN+BPR.py: -------------------------------------------------------------------------------- 1 | #!/usr/bin/env python2 2 | # -*- coding: utf-8 -*- 3 | """ 4 | Created on 06/09/2017 9:54 PM 5 | 6 | @author: Tangrizzly 7 | """ 8 | import re 9 | import numpy as np 10 | from datetime import datetime 11 | import sys 12 | import pandas as pd 13 | 14 | 15 | def sigmoid(x): 16 | return 1 / (1 + np.exp(-x)) 17 | 18 | class RnnBpr: 19 | def __init__(self, d, x, lmd): 20 | self.d = d 21 | self.x = x 22 | self.lmd = lmd 23 | self.v = np.unique(x) 24 | self.R = np.random.rand(np.max(x)+1, d) 25 | self.M = np.random.rand(d, d) 26 | self.W = np.random.rand(d, d) 27 | 28 | def forward_propagation(self): 29 | K = len(self.x) 30 | h = np.zeros((K + 1, self.d)) 31 | h[-1] = np.zeros(self.d) 32 | y = np.zeros(K).reshape(K, 1) 33 | yi = np.zeros(K).reshape(K, 1) 34 | recall = 0 35 | for k in np.arange(K): 36 | h[k] = sigmoid(np.dot(self.R[self.x[k]].reshape(1, self.d), self.M) + np.dot(h[k - 1].reshape(1, self.d), self.W)) 37 | # y[k] = np.dot(np.dot(h[k], self.W), np.dot(self.R[x[k]], self.M).T) 38 | y[k] = np.dot(h[k-1].reshape(1, self.d), self.R[self.x[k]].reshape(self.d, 1)) 39 | i = np.random.choice(self.v, 1)[0] 40 | # yi[k] = np.dot(np.dot(h[k], self.W), np.dot(r[i], self.M).T) 41 | yi[k] = np.dot(h[k-1].reshape(1, self.d), self.R[i].reshape(self.d, 1)) 42 | y_hat = np.argsort(np.dot(h[k - 1], self.R.T))[-10:][::-1] 43 | if self.x[k] in y_hat: 44 | recall += 1 45 | recall_rate = recall/float(len(self.x)) 46 | return h, y, yi, recall_rate 47 | 48 | def calculate_loss(self, lmd): 49 | h, y, yi, recall_rate = self.forward_propagation() 50 | l = np.sum(np.log(1 + np.exp(-y + yi))) 51 | l += lmd / 2 * (np.square(np.linalg.norm(self.R)) 52 | + np.square(np.linalg.norm(self.W)) 53 | + np.square(np.linalg.norm(self.M)) 54 | ) 55 | return l, recall_rate 56 | 57 | def bptt(self, learning_rate): 58 | K = len(self.x) 59 | h, y, yi, recall_rate = self.forward_propagation() 60 | j = np.random.choice(self.v, 1)[0] 61 | dLdh = self.R[self.x[K-1]] - self.R[j] 62 | dLdR = np.zeros(self.R.shape) 63 | for t in np.arange(K-1)[::-1]: 64 | j = np.random.choice(self.v, 1)[0] 65 | Xij = np.dot(h[t], self.R[self.x[t+1]].T) - np.dot(h[t], self.R[j].T) 66 | dLdR[self.x[t+1]] = h[t] 67 | dLdR[j] = -h[t] 68 | df = np.multiply(h[t], (1 - h[t])).reshape(1, self.d) 69 | dLdM = np.dot(self.R[self.x[t]].reshape(self.d, 1), dLdh * df) 70 | dLdW = np.dot(h[t-1].reshape(self.d, 1), dLdh * df) 71 | dLdh = np.dot(dLdh * df, self.W.T) 72 | self.M += learning_rate * (np.exp(-Xij)*sigmoid(Xij)*dLdM-self.lmd*self.M) 73 | self.W += learning_rate * (np.exp(-Xij)*sigmoid(Xij)*dLdW-self.lmd*self.W) 74 | self.R[self.x[t+1]] += learning_rate * (np.exp(-Xij)*sigmoid(Xij)*dLdR[self.x[t+1]]-self.lmd*self.R[self.x[t+1]]) 75 | self.R[j] += learning_rate * (np.exp(-Xij)*sigmoid(Xij)*dLdR[j]-self.lmd*self.R[j]) 76 | 77 | 78 | def train(model, learning_rate=0.002, nepoch=100, evaluate_loss_after=5): 79 | # We keep track of the losses so we can plot them later 80 | losses = [] 81 | num_examples_seen = 0 82 | max_recall_rate = 0.0 83 | for epoch in range(nepoch): 84 | # Optionally evaluate the loss 85 | if epoch % evaluate_loss_after == 0: 86 | loss, recall_rate = model.calculate_loss(model.lmd) 87 | max_recall_rate = np.max([max_recall_rate, recall_rate]) 88 | losses.append((num_examples_seen, loss)) 89 | time = datetime.now().strftime('%Y-%m-%d %H:%M:%S') 90 | print "%s: Loss after num_examples_seen=%d : %f" % (time, num_examples_seen, loss) 91 | print "Max recall rate: %f" % max_recall_rate 92 | # Adjust the learning rate if loss increases 93 | # if len(losses) > 1 and losses[-1][1] > losses[-2][1]: 94 | # learning_rate = learning_rate * 0.5 95 | # print "Setting learning rate to %f" % learning_rate 96 | sys.stdout.flush() 97 | model.bptt(learning_rate) 98 | num_examples_seen += 1 99 | return max_recall_rate 100 | 101 | 102 | with open('./data/user_cart.json', 'r') as f: 103 | data = f.readlines() 104 | train_orig = [] 105 | temp = [] 106 | for line in data: 107 | line = re.sub('[\[|\]|\s++]', '', line) 108 | odom = line.split(',') 109 | if len(odom) < 10: 110 | continue 111 | numbers_float = map(float, odom) 112 | numbers = map(int, numbers_float) 113 | train_orig.append(numbers) 114 | temp.extend(numbers) 115 | 116 | # remove items appearing less than 3 times 117 | trainSet = [] 118 | testSet = [] 119 | temp_df = pd.DataFrame(temp) 120 | lessThan3 = temp_df[0].value_counts()[temp_df[0].value_counts()<3].index[:] 121 | for line in train_orig: 122 | cleaned = [x for x in line if x not in lessThan3] 123 | if len(cleaned) >= 10: 124 | trainSet.append(cleaned) 125 | # split train set and test set 126 | # if len(cleaned) >= 10: 127 | # trainSet.append(cleaned[0: int(len(cleaned) * 0.8)-1]) 128 | # testSet.append(cleaned[int(len(cleaned) * 0.8): -1]) 129 | 130 | np.random.seed(1) 131 | recall = [] 132 | for rui in np.arange(len(trainSet)): 133 | model = RnnBpr(10, trainSet[rui], 0.01) 134 | recall.append(train(model, evaluate_loss_after=1)) 135 | print rui 136 | np.savetxt('rnn_bpr_result', np.asarray(recall), fmt='%f') -------------------------------------------------------------------------------- /RNN+BPR2.py: -------------------------------------------------------------------------------- 1 | #!/usr/bin/env python2 2 | # -*- coding: utf-8 -*- 3 | """ 4 | Created on 06/09/2017 9:54 PM 5 | Modified on 25/11/2017 8:37 AM 6 | @author: Tangrizzly 7 | """ 8 | import numpy as np 9 | from datetime import datetime 10 | from numpy.random import rand 11 | # from numpy.random import uniform 12 | from collections import Counter 13 | 14 | 15 | def sigmoid(x): 16 | return 1 / (1 + np.exp(-x)) 17 | 18 | 19 | class RnnBpr: 20 | def __init__(self, d, dataset, v, maxNo, lmd): 21 | self.d = d # 隐层维度 22 | self.lmd = lmd # lambda 23 | self.dataset = dataset 24 | self.v = v # dataset里的items 25 | self.R = rand(maxNo + 1, d) # 所有item的向量表示 26 | self.M = rand(self.d, self.d) # uniform(-0.5, 0.5, (d, d))指定范围 27 | self.W = rand(self.d, self.d) 28 | 29 | def forward_propagation(self, x): 30 | K = len(x) 31 | h = np.zeros((K + 1, self.d)) 32 | h[-1] = np.zeros(self.d) 33 | y = np.zeros(K).reshape((K, 1)) # 注意reshape函数的格式 34 | yi = np.zeros(K).reshape((K, 1)) 35 | for k in np.arange(K): 36 | h[k] = sigmoid(np.dot(self.M, self.R[x[k]]) + np.dot(self.W, h[k - 1])) 37 | # 之前写的是点乘得到的结果 38 | y[k] = np.sum(h[k - 1] * self.R[x[k]]) # 对正样本的偏好值, 应该是pair-wise乘法之后加和 39 | i = int(np.random.choice(self.v, 1)[0]) # 负样本 40 | yi[k] = np.sum(h[k - 1] * self.R[i]) # 对负样本的偏好值 41 | # y_hat = np.argsort(np.dot(h[K-1], self.R.T))[-10:][::-1] # 将所有的评分进行排序之后取前十,计算量过大 42 | # 先得到前十之后进行排序 43 | scores = np.dot(h[K - 1], self.R.T) # user对所有items的评分 44 | max_10 = np.argpartition(scores, -10)[-10:] # 复杂度O(n),获得评分前十个的index 45 | y_hat = max_10[np.argsort(scores[max_10])][::-1] # 复杂度O(nlogn),得到前十个得分最高的items_ids推荐给用户 46 | return h, y, yi, y_hat # 得到前十个得分最大的对应的item_ids,并且最左边的是得分最大的 47 | 48 | def calculate_loss(self, lmd, x): 49 | h, y, yi, y_hat = self.forward_propagation(x) 50 | # l = np.sum(np.log(1 + np.exp(-y + yi))) # ??? 51 | l = np.sum(np.log(1 + np.exp(y - yi))) # 对正样本偏好减对负样本偏好 52 | l += lmd / 2 * (np.square(np.linalg.norm(self.R)) 53 | + np.square(np.linalg.norm(self.W)) 54 | + np.square(np.linalg.norm(self.M)) 55 | ) 56 | return l, y_hat 57 | 58 | def bptt(self, x, learning_rate): 59 | # 在此写的是BP,而非BPTT(汇总损失,仅更新一大步) 60 | K = len(x) 61 | h, y, yi, y_hat = self.forward_propagation(x) 62 | j = int(np.random.choice(self.v, 1)[0]) 63 | dLdh = self.R[x[K - 1]] - self.R[j] 64 | dLdR = np.zeros(self.R.shape) 65 | for t in np.arange(K - 1)[::-1]: 66 | j = int(np.random.choice(self.v, 1)[0]) 67 | Xij = np.dot(h[t], self.R[x[t + 1]].T) - np.dot(h[t], self.R[j].T) 68 | dLdR[x[t + 1]] = h[t] 69 | dLdR[j] = -h[t] 70 | df = np.multiply(h[t], (1 - h[t])).reshape(1, self.d) 71 | dLdM = np.dot(dLdh * df, self.R[x[t]].reshape((self.d, 1))) 72 | dLdW = np.dot(dLdh * df, h[t - 1].reshape((self.d, 1))) 73 | dLdh = np.dot(dLdh * df, self.W.T) 74 | self.M += learning_rate * (np.exp(-Xij) * sigmoid(Xij) * dLdM - self.lmd * self.M) 75 | self.W += learning_rate * (np.exp(-Xij) * sigmoid(Xij) * dLdW - self.lmd * self.W) 76 | self.R[x[t + 1]] += learning_rate * ( 77 | np.exp(-Xij) * sigmoid(Xij) * dLdR[x[t + 1]] - self.lmd * self.R[x[t + 1]]) 78 | self.R[j] += learning_rate * (np.exp(-Xij) * sigmoid(Xij) * dLdR[j] - self.lmd * self.R[j]) 79 | 80 | 81 | # def train(model, learning_rate=0.002): 82 | # """ 83 | # :param model: RnnBpr 84 | # :param learning_rate: 一般采用0.1/0.01这种10倍间隔的,中间值可选用0.05这种。 85 | # :return: recall_rate 86 | # """ 87 | # losses = [] 88 | # recall = 0 89 | # for i in range(0, len(model.x)): 90 | # xi = model.x[0: i + 1] # shape=(i+1, d) 91 | # loss, y_hat = model.calculate_loss(model.lmd, xi) 92 | # if i < len(model.x) - 1: 93 | # if model.x[i + 1] in y_hat: 94 | # recall += 1 95 | # losses.append(loss) 96 | # time = datetime.now().strftime('%Y-%m-%d %H:%M:%S') 97 | # print "%s: Loss %f, recall %d"% (time, loss, recall) 98 | # model.bptt(xi, learning_rate) 99 | # recall_rate = recall / float(len(model.x) - 1) 100 | # return recall_rate 101 | 102 | def train(model, learning_rate=0.01): 103 | """ 104 | :param model: RnnBpr 105 | :param learning_rate: 一般采用0.1/0.01这种10倍间隔的,中间值可选用0.05这种。 106 | :return: recall_rate 107 | """ 108 | example = 0 109 | time = datetime.now().strftime('%Y-%m-%d %H:%M:%S') 110 | print "%s: Loss after example=%d : %f" % (time, example, 0) 111 | for x in model.dataset: 112 | example += 1 113 | recall = 0 114 | for i in range(0, len(x)): 115 | xi = x[0: i + 1] # shape=(i+1, d) 116 | loss, y_hat = model.calculate_loss(model.lmd, xi) 117 | if i < len(x) - 1: 118 | if x[i + 1] in y_hat: 119 | recall += 1 120 | # time = datetime.now().strftime('%Y-%m-%d %H:%M:%S') 121 | # print "%s: Loss %f, recall %d"% (time, loss, recall) 122 | model.bptt(xi, learning_rate) 123 | recall_rate = recall / float(len(x) - 1) 124 | time = datetime.now().strftime('%Y-%m-%d %H:%M:%S') 125 | print "%s: Loss after example=%d : %f" % (time, example, loss) 126 | print "Recall rate: %f" % recall_rate 127 | 128 | 129 | # 读取用户序列历史, n_user=737 130 | with open('./data/user_cart.json', 'r') as f: 131 | data = f.readlines() 132 | train_orig = [] # 嵌套列表,每个用户的序列=[int1, int2, ...] 133 | temp = [] 134 | for line in data: 135 | # line = re.sub('[\[|\]|\s++]', '', line) 136 | # odom = line.split(',') 137 | odem = line[1:][:-2].replace(' ', '').split(',') 138 | if len(odem) < 10: 139 | continue 140 | # numbers_float = map(float, odom) 141 | # numbers = map(int, numbers_float) 142 | numbers = [int(float(i)) for i in odem] 143 | train_orig.append(numbers) 144 | temp.extend(numbers) 145 | 146 | # 处理后,n_user=706 147 | # remove items appearing less than 3 times 148 | maxNo = 0 149 | v = [] 150 | trainSet = [] # 嵌套列表,每个用户的序列=[int1, int2, ...] 151 | # temp_df = pd.DataFrame(temp) 152 | # lessThan3 = temp_df[0].value_counts()[temp_df[0].value_counts()<3].index[:] 153 | items_count = dict(Counter(temp)) 154 | lessThan3 = [item for item, count in items_count.items() if count < 3] 155 | for line in train_orig: 156 | cleaned = [x for x in line if x not in lessThan3] 157 | maxNo = np.max([maxNo, np.max(cleaned)]) 158 | v = np.append(v, np.unique(cleaned)) 159 | if len(cleaned) >= 10: 160 | trainSet.append(cleaned) 161 | 162 | np.random.seed(1) 163 | recall = [] 164 | # model可以只建1次,一次性初始化所有的items表达 165 | model = RnnBpr(10, trainSet, v, maxNo, 0.01) 166 | train(model) 167 | # np.savetxt('rnn_bpr_result', np.asarray(recall), fmt='%f') 168 | -------------------------------------------------------------------------------- /SVD.py: -------------------------------------------------------------------------------- 1 | #!/usr/bin/env python2 2 | # -*- coding: utf-8 -*- 3 | """ 4 | Created on 06/09/2017 9:50 AM 5 | 6 | @author: Tangrizzly 7 | """ 8 | # best result: 9 | # rse_train = 57372.4818287 10 | # rse_test = 21230.4729627 11 | # more loops will lead to overfitting 12 | 13 | import numpy as np 14 | 15 | test_orgi = np.loadtxt("./data/test.txt") 16 | train_orgi = np.loadtxt("./data/train.txt") 17 | 18 | u = np.unique(train_orgi[:, 0]).shape[0] + 1 19 | # i = np.unique(train_orgi[:, 0]).shape[0] 20 | i = int(np.max(train_orgi[:, 1])) + 1 21 | f = 20 22 | gm = 0.005 23 | lmd = 0.02 24 | avg = np.average(train_orgi[:, 2]) 25 | 26 | # initialization 27 | qi = np.random.rand(f, i) 28 | pu = np.random.rand(f, u) 29 | bi = np.zeros([i, 1]) 30 | bu = np.zeros([1, u]) 31 | # r_hat = avg + bi + bu + np.dot(qi.T, pu) 32 | 33 | for i in range(0, 19): 34 | print i 35 | for rui in train_orgi: 36 | a = int(rui[0]) 37 | b = int(rui[1]) 38 | rui_hat = avg + bi[b][0] + bu[0][a] + np.dot(qi[:, b].T, pu[:, a]) 39 | eui = rui[2] - rui_hat 40 | bu[0][a] += gm * (eui - lmd * bu[0][a]) 41 | bi[b][0] += gm * (eui - lmd * bi[b][0]) 42 | qi[:, b] += gm * (eui * pu[:, a] - lmd * qi[:, b]) 43 | pu[:, a] += gm * (eui * qi[:, b] - lmd * pu[:, a]) 44 | rse_train = 0 45 | for rui in train_orgi: 46 | a = int(rui[0]) 47 | b = int(rui[1]) 48 | rui_hat = avg + bi[b][0] + bu[0][a] + np.dot(qi[:, b].T, pu[:, a]) 49 | rse_train += np.square(rui[2]-rui_hat) + lmd*(np.square(bi[b][0])+np.square(bu[0][a])+np.square(np.linalg.norm(qi[:, b]))+np.square(np.linalg.norm(pu[:, a]))) 50 | print rse_train 51 | rse_test = 0 52 | for rui in test_orgi: 53 | a = int(rui[0]) 54 | b = int(rui[1]) 55 | if b >= 1680: 56 | rui_hat = avg 57 | rse_test += 0 58 | else: 59 | rui_hat = avg + bi[b][0] + bu[0][a] + np.dot(qi[:, b].T, pu[:, a]) 60 | rse_test += np.square(rui[2]-rui_hat) + lmd*(np.square(bi[b][0])+np.square(np.linalg.norm(qi[:, b]))+np.square(np.linalg.norm(pu[:, a]))) 61 | print rse_test 62 | 63 | for rui in test_orgi: 64 | a = int(rui[0]) 65 | b = int(rui[1]) 66 | if b>=1680: 67 | rui_hat=avg 68 | else: 69 | rui_hat = avg + bi[b][0] + bu[0][a] + np.dot(qi[:, b].T, pu[:, a]) 70 | rui[3] = rui_hat 71 | print rui 72 | 73 | np.savetxt('result', test_orgi, fmt='%.2f') 74 | 75 | 76 | 77 | -------------------------------------------------------------------------------- /result/rnn_bpr_result: -------------------------------------------------------------------------------- 1 | 0.586667 2 | 0.255937 3 | 0.048780 4 | 0.173913 5 | 0.206128 6 | 0.041667 7 | 0.000000 8 | 0.000000 9 | 0.000000 10 | 0.381818 11 | 0.000000 12 | 0.475728 13 | 0.646789 14 | 0.000000 15 | 0.000000 16 | 0.000000 17 | 0.000000 18 | 0.236220 19 | 0.000000 20 | 0.042553 21 | 0.000000 22 | 0.000000 23 | 0.000000 24 | 0.503788 25 | 0.180180 26 | 0.224852 27 | 0.000000 28 | 0.000000 29 | 0.000000 30 | 0.000000 31 | 0.204301 32 | 0.564267 33 | 0.000000 34 | 0.000000 35 | 0.427984 36 | 0.228070 37 | 0.076923 38 | 0.033708 39 | 0.359873 40 | 0.000000 41 | 0.000000 42 | 0.314879 43 | 0.000000 44 | 0.164740 45 | 0.000000 46 | 0.176471 47 | 0.000000 48 | 0.088235 49 | 0.000000 50 | 0.000000 51 | 0.000000 52 | 0.002008 53 | 0.000000 54 | 0.000000 55 | 0.176166 56 | 0.143969 57 | 0.000000 58 | 0.282609 59 | 0.010753 60 | 0.000000 61 | 0.000000 62 | 0.090909 63 | 0.308057 64 | 0.096591 65 | 0.000000 66 | 0.000000 67 | 0.502591 68 | 0.213873 69 | 0.022727 70 | 0.007353 71 | 0.000000 72 | 0.000000 73 | 0.455497 74 | 0.657744 75 | 0.379679 76 | 0.000000 77 | 0.092150 78 | 0.054974 79 | 0.000000 80 | 0.127451 81 | 0.000000 82 | 0.400000 83 | 0.160643 84 | 0.000000 85 | 0.154545 86 | 0.000000 87 | 0.000000 88 | 0.175676 89 | 0.003676 90 | 0.000000 91 | 0.000000 92 | 0.000000 93 | 0.000000 94 | 0.000000 95 | 0.000000 96 | 0.000000 97 | 0.000000 98 | 0.000000 99 | 0.002874 100 | 0.000000 101 | 0.024024 102 | 0.000000 103 | 0.445000 104 | 0.000000 105 | 0.000000 106 | 0.000000 107 | 0.008287 108 | 0.529167 109 | 0.000000 110 | 0.148000 111 | 0.000000 112 | 0.302703 113 | 0.000000 114 | 0.000000 115 | 0.000000 116 | 0.000000 117 | 0.202073 118 | 0.000000 119 | 0.000000 120 | 0.343195 121 | 0.000000 122 | 0.000000 123 | 0.043478 124 | 0.000000 125 | 0.434316 126 | 0.363420 127 | 0.397436 128 | 0.000000 129 | 0.271472 130 | 0.000000 131 | 0.287260 132 | 0.621170 133 | 0.116531 134 | 0.000000 135 | 0.000000 136 | 0.000000 137 | 0.225627 138 | 0.000000 139 | 0.267677 140 | 0.000000 141 | 0.000000 142 | 0.000000 143 | 0.138790 144 | 0.273171 145 | 0.000000 146 | 0.000000 147 | 0.000000 148 | 0.000000 149 | 0.000000 150 | 0.000000 151 | 0.035461 152 | 0.000000 153 | 0.000000 154 | 0.000000 155 | 0.000000 156 | 0.348123 157 | 0.000000 158 | 0.000000 159 | 0.000000 160 | 0.000000 161 | 0.123288 162 | 0.116147 163 | 0.001969 164 | 0.034483 165 | 0.000000 166 | 0.000000 167 | 0.000000 168 | 0.000000 169 | 0.671958 170 | 0.417778 171 | 0.000000 172 | 0.000000 173 | 0.146179 174 | 0.343915 175 | 0.595669 176 | 0.000000 177 | 0.118299 178 | 0.000000 179 | 0.394137 180 | 0.000000 181 | 0.000000 182 | 0.370258 183 | 0.000000 184 | 0.000000 185 | 0.000000 186 | 0.000000 187 | 0.137652 188 | 0.000000 189 | 0.000000 190 | 0.000000 191 | 0.000000 192 | 0.000000 193 | 0.000000 194 | 0.000000 195 | 0.000000 196 | 0.000000 197 | 0.000000 198 | 0.000000 199 | 0.158672 200 | 0.000000 201 | 0.141479 202 | 0.000000 203 | 0.150852 204 | 0.271955 205 | 0.000000 206 | 0.000000 207 | 0.318386 208 | 0.220339 209 | 0.000000 210 | 0.060948 211 | 0.000000 212 | 0.000000 213 | 0.000000 214 | 0.000000 215 | 0.101660 216 | 0.000000 217 | 0.000000 218 | 0.000000 219 | 0.140212 220 | 0.000000 221 | 0.000000 222 | 0.000000 223 | 0.000000 224 | 0.000000 225 | 0.000000 226 | 0.000000 227 | 0.000000 228 | 0.100000 229 | 0.293617 230 | 0.100000 231 | 0.000000 232 | 0.000000 233 | 0.000000 234 | 0.275362 235 | 0.373518 236 | 0.000000 237 | 0.149408 238 | 0.000000 239 | 0.000000 240 | 0.000000 241 | 0.018519 242 | 0.000000 243 | 0.075188 244 | 0.000000 245 | 0.000000 246 | 0.156250 247 | 0.000000 248 | 0.000000 249 | 0.000000 250 | 0.000000 251 | 0.000000 252 | 0.000000 253 | 0.717281 254 | 0.052632 255 | 0.000000 256 | 0.000000 257 | 0.365591 258 | 0.029787 259 | 0.000000 260 | 0.000000 261 | 0.000000 262 | 0.006250 263 | 0.000000 264 | 0.000000 265 | 0.000000 266 | 0.742739 267 | 0.000000 268 | 0.000000 269 | 0.000000 270 | 0.000000 271 | 0.000000 272 | 0.000000 273 | 0.049550 274 | 0.000000 275 | 0.000000 276 | 0.000000 277 | 0.132022 278 | 0.000000 279 | 0.000000 280 | 0.000000 281 | 0.000000 282 | 0.000000 283 | 0.274744 284 | 0.000000 285 | 0.000000 286 | 0.000000 287 | 0.000000 288 | 0.281081 289 | 0.000000 290 | 0.307263 291 | 0.071429 292 | 0.000000 293 | 0.000000 294 | 0.000000 295 | 0.000000 296 | 0.152381 297 | 0.000000 298 | 0.000000 299 | 0.207965 300 | 0.000000 301 | 0.000000 302 | 0.000000 303 | 0.045455 304 | 0.130037 305 | 0.251121 306 | 0.135714 307 | 0.000000 308 | 0.212245 309 | 0.000000 310 | 0.122807 311 | 0.000000 312 | 0.001890 313 | 0.000000 314 | 0.118090 315 | 0.000000 316 | 0.066986 317 | 0.415493 318 | 0.000000 319 | 0.000000 320 | 0.000000 321 | 0.289216 322 | 0.234899 323 | 0.000000 324 | 0.000000 325 | 0.000000 326 | 0.110825 327 | 0.000000 328 | 0.000000 329 | 0.152439 330 | 0.233141 331 | 0.000000 332 | 0.340996 333 | 0.000000 334 | 0.000000 335 | 0.231132 336 | 0.092150 337 | 0.052632 338 | 0.281046 339 | 0.000000 340 | 0.341513 341 | 0.290640 342 | 0.068069 343 | 0.000000 344 | 0.249158 345 | 0.000000 346 | 0.000000 347 | 0.000000 348 | 0.265909 349 | 0.322917 350 | 0.000000 351 | 0.000000 352 | 0.116651 353 | 0.000000 354 | 0.000000 355 | 0.000000 356 | 0.000000 357 | 0.415406 358 | 0.469388 359 | 0.000000 360 | 0.000000 361 | 0.000000 362 | 0.000000 363 | 0.000000 364 | 0.000000 365 | 0.000000 366 | 0.000000 367 | 0.000000 368 | 0.000000 369 | 0.123426 370 | 0.000000 371 | 0.315217 372 | 0.000000 373 | 0.000000 374 | 0.000000 375 | 0.000000 376 | 0.000000 377 | 0.000000 378 | 0.335556 379 | 0.124473 380 | 0.000000 381 | 0.000000 382 | 0.000000 383 | 0.000000 384 | 0.000000 385 | 0.000000 386 | 0.107649 387 | 0.000000 388 | 0.000000 389 | 0.369318 390 | 0.000000 391 | 0.217573 392 | 0.000000 393 | 0.000000 394 | 0.045455 395 | 0.000000 396 | 0.000000 397 | 0.488595 398 | 0.009434 399 | 0.000000 400 | 0.000000 401 | 0.000000 402 | 0.000000 403 | 0.136364 404 | 0.167969 405 | 0.000000 406 | 0.219388 407 | 0.000000 408 | 0.000000 409 | 0.000000 410 | 0.000000 411 | 0.095238 412 | 0.000000 413 | 0.000000 414 | 0.017857 415 | 0.014493 416 | 0.000000 417 | 0.569052 418 | 0.000000 419 | 0.000000 420 | 0.000000 421 | 0.000000 422 | 0.000000 423 | 0.000000 424 | 0.232353 425 | 0.000000 426 | 0.000000 427 | 0.005319 428 | 0.000000 429 | 0.000000 430 | 0.009901 431 | 0.135468 432 | 0.000000 433 | 0.000000 434 | 0.424444 435 | 0.000000 436 | 0.000000 437 | 0.114967 438 | 0.000000 439 | 0.000000 440 | 0.004032 441 | 0.000000 442 | 0.000000 443 | 0.000000 444 | 0.000000 445 | 0.000000 446 | 0.000000 447 | 0.000000 448 | 0.000000 449 | 0.000000 450 | 0.010929 451 | 0.000000 452 | 0.000000 453 | 0.000000 454 | 0.226351 455 | 0.000000 456 | 0.000000 457 | 0.167213 458 | 0.333648 459 | 0.003509 460 | 0.175637 461 | 0.000000 462 | 0.364583 463 | 0.000000 464 | 0.000000 465 | 0.000000 466 | 0.000000 467 | 0.000000 468 | 0.000000 469 | 0.000000 470 | 0.000000 471 | 0.000000 472 | 0.000000 473 | 0.000000 474 | 0.000000 475 | 0.082290 476 | 0.000000 477 | 0.000000 478 | 0.000000 479 | 0.000000 480 | 0.000000 481 | 0.389860 482 | 0.000000 483 | 0.000000 484 | 0.000000 485 | 0.000000 486 | 0.541735 487 | 0.000000 488 | 0.000000 489 | 0.365452 490 | 0.000000 491 | 0.000000 492 | 0.128571 493 | 0.000000 494 | 0.000000 495 | 0.000000 496 | 0.066667 497 | 0.000000 498 | 0.464020 499 | 0.000000 500 | 0.000000 501 | 0.000000 502 | 0.000000 503 | 0.058824 504 | 0.275492 505 | 0.000000 506 | 0.000000 507 | 0.228070 508 | 0.146154 509 | 0.129310 510 | 0.000000 511 | 0.277008 512 | 0.000000 513 | 0.011628 514 | 0.000000 515 | 0.469945 516 | 0.549828 517 | 0.000000 518 | 0.000000 519 | 0.000000 520 | 0.000000 521 | 0.288368 522 | 0.000000 523 | 0.219424 524 | 0.138655 525 | 0.000000 526 | 0.000000 527 | 0.000000 528 | 0.112766 529 | 0.280000 530 | 0.000000 531 | 0.000000 532 | 0.284444 533 | 0.000000 534 | 0.000000 535 | 0.328571 536 | 0.476923 537 | 0.000000 538 | 0.000000 539 | 0.000000 540 | 0.647860 541 | 0.000000 542 | 0.000000 543 | 0.597260 544 | 0.034483 545 | 0.000000 546 | 0.000000 547 | 0.007299 548 | 0.000000 549 | 0.004673 550 | 0.002415 551 | 0.000000 552 | 0.000000 553 | 0.000000 554 | 0.000000 555 | 0.000000 556 | 0.000000 557 | 0.000000 558 | 0.151880 559 | 0.000000 560 | 0.000000 561 | 0.000000 562 | 0.000000 563 | 0.000000 564 | 0.324138 565 | 0.000000 566 | 0.000000 567 | 0.000000 568 | 0.176871 569 | 0.003460 570 | 0.000000 571 | 0.000000 572 | 0.106061 573 | 0.000000 574 | 0.000000 575 | 0.000000 576 | 0.000000 577 | 0.343396 578 | 0.000000 579 | 0.026490 580 | 0.000000 581 | 0.000000 582 | 0.000000 583 | 0.000000 584 | 0.000000 585 | 0.000000 586 | 0.202759 587 | 0.000000 588 | 0.525253 589 | 0.000000 590 | 0.000000 591 | 0.000000 592 | 0.000000 593 | 0.000000 594 | 0.000000 595 | 0.118497 596 | 0.113744 597 | 0.000000 598 | 0.100457 599 | 0.000000 600 | 0.050000 601 | 0.000000 602 | 0.000000 603 | 0.000000 604 | 0.084444 605 | 0.000000 606 | 0.000000 607 | 0.007874 608 | 0.024390 609 | 0.074205 610 | 0.167832 611 | 0.000000 612 | 0.000000 613 | 0.000000 614 | 0.011765 615 | 0.000000 616 | 0.000000 617 | 0.504160 618 | 0.000000 619 | 0.000000 620 | 0.055556 621 | 0.000000 622 | 0.000000 623 | 0.000000 624 | 0.000000 625 | 0.000000 626 | 0.000000 627 | 0.000000 628 | 0.000000 629 | 0.000000 630 | 0.000000 631 | 0.219565 632 | 0.000000 633 | 0.000000 634 | 0.000000 635 | 0.000000 636 | 0.000000 637 | 0.000000 638 | 0.000000 639 | 0.000000 640 | 0.358621 641 | 0.000000 642 | 0.395990 643 | 0.000000 644 | 0.000000 645 | 0.000000 646 | 0.000000 647 | 0.000000 648 | 0.000000 649 | 0.000000 650 | 0.003401 651 | 0.000000 652 | 0.000000 653 | 0.000000 654 | 0.000000 655 | 0.000000 656 | 0.355932 657 | 0.000000 658 | 0.013514 659 | 0.000000 660 | 0.005814 661 | 0.106849 662 | 0.000000 663 | 0.090909 664 | 0.000000 665 | 0.000000 666 | 0.000000 667 | 0.052632 668 | 0.000000 669 | 0.000000 670 | 0.312102 671 | 0.000000 672 | 0.311688 673 | 0.000000 674 | 0.071429 675 | 0.028986 676 | 0.290323 677 | 0.000000 678 | 0.000000 679 | 0.000000 680 | 0.000000 681 | 0.000000 682 | 0.000000 683 | 0.000000 684 | 0.000000 685 | 0.381193 686 | 0.000000 687 | 0.000000 688 | 0.000000 689 | 0.000000 690 | 0.000000 691 | 0.000000 692 | 0.000000 693 | 0.284224 694 | 0.000000 695 | 0.000000 696 | 0.416272 697 | 0.000000 698 | 0.000000 699 | 0.000000 700 | 0.194444 701 | 0.000000 702 | 0.000000 703 | 0.000000 704 | 0.208738 705 | 0.000000 706 | 0.000000 707 | 0.076923 708 | --------------------------------------------------------------------------------