├── Codes ├── Base_Recommender │ ├── __init__.py │ └── MultiVAE.py ├── config.ini ├── generator.py ├── eval_functions.py ├── sample.py ├── discriminator.py ├── test.py ├── data_processing.py └── train.py ├── architecture.JPG ├── LICENSE ├── README.md └── Dataset └── Askubuntu_Sample ├── niche_items.txt ├── unique_item_id.txt ├── item2id.txt └── item_list.txt /Codes/Base_Recommender/__init__.py: -------------------------------------------------------------------------------- 1 | -------------------------------------------------------------------------------- /architecture.JPG: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/ash-shar/Long-Tail-GAN/HEAD/architecture.JPG -------------------------------------------------------------------------------- /Codes/config.ini: -------------------------------------------------------------------------------- 1 | [Long-Tail-GAN] 2 | h0_size = 100 3 | h1_size = 150 4 | h2_size = 250 5 | h3_size = 300 6 | NUM_EPOCH = 80 7 | BATCH_SIZE = 100 8 | DISPLAY_ITER = 50 9 | LEARNING_RATE = 0.0001 10 | to_restore = 0 11 | model_name = LT_GAN 12 | GANLAMBDA = 1.0 13 | -------------------------------------------------------------------------------- /Codes/generator.py: -------------------------------------------------------------------------------- 1 | from Base_Recommender.MultiVAE import MultiVAE 2 | import os 3 | 4 | def generator_VAECF(pro_dir): 5 | 6 | unique_sid = list() 7 | with open(os.path.join(pro_dir, 'unique_item_id.txt'), 'r') as f: 8 | for line in f: 9 | unique_sid.append(line.strip()) 10 | 11 | n_items = len(unique_sid) 12 | 13 | p_dims = [200, 600, n_items] # VAECF recommended values 14 | 15 | total_anneal_steps = 20000 # VAECF recommended values 16 | anneal_cap = 0.2 # VAECF recommended values 17 | 18 | vae = MultiVAE(p_dims, lam=0.0, random_seed=98765) 19 | 20 | logits_var, loss_var, params = vae.build_graph() 21 | 22 | return vae, logits_var, loss_var, params, p_dims, total_anneal_steps, anneal_cap -------------------------------------------------------------------------------- /LICENSE: -------------------------------------------------------------------------------- 1 | BSD 3-Clause License 2 | 3 | Copyright (c) 2018, Adit Krishnan 4 | All rights reserved. 5 | 6 | Redistribution and use in source and binary forms, with or without 7 | modification, are permitted provided that the following conditions are met: 8 | 9 | * Redistributions of source code must retain the above copyright notice, this 10 | list of conditions and the following disclaimer. 11 | 12 | * Redistributions in binary form must reproduce the above copyright notice, 13 | this list of conditions and the following disclaimer in the documentation 14 | and/or other materials provided with the distribution. 15 | 16 | * Neither the name of the copyright holder nor the names of its 17 | contributors may be used to endorse or promote products derived from 18 | this software without specific prior written permission. 19 | 20 | THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS" 21 | AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE 22 | IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE 23 | DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT HOLDER OR CONTRIBUTORS BE LIABLE 24 | FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL 25 | DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR 26 | SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER 27 | CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, 28 | OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE 29 | OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. 30 | -------------------------------------------------------------------------------- /Codes/eval_functions.py: -------------------------------------------------------------------------------- 1 | import tensorflow as tf 2 | import numpy as np 3 | import os 4 | import shutil 5 | import codecs 6 | import psutil 7 | from scipy import sparse 8 | import pandas as pd 9 | import bottleneck as bn 10 | 11 | def NDCG_binary_at_k_batch(X_pred, heldout_batch, k=100): 12 | ''' 13 | normalized discounted cumulative gain@k for binary relevance 14 | ASSUMPTIONS: all the 0's in heldout_data indicate 0 relevance 15 | ''' 16 | batch_users = X_pred.shape[0] 17 | idx_topk_part = bn.argpartition(-X_pred, k, axis=1) 18 | topk_part = X_pred[np.arange(batch_users)[:, np.newaxis], 19 | idx_topk_part[:, :k]] 20 | idx_part = np.argsort(-topk_part, axis=1) 21 | 22 | # topk predicted score 23 | idx_topk = idx_topk_part[np.arange(batch_users)[:, np.newaxis], idx_part] 24 | # build the discount template 25 | tp = 1. / np.log2(np.arange(2, k + 2)) 26 | 27 | DCG = (heldout_batch[np.arange(batch_users)[:, np.newaxis], 28 | idx_topk].toarray() * tp).sum(axis=1) 29 | IDCG = np.array([(tp[:min(n, k)]).sum() 30 | for n in heldout_batch.getnnz(axis=1)]) 31 | 32 | output = [] 33 | 34 | for idx in range(np.shape(DCG)[0]): 35 | if IDCG[idx] != 0: 36 | output.append(DCG[idx]/IDCG[idx]) 37 | 38 | return output 39 | 40 | def Recall_at_k_batch(X_pred, heldout_batch, k=100): 41 | batch_users = X_pred.shape[0] 42 | 43 | idx = bn.argpartition(-X_pred, k, axis=1) 44 | X_pred_binary = np.zeros_like(X_pred, dtype=bool) 45 | X_pred_binary[np.arange(batch_users)[:, np.newaxis], idx[:, :k]] = True 46 | 47 | X_true_binary = (heldout_batch > 0).toarray() 48 | 49 | tmp = (np.logical_and(X_true_binary, X_pred_binary).sum(axis=1)).astype( 50 | np.float32) 51 | 52 | denom = np.minimum(k, X_true_binary.sum(axis=1)) 53 | 54 | output = [] 55 | 56 | misclassified_tags = [] 57 | 58 | for idx in range(np.shape(tmp)[0]): 59 | if denom[idx] != 0: 60 | output.append(tmp[idx]/denom[idx]) 61 | 62 | return output, misclassified_tags 63 | -------------------------------------------------------------------------------- /Codes/sample.py: -------------------------------------------------------------------------------- 1 | import numpy as np 2 | 3 | 4 | 5 | def sample_from_generator(elements, probabilities_li, to_sample, niche_only = False): 6 | 7 | sampled_li_bin = np.zeros([len(elements)], dtype = float) 8 | probabilities_li = np.asarray(probabilities_li) 9 | 10 | while True: 11 | try: 12 | if niche_only == True: 13 | try: 14 | probabilities_li[OTHER_TAGS] = 0.0 15 | if probabilities_li.sum() != 0.0: 16 | probabilities_li = probabilities_li/(1.0*probabilities_li.sum()) 17 | else: 18 | probabilities_li = [(1.0/len(elements))]*len(elements) 19 | probabilities_li = np.asarray(probabilities_li) 20 | except Exception as e: 21 | print('Error:', str(e)) 22 | 23 | sampled_li = np.random.choice(elements, to_sample, p = probabilities_li, replace = False) 24 | 25 | break 26 | except: 27 | # print('Error Sampling: Reducing to_sample') 28 | to_sample -= 1 29 | if to_sample == 0: 30 | break 31 | 32 | 33 | # for idx in range(np.shape(probabilities_li)[0]): 34 | sampled_li_bin[sampled_li] = 1 35 | 36 | return np.asarray(sampled_li_bin), np.asarray(sampled_li) 37 | 38 | 39 | 40 | def sample_from_generator_new(elements, probabilities_li, to_sample, num_elements): 41 | 42 | sampled_li_bin = np.zeros([num_elements], dtype = float) 43 | probabilities_li = np.asarray(probabilities_li) 44 | 45 | if probabilities_li.sum() != 0.0: 46 | probabilities_li = probabilities_li/(1.0*probabilities_li.sum()) 47 | else: 48 | probabilities_li = [(1.0/num_elements)]*num_elements 49 | probabilities_li = np.asarray(probabilities_li) 50 | 51 | while True: 52 | try: 53 | 54 | sampled_li = np.random.choice(elements, to_sample, p = probabilities_li, replace = False) 55 | 56 | break 57 | except: 58 | # print('Error Sampling: Reducing to_sample') 59 | to_sample -= 1 60 | if to_sample == 0: 61 | break 62 | 63 | 64 | # for idx in range(np.shape(probabilities_li)[0]): 65 | sampled_li_bin[sampled_li] = 1 66 | 67 | return np.asarray(sampled_li_bin), np.asarray(sampled_li) 68 | -------------------------------------------------------------------------------- /Codes/discriminator.py: -------------------------------------------------------------------------------- 1 | import tensorflow as tf 2 | 3 | def discriminator(n_items, FEATURE_LEN, h0_size, h1_size, h2_size, h3_size): 4 | # Discriminator 5 | x_generated_id = tf.placeholder(tf.int32, [None], name = "x_generated") 6 | x_popular_n_id = tf.placeholder(tf.int32, [None], name="x_popular_n") 7 | x_popular_g_id = tf.placeholder(tf.int32, [None], name="x_popular_g") 8 | x_niche_id = tf.placeholder(tf.int32, [None], name="x_niche") 9 | 10 | item_feature_arr = tf.placeholder(tf.float32, [None, FEATURE_LEN], name="item_feature_arr") # num_tags x ... 11 | 12 | keep_prob = tf.placeholder(tf.float32, name="keep_prob") # dropout 13 | 14 | emb_matrix = tf.Variable(tf.truncated_normal([FEATURE_LEN, h0_size], stddev=0.1), name="d_w1", dtype=tf.float32) 15 | 16 | x_generated = tf.nn.embedding_lookup(emb_matrix, x_generated_id) # [None, h0_size] 17 | x_popular_n = tf.nn.embedding_lookup(emb_matrix, x_popular_n_id) # [None, h0_size] 18 | x_popular_g = tf.nn.embedding_lookup(emb_matrix, x_popular_g_id) # [None, h0_size] 19 | x_niche = tf.nn.embedding_lookup(emb_matrix, x_niche_id) # [None, h0_size] 20 | 21 | 22 | # Popular Tags 23 | w1 = tf.Variable(tf.truncated_normal([h0_size, h1_size], stddev=0.1), name="d_w1", dtype=tf.float32) 24 | b1 = tf.Variable(tf.zeros([h1_size]), name="d_b1", dtype=tf.float32) 25 | h1 = tf.nn.dropout(tf.nn.tanh(tf.matmul(x_popular_n, w1) + b1), keep_prob) 26 | 27 | # Niche Tags 28 | w2 = tf.Variable(tf.truncated_normal([h0_size, h2_size], stddev=0.1), name="d_w2", dtype=tf.float32) 29 | b2 = tf.Variable(tf.zeros([h2_size]), name="d_b2", dtype=tf.float32) 30 | h2 = tf.nn.dropout(tf.nn.tanh(tf.matmul(x_niche, w2) + b2), keep_prob) 31 | 32 | 33 | h_in_data = tf.concat([h1, h2], 1) 34 | 35 | # Fully Connected Layer 1 36 | w3 = tf.Variable(tf.truncated_normal([h1_size + h2_size, h3_size], stddev=0.1), name="d_w3", dtype=tf.float32) 37 | b3 = tf.Variable(tf.zeros([h3_size]), name="d_b3", dtype=tf.float32) 38 | 39 | # Fully Connected Layer 2 40 | w4 = tf.Variable(tf.truncated_normal([h3_size, 1], stddev=0.1), name="d_w4", dtype=tf.float32) 41 | b4 = tf.Variable(tf.zeros([1]), name="d_b4", dtype=tf.float32) 42 | 43 | 44 | y_data = tf.nn.dropout(tf.nn.tanh(tf.matmul(h_in_data, w3) + b3), keep_prob) 45 | y_data = tf.nn.sigmoid(tf.matmul(y_data, w4) + b4) 46 | 47 | d_params = [w1, b1, w2, b2, w3, b3, w4, b4] 48 | 49 | 50 | # Generated Tags 51 | h3 = tf.nn.dropout(tf.nn.tanh(tf.matmul(x_popular_g, w1) + b1), keep_prob) 52 | h4 = tf.nn.dropout(tf.nn.tanh(tf.matmul(x_generated, w2) + b2), keep_prob) 53 | h_in_gen = tf.concat([h3, h4], 1) 54 | y_generated = tf.nn.dropout(tf.nn.tanh(tf.matmul(h_in_gen, w3) + b3), keep_prob) 55 | y_generated = tf.nn.sigmoid(tf.matmul(y_generated, w4) + b4) 56 | 57 | 58 | return y_data, y_generated, d_params, x_generated_id, x_popular_n_id, x_popular_g_id, x_niche_id, item_feature_arr, keep_prob 59 | -------------------------------------------------------------------------------- /README.md: -------------------------------------------------------------------------------- 1 | # Long-Tail-GAN 2 | 3 | This repository contains the training and testing codes for the Generative Adversarial learning framework for Neural Collaborative Filtering (NCF) models, which aims to enhance long-tail item recommendations. 4 | 5 | If this code helps you in your research, please cite the following publication: 6 | 7 | > Krishnan, Adit, et al. "An Adversarial Approach to Improve Long-Tail Performance in Neural Collaborative Filtering." Proceedings of the 27th ACM International Conference on Information and Knowledge Management. ACM, 2018. 8 | ## Getting Started 9 | 10 | These instructions will help you setup the proposed model on your local machine. 11 | 12 | ### Platforms Supported 13 | 14 | - Unix, MacOS, Windows (with appropriate Python and Tensorflow environment) 15 | 16 | ### Prerequisites 17 | 18 | Our framework can be compiled on Python 2.7+ environments with the following modules installed: 19 | - [tensorflow](https://www.tensorflow.org/) 20 | - [numpy](http://www.numpy.org/) 21 | - [scipy](https://www.scipy.org/) 22 | - [pandas](https://pandas.pydata.org/) 23 | - [bottleneck](https://pypi.org/project/Bottleneck/) 24 | 25 | These requirements may be satisified with an updated Anaconda environment as well - https://www.anaconda.com/ 26 | 27 | 28 | ## Input Files 29 | 30 | You will need the following files for running our model: 31 | 32 | ``` 33 | item_counts.csv: CSV file containing userId, itemId, and rating (given by user to item) separated by comma (,) 34 | item_list.txt: List of item ids. 35 | unique_item_id.txt: Items to use for training and testing (say, only use items rated by atleast 5 users) 36 | item2id.txt: Mapping which makes item ids in unique_item_id sequential (0 to num_item), tab-separated 37 | profile2id.txt: Mapping which makes user ids sequential (0 to num_user), tab-separated 38 | niche_items.txt: Items which are niche (original ids) 39 | train_GAN.csv: CSV file containing pairs of userId (mapped), itemId (mapped) with rating greater than an application-specific threshold 40 | train_GAN_popular.csv: userId (mapped), itemId (mapped) pairs of niche items 41 | train_GAN_niche.csv: userId (mapped), itemId (mapped) pairs of popular items (unique_items - niche items) 42 | validation_tr.csv: Training data for Validation (userId (mapped), itemId (mapped) pairs) 43 | validation_te.csv: Test Data for Validation (userId (mapped), itemId (mapped) pairs) 44 | test_tr.csv: Training data for Testing (userId (mapped), itemId (mapped) pairs) 45 | test_te.csv: Test Data for Testing (userId (mapped), itemId (mapped) pairs) 46 | ``` 47 | 48 | A set of input files for a sampled version of Askubuntu dataset are present in the [Dataset](Dataset/) folder. Note that we use the set of tags assigned to the posts of a user as items; the posts correspond to the questions asked by the user, the answers given by the user, the posts liked by the user, and the posts to which the user commented. 49 | 50 | Refer to the following ipython notebook for details regarding creation of these files for movielens dataset: [ml-parse-vaecf](https://github.com/dawenl/vae_cf/blob/master/VAE_ML20M_WWW2018.ipynb). The movies rated by the users are the items. 51 | 52 | ## Running the Model 53 | 54 | 55 | ### Configure 56 | 57 | The model can be configured using the file [config.ini](Codes/config.ini) present inside the [Codes](Codes/) folder. The parameters h0_size, h1_size, h2_size, and h3_size are the sizes of the hidden layers as defined in the architecture of our discriminator in the GAN framework (see figure). 58 | 59 | ![Architecture](architecture.JPG?raw=true "Title") 60 | 61 | 62 | The other parameters to be configured are: 63 | 64 | ``` 65 | GANLAMBDA: Weight provided to the Adversary's Loss Term (Default = 1.0) 66 | NUM_EPOCH: Number of Epochs for training (Default = 80) 67 | BATCH_SIZE: Size of each batch (Default = 100) 68 | LEARNING_RATE: Learning Rate of the Model (Default = 0.0001) 69 | model_name: Name by which model is saved (Default = "LT_GAN") 70 | ``` 71 | 72 | ### Base Recommender 73 | 74 | The repo uses [VAE-CF](https://arxiv.org/abs/1802.05814) as the base recommender (generator in our architecture) by default. You can also replace this with your own recommender models (or other recommenders) to be trained with the GAN loss and long-tail strategy proposed by us. Follow the below instructions: 75 | 76 | 1. Create a python class of your recommender. You can use the [VAECF class](Codes/Base_Recommender/MultiVAE.py) as a template. 77 | 2. Write a wrapper function for your recommender class in the [generator.py](Codes/generator.py) file. The function should take path to the dataset folder as input, irrespective of its usage. Eg., for Askubuntu dataset, it will take path to the [Askubuntu folder](Dataset/Askubuntu) as input. The function should return the following: object to the defined class, probability distribution over the set of items (recommender's output), loss function of the recommender, parameters of the recommender to learn, and hyperparamters used by the recommender. Again, refer the wrapper function of VAE-CF defined in the code. 78 | 3. In the [train.py](Codes/train.py) file, import the wrapper function of your recommender instead of generator_VAECF (line 21). 79 | 4. If the set of hyperparameters of your recommender are similar to VAE-CF, then no more change would be needed. Otherwise, you might need to take care of them in the code, especially if some of them are updated over training iterations (like annealing). 80 | 81 | ### Train 82 | 83 | For training the model, run the following command: 84 | 85 | ``` 86 | $ python2.7 train.py 87 | ``` 88 | 89 | Model parameters are set to the values provided in the config file. By default, the trained model is checkpointed and saved to **path/to/input/folder/chkpt/** after every epoch. 90 | 91 | ### Test 92 | 93 | For testing the model, run the following command: 94 | 95 | ``` 96 | $ python2.7 test.py 97 | ``` 98 | 99 | where Path to saved model is the path to the saved model file inside chkpt folder (will be model_ by default). 100 | -------------------------------------------------------------------------------- /Codes/test.py: -------------------------------------------------------------------------------- 1 | from __future__ import print_function 2 | import tensorflow as tf 3 | import numpy as np 4 | import os 5 | import shutil 6 | import codecs 7 | import psutil 8 | from scipy import sparse 9 | import pandas as pd 10 | import bottleneck as bn 11 | 12 | from math import isinf 13 | 14 | import sys 15 | import pickle 16 | import ConfigParser 17 | 18 | 19 | from Base_Recommender.MultiVAE import MultiVAE as BaseRecommender 20 | 21 | from data_processing import load_tr_te_data, load_pop_niche_tags 22 | from data_processing import load_item_one_hot_features as load_item_features 23 | 24 | from generator import generator_VAECF as generator 25 | 26 | from discriminator import discriminator 27 | 28 | from eval_functions import NDCG_binary_at_k_batch, Recall_at_k_batch 29 | 30 | def test_GAN(h0_size, h1_size, h2_size, h3_size, NUM_EPOCH, NUM_SUB_EPOCHS, BATCH_SIZE, DISPLAY_ITER, LEARNING_RATE, to_restore, model_name, dataset, GANLAMBDA, output_path): 31 | 32 | 33 | DATA_DIR = dataset+'/' 34 | 35 | show2id_path = DATA_DIR + "item2id.txt" 36 | niche_tags_path = DATA_DIR + "niche_items.txt" 37 | 38 | user_tag_matrix_path = DATA_DIR + "item_counts.csv" 39 | 40 | 41 | item_list_path = DATA_DIR + 'item_list.txt' 42 | 43 | pro_dir = DATA_DIR # os.path.join(DATA_DIR, 'pro_sg_tags_1k') 44 | 45 | 46 | unique_sid = list() 47 | with open(os.path.join(pro_dir, 'unique_item_id.txt'), 'r') as f: 48 | for line in f: 49 | unique_sid.append(line.strip()) 50 | 51 | n_items = len(unique_sid) 52 | 53 | 54 | 55 | print('Loading Items...', end = '') 56 | SHOW2ID, IDs_present, NICHE_TAGS, ALL_TAGS, OTHER_TAGS = load_pop_niche_tags(show2id_path, item_list_path, niche_tags_path, n_items) 57 | print('Done.') 58 | 59 | 60 | # One Hot Vectors for Items 61 | print('Loading Item Features...', end = '') 62 | ITEM_FEATURE_DICT, FEATURE_LEN, ITEM_FEATURE_ARR = load_item_features(item_list_path, SHOW2ID, n_items) 63 | print('Done.') 64 | 65 | 66 | # Load Data for Testing 67 | print('Loading Test Matrix...', end = '') 68 | test_data_tr, test_data_te, uid_start_idx = load_tr_te_data(os.path.join(pro_dir, 'test_tr.csv'), 69 | os.path.join(pro_dir, 'test_te.csv'), n_items) 70 | 71 | N_test = test_data_tr.shape[0] 72 | print('N_test:', N_test) 73 | 74 | idxlist_test = range(N_test) 75 | 76 | batch_size_test = 20000 77 | 78 | # Generator 79 | generator_network, generator_out, g_vae_loss, g_params, p_dims, total_anneal_steps, anneal_cap = generator(pro_dir) 80 | 81 | generated_tags = tf.placeholder(tf.float32, [None, n_items], name = "generated_tags") 82 | 83 | 84 | # Discriminator 85 | y_data, y_generated, d_params, x_generated_id, x_popular_n_id, x_popular_g_id, x_niche_id, item_feature_arr, keep_prob = discriminator(n_items, FEATURE_LEN, h0_size, h1_size, h2_size, h3_size) 86 | 87 | 88 | zero = tf.constant(0, dtype=tf.float32) 89 | 90 | 91 | # Loss Function 92 | 93 | d_loss = - tf.reduce_sum(tf.log(y_data)) - tf.reduce_sum(tf.log(1 - y_generated)) 94 | d_loss_mean = tf.reduce_mean(d_loss) 95 | 96 | sampled_generator_out = tf.multiply(generator_out, generated_tags) 97 | 98 | sampled_generator_out = tf.reshape(sampled_generator_out, [-1]) 99 | 100 | sampled_generator_out_non_zero = tf.gather_nd(sampled_generator_out ,tf.where(tf.not_equal(sampled_generator_out, zero))) 101 | 102 | sampled_cnt = tf.placeholder_with_default(1., shape=None) 103 | gen_lambda = tf.placeholder_with_default(1.0, shape=None) 104 | 105 | 106 | g_loss = g_vae_loss - (1.0 * gen_lambda / sampled_cnt) * tf.reduce_sum(tf.multiply(sampled_generator_out_non_zero, y_generated)) 107 | g_loss_mean = tf.reduce_mean(g_loss) 108 | gan_loss = - (1.0 * gen_lambda / sampled_cnt) * tf.reduce_sum(tf.multiply(sampled_generator_out_non_zero, y_generated)) 109 | 110 | # optimizer : AdamOptimizer 111 | optimizer = tf.train.AdamOptimizer(LEARNING_RATE) 112 | 113 | # discriminator and generator loss 114 | d_trainer = optimizer.minimize(d_loss, var_list=d_params) 115 | g_trainer = optimizer.minimize(g_loss, var_list=g_params) 116 | 117 | 118 | init = tf.global_variables_initializer() 119 | 120 | saver = tf.train.Saver() 121 | 122 | curr_gen_lamda = GANLAMBDA 123 | 124 | update_count = 0.0 125 | 126 | n100_list, r20_list, r50_list = [], [], [] 127 | 128 | user_li = [] 129 | 130 | not_found_20_list, not_found_50_list = [], [] 131 | 132 | 133 | with tf.Session() as sess: 134 | saver.restore(sess, output_path) 135 | 136 | print('Model Loaded') 137 | 138 | for bnum, st_idx in enumerate(range(0, N_test, batch_size_test)): 139 | end_idx = min(st_idx + batch_size_test, N_test) 140 | X = test_data_tr[idxlist_test[st_idx:end_idx]] 141 | 142 | if sparse.isspmatrix(X): 143 | X = X.toarray() 144 | X = X.astype('float32') 145 | 146 | pred_val = sess.run(generator_out, feed_dict = {generator_network.input_ph: X}) 147 | 148 | # exclude examples from training and validation (if any) 149 | pred_val[X.nonzero()] = -np.inf 150 | 151 | ndcg = NDCG_binary_at_k_batch(pred_val, test_data_te[idxlist_test[st_idx:end_idx]], k=100) 152 | 153 | n100_list.append(ndcg) 154 | 155 | recall_at_20, not_found_20 = Recall_at_k_batch(pred_val, test_data_te[idxlist_test[st_idx:end_idx]], k=20) 156 | 157 | recall_at_50, not_found_50 = Recall_at_k_batch(pred_val, test_data_te[idxlist_test[st_idx:end_idx]], k=50) 158 | 159 | r20_list.append(recall_at_20) 160 | 161 | r50_list.append(recall_at_50) 162 | 163 | not_found_20_list.append(not_found_20) 164 | not_found_50_list.append(not_found_50) 165 | 166 | curr_user_li = [] 167 | 168 | for user_idx in idxlist_test[st_idx:end_idx]: 169 | curr_user_li.append(user_idx+uid_start_idx) 170 | 171 | user_li.append(curr_user_li) 172 | 173 | print(str(np.mean(n100_list)) + '\t' + str(np.mean(r20_list)) + '\t' + str(np.mean(r50_list))) 174 | 175 | 176 | 177 | configParser = ConfigParser.RawConfigParser() 178 | configFilePath = r'config.ini' 179 | configParser.read(configFilePath) 180 | 181 | h0_size = int(configParser.get('Long-Tail-GAN', 'h0_size')) 182 | h1_size = int(configParser.get('Long-Tail-GAN', 'h1_size')) 183 | h2_size = int(configParser.get('Long-Tail-GAN', 'h2_size')) 184 | h3_size = int(configParser.get('Long-Tail-GAN', 'h3_size')) 185 | 186 | NUM_EPOCH = int(configParser.get('Long-Tail-GAN', 'NUM_EPOCH')) 187 | NUM_SUB_EPOCHS = int(NUM_EPOCH/8) 188 | BATCH_SIZE = int(configParser.get('Long-Tail-GAN', 'BATCH_SIZE')) 189 | 190 | DISPLAY_ITER = int(configParser.get('Long-Tail-GAN', 'DISPLAY_ITER')) 191 | LEARNING_RATE = float(configParser.get('Long-Tail-GAN', 'LEARNING_RATE')) 192 | to_restore = int(configParser.get('Long-Tail-GAN', 'to_restore')) 193 | GANLAMBDA = float(configParser.get('Long-Tail-GAN', 'GANLAMBDA')) 194 | 195 | model_name = configParser.get('Long-Tail-GAN', 'model_name') 196 | 197 | 198 | dataset = sys.argv[1] 199 | output_path = sys.argv[2] 200 | 201 | 202 | test_GAN(h0_size, h1_size, h2_size, h3_size, NUM_EPOCH, NUM_SUB_EPOCHS, BATCH_SIZE, DISPLAY_ITER, LEARNING_RATE, to_restore, model_name, dataset, GANLAMBDA, output_path) -------------------------------------------------------------------------------- /Codes/Base_Recommender/MultiVAE.py: -------------------------------------------------------------------------------- 1 | import numpy as np 2 | import tensorflow as tf 3 | from tensorflow.contrib.layers import apply_regularization, l2_regularizer 4 | 5 | import bottleneck as bn 6 | 7 | slim=tf.contrib.slim 8 | Bernoulli = tf.contrib.distributions.Bernoulli 9 | 10 | 11 | class MultiDAE(object): 12 | def __init__(self, p_dims, q_dims=None, lam=0.01, lr=1e-3, random_seed=None): 13 | self.p_dims = p_dims 14 | if q_dims is None: 15 | self.q_dims = p_dims[::-1] 16 | else: 17 | assert q_dims[0] == p_dims[-1], "Input and output dimension must equal each other for autoencoders." 18 | assert q_dims[-1] == p_dims[0], "Latent dimension for p- and q-network mismatches." 19 | self.q_dims = q_dims 20 | self.dims = self.q_dims + self.p_dims[1:] 21 | 22 | self.lam = lam 23 | self.lr = lr 24 | self.random_seed = random_seed 25 | 26 | self.construct_placeholders() 27 | 28 | def construct_placeholders(self): 29 | self.input_ph = tf.placeholder( 30 | dtype=tf.float32, shape=[None, self.dims[0]]) 31 | self.keep_prob_ph = tf.placeholder_with_default(0.75, shape=None) 32 | 33 | def build_graph(self): 34 | 35 | self.construct_weights() 36 | 37 | saver, logits = self.forward_pass() 38 | log_softmax_var = tf.nn.log_softmax(logits) 39 | 40 | # per-user average negative log-likelihood 41 | neg_ll = -tf.reduce_mean(tf.reduce_sum( 42 | log_softmax_var * self.input_ph, axis=1)) 43 | # apply regularization to weights 44 | reg = l2_regularizer(self.lam) 45 | reg_var = apply_regularization(reg, self.weights) 46 | # tensorflow l2 regularization multiply 0.5 to the l2 norm 47 | # multiply 2 so that it is back in the same scale 48 | loss = neg_ll + 2 * reg_var 49 | 50 | train_op = tf.train.AdamOptimizer(self.lr).minimize(loss) 51 | 52 | # add summary statistics 53 | tf.summary.scalar('negative_multi_ll', neg_ll) 54 | tf.summary.scalar('loss', loss) 55 | merged = tf.summary.merge_all() 56 | return saver, logits, loss, train_op, merged 57 | 58 | def forward_pass(self): 59 | # construct forward graph 60 | h = tf.nn.l2_normalize(self.input_ph, 1) 61 | h = tf.nn.dropout(h, self.keep_prob_ph) 62 | 63 | for i, (w, b) in enumerate(zip(self.weights, self.biases)): 64 | h = tf.matmul(h, w) + b 65 | 66 | if i != len(self.weights) - 1: 67 | h = tf.nn.tanh(h) 68 | return tf.train.Saver(), h 69 | 70 | def construct_weights(self): 71 | 72 | self.weights = [] 73 | self.biases = [] 74 | 75 | # define weights 76 | for i, (d_in, d_out) in enumerate(zip(self.dims[:-1], self.dims[1:])): 77 | weight_key = "weight_{}to{}".format(i, i+1) 78 | bias_key = "bias_{}".format(i+1) 79 | 80 | self.weights.append(tf.get_variable( 81 | name=weight_key, shape=[d_in, d_out], 82 | initializer=tf.contrib.layers.xavier_initializer( 83 | seed=self.random_seed))) 84 | 85 | self.biases.append(tf.get_variable( 86 | name=bias_key, shape=[d_out], 87 | initializer=tf.truncated_normal_initializer( 88 | stddev=0.001, seed=self.random_seed))) 89 | 90 | # add summary stats 91 | tf.summary.histogram(weight_key, self.weights[-1]) 92 | tf.summary.histogram(bias_key, self.biases[-1]) 93 | 94 | 95 | class MultiVAE(MultiDAE): 96 | 97 | def construct_placeholders(self): 98 | super(MultiVAE, self).construct_placeholders() 99 | 100 | # placeholders with default values when scoring 101 | self.is_training_ph = tf.placeholder_with_default(0., shape=None) 102 | self.anneal_ph = tf.placeholder_with_default(1., shape=None) 103 | 104 | def build_graph(self): 105 | self._construct_weights() 106 | 107 | saver, logits, KL = self.forward_pass() 108 | log_softmax_var = tf.nn.log_softmax(logits) 109 | 110 | neg_ll = -tf.reduce_mean(tf.reduce_sum( 111 | log_softmax_var * self.input_ph, 112 | axis=-1)) 113 | # apply regularization to weights 114 | reg = l2_regularizer(self.lam) 115 | 116 | reg_var = apply_regularization(reg, self.weights_q + self.weights_p) 117 | # tensorflow l2 regularization multiply 0.5 to the l2 norm 118 | # multiply 2 so that it is back in the same scale 119 | neg_ELBO = neg_ll + self.anneal_ph * KL + 2 * reg_var 120 | 121 | train_op = tf.train.AdamOptimizer(self.lr).minimize(neg_ELBO) 122 | 123 | # add summary statistics 124 | tf.summary.scalar('negative_multi_ll', neg_ll) 125 | tf.summary.scalar('KL', KL) 126 | tf.summary.scalar('neg_ELBO_train', neg_ELBO) 127 | merged = tf.summary.merge_all() 128 | 129 | params = [] 130 | 131 | for elem in self.weights_q: 132 | params.append(elem) 133 | 134 | for elem in self.weights_p: 135 | params.append(elem) 136 | 137 | for elem in self.biases_q: 138 | params.append(elem) 139 | 140 | for elem in self.biases_p: 141 | params.append(elem) 142 | 143 | return tf.nn.softmax(logits), neg_ELBO, params 144 | 145 | def q_graph(self): 146 | mu_q, std_q, KL = None, None, None 147 | 148 | h = tf.nn.l2_normalize(self.input_ph, 1) 149 | h = tf.nn.dropout(h, self.keep_prob_ph) 150 | 151 | for i, (w, b) in enumerate(zip(self.weights_q, self.biases_q)): 152 | h = tf.matmul(h, w) + b 153 | 154 | if i != len(self.weights_q) - 1: 155 | h = tf.nn.tanh(h) 156 | else: 157 | mu_q = h[:, :self.q_dims[-1]] 158 | logvar_q = h[:, self.q_dims[-1]:] 159 | 160 | std_q = tf.exp(0.5 * logvar_q) 161 | KL = tf.reduce_mean(tf.reduce_sum( 162 | 0.5 * (-logvar_q + tf.exp(logvar_q) + mu_q**2 - 1), axis=1)) 163 | return mu_q, std_q, KL 164 | 165 | def p_graph(self, z): 166 | h = z 167 | 168 | for i, (w, b) in enumerate(zip(self.weights_p, self.biases_p)): 169 | h = tf.matmul(h, w) + b 170 | 171 | if i != len(self.weights_p) - 1: 172 | h = tf.nn.tanh(h) 173 | return h 174 | 175 | def forward_pass(self): 176 | # q-network 177 | mu_q, std_q, KL = self.q_graph() 178 | epsilon = tf.random_normal(tf.shape(std_q)) 179 | 180 | sampled_z = mu_q + self.is_training_ph *\ 181 | epsilon * std_q 182 | 183 | # p-network 184 | logits = self.p_graph(sampled_z) 185 | 186 | return tf.train.Saver(), logits, KL 187 | 188 | def _construct_weights(self): 189 | self.weights_q, self.biases_q = [], [] 190 | 191 | for i, (d_in, d_out) in enumerate(zip(self.q_dims[:-1], self.q_dims[1:])): 192 | if i == len(self.q_dims[:-1]) - 1: 193 | # we need two sets of parameters for mean and variance, 194 | # respectively 195 | d_out *= 2 196 | weight_key = "weight_q_{}to{}".format(i, i+1) 197 | bias_key = "bias_q_{}".format(i+1) 198 | 199 | self.weights_q.append(tf.get_variable( 200 | name=weight_key, shape=[d_in, d_out], 201 | initializer=tf.contrib.layers.xavier_initializer( 202 | seed=self.random_seed))) 203 | 204 | self.biases_q.append(tf.get_variable( 205 | name=bias_key, shape=[d_out], 206 | initializer=tf.truncated_normal_initializer( 207 | stddev=0.001, seed=self.random_seed))) 208 | 209 | # add summary stats 210 | tf.summary.histogram(weight_key, self.weights_q[-1]) 211 | tf.summary.histogram(bias_key, self.biases_q[-1]) 212 | 213 | self.weights_p, self.biases_p = [], [] 214 | 215 | for i, (d_in, d_out) in enumerate(zip(self.p_dims[:-1], self.p_dims[1:])): 216 | weight_key = "weight_p_{}to{}".format(i, i+1) 217 | bias_key = "bias_p_{}".format(i+1) 218 | self.weights_p.append(tf.get_variable( 219 | name=weight_key, shape=[d_in, d_out], 220 | initializer=tf.contrib.layers.xavier_initializer( 221 | seed=self.random_seed))) 222 | 223 | self.biases_p.append(tf.get_variable( 224 | name=bias_key, shape=[d_out], 225 | initializer=tf.truncated_normal_initializer( 226 | stddev=0.001, seed=self.random_seed))) 227 | 228 | # add summary stats 229 | tf.summary.histogram(weight_key, self.weights_p[-1]) 230 | tf.summary.histogram(bias_key, self.biases_p[-1]) 231 | 232 | -------------------------------------------------------------------------------- /Dataset/Askubuntu_Sample/niche_items.txt: -------------------------------------------------------------------------------- 1 | 2796 2 | 77 3 | 43 4 | 145 5 | 78 6 | 452 7 | 230 8 | 345 9 | 576 10 | 28 11 | 154 12 | 269 13 | 27 14 | 456 15 | 85 16 | 165 17 | 601 18 | 319 19 | 42 20 | 51 21 | 418 22 | 1065 23 | 300 24 | 530 25 | 14 26 | 242 27 | 1393 28 | 229 29 | 185 30 | 54 31 | 566 32 | 125 33 | 997 34 | 444 35 | 2576 36 | 1733 37 | 363 38 | 285 39 | 215 40 | 809 41 | 324 42 | 326 43 | 289 44 | 102 45 | 201 46 | 9 47 | 662 48 | 1188 49 | 807 50 | 679 51 | 428 52 | 212 53 | 1242 54 | 288 55 | 0 56 | 302 57 | 1252 58 | 455 59 | 226 60 | 1398 61 | 613 62 | 760 63 | 557 64 | 939 65 | 254 66 | 675 67 | 2772 68 | 61 69 | 883 70 | 195 71 | 210 72 | 275 73 | 426 74 | 667 75 | 176 76 | 1221 77 | 29 78 | 857 79 | 281 80 | 343 81 | 609 82 | 828 83 | 48 84 | 314 85 | 404 86 | 10 87 | 594 88 | 998 89 | 562 90 | 202 91 | 579 92 | 405 93 | 50 94 | 105 95 | 519 96 | 87 97 | 403 98 | 350 99 | 99 100 | 1235 101 | 563 102 | 526 103 | 2462 104 | 24 105 | 122 106 | 4 107 | 259 108 | 205 109 | 595 110 | 173 111 | 922 112 | 246 113 | 685 114 | 134 115 | 543 116 | 198 117 | 2921 118 | 445 119 | 250 120 | 1 121 | 480 122 | 37 123 | 352 124 | 81 125 | 952 126 | 505 127 | 255 128 | 186 129 | 73 130 | 21 131 | 1849 132 | 698 133 | 7 134 | 690 135 | 709 136 | 2100 137 | 1327 138 | 1840 139 | 409 140 | 23 141 | 980 142 | 507 143 | 150 144 | 1059 145 | 321 146 | 49 147 | 182 148 | 2358 149 | 70 150 | 1181 151 | 1340 152 | 227 153 | 600 154 | 458 155 | 1804 156 | 1098 157 | 463 158 | 264 159 | 542 160 | 309 161 | 802 162 | 193 163 | 84 164 | 299 165 | 892 166 | 764 167 | 486 168 | 620 169 | 1259 170 | 375 171 | 1331 172 | 1079 173 | 631 174 | 732 175 | 340 176 | 252 177 | 1461 178 | 756 179 | 985 180 | 940 181 | 822 182 | 503 183 | 1254 184 | 763 185 | 330 186 | 1180 187 | 56 188 | 1110 189 | 489 190 | 979 191 | 96 192 | 1901 193 | 113 194 | 636 195 | 457 196 | 1114 197 | 1009 198 | 1063 199 | 684 200 | 1721 201 | 673 202 | 800 203 | 1289 204 | 1138 205 | 179 206 | 92 207 | 1036 208 | 1217 209 | 865 210 | 1356 211 | 15 212 | 1584 213 | 209 214 | 128 215 | 258 216 | 101 217 | 599 218 | 632 219 | 666 220 | 931 221 | 274 222 | 1067 223 | 2368 224 | 1608 225 | 1472 226 | 951 227 | 874 228 | 310 229 | 574 230 | 216 231 | 207 232 | 278 233 | 57 234 | 1861 235 | 796 236 | 67 237 | 243 238 | 867 239 | 347 240 | 83 241 | 788 242 | 1044 243 | 443 244 | 1442 245 | 45 246 | 590 247 | 1846 248 | 1363 249 | 1838 250 | 1196 251 | 1096 252 | 2283 253 | 424 254 | 695 255 | 1170 256 | 1521 257 | 41 258 | 2500 259 | 1265 260 | 494 261 | 192 262 | 338 263 | 2911 264 | 59 265 | 127 266 | 1280 267 | 547 268 | 498 269 | 1087 270 | 423 271 | 2482 272 | 767 273 | 854 274 | 470 275 | 461 276 | 1195 277 | 239 278 | 181 279 | 437 280 | 315 281 | 518 282 | 1376 283 | 963 284 | 1014 285 | 749 286 | 1324 287 | 1135 288 | 950 289 | 1159 290 | 238 291 | 658 292 | 2038 293 | 1864 294 | 72 295 | 797 296 | 298 297 | 1129 298 | 1567 299 | 247 300 | 329 301 | 440 302 | 88 303 | 391 304 | 89 305 | 1194 306 | 417 307 | 1119 308 | 1055 309 | 1523 310 | 694 311 | 1117 312 | 1404 313 | 1182 314 | 473 315 | 627 316 | 1130 317 | 1070 318 | 191 319 | 31 320 | 851 321 | 63 322 | 569 323 | 107 324 | 353 325 | 121 326 | 1154 327 | 348 328 | 36 329 | 276 330 | 548 331 | 1111 332 | 222 333 | 104 334 | 640 335 | 1112 336 | 967 337 | 798 338 | 1123 339 | 1473 340 | 588 341 | 152 342 | 1802 343 | 71 344 | 97 345 | 651 346 | 327 347 | 1134 348 | 525 349 | 419 350 | 536 351 | 839 352 | 663 353 | 22 354 | 813 355 | 897 356 | 439 357 | 1839 358 | 545 359 | 555 360 | 593 361 | 80 362 | 1416 363 | 585 364 | 838 365 | 170 366 | 1889 367 | 811 368 | 647 369 | 1483 370 | 2124 371 | 2414 372 | 1896 373 | 1629 374 | 189 375 | 1002 376 | 1601 377 | 1555 378 | 517 379 | 1603 380 | 178 381 | 346 382 | 805 383 | 296 384 | 1440 385 | 1631 386 | 490 387 | 537 388 | 1401 389 | 58 390 | 1382 391 | 652 392 | 256 393 | 877 394 | 297 395 | 570 396 | 344 397 | 272 398 | 947 399 | 467 400 | 744 401 | 292 402 | 1176 403 | 406 404 | 156 405 | 271 406 | 1041 407 | 937 408 | 8 409 | 1345 410 | 1316 411 | 903 412 | 901 413 | 1058 414 | 413 415 | 626 416 | 1390 417 | 769 418 | 366 419 | 1020 420 | 1204 421 | 2058 422 | 1446 423 | 1748 424 | 351 425 | 1052 426 | 332 427 | 1671 428 | 2062 429 | 654 430 | 722 431 | 197 432 | 1223 433 | 1281 434 | 592 435 | 697 436 | 2489 437 | 313 438 | 1626 439 | 896 440 | 1734 441 | 1400 442 | 1225 443 | 2171 444 | 236 445 | 711 446 | 141 447 | 715 448 | 1365 449 | 944 450 | 723 451 | 75 452 | 304 453 | 397 454 | 514 455 | 153 456 | 1492 457 | 843 458 | 761 459 | 1454 460 | 2488 461 | 425 462 | 355 463 | 311 464 | 1688 465 | 86 466 | 1213 467 | 2285 468 | 265 469 | 1825 470 | 1046 471 | 520 472 | 369 473 | 1001 474 | 926 475 | 364 476 | 214 477 | 336 478 | 728 479 | 475 480 | 823 481 | 847 482 | 441 483 | 1575 484 | 777 485 | 493 486 | 1201 487 | 1132 488 | 371 489 | 112 490 | 199 491 | 1296 492 | 1676 493 | 970 494 | 1034 495 | 725 496 | 2405 497 | 1108 498 | 746 499 | 1240 500 | 1095 501 | 1035 502 | 1075 503 | 1051 504 | 1018 505 | 983 506 | 1684 507 | 482 508 | 1179 509 | 840 510 | 933 511 | 2912 512 | 814 513 | 44 514 | 367 515 | 923 516 | 762 517 | 1399 518 | 1714 519 | 1876 520 | 434 521 | 845 522 | 218 523 | 481 524 | 880 525 | 1384 526 | 1379 527 | 2287 528 | 2419 529 | 325 530 | 203 531 | 702 532 | 992 533 | 161 534 | 2051 535 | 623 536 | 1153 537 | 1520 538 | 1987 539 | 144 540 | 957 541 | 1392 542 | 552 543 | 307 544 | 2110 545 | 625 546 | 945 547 | 2841 548 | 818 549 | 571 550 | 1215 551 | 629 552 | 650 553 | 850 554 | 1192 555 | 385 556 | 700 557 | 680 558 | 2523 559 | 1553 560 | 40 561 | 758 562 | 2007 563 | 200 564 | 465 565 | 1796 566 | 2487 567 | 1786 568 | 591 569 | 1895 570 | 393 571 | 387 572 | 2861 573 | 1092 574 | 1156 575 | 894 576 | 1163 577 | 515 578 | 2039 579 | 1686 580 | 516 581 | 750 582 | 1073 583 | 1654 584 | 1209 585 | 739 586 | 2738 587 | 1649 588 | 1418 589 | 582 590 | 241 591 | 431 592 | 1099 593 | 916 594 | 341 595 | 934 596 | 1884 597 | 607 598 | 1475 599 | 540 600 | 334 601 | 618 602 | 804 603 | 1214 604 | 734 605 | 129 606 | 943 607 | 148 608 | 372 609 | 791 610 | 1500 611 | 1109 612 | 492 613 | 1047 614 | 1307 615 | 1906 616 | 1013 617 | 1423 618 | 306 619 | 1267 620 | 1512 621 | 553 622 | 527 623 | 1699 624 | 1425 625 | 1506 626 | 743 627 | 699 628 | 1144 629 | 1212 630 | 862 631 | 678 632 | 768 633 | 1023 634 | 954 635 | 899 636 | 303 637 | 955 638 | 159 639 | 792 640 | 1495 641 | 860 642 | 160 643 | 793 644 | 1388 645 | 1544 646 | 1942 647 | 407 648 | 421 649 | 965 650 | 1467 651 | 1643 652 | 2065 653 | 108 654 | 930 655 | 126 656 | 100 657 | 1791 658 | 538 659 | 1923 660 | 1341 661 | 151 662 | 158 663 | 499 664 | 946 665 | 415 666 | 308 667 | 221 668 | 772 669 | 615 670 | 608 671 | 1980 672 | 1516 673 | 106 674 | 638 675 | 1693 676 | 1602 677 | 1773 678 | 496 679 | 479 680 | 1077 681 | 799 682 | 1115 683 | 266 684 | 466 685 | 378 686 | 1616 687 | 1136 688 | 301 689 | 935 690 | 1271 691 | 2475 692 | 757 693 | 604 694 | 1362 695 | 848 696 | 476 697 | 782 698 | 687 699 | 1357 700 | 245 701 | 1089 702 | 485 703 | 531 704 | 1972 705 | 567 706 | 1294 707 | 453 708 | 1650 709 | 1031 710 | 719 711 | 1292 712 | 462 713 | 986 714 | 1121 715 | 208 716 | 1886 717 | 384 718 | 286 719 | 2116 720 | 738 721 | 884 722 | 689 723 | 2495 724 | 116 725 | 373 726 | 1619 727 | 677 728 | 550 729 | 1185 730 | 1200 731 | 1450 732 | 1104 733 | 863 734 | 1183 735 | 1302 736 | 1871 737 | 1881 738 | 1445 739 | 622 740 | 66 741 | 2010 742 | 380 743 | 564 744 | 2115 745 | 780 746 | 1387 747 | 716 748 | 999 749 | 2319 750 | 1239 751 | 1162 752 | 1086 753 | 648 754 | 568 755 | 1033 756 | 251 757 | 713 758 | 139 759 | 1337 760 | 1010 761 | 786 762 | 870 763 | 1219 764 | 1177 765 | 2758 766 | 1383 767 | 956 768 | 1288 769 | 1522 770 | 1843 771 | 1694 772 | 2483 773 | 2385 774 | 688 775 | 754 776 | 322 777 | 2585 778 | 1554 779 | 1501 780 | 910 781 | 1005 782 | 1366 783 | 1057 784 | 280 785 | 2108 786 | 1275 787 | 1545 788 | 2470 789 | 891 790 | 2192 791 | 237 792 | 984 793 | 1964 794 | 1704 795 | 2394 796 | 1588 797 | 484 798 | 1246 799 | 948 800 | 337 801 | 1268 802 | 76 803 | 1060 804 | 1313 805 | 781 806 | 729 807 | 1250 808 | 721 809 | 528 810 | 1158 811 | 669 812 | 692 813 | 2106 814 | 427 815 | 1793 816 | 2369 817 | 1581 818 | 1006 819 | 1222 820 | 82 821 | 232 822 | 770 823 | 1042 824 | 1184 825 | 664 826 | 1171 827 | 1457 828 | 1614 829 | 506 830 | 388 831 | 263 832 | 2958 833 | 2524 834 | 1000 835 | 2611 836 | 1208 837 | 616 838 | 1081 839 | 2070 840 | 535 841 | 1318 842 | 1661 843 | 2003 844 | 1725 845 | 908 846 | 414 847 | 628 848 | 2024 849 | 2327 850 | 1459 851 | 1155 852 | 1604 853 | 1346 854 | 982 855 | 612 856 | 1263 857 | 1191 858 | 119 859 | 478 860 | 544 861 | 976 862 | 1766 863 | 2182 864 | 1166 865 | 2189 866 | 1105 867 | 1460 868 | 147 869 | 765 870 | 773 871 | 412 872 | 888 873 | 911 874 | 124 875 | 1353 876 | 2316 877 | 521 878 | 1754 879 | 2288 880 | 1873 881 | 1966 882 | 1776 883 | 534 884 | 1007 885 | 399 886 | 1286 887 | 1792 888 | 500 889 | 1249 890 | 1021 891 | 1828 892 | 491 893 | 936 894 | 819 895 | 2875 896 | 131 897 | 1756 898 | 1241 899 | 787 900 | 522 901 | -------------------------------------------------------------------------------- /Codes/data_processing.py: -------------------------------------------------------------------------------- 1 | import pandas as pd 2 | from scipy import sparse 3 | import codecs 4 | import numpy as np 5 | 6 | def load_train_data(csv_file, n_items): 7 | tp = pd.read_csv(csv_file) 8 | n_users = tp['uid'].max() + 1 9 | 10 | rows, cols = tp['uid'], tp['sid'] 11 | 12 | 13 | data = sparse.csr_matrix((np.ones_like(rows), 14 | (rows, cols)), dtype='float32', 15 | shape=(n_users, n_items)) 16 | 17 | return data, tp['uid'].min() 18 | 19 | 20 | def load_tr_te_data(csv_file_tr, csv_file_te, n_items): 21 | tp_tr = pd.read_csv(csv_file_tr) 22 | tp_te = pd.read_csv(csv_file_te) 23 | 24 | start_idx = min(tp_tr['uid'].min(), tp_te['uid'].min()) 25 | end_idx = max(tp_tr['uid'].max(), tp_te['uid'].max()) 26 | # print(start_idx, end_idx) 27 | 28 | rows_tr, cols_tr = tp_tr['uid'] - start_idx, tp_tr['sid'] 29 | rows_te, cols_te = tp_te['uid'] - start_idx, tp_te['sid'] 30 | 31 | data_tr = sparse.csr_matrix((np.ones_like(rows_tr), 32 | (rows_tr, cols_tr)), dtype='float64', shape=(end_idx - start_idx + 1, n_items)) 33 | data_te = sparse.csr_matrix((np.ones_like(rows_te), 34 | (rows_te, cols_te)), dtype='float64', shape=(end_idx - start_idx + 1, n_items)) 35 | 36 | 37 | return data_tr, data_te, start_idx 38 | 39 | 40 | def load_item_one_hot_features(item_list_path, SHOW2ID, n_items): 41 | # global FEATURE_LEN 42 | ITEM_OH_ARR = [] 43 | ITEM_OH_DICT = {} 44 | 45 | FEATURE_LEN = 0 46 | 47 | tag_feature_file = codecs.open(item_list_path, 'r', 'utf-8') 48 | 49 | for row in tag_feature_file: 50 | s = row.strip()#.split('\t') 51 | 52 | # li = eval(s[1]) 53 | 54 | try: 55 | curr_li = [0]*n_items 56 | curr_li[int(SHOW2ID[s])] = 1 57 | ITEM_OH_DICT[int(SHOW2ID[s])] = curr_li 58 | except: 59 | continue 60 | 61 | FEATURE_LEN = len(curr_li) 62 | 63 | tag_feature_file.close() 64 | for id1 in range(len(ITEM_OH_DICT)): 65 | try: 66 | ITEM_OH_ARR.append(ITEM_OH_DICT[id1]) 67 | except: 68 | continue 69 | # print("error") 70 | return ITEM_OH_DICT, FEATURE_LEN, np.array(ITEM_OH_ARR) 71 | 72 | def load_user_items(csv_file_path): 73 | csv_file = codecs.open(csv_file_path, 'r', 'utf-8') 74 | 75 | USER_FEATURES = {} 76 | 77 | idx = 0 78 | 79 | for row in csv_file: 80 | if idx == 0: 81 | idx = 1 82 | continue 83 | 84 | s = row.strip().split(',') 85 | 86 | user_id = int(s[0]) 87 | tag_id = int(s[1]) 88 | 89 | if user_id not in USER_FEATURES: 90 | USER_FEATURES[user_id] = [] 91 | 92 | USER_FEATURES[user_id].append(tag_id) 93 | 94 | csv_file.close() 95 | 96 | return USER_FEATURES 97 | 98 | 99 | 100 | def overlap_cofficient(x,y): 101 | num = len(list(x & y)) 102 | 103 | denom = min(len(x),len(y)) 104 | 105 | overlap = (num*1.0)/(1.0*denom) 106 | 107 | return overlap 108 | 109 | 110 | def load_overlap_coeff(show2id_path, user_tag_matrix_path): 111 | 112 | SHOW2ID = {} 113 | 114 | show2id_file = codecs.open(show2id_path, 'r', 'utf-8') 115 | 116 | for row in show2id_file: 117 | s = row.strip().split('\t') 118 | 119 | SHOW2ID[s[0]] = s[1] 120 | 121 | 122 | show2id_file.close() 123 | 124 | 125 | TAG_SETS = {} 126 | 127 | TAG_IDs = set() 128 | 129 | user_tag_matrix_file = codecs.open(user_tag_matrix_path, 'r', 'utf-8') 130 | 131 | i = 0 132 | for row in user_tag_matrix_file: 133 | if i == 0: 134 | i = 1 135 | continue 136 | 137 | s = row.strip().split(',') 138 | 139 | user_id = s[0] 140 | try: 141 | tag_id = SHOW2ID[s[1]] 142 | except: 143 | print('Error:', tag_id) 144 | continue 145 | 146 | if tag_id not in TAG_SETS: 147 | TAG_SETS[tag_id] = set() 148 | 149 | TAG_SETS[tag_id].add(user_id) 150 | 151 | TAG_IDs.add(tag_id) 152 | 153 | user_tag_matrix_file.close() 154 | 155 | #print('TAG SETS:', len(TAG_SETS), len(TAG_IDs)) 156 | 157 | TAG_IDs = list(TAG_IDs) 158 | 159 | OVERLAP_COEFFS = {} 160 | 161 | for idx in range(len(TAG_IDs)): 162 | OVERLAP_COEFFS[int(TAG_IDs[idx])] = {} 163 | for inner_idx in range(len(TAG_IDs)): 164 | OVERLAP_COEFFS[int(TAG_IDs[idx])][int(TAG_IDs[inner_idx])] = overlap_cofficient(TAG_SETS[TAG_IDs[idx]], TAG_SETS[TAG_IDs[inner_idx]]) 165 | 166 | # OVERLAP_COEFFS = pickle.load(open(pro_dir+'/OVERLAP_COEFFS.pkl', 'rb')) 167 | return OVERLAP_COEFFS 168 | 169 | 170 | def load_items_to_sample(user_popular_data, user_niche_data, NICHE_TAGS, OVERLAP_COEFFS, N): 171 | USER_TAGS_TO_SAMPLE = {} 172 | 173 | for user_idx in range(N): 174 | if user_idx not in user_popular_data or user_idx not in user_niche_data: 175 | continue 176 | 177 | curr_pop_vectors = user_popular_data[user_idx] 178 | curr_niche_vectors = user_niche_data[user_idx] 179 | 180 | num_niche_tags = len(curr_niche_vectors) 181 | 182 | num_sample_tags = max(2 * len(curr_niche_vectors), 10 - num_niche_tags) 183 | 184 | curr_niche_tags = set() 185 | 186 | curr_sampling_tags = [] 187 | 188 | for niche_tag in curr_niche_vectors: 189 | niche_tag_idx = niche_tag 190 | curr_niche_tags.add(niche_tag_idx) 191 | curr_sampling_tags.append(int(niche_tag_idx)) 192 | 193 | other_niche_tags = list(NICHE_TAGS - curr_niche_tags) 194 | 195 | other_tags_corr = {} 196 | 197 | for inner_idx in range(len(other_niche_tags)): 198 | 199 | other_tag_idx = other_niche_tags[inner_idx] 200 | 201 | max_coeff = -1.0 202 | 203 | for niche_tag in curr_niche_vectors: 204 | niche_tag_idx = niche_tag 205 | 206 | curr_coeff = OVERLAP_COEFFS[niche_tag_idx][other_tag_idx] 207 | 208 | if curr_coeff > max_coeff: 209 | max_coeff = curr_coeff 210 | 211 | other_tags_corr[other_tag_idx] = max_coeff 212 | 213 | 214 | sorted_other_tags = sorted(other_tags_corr.items(), key = lambda x: x[1] , reverse = True) 215 | 216 | for inner_idx in range(min(num_sample_tags, len(sorted_other_tags))): 217 | curr_sampling_tags.append(sorted_other_tags[inner_idx][0]) 218 | 219 | 220 | curr_sampling_tags.sort() 221 | 222 | USER_TAGS_TO_SAMPLE[user_idx] = np.asarray(curr_sampling_tags) 223 | 224 | return USER_TAGS_TO_SAMPLE 225 | 226 | 227 | def load_vectors(user_popular_data, user_niche_data, OVERLAP_COEFFS, ITEM_FEATURE_DICT, N): 228 | 229 | user_x_niche_vectors = {} 230 | user_x_popular_n_vectors = {} 231 | 232 | for user_idx in range(N): 233 | if user_idx not in user_popular_data or user_idx not in user_niche_data: 234 | continue 235 | 236 | curr_pop_vectors = user_popular_data[user_idx] 237 | curr_niche_vectors = user_niche_data[user_idx] 238 | 239 | curr_x_niche = [] 240 | curr_x_popular_n = [] 241 | 242 | for niche_tag in curr_niche_vectors: 243 | niche_tag_idx = niche_tag 244 | 245 | max_coeff = -1.0 246 | max_pop_tag_idx = -1 247 | 248 | 249 | for pop_tag in curr_pop_vectors: 250 | pop_tag_idx = pop_tag 251 | 252 | curr_coeff = OVERLAP_COEFFS[niche_tag_idx][pop_tag_idx] 253 | 254 | if curr_coeff > max_coeff: 255 | max_coeff = curr_coeff 256 | max_pop_tag_idx = pop_tag_idx 257 | 258 | if niche_tag_idx not in ITEM_FEATURE_DICT or max_pop_tag_idx not in ITEM_FEATURE_DICT: 259 | # print('Invalid Niche Tag Pair:', niche_tag_idx, max_pop_tag_idx) 260 | continue 261 | 262 | curr_x_niche.append(niche_tag_idx) 263 | curr_x_popular_n.append(max_pop_tag_idx) 264 | 265 | 266 | user_x_niche_vectors[user_idx] = curr_x_niche 267 | 268 | user_x_popular_n_vectors[user_idx] = curr_x_popular_n 269 | 270 | 271 | return user_x_niche_vectors, user_x_popular_n_vectors 272 | 273 | 274 | 275 | def load_pop_niche_tags(show2id_path, item_list_path, niche_tags_path, n_items): 276 | SHOW2ID = {} 277 | 278 | show2id_file = codecs.open(show2id_path, 'r', 'utf-8') 279 | 280 | for row in show2id_file: 281 | s = row.strip().split('\t') 282 | 283 | SHOW2ID[s[0]] = s[1] 284 | 285 | show2id_file.close() 286 | 287 | 288 | IDs_present = set() 289 | 290 | parsed_tag_vector_file = codecs.open(item_list_path, 'r', 'utf-8') 291 | 292 | for row in parsed_tag_vector_file: 293 | s = row.strip()#.split('\t') 294 | 295 | try: 296 | IDs_present.add(SHOW2ID[s]) 297 | except: 298 | pass 299 | 300 | parsed_tag_vector_file.close() 301 | 302 | 303 | NICHE_TAGS = set() 304 | 305 | niche_tags_file = codecs.open(niche_tags_path, 'r', 'utf-8') 306 | 307 | tag_idx = 0 308 | 309 | for row in niche_tags_file: 310 | s = row.strip() 311 | 312 | try: 313 | if SHOW2ID[s] in IDs_present: 314 | NICHE_TAGS.add(int(SHOW2ID[s])) 315 | 316 | except Exception as e: 317 | print('Error:', str(e)) 318 | 319 | niche_tags_file.close() 320 | 321 | ALL_TAGS = [] 322 | 323 | for i in range(n_items): 324 | ALL_TAGS.append(int(i)) 325 | 326 | # ALL_TAGS = range(n_items) 327 | 328 | OTHER_TAGS_1 = list(set(ALL_TAGS) - set(NICHE_TAGS)) 329 | 330 | OTHER_TAGS = [] 331 | for elem in OTHER_TAGS_1: 332 | OTHER_TAGS.append(int(elem)) 333 | 334 | OTHER_TAGS.sort() 335 | 336 | OTHER_TAGS = np.asarray(OTHER_TAGS) 337 | 338 | #print('OTHER_TAGS:', len(OTHER_TAGS), OTHER_TAGS) 339 | 340 | return SHOW2ID, IDs_present, NICHE_TAGS, ALL_TAGS, OTHER_TAGS 341 | -------------------------------------------------------------------------------- /Dataset/Askubuntu_Sample/unique_item_id.txt: -------------------------------------------------------------------------------- 1 | 0 2 | 1 3 | 2 4 | 3 5 | 4 6 | 5 7 | 6 8 | 7 9 | 8 10 | 9 11 | 10 12 | 11 13 | 12 14 | 13 15 | 18 16 | 19 17 | 20 18 | 22 19 | 23 20 | 24 21 | 25 22 | 26 23 | 27 24 | 29 25 | 33 26 | 34 27 | 36 28 | 40 29 | 41 30 | 42 31 | 43 32 | 44 33 | 46 34 | 48 35 | 49 36 | 50 37 | 51 38 | 54 39 | 55 40 | 56 41 | 59 42 | 60 43 | 61 44 | 62 45 | 63 46 | 64 47 | 66 48 | 67 49 | 68 50 | 72 51 | 74 52 | 77 53 | 78 54 | 80 55 | 84 56 | 85 57 | 87 58 | 89 59 | 92 60 | 95 61 | 96 62 | 97 63 | 98 64 | 101 65 | 102 66 | 103 67 | 104 68 | 105 69 | 106 70 | 108 71 | 110 72 | 111 73 | 112 74 | 115 75 | 117 76 | 118 77 | 120 78 | 122 79 | 127 80 | 128 81 | 129 82 | 132 83 | 133 84 | 134 85 | 135 86 | 136 87 | 137 88 | 138 89 | 139 90 | 141 91 | 145 92 | 146 93 | 152 94 | 153 95 | 154 96 | 167 97 | 170 98 | 172 99 | 173 100 | 174 101 | 176 102 | 177 103 | 178 104 | 179 105 | 180 106 | 181 107 | 184 108 | 186 109 | 189 110 | 192 111 | 193 112 | 196 113 | 198 114 | 199 115 | 201 116 | 204 117 | 205 118 | 207 119 | 208 120 | 209 121 | 210 122 | 211 123 | 215 124 | 216 125 | 221 126 | 222 127 | 223 128 | 226 129 | 228 130 | 230 131 | 231 132 | 236 133 | 238 134 | 240 135 | 242 136 | 243 137 | 245 138 | 246 139 | 247 140 | 249 141 | 250 142 | 251 143 | 252 144 | 253 145 | 254 146 | 255 147 | 258 148 | 259 149 | 262 150 | 264 151 | 269 152 | 270 153 | 272 154 | 274 155 | 275 156 | 276 157 | 278 158 | 279 159 | 281 160 | 283 161 | 285 162 | 286 163 | 288 164 | 300 165 | 302 166 | 305 167 | 310 168 | 318 169 | 323 170 | 326 171 | 330 172 | 333 173 | 336 174 | 337 175 | 338 176 | 339 177 | 343 178 | 344 179 | 345 180 | 346 181 | 348 182 | 351 183 | 352 184 | 353 185 | 362 186 | 364 187 | 367 188 | 369 189 | 373 190 | 377 191 | 383 192 | 387 193 | 388 194 | 391 195 | 393 196 | 403 197 | 409 198 | 411 199 | 414 200 | 417 201 | 424 202 | 425 203 | 434 204 | 443 205 | 455 206 | 463 207 | 467 208 | 488 209 | 494 210 | 499 211 | 505 212 | 506 213 | 523 214 | 529 215 | 540 216 | 542 217 | 543 218 | 555 219 | 576 220 | 578 221 | 579 222 | 582 223 | 585 224 | 587 225 | 594 226 | 599 227 | 601 228 | 623 229 | 627 230 | 631 231 | 644 232 | 657 233 | 662 234 | 675 235 | 679 236 | 681 237 | 687 238 | 709 239 | 782 240 | 791 241 | 799 242 | 802 243 | 807 244 | 831 245 | 854 246 | 954 247 | 965 248 | 984 249 | 997 250 | 1002 251 | 1036 252 | 1059 253 | 1079 254 | 1081 255 | 1096 256 | 1111 257 | 1121 258 | 1194 259 | 1204 260 | 1221 261 | 1242 262 | 1283 263 | 1289 264 | 1327 265 | 1331 266 | 1501 267 | 1521 268 | 1555 269 | 1614 270 | 1619 271 | 1654 272 | 1721 273 | 1729 274 | 1820 275 | 2003 276 | 2007 277 | 2401 278 | 2403 279 | 2875 280 | 14 281 | 15 282 | 17 283 | 21 284 | 28 285 | 37 286 | 57 287 | 58 288 | 73 289 | 75 290 | 76 291 | 81 292 | 86 293 | 88 294 | 91 295 | 100 296 | 121 297 | 125 298 | 126 299 | 147 300 | 150 301 | 156 302 | 158 303 | 160 304 | 164 305 | 165 306 | 182 307 | 183 308 | 185 309 | 191 310 | 197 311 | 202 312 | 203 313 | 212 314 | 218 315 | 219 316 | 227 317 | 229 318 | 239 319 | 241 320 | 256 321 | 263 322 | 277 323 | 280 324 | 289 325 | 292 326 | 294 327 | 296 328 | 301 329 | 303 330 | 308 331 | 309 332 | 311 333 | 313 334 | 320 335 | 321 336 | 329 337 | 340 338 | 347 339 | 350 340 | 361 341 | 363 342 | 370 343 | 371 344 | 372 345 | 375 346 | 380 347 | 399 348 | 404 349 | 407 350 | 412 351 | 413 352 | 418 353 | 423 354 | 426 355 | 428 356 | 437 357 | 439 358 | 440 359 | 444 360 | 445 361 | 452 362 | 456 363 | 458 364 | 465 365 | 470 366 | 476 367 | 478 368 | 480 369 | 481 370 | 486 371 | 491 372 | 492 373 | 503 374 | 507 375 | 515 376 | 517 377 | 519 378 | 520 379 | 521 380 | 525 381 | 526 382 | 530 383 | 531 384 | 544 385 | 548 386 | 553 387 | 557 388 | 560 389 | 562 390 | 564 391 | 568 392 | 570 393 | 571 394 | 590 395 | 592 396 | 600 397 | 609 398 | 612 399 | 613 400 | 616 401 | 632 402 | 640 403 | 663 404 | 667 405 | 678 406 | 680 407 | 685 408 | 690 409 | 695 410 | 697 411 | 698 412 | 711 413 | 713 414 | 715 415 | 716 416 | 719 417 | 721 418 | 723 419 | 728 420 | 729 421 | 732 422 | 734 423 | 735 424 | 738 425 | 743 426 | 744 427 | 746 428 | 749 429 | 760 430 | 761 431 | 763 432 | 764 433 | 769 434 | 772 435 | 780 436 | 805 437 | 809 438 | 811 439 | 813 440 | 814 441 | 822 442 | 828 443 | 840 444 | 851 445 | 863 446 | 867 447 | 874 448 | 877 449 | 883 450 | 892 451 | 901 452 | 903 453 | 908 454 | 910 455 | 911 456 | 916 457 | 922 458 | 923 459 | 926 460 | 931 461 | 935 462 | 944 463 | 963 464 | 967 465 | 976 466 | 980 467 | 986 468 | 1010 469 | 1014 470 | 1020 471 | 1021 472 | 1041 473 | 1060 474 | 1065 475 | 1070 476 | 1075 477 | 1087 478 | 1098 479 | 1109 480 | 1117 481 | 1119 482 | 1129 483 | 1130 484 | 1135 485 | 1138 486 | 1144 487 | 1153 488 | 1159 489 | 1163 490 | 1182 491 | 1183 492 | 1184 493 | 1188 494 | 1192 495 | 1196 496 | 1213 497 | 1214 498 | 1217 499 | 1219 500 | 1235 501 | 1252 502 | 1259 503 | 1264 504 | 1281 505 | 1324 506 | 1353 507 | 1357 508 | 1376 509 | 1387 510 | 1393 511 | 1398 512 | 1401 513 | 1404 514 | 1415 515 | 1418 516 | 1440 517 | 1442 518 | 1446 519 | 1457 520 | 1473 521 | 1523 522 | 1575 523 | 1601 524 | 1608 525 | 1626 526 | 1643 527 | 1650 528 | 1714 529 | 1733 530 | 1766 531 | 1840 532 | 1876 533 | 1889 534 | 1895 535 | 1896 536 | 1901 537 | 1923 538 | 1991 539 | 2038 540 | 2039 541 | 2051 542 | 2065 543 | 2070 544 | 2124 545 | 2129 546 | 2171 547 | 2206 548 | 2368 549 | 2394 550 | 2460 551 | 2470 552 | 2482 553 | 2576 554 | 2796 555 | 2849 556 | 2911 557 | 2912 558 | 2958 559 | 99 560 | 689 561 | 82 562 | 83 563 | 473 564 | 566 565 | 1472 566 | 322 567 | 421 568 | 536 569 | 563 570 | 804 571 | 998 572 | 1018 573 | 1051 574 | 1055 575 | 1280 576 | 1629 577 | 45 578 | 159 579 | 237 580 | 304 581 | 498 582 | 574 583 | 850 584 | 896 585 | 936 586 | 985 587 | 1170 588 | 1209 589 | 1704 590 | 1861 591 | 2642 592 | 518 593 | 31 594 | 1007 595 | 107 596 | 119 597 | 195 598 | 397 599 | 441 600 | 462 601 | 538 602 | 620 603 | 664 604 | 750 605 | 891 606 | 1034 607 | 1052 608 | 1112 609 | 1365 610 | 1802 611 | 951 612 | 992 613 | 1067 614 | 1136 615 | 1181 616 | 1302 617 | 1475 618 | 200 619 | 431 620 | 214 621 | 161 622 | 384 623 | 615 624 | 341 625 | 70 626 | 71 627 | 113 628 | 124 629 | 131 630 | 232 631 | 265 632 | 271 633 | 298 634 | 299 635 | 307 636 | 314 637 | 315 638 | 319 639 | 324 640 | 332 641 | 355 642 | 366 643 | 378 644 | 385 645 | 405 646 | 406 647 | 415 648 | 419 649 | 457 650 | 461 651 | 466 652 | 475 653 | 479 654 | 493 655 | 514 656 | 516 657 | 522 658 | 534 659 | 545 660 | 547 661 | 550 662 | 567 663 | 569 664 | 588 665 | 591 666 | 595 667 | 618 668 | 625 669 | 636 670 | 638 671 | 647 672 | 650 673 | 651 674 | 654 675 | 658 676 | 673 677 | 677 678 | 684 679 | 699 680 | 700 681 | 702 682 | 722 683 | 754 684 | 757 685 | 762 686 | 765 687 | 787 688 | 788 689 | 792 690 | 793 691 | 797 692 | 800 693 | 839 694 | 843 695 | 847 696 | 857 697 | 865 698 | 880 699 | 897 700 | 899 701 | 933 702 | 939 703 | 945 704 | 946 705 | 948 706 | 952 707 | 955 708 | 956 709 | 957 710 | 979 711 | 982 712 | 1001 713 | 1009 714 | 1013 715 | 1033 716 | 1046 717 | 1073 718 | 1099 719 | 1110 720 | 1114 721 | 1134 722 | 1171 723 | 1176 724 | 1180 725 | 1185 726 | 1191 727 | 1201 728 | 1208 729 | 1212 730 | 1215 731 | 1223 732 | 1225 733 | 1239 734 | 1250 735 | 1254 736 | 1265 737 | 1267 738 | 1275 739 | 1286 740 | 1292 741 | 1340 742 | 1356 743 | 1363 744 | 1383 745 | 1388 746 | 1392 747 | 1399 748 | 1454 749 | 1461 750 | 1492 751 | 1520 752 | 1553 753 | 1584 754 | 1649 755 | 1694 756 | 1776 757 | 1846 758 | 1987 759 | 2058 760 | 2283 761 | 2285 762 | 2319 763 | 2489 764 | 2524 765 | 2861 766 | 669 767 | 756 768 | 848 769 | 1154 770 | 1316 771 | 1495 772 | 1567 773 | 1684 774 | 1791 775 | 2100 776 | 2108 777 | 2316 778 | 2585 779 | 2772 780 | 2841 781 | 2921 782 | 148 783 | 1804 784 | 144 785 | 490 786 | 607 787 | 629 788 | 648 789 | 652 790 | 694 791 | 786 792 | 950 793 | 970 794 | 1362 795 | 1445 796 | 1483 797 | 1725 798 | 1838 799 | 2287 800 | 427 801 | 537 802 | 1241 803 | 818 804 | 1240 805 | 1249 806 | 1343 807 | 266 808 | 823 809 | 862 810 | 1077 811 | 1105 812 | 1307 813 | 1346 814 | 1379 815 | 1500 816 | 1616 817 | 608 818 | 297 819 | 666 820 | 688 821 | 943 822 | 999 823 | 1263 824 | 1268 825 | 1671 826 | 1158 827 | 1676 828 | 151 829 | 489 830 | 527 831 | 552 832 | 593 833 | 604 834 | 626 835 | 628 836 | 725 837 | 758 838 | 767 839 | 798 840 | 888 841 | 894 842 | 937 843 | 1006 844 | 1063 845 | 1450 846 | 1748 847 | 1884 848 | 306 849 | 870 850 | 485 851 | 777 852 | 1296 853 | 1425 854 | 334 855 | 496 856 | 622 857 | 739 858 | 773 859 | 884 860 | 1031 861 | 1086 862 | 1089 863 | 1095 864 | 2500 865 | 535 866 | 1177 867 | 983 868 | 325 869 | 940 870 | 1382 871 | 1839 872 | 2358 873 | 1057 874 | 528 875 | 819 876 | 930 877 | 1042 878 | 1123 879 | 1686 880 | 1693 881 | 484 882 | 1047 883 | 1132 884 | 1416 885 | 1581 886 | 1966 887 | 2182 888 | 1828 889 | 1871 890 | 1873 891 | 2405 892 | 1023 893 | 1864 894 | 2483 895 | 1179 896 | 1058 897 | 1288 898 | 2414 899 | 116 900 | 1044 901 | 2462 902 | 1467 903 | 768 904 | 838 905 | 1166 906 | 1603 907 | 2116 908 | 1843 909 | 1000 910 | 947 911 | 1271 912 | 1337 913 | 1964 914 | 327 915 | 500 916 | 692 917 | 770 918 | 796 919 | 1005 920 | 1035 921 | 1155 922 | 1195 923 | 1200 924 | 1345 925 | 1384 926 | 1459 927 | 1460 928 | 1506 929 | 1631 930 | 1849 931 | 1980 932 | 2010 933 | 2115 934 | 2189 935 | 2488 936 | 2495 937 | 2523 938 | 453 939 | 482 940 | 781 941 | 845 942 | 860 943 | 934 944 | 1092 945 | 1104 946 | 1108 947 | 1156 948 | 1162 949 | 1222 950 | 1246 951 | 1294 952 | 1318 953 | 1341 954 | 1366 955 | 1390 956 | 1400 957 | 1423 958 | 1512 959 | 1516 960 | 1522 961 | 1544 962 | 1545 963 | 1588 964 | 1688 965 | 1734 966 | 1754 967 | 1773 968 | 1792 969 | 1825 970 | 1942 971 | 2106 972 | 2110 973 | 2327 974 | 2385 975 | 2419 976 | 2475 977 | 2611 978 | 1313 979 | 1886 980 | 1906 981 | 2062 982 | 2024 983 | 2288 984 | 1602 985 | 1972 986 | 2758 987 | 1604 988 | 1881 989 | 1115 990 | 1554 991 | 1699 992 | 1756 993 | 1786 994 | 1796 995 | 2369 996 | 1661 997 | 2738 998 | 1793 999 | 2487 1000 | 2192 1001 | -------------------------------------------------------------------------------- /Dataset/Askubuntu_Sample/item2id.txt: -------------------------------------------------------------------------------- 1 | 0 0 2 | 1 1 3 | 2 2 4 | 3 3 5 | 4 4 6 | 5 5 7 | 6 6 8 | 7 7 9 | 8 8 10 | 9 9 11 | 10 10 12 | 11 11 13 | 12 12 14 | 13 13 15 | 14 279 16 | 15 280 17 | 17 281 18 | 18 14 19 | 19 15 20 | 20 16 21 | 21 282 22 | 22 17 23 | 23 18 24 | 24 19 25 | 25 20 26 | 26 21 27 | 27 22 28 | 28 283 29 | 29 23 30 | 31 592 31 | 33 24 32 | 34 25 33 | 36 26 34 | 37 284 35 | 40 27 36 | 41 28 37 | 42 29 38 | 43 30 39 | 44 31 40 | 45 576 41 | 46 32 42 | 48 33 43 | 49 34 44 | 50 35 45 | 51 36 46 | 2100 774 47 | 54 37 48 | 55 38 49 | 56 39 50 | 57 285 51 | 58 286 52 | 59 40 53 | 60 41 54 | 61 42 55 | 62 43 56 | 63 44 57 | 64 45 58 | 66 46 59 | 67 47 60 | 68 48 61 | 70 624 62 | 353 183 63 | 72 49 64 | 73 287 65 | 74 50 66 | 75 288 67 | 76 289 68 | 77 51 69 | 78 52 70 | 80 53 71 | 81 290 72 | 82 560 73 | 83 561 74 | 84 54 75 | 85 55 76 | 86 291 77 | 87 56 78 | 88 292 79 | 89 57 80 | 91 293 81 | 92 58 82 | 95 59 83 | 96 60 84 | 97 61 85 | 98 62 86 | 99 558 87 | 100 294 88 | 101 63 89 | 102 64 90 | 103 65 91 | 104 66 92 | 105 67 93 | 106 68 94 | 107 594 95 | 108 69 96 | 110 70 97 | 111 71 98 | 112 72 99 | 113 626 100 | 115 73 101 | 116 898 102 | 117 74 103 | 118 75 104 | 119 595 105 | 120 76 106 | 121 295 107 | 122 77 108 | 2171 545 109 | 124 627 110 | 125 296 111 | 126 297 112 | 127 78 113 | 128 79 114 | 129 80 115 | 131 628 116 | 132 81 117 | 133 82 118 | 134 83 119 | 135 84 120 | 136 85 121 | 137 86 122 | 138 87 123 | 139 88 124 | 141 89 125 | 2524 763 126 | 144 783 127 | 145 90 128 | 146 91 129 | 147 298 130 | 148 781 131 | 2414 897 132 | 150 299 133 | 151 827 134 | 152 92 135 | 153 93 136 | 154 94 137 | 156 300 138 | 158 301 139 | 159 577 140 | 160 302 141 | 161 620 142 | 164 303 143 | 165 304 144 | 167 95 145 | 170 96 146 | 172 97 147 | 173 98 148 | 174 99 149 | 176 100 150 | 177 101 151 | 178 102 152 | 179 103 153 | 180 104 154 | 181 105 155 | 182 305 156 | 183 306 157 | 184 106 158 | 185 307 159 | 186 107 160 | 189 108 161 | 191 308 162 | 192 109 163 | 193 110 164 | 195 596 165 | 196 111 166 | 197 309 167 | 198 112 168 | 199 113 169 | 200 617 170 | 201 114 171 | 202 310 172 | 203 311 173 | 204 115 174 | 205 116 175 | 207 117 176 | 208 118 177 | 209 119 178 | 210 120 179 | 211 121 180 | 212 312 181 | 214 619 182 | 215 122 183 | 216 123 184 | 218 313 185 | 219 314 186 | 221 124 187 | 222 125 188 | 223 126 189 | 226 127 190 | 227 315 191 | 228 128 192 | 229 316 193 | 230 129 194 | 231 130 195 | 232 629 196 | 2283 759 197 | 236 131 198 | 237 578 199 | 238 132 200 | 239 317 201 | 240 133 202 | 241 318 203 | 242 134 204 | 243 135 205 | 245 136 206 | 246 137 207 | 247 138 208 | 249 139 209 | 250 140 210 | 251 141 211 | 252 142 212 | 253 143 213 | 254 144 214 | 255 145 215 | 256 319 216 | 258 146 217 | 259 147 218 | 262 148 219 | 263 320 220 | 264 149 221 | 265 630 222 | 266 806 223 | 2316 776 224 | 269 150 225 | 270 151 226 | 271 631 227 | 272 152 228 | 274 153 229 | 275 154 230 | 276 155 231 | 277 321 232 | 278 156 233 | 279 157 234 | 280 322 235 | 281 158 236 | 283 159 237 | 285 160 238 | 286 161 239 | 288 162 240 | 289 323 241 | 292 324 242 | 294 325 243 | 296 326 244 | 297 817 245 | 298 632 246 | 299 633 247 | 300 163 248 | 301 327 249 | 302 164 250 | 303 328 251 | 304 579 252 | 305 165 253 | 306 847 254 | 307 634 255 | 308 329 256 | 309 330 257 | 310 166 258 | 311 331 259 | 313 332 260 | 314 635 261 | 315 636 262 | 318 167 263 | 319 637 264 | 320 333 265 | 321 334 266 | 322 565 267 | 323 168 268 | 324 638 269 | 325 867 270 | 326 169 271 | 327 913 272 | 329 335 273 | 330 170 274 | 332 639 275 | 333 171 276 | 334 853 277 | 336 172 278 | 337 173 279 | 338 174 280 | 339 175 281 | 340 336 282 | 341 623 283 | 343 176 284 | 344 177 285 | 345 178 286 | 2394 548 287 | 347 337 288 | 348 180 289 | 2106 970 290 | 350 338 291 | 351 181 292 | 352 182 293 | 2401 276 294 | 2403 277 295 | 2405 890 296 | 361 339 297 | 362 184 298 | 363 340 299 | 364 185 300 | 366 641 301 | 367 186 302 | 369 187 303 | 370 341 304 | 371 342 305 | 372 343 306 | 373 188 307 | 375 344 308 | 377 189 309 | 378 642 310 | 380 345 311 | 383 190 312 | 384 621 313 | 385 643 314 | 387 191 315 | 388 192 316 | 391 193 317 | 393 194 318 | 397 597 319 | 399 346 320 | 403 195 321 | 404 347 322 | 405 644 323 | 406 645 324 | 407 348 325 | 409 196 326 | 411 197 327 | 412 349 328 | 413 350 329 | 414 198 330 | 415 646 331 | 417 199 332 | 418 351 333 | 419 647 334 | 421 566 335 | 2470 550 336 | 423 352 337 | 424 200 338 | 425 201 339 | 426 353 340 | 71 625 341 | 428 354 342 | 431 618 343 | 434 202 344 | 2483 893 345 | 437 355 346 | 439 356 347 | 440 357 348 | 441 598 349 | 443 203 350 | 444 358 351 | 445 359 352 | 2495 935 353 | 452 360 354 | 453 937 355 | 455 204 356 | 456 361 357 | 2124 543 358 | 458 362 359 | 461 649 360 | 462 599 361 | 463 205 362 | 465 363 363 | 466 650 364 | 467 206 365 | 470 364 366 | 473 562 367 | 475 651 368 | 476 365 369 | 478 366 370 | 479 652 371 | 480 367 372 | 481 368 373 | 482 938 374 | 484 880 375 | 485 849 376 | 486 369 377 | 2129 544 378 | 488 207 379 | 489 828 380 | 490 784 381 | 491 370 382 | 492 371 383 | 493 653 384 | 494 208 385 | 496 854 386 | 498 580 387 | 499 209 388 | 500 914 389 | 503 372 390 | 505 210 391 | 506 211 392 | 507 373 393 | 514 654 394 | 515 374 395 | 516 655 396 | 517 375 397 | 518 591 398 | 519 376 399 | 520 377 400 | 521 378 401 | 522 656 402 | 523 212 403 | 525 379 404 | 526 380 405 | 527 829 406 | 2576 552 407 | 529 213 408 | 530 381 409 | 531 382 410 | 534 657 411 | 535 864 412 | 536 567 413 | 2585 777 414 | 538 600 415 | 540 214 416 | 427 799 417 | 542 215 418 | 543 216 419 | 544 383 420 | 545 658 421 | 547 659 422 | 548 384 423 | 550 660 424 | 552 830 425 | 553 385 426 | 555 217 427 | 557 386 428 | 560 387 429 | 562 388 430 | 563 568 431 | 564 389 432 | 566 563 433 | 567 661 434 | 568 390 435 | 569 662 436 | 570 391 437 | 571 392 438 | 574 581 439 | 576 218 440 | 578 219 441 | 579 220 442 | 2065 541 443 | 582 221 444 | 585 222 445 | 587 223 446 | 588 663 447 | 590 393 448 | 591 664 449 | 592 394 450 | 593 831 451 | 2642 590 452 | 595 665 453 | 599 225 454 | 600 395 455 | 601 226 456 | 604 832 457 | 2475 975 458 | 607 785 459 | 608 816 460 | 609 396 461 | 612 397 462 | 613 398 463 | 615 622 464 | 616 399 465 | 618 666 466 | 620 601 467 | 622 855 468 | 623 227 469 | 625 667 470 | 626 833 471 | 627 228 472 | 628 834 473 | 629 786 474 | 631 229 475 | 632 400 476 | 636 668 477 | 638 669 478 | 2841 779 479 | 640 401 480 | 644 230 481 | 647 670 482 | 648 787 483 | 650 671 484 | 651 672 485 | 652 788 486 | 654 673 487 | 657 231 488 | 658 674 489 | 662 232 490 | 663 402 491 | 664 602 492 | 2500 863 493 | 666 818 494 | 667 403 495 | 669 765 496 | 673 675 497 | 675 233 498 | 677 676 499 | 678 404 500 | 679 234 501 | 680 405 502 | 681 235 503 | 684 677 504 | 685 406 505 | 687 236 506 | 688 819 507 | 689 559 508 | 690 407 509 | 773 857 510 | 694 789 511 | 695 408 512 | 697 409 513 | 698 410 514 | 699 678 515 | 700 679 516 | 702 680 517 | 709 237 518 | 2758 985 519 | 711 411 520 | 713 412 521 | 715 413 522 | 716 414 523 | 719 415 524 | 721 416 525 | 722 681 526 | 723 417 527 | 2772 778 528 | 725 835 529 | 728 418 530 | 729 419 531 | 2070 542 532 | 732 420 533 | 734 421 534 | 735 422 535 | 738 423 536 | 739 856 537 | 743 424 538 | 744 425 539 | 746 426 540 | 2796 553 541 | 749 427 542 | 750 603 543 | 754 682 544 | 756 766 545 | 757 683 546 | 758 836 547 | 760 428 548 | 761 429 549 | 762 684 550 | 763 430 551 | 764 431 552 | 765 685 553 | 767 837 554 | 768 902 555 | 769 432 556 | 770 916 557 | 772 433 558 | 2460 549 559 | 777 850 560 | 780 434 561 | 346 179 562 | 782 238 563 | 2861 764 564 | 786 790 565 | 787 686 566 | 788 687 567 | 791 239 568 | 792 688 569 | 793 689 570 | 796 917 571 | 797 690 572 | 798 838 573 | 799 240 574 | 800 691 575 | 2849 554 576 | 802 241 577 | 2051 540 578 | 804 569 579 | 805 435 580 | 807 242 581 | 809 436 582 | 811 437 583 | 813 438 584 | 814 439 585 | 2482 551 586 | 818 802 587 | 819 874 588 | 822 440 589 | 823 807 590 | 2875 278 591 | 828 441 592 | 831 243 593 | 838 903 594 | 839 692 595 | 840 442 596 | 843 693 597 | 845 940 598 | 847 694 599 | 848 767 600 | 850 582 601 | 851 443 602 | 854 244 603 | 857 695 604 | 860 941 605 | 862 808 606 | 2911 555 607 | 2912 556 608 | 865 696 609 | 2115 932 610 | 867 445 611 | 870 848 612 | 2921 780 613 | 874 446 614 | 877 447 615 | 880 697 616 | 883 448 617 | 884 858 618 | 888 839 619 | 891 604 620 | 892 449 621 | 894 840 622 | 896 583 623 | 897 698 624 | 899 699 625 | 901 450 626 | 903 451 627 | 908 452 628 | 2958 557 629 | 911 454 630 | 916 455 631 | 2611 976 632 | 922 456 633 | 923 457 634 | 926 458 635 | 930 875 636 | 931 459 637 | 781 939 638 | 933 700 639 | 934 942 640 | 935 460 641 | 936 584 642 | 937 841 643 | 939 701 644 | 940 868 645 | 943 820 646 | 944 461 647 | 945 702 648 | 946 703 649 | 947 909 650 | 948 704 651 | 2206 546 652 | 950 791 653 | 951 610 654 | 952 705 655 | 954 245 656 | 955 706 657 | 956 707 658 | 957 708 659 | 963 462 660 | 965 246 661 | 967 463 662 | 970 792 663 | 2419 974 664 | 976 464 665 | 979 709 666 | 980 465 667 | 982 710 668 | 983 866 669 | 984 247 670 | 985 585 671 | 986 466 672 | 1871 888 673 | 992 611 674 | 2488 934 675 | 997 248 676 | 998 570 677 | 999 821 678 | 1000 908 679 | 1001 711 680 | 1002 249 681 | 1005 918 682 | 1006 842 683 | 1007 593 684 | 1009 712 685 | 1010 467 686 | 1013 713 687 | 1014 468 688 | 1018 571 689 | 1020 469 690 | 1021 470 691 | 1023 891 692 | 2489 762 693 | 1031 859 694 | 1033 714 695 | 1034 605 696 | 1035 919 697 | 1036 250 698 | 1041 471 699 | 1042 876 700 | 2189 933 701 | 1044 899 702 | 1046 715 703 | 1047 881 704 | 1051 572 705 | 1052 606 706 | 1055 573 707 | 1057 872 708 | 1058 895 709 | 1059 251 710 | 1060 472 711 | 1063 843 712 | 1065 473 713 | 1067 612 714 | 1070 474 715 | 1073 716 716 | 1075 475 717 | 1077 809 718 | 1079 252 719 | 1081 253 720 | 863 444 721 | 1086 860 722 | 1087 476 723 | 1089 861 724 | 1092 943 725 | 1095 862 726 | 1096 254 727 | 1098 477 728 | 1099 717 729 | 1104 944 730 | 1105 810 731 | 1108 945 732 | 1109 478 733 | 1110 718 734 | 1111 255 735 | 1112 607 736 | 1114 719 737 | 1115 988 738 | 1117 479 739 | 1119 480 740 | 1121 256 741 | 1123 877 742 | 1129 481 743 | 1130 482 744 | 1132 882 745 | 1134 720 746 | 1135 483 747 | 1136 613 748 | 1138 484 749 | 1144 485 750 | 1153 486 751 | 1154 768 752 | 1155 920 753 | 1156 946 754 | 1158 825 755 | 1159 487 756 | 1162 947 757 | 1163 488 758 | 1166 904 759 | 1170 586 760 | 1171 721 761 | 537 800 762 | 1176 722 763 | 1177 865 764 | 1179 894 765 | 1180 723 766 | 1181 614 767 | 1182 489 768 | 1183 490 769 | 1184 491 770 | 1185 724 771 | 2358 871 772 | 1188 492 773 | 1191 725 774 | 1192 493 775 | 1194 257 776 | 1195 921 777 | 1196 494 778 | 1200 922 779 | 1201 726 780 | 1204 258 781 | 1208 727 782 | 1209 587 783 | 1212 728 784 | 1213 495 785 | 1214 496 786 | 1215 729 787 | 1217 497 788 | 1219 498 789 | 1221 259 790 | 1222 948 791 | 1223 730 792 | 1225 731 793 | 1235 499 794 | 1239 732 795 | 457 648 796 | 1241 801 797 | 1242 260 798 | 1246 949 799 | 1249 804 800 | 1250 733 801 | 1252 500 802 | 1254 734 803 | 1259 501 804 | 1263 822 805 | 1264 502 806 | 1265 735 807 | 1267 736 808 | 1268 823 809 | 2462 900 810 | 1271 910 811 | 1275 737 812 | 1280 574 813 | 1281 503 814 | 1283 261 815 | 1286 738 816 | 1288 896 817 | 1289 262 818 | 1292 739 819 | 1294 950 820 | 1296 851 821 | 1240 803 822 | 1302 615 823 | 1307 811 824 | 1313 977 825 | 1316 769 826 | 1318 951 827 | 1324 504 828 | 1327 263 829 | 1331 264 830 | 1337 911 831 | 1340 740 832 | 1341 952 833 | 1343 805 834 | 1345 923 835 | 1346 812 836 | 1353 505 837 | 1356 741 838 | 1357 506 839 | 1362 793 840 | 1363 742 841 | 910 453 842 | 1366 953 843 | 1376 507 844 | 1379 813 845 | 1382 869 846 | 1383 743 847 | 1384 924 848 | 1387 508 849 | 1388 744 850 | 1390 954 851 | 1392 745 852 | 1393 509 853 | 1398 510 854 | 1399 746 855 | 1400 955 856 | 1401 511 857 | 1404 512 858 | 1415 513 859 | 1416 883 860 | 1418 514 861 | 2285 760 862 | 1425 852 863 | 2287 798 864 | 1440 515 865 | 2288 982 866 | 1442 516 867 | 1445 794 868 | 1446 517 869 | 1450 844 870 | 1454 747 871 | 1457 518 872 | 1459 925 873 | 1460 926 874 | 1461 748 875 | 1467 901 876 | 1472 564 877 | 1473 519 878 | 1475 616 879 | 1483 795 880 | 1492 749 881 | 2192 999 882 | 1495 770 883 | 1500 814 884 | 1501 265 885 | 1506 927 886 | 1512 957 887 | 1516 958 888 | 594 224 889 | 1520 750 890 | 1521 266 891 | 1522 959 892 | 1523 520 893 | 528 873 894 | 692 915 895 | 1544 960 896 | 1545 961 897 | 1553 751 898 | 1554 989 899 | 1555 267 900 | 2738 996 901 | 1567 771 902 | 1575 521 903 | 1581 884 904 | 1584 752 905 | 1588 962 906 | 1601 522 907 | 1602 983 908 | 1603 905 909 | 1604 986 910 | 1608 523 911 | 2487 998 912 | 1614 268 913 | 1616 815 914 | 1619 269 915 | 1626 524 916 | 2319 761 917 | 1629 575 918 | 1631 928 919 | 1643 525 920 | 1649 753 921 | 1650 526 922 | 1654 270 923 | 1661 995 924 | 1671 824 925 | 2327 972 926 | 1676 826 927 | 1684 772 928 | 1686 878 929 | 1688 963 930 | 1693 879 931 | 1694 754 932 | 1699 990 933 | 1704 588 934 | 1714 527 935 | 1721 271 936 | 1725 796 937 | 1729 272 938 | 1733 528 939 | 1734 964 940 | 1748 845 941 | 1754 965 942 | 1756 991 943 | 1766 529 944 | 1773 966 945 | 1776 755 946 | 1423 956 947 | 1786 992 948 | 1791 773 949 | 1792 967 950 | 1793 997 951 | 1796 993 952 | 1802 609 953 | 1804 782 954 | 1820 273 955 | 1825 968 956 | 1828 887 957 | 1838 797 958 | 1839 870 959 | 1840 530 960 | 1843 907 961 | 1846 756 962 | 1849 929 963 | 2058 758 964 | 1861 589 965 | 1864 892 966 | 2108 775 967 | 1873 889 968 | 1876 531 969 | 1881 987 970 | 1884 846 971 | 1886 978 972 | 1889 532 973 | 1895 533 974 | 1896 534 975 | 1901 535 976 | 1906 979 977 | 355 640 978 | 2368 547 979 | 1923 536 980 | 2369 994 981 | 2110 971 982 | 2116 906 983 | 1942 969 984 | 1964 912 985 | 1966 885 986 | 1972 984 987 | 1980 930 988 | 1987 757 989 | 1991 537 990 | 2003 274 991 | 2007 275 992 | 2010 931 993 | 2385 973 994 | 2024 981 995 | 2062 980 996 | 2038 538 997 | 2039 539 998 | 2523 936 999 | 2182 886 1000 | 1365 608 1001 | -------------------------------------------------------------------------------- /Codes/train.py: -------------------------------------------------------------------------------- 1 | from __future__ import print_function 2 | import tensorflow as tf 3 | import numpy as np 4 | import os 5 | import shutil 6 | import codecs 7 | import psutil 8 | from scipy import sparse 9 | import pandas as pd 10 | import bottleneck as bn 11 | 12 | from math import isinf 13 | 14 | import sys 15 | import pickle 16 | import ConfigParser 17 | 18 | from data_processing import load_train_data, load_tr_te_data, load_user_items, load_overlap_coeff, load_pop_niche_tags, load_items_to_sample, load_vectors 19 | from data_processing import load_item_one_hot_features as load_item_features 20 | 21 | from generator import generator_VAECF as generator 22 | 23 | from sample import sample_from_generator_new 24 | 25 | from discriminator import discriminator 26 | 27 | from eval_functions import NDCG_binary_at_k_batch, Recall_at_k_batch 28 | 29 | 30 | def train_GAN(h0_size, h1_size, h2_size, h3_size, NUM_EPOCH, NUM_SUB_EPOCHS, BATCH_SIZE, DISPLAY_ITER, LEARNING_RATE, to_restore, model_name, dataset, GANLAMBDA): 31 | 32 | 33 | DATA_DIR = dataset+'/' 34 | 35 | show2id_path = DATA_DIR + "item2id.txt" 36 | niche_tags_path = DATA_DIR + "niche_items.txt" 37 | 38 | user_tag_matrix_path = DATA_DIR + "item_counts.csv" 39 | 40 | dataset_name = dataset.split('/')[-1].strip() 41 | 42 | if dataset_name == '': 43 | dataset_name = dataset.split('/')[-2].strip() 44 | 45 | output_path = "chkpt/"+dataset_name+"_"+model_name+"_"+str(GANLAMBDA)+"/" 46 | 47 | if not os.path.exists(output_path): 48 | os.makedirs(output_path) 49 | 50 | 51 | item_list_path = DATA_DIR + 'item_list.txt' 52 | 53 | pro_dir = DATA_DIR # os.path.join(DATA_DIR, 'pro_sg_tags_1k') 54 | 55 | 56 | unique_sid = list() 57 | with open(os.path.join(pro_dir, 'unique_item_id.txt'), 'r') as f: 58 | for line in f: 59 | unique_sid.append(line.strip()) 60 | 61 | n_items = len(unique_sid) 62 | 63 | 64 | print('Loading Items...', end = '') 65 | SHOW2ID, IDs_present, NICHE_TAGS, ALL_TAGS, OTHER_TAGS = load_pop_niche_tags(show2id_path, item_list_path, niche_tags_path, n_items) 66 | print('Done.') 67 | 68 | 69 | # One Hot Vectors for Items 70 | print('Loading Item Features...', end = '') 71 | ITEM_FEATURE_DICT, FEATURE_LEN, ITEM_FEATURE_ARR = load_item_features(item_list_path, SHOW2ID, n_items) 72 | print('Done.') 73 | 74 | 75 | # Load Binary Interaction Matrix X 76 | print('Loading Training Interaction Matrix...', end = '') 77 | train_data, uid_start_idx = load_train_data(os.path.join(pro_dir, 'train_GAN.csv'), n_items) 78 | print('Done.') 79 | 80 | 81 | # Load Data for Validation 82 | print('Loading Validation Matrix...', end = '') 83 | vad_data_tr, vad_data_te, uid_start_idx_vad = load_tr_te_data(os.path.join(pro_dir, 'validation_tr.csv'), 84 | os.path.join(pro_dir, 'validation_te.csv'), n_items) 85 | print('Done.') 86 | 87 | 88 | 89 | # Load User's Popular and Niche Items 90 | print("Loading User's Popular and Niche Items...", end = '') 91 | user_popular_data = load_user_items(os.path.join(pro_dir,'train_GAN_popular.csv')) 92 | user_niche_data = load_user_items(os.path.join(pro_dir,'train_GAN_niche.csv')) 93 | print("Done.") 94 | 95 | 96 | 97 | print('Loading item overlap coefficients....', end = '') 98 | OVERLAP_COEFFS = load_overlap_coeff(show2id_path, user_tag_matrix_path) 99 | print('Done.') 100 | 101 | 102 | 103 | N = train_data.shape[0] 104 | idxlist = range(N) 105 | 106 | 107 | user_x_niche_vectors, user_x_popular_n_vectors = load_vectors(user_popular_data, user_niche_data, OVERLAP_COEFFS, ITEM_FEATURE_DICT, N) 108 | print('Vectors Loaded') 109 | 110 | 111 | print('Loading Items to Sample....', end = '') 112 | USER_TAGS_TO_SAMPLE = load_items_to_sample(user_popular_data, user_niche_data, NICHE_TAGS, OVERLAP_COEFFS, N) 113 | print("Done") 114 | 115 | 116 | N_vad = vad_data_tr.shape[0] 117 | idxlist_vad = range(N_vad) 118 | 119 | print('Number of Users: ', N) 120 | 121 | batches_per_epoch = int(np.ceil(float(N) / BATCH_SIZE)) 122 | 123 | print('Batches Per Epoch: ', batches_per_epoch) 124 | 125 | global_step = tf.Variable(0, name="global_step", trainable=False) 126 | 127 | tf.reset_default_graph() 128 | 129 | # Generator 130 | generator_network, generator_out, g_vae_loss, g_params, p_dims, total_anneal_steps, anneal_cap = generator(pro_dir) 131 | 132 | generated_tags = tf.placeholder(tf.float32, [None, n_items], name = "generated_tags") 133 | 134 | 135 | # Discriminator 136 | y_data, y_generated, d_params, x_generated_id, x_popular_n_id, x_popular_g_id, x_niche_id, item_feature_arr, keep_prob = discriminator(n_items, FEATURE_LEN, h0_size, h1_size, h2_size, h3_size) 137 | 138 | zero = tf.constant(0, dtype=tf.float32) 139 | 140 | # Loss Function 141 | 142 | d_loss = - tf.reduce_sum(tf.log(y_data)) - tf.reduce_sum(tf.log(1 - y_generated)) 143 | d_loss_mean = tf.reduce_mean(d_loss) 144 | 145 | sampled_generator_out = tf.multiply(generator_out, generated_tags) 146 | 147 | sampled_generator_out = tf.reshape(sampled_generator_out, [-1]) 148 | 149 | sampled_generator_out_non_zero = tf.gather_nd(sampled_generator_out ,tf.where(tf.not_equal(sampled_generator_out, zero))) 150 | 151 | sampled_cnt = tf.placeholder_with_default(1., shape=None) 152 | gen_lambda = tf.placeholder_with_default(1.0, shape=None) 153 | 154 | 155 | g_loss = g_vae_loss - (1.0 * gen_lambda / sampled_cnt) * tf.reduce_sum(tf.multiply(sampled_generator_out_non_zero, y_generated)) 156 | g_loss_mean = tf.reduce_mean(g_loss) 157 | gan_loss = - (1.0 * gen_lambda / sampled_cnt) * tf.reduce_sum(tf.multiply(sampled_generator_out_non_zero, y_generated)) 158 | 159 | # optimizer : AdamOptimizer 160 | optimizer = tf.train.AdamOptimizer(LEARNING_RATE) 161 | 162 | # discriminator and generator loss 163 | d_trainer = optimizer.minimize(d_loss, var_list=d_params) 164 | g_trainer = optimizer.minimize(g_loss, var_list=g_params) 165 | 166 | 167 | init = tf.global_variables_initializer() 168 | 169 | saver = tf.train.Saver() 170 | 171 | gpu_options = tf.GPUOptions(per_process_gpu_memory_fraction=0.333, allow_growth=True) 172 | sess = tf.Session(config=tf.ConfigProto(gpu_options=gpu_options)) 173 | 174 | sess.run(init) 175 | 176 | curr_gen_lamda = GANLAMBDA 177 | 178 | update_count = 0.0 179 | 180 | for i in range(NUM_EPOCH): 181 | 182 | batch_total_sampled_tags = [] 183 | batch_curr_x_generated = [] 184 | batch_curr_x_popular_g = [] 185 | batch_curr_x_popular_n = [] 186 | batch_curr_x_niche = [] 187 | batch_X = [] 188 | batch_total_sampled_cnt = [] 189 | 190 | # train for each epoch 191 | user_err_cnt = 0 192 | for bnum, st_idx in enumerate(range(0, N, BATCH_SIZE)): 193 | end_idx = min(st_idx + BATCH_SIZE, N) 194 | X = train_data[idxlist[st_idx:end_idx]] 195 | 196 | if sparse.isspmatrix(X): 197 | X = X.toarray() 198 | X = X.astype('float32') 199 | 200 | curr_generator_out = sess.run(generator_out, feed_dict = {generator_network.input_ph: X}) 201 | 202 | 203 | curr_x_popular_n = [] 204 | curr_x_niche = [] 205 | 206 | curr_x_popular_g = [] 207 | curr_x_generated = [] 208 | 209 | total_sampled_cnt = 0 210 | total_sampled_tags = [] 211 | 212 | for ii, user_idx in enumerate(idxlist[st_idx:end_idx]): 213 | if user_idx + uid_start_idx not in user_popular_data or user_idx + uid_start_idx not in user_niche_data: 214 | # Invalid User: user_idx + uid_start_idx 215 | user_err_cnt += 1 216 | total_sampled_tags.append([0]*n_items) 217 | continue 218 | 219 | curr_pop_vectors = user_popular_data[user_idx + uid_start_idx] 220 | curr_niche_vectors = user_niche_data[user_idx + uid_start_idx] 221 | 222 | 223 | curr_x_niche += user_x_niche_vectors[user_idx + uid_start_idx] 224 | curr_x_popular_n += user_x_popular_n_vectors[user_idx + uid_start_idx] 225 | 226 | 227 | curr_sampled_tags_bin, curr_sampled_tags = sample_from_generator_new(USER_TAGS_TO_SAMPLE[user_idx + uid_start_idx], np.asarray(curr_generator_out)[ii, USER_TAGS_TO_SAMPLE[user_idx + uid_start_idx]], len(curr_niche_vectors), n_items) 228 | 229 | curr_cnt = 0 230 | curr_sampled_tags.sort() 231 | 232 | for generated_tag_idx in curr_sampled_tags: 233 | 234 | max_coeff = -1.0 235 | 236 | max_pop_tag_idx = np.random.choice(range(len(curr_pop_vectors))) 237 | 238 | max_pop_tag_idx = curr_pop_vectors[max_pop_tag_idx] 239 | 240 | if generated_tag_idx not in ITEM_FEATURE_DICT or max_pop_tag_idx not in ITEM_FEATURE_DICT: 241 | # Invalid Generated Tag Pair: generated_tag_idx, max_pop_tag_idx 242 | curr_sampled_tags_bin[generated_tag_idx] = 0 243 | continue 244 | 245 | curr_x_generated.append(generated_tag_idx) 246 | curr_x_popular_g.append(max_pop_tag_idx) 247 | 248 | curr_cnt += 1 249 | 250 | total_sampled_tags.append(curr_sampled_tags_bin) 251 | total_sampled_cnt += curr_cnt 252 | 253 | 254 | if curr_x_generated == []: 255 | continue 256 | 257 | total_sampled_tags = np.asarray(total_sampled_tags) 258 | curr_x_generated = np.asarray(curr_x_generated) 259 | curr_x_popular_g = np.asarray(curr_x_popular_g) 260 | curr_x_popular_n = np.asarray(curr_x_popular_n) 261 | curr_x_niche = np.asarray(curr_x_niche) 262 | 263 | batch_total_sampled_tags.append(total_sampled_tags) 264 | batch_curr_x_generated.append(curr_x_generated) 265 | batch_curr_x_popular_g.append(curr_x_popular_g) 266 | batch_curr_x_popular_n.append(curr_x_popular_n) 267 | batch_curr_x_niche.append(curr_x_niche) 268 | batch_X.append(X) 269 | batch_total_sampled_cnt.append(total_sampled_cnt) 270 | 271 | 272 | batch_total_sampled_tags = np.asarray(batch_total_sampled_tags) 273 | batch_curr_x_generated = np.asarray(batch_curr_x_generated) 274 | batch_curr_x_popular_g = np.asarray(batch_curr_x_popular_g) 275 | batch_curr_x_popular_n = np.asarray(batch_curr_x_popular_n) 276 | batch_curr_x_niche = np.asarray(batch_curr_x_niche) 277 | batch_X = np.asarray(batch_X) 278 | batch_total_sampled_cnt = np.asarray(batch_total_sampled_cnt) 279 | 280 | print("global-epoch:", i, "Data Creation Finished", "user_err_cnt:", user_err_cnt) 281 | 282 | # print(batch_total_sampled_cnt.tolist()) 283 | 284 | indices = np.arange(batch_total_sampled_tags.shape[0]) 285 | np.random.shuffle(indices) 286 | 287 | for j_disc in range(NUM_SUB_EPOCHS): 288 | 289 | for disc_batch_idx in indices: 290 | 291 | X = batch_X[disc_batch_idx] 292 | curr_x_popular_id_n = batch_curr_x_popular_n[disc_batch_idx] 293 | curr_x_popular_id_g = batch_curr_x_popular_g[disc_batch_idx] 294 | curr_x_niche_id = batch_curr_x_niche[disc_batch_idx] 295 | curr_x_generated_id = batch_curr_x_generated[disc_batch_idx] 296 | total_sampled_tags = batch_total_sampled_tags[disc_batch_idx] 297 | total_sampled_cnt = batch_total_sampled_cnt[disc_batch_idx] 298 | 299 | 300 | _, curr_d_loss = sess.run([d_trainer, d_loss_mean], feed_dict={generator_network.input_ph: X, x_popular_n_id: curr_x_popular_id_n, x_popular_g_id: curr_x_popular_id_g , x_niche_id: curr_x_niche_id, x_generated_id: curr_x_generated_id, generated_tags: total_sampled_tags, sampled_cnt: total_sampled_cnt, keep_prob: np.sum(0.7).astype(np.float32), item_feature_arr: ITEM_FEATURE_ARR}) 301 | 302 | 303 | print("global-epoch:%s, discr-epoch:%s, d_loss:%.5f" % (i, j_disc, curr_d_loss)) 304 | 305 | print('') 306 | 307 | for j_gen in range(NUM_SUB_EPOCHS): 308 | 309 | for gen_batch_idx in indices: 310 | X = batch_X[gen_batch_idx] 311 | curr_x_popular_id_n = batch_curr_x_popular_n[gen_batch_idx] 312 | curr_x_popular_id_g = batch_curr_x_popular_g[gen_batch_idx] 313 | curr_x_niche_id = batch_curr_x_niche[gen_batch_idx] 314 | curr_x_generated_id = batch_curr_x_generated[gen_batch_idx] 315 | total_sampled_tags = batch_total_sampled_tags[gen_batch_idx] 316 | total_sampled_cnt = batch_total_sampled_cnt[gen_batch_idx] 317 | 318 | 319 | if total_anneal_steps > 0: 320 | anneal = min(anneal_cap, 1. * ((update_count) / total_anneal_steps)) 321 | else: 322 | anneal = anneal_cap 323 | 324 | update_count += 1 325 | 326 | _, curr_g_loss, curr_g_loss_term_1, curr_g_loss_term_2 = sess.run([g_trainer, g_loss_mean, g_vae_loss, gan_loss], feed_dict={generator_network.input_ph: X, x_popular_n_id: curr_x_popular_id_n, x_popular_g_id: curr_x_popular_id_g , x_niche_id: curr_x_niche_id, x_generated_id: curr_x_generated_id, generated_tags: total_sampled_tags, sampled_cnt: total_sampled_cnt, generator_network.keep_prob_ph: 0.75, generator_network.is_training_ph: 1, generator_network.anneal_ph: anneal, gen_lambda: curr_gen_lamda, keep_prob: np.sum(0.7).astype(np.float32)}) 327 | 328 | 329 | print("global-epoch:%s, generator-epoch:%s, g_loss:%.5f (vae_loss: %.5f + gan_loss: %.5f, anneal: %.5f)" % (i, j_gen, curr_g_loss, curr_g_loss_term_1, curr_g_loss_term_2, anneal)) 330 | 331 | print('') 332 | 333 | X_vad = vad_data_tr[idxlist_vad[0:N_vad]] 334 | 335 | if sparse.isspmatrix(X_vad): 336 | X_vad = X_vad.toarray() 337 | X_vad = X_vad.astype('float32') 338 | 339 | pred_vad = sess.run(generator_out, feed_dict={generator_network.input_ph: X_vad} ) 340 | # exclude examples from training and validation (if any) 341 | pred_vad[X_vad.nonzero()] = -np.inf 342 | ndcg_vad = NDCG_binary_at_k_batch(pred_vad, vad_data_te[idxlist_vad[0:N_vad]]) 343 | 344 | recall_at_20, not_found_20 = Recall_at_k_batch(pred_vad, vad_data_te[idxlist_vad[0:N_vad]], k=20) 345 | 346 | recall_at_50, not_found_50 = Recall_at_k_batch(pred_vad, vad_data_te[idxlist_vad[0:N_vad]], k=50) 347 | 348 | print('global-epoch:', i , 'gen-epoch:', j_gen, 'Vad: NDCG:', np.mean(ndcg_vad), 'Recall@20:', np.mean(recall_at_20), 'Recall@50:', np.mean(recall_at_50), 'Num_users:', len(ndcg_vad), len(recall_at_20), len(recall_at_50)) 349 | 350 | 351 | print('') 352 | 353 | 354 | saver.save(sess, os.path.join(output_path, "model_"+str(i))) 355 | 356 | print('Model saved at global-epoch', i) 357 | 358 | 359 | configParser = ConfigParser.RawConfigParser() 360 | configFilePath = r'config.ini' 361 | configParser.read(configFilePath) 362 | 363 | h0_size = int(configParser.get('Long-Tail-GAN', 'h0_size')) 364 | h1_size = int(configParser.get('Long-Tail-GAN', 'h1_size')) 365 | h2_size = int(configParser.get('Long-Tail-GAN', 'h2_size')) 366 | h3_size = int(configParser.get('Long-Tail-GAN', 'h3_size')) 367 | 368 | NUM_EPOCH = int(configParser.get('Long-Tail-GAN', 'NUM_EPOCH')) 369 | NUM_SUB_EPOCHS = int(NUM_EPOCH/8) 370 | BATCH_SIZE = int(configParser.get('Long-Tail-GAN', 'BATCH_SIZE')) 371 | 372 | DISPLAY_ITER = int(configParser.get('Long-Tail-GAN', 'DISPLAY_ITER')) 373 | LEARNING_RATE = float(configParser.get('Long-Tail-GAN', 'LEARNING_RATE')) 374 | to_restore = int(configParser.get('Long-Tail-GAN', 'to_restore')) 375 | GANLAMBDA = float(configParser.get('Long-Tail-GAN', 'GANLAMBDA')) 376 | 377 | model_name = configParser.get('Long-Tail-GAN', 'model_name') 378 | 379 | dataset = sys.argv[1] 380 | 381 | train_GAN(h0_size, h1_size, h2_size, h3_size, NUM_EPOCH, NUM_SUB_EPOCHS, BATCH_SIZE, DISPLAY_ITER, LEARNING_RATE, to_restore, model_name, dataset, GANLAMBDA) 382 | -------------------------------------------------------------------------------- /Dataset/Askubuntu_Sample/item_list.txt: -------------------------------------------------------------------------------- 1 | 901 2 | 917 3 | 1992 4 | 2888 5 | 409 6 | 2766 7 | 1077 8 | 724 9 | 1223 10 | 2232 11 | 1601 12 | 2103 13 | 1193 14 | 1832 15 | 1126 16 | 90 17 | 1652 18 | 2774 19 | 288 20 | 2330 21 | 2739 22 | 2147 23 | 2204 24 | 2419 25 | 2225 26 | 2977 27 | 1730 28 | 560 29 | 1984 30 | 2783 31 | 1851 32 | 11 33 | 508 34 | 1025 35 | 2231 36 | 2188 37 | 890 38 | 2926 39 | 678 40 | 1650 41 | 2741 42 | 1810 43 | 1576 44 | 637 45 | 2185 46 | 1897 47 | 2274 48 | 611 49 | 2883 50 | 2359 51 | 1725 52 | 2692 53 | 2084 54 | 1581 55 | 854 56 | 2798 57 | 1227 58 | 2197 59 | 753 60 | 345 61 | 2680 62 | 720 63 | 132 64 | 410 65 | 1067 66 | 2282 67 | 1493 68 | 1276 69 | 1095 70 | 1945 71 | 2619 72 | 214 73 | 1071 74 | 618 75 | 859 76 | 392 77 | 911 78 | 1462 79 | 740 80 | 2252 81 | 191 82 | 1482 83 | 1319 84 | 897 85 | 2580 86 | 2020 87 | 100 88 | 792 89 | 1528 90 | 1029 91 | 1511 92 | 1181 93 | 1080 94 | 824 95 | 1226 96 | 915 97 | 2312 98 | 1299 99 | 148 100 | 817 101 | 735 102 | 1784 103 | 2575 104 | 120 105 | 1082 106 | 2694 107 | 276 108 | 1432 109 | 440 110 | 122 111 | 363 112 | 2851 113 | 2936 114 | 1195 115 | 23 116 | 307 117 | 4 118 | 1893 119 | 1107 120 | 604 121 | 461 122 | 1491 123 | 314 124 | 2528 125 | 285 126 | 2647 127 | 1740 128 | 1178 129 | 880 130 | 2620 131 | 360 132 | 787 133 | 289 134 | 2779 135 | 2005 136 | 2490 137 | 2940 138 | 2495 139 | 538 140 | 1574 141 | 2686 142 | 482 143 | 1892 144 | 1300 145 | 372 146 | 337 147 | 2333 148 | 700 149 | 1866 150 | 190 151 | 2792 152 | 2096 153 | 593 154 | 171 155 | 2007 156 | 1337 157 | 1102 158 | 1068 159 | 131 160 | 340 161 | 432 162 | 797 163 | 1222 164 | 2911 165 | 2400 166 | 961 167 | 2552 168 | 918 169 | 1962 170 | 2480 171 | 799 172 | 1366 173 | 2171 174 | 841 175 | 2583 176 | 1455 177 | 781 178 | 1862 179 | 417 180 | 1359 181 | 1398 182 | 434 183 | 172 184 | 1852 185 | 2820 186 | 2470 187 | 1413 188 | 1330 189 | 1845 190 | 485 191 | 164 192 | 2331 193 | 1562 194 | 1303 195 | 855 196 | 2264 197 | 147 198 | 2621 199 | 226 200 | 2554 201 | 2568 202 | 884 203 | 1322 204 | 1501 205 | 2455 206 | 2782 207 | 716 208 | 1707 209 | 1045 210 | 1735 211 | 456 212 | 2058 213 | 204 214 | 2865 215 | 1804 216 | 2113 217 | 1003 218 | 1727 219 | 251 220 | 2242 221 | 2927 222 | 2549 223 | 91 224 | 1546 225 | 256 226 | 1881 227 | 625 228 | 2738 229 | 2728 230 | 674 231 | 814 232 | 2642 233 | 2709 234 | 502 235 | 1142 236 | 2313 237 | 1803 238 | 2637 239 | 821 240 | 1094 241 | 2990 242 | 2923 243 | 470 244 | 2561 245 | 1512 246 | 1520 247 | 130 248 | 2416 249 | 1649 250 | 2071 251 | 2176 252 | 1000 253 | 923 254 | 946 255 | 2337 256 | 772 257 | 2800 258 | 694 259 | 835 260 | 114 261 | 2847 262 | 951 263 | 2501 264 | 2537 265 | 1401 266 | 362 267 | 2523 268 | 971 269 | 2762 270 | 557 271 | 671 272 | 582 273 | 1484 274 | 1345 275 | 1900 276 | 2192 277 | 1882 278 | 1752 279 | 1446 280 | 639 281 | 2809 282 | 1367 283 | 2622 284 | 2610 285 | 1502 286 | 838 287 | 1545 288 | 1383 289 | 387 290 | 1274 291 | 2321 292 | 2702 293 | 2041 294 | 2605 295 | 1335 296 | 2875 297 | 974 298 | 864 299 | 84 300 | 2474 301 | 1448 302 | 1913 303 | 2145 304 | 626 305 | 510 306 | 1853 307 | 319 308 | 970 309 | 585 310 | 2022 311 | 882 312 | 1896 313 | 973 314 | 2700 315 | 1098 316 | 2677 317 | 1991 318 | 589 319 | 2759 320 | 939 321 | 2432 322 | 1959 323 | 1451 324 | 2371 325 | 535 326 | 1365 327 | 477 328 | 2064 329 | 1350 330 | 2017 331 | 1791 332 | 200 333 | 1225 334 | 271 335 | 1592 336 | 2289 337 | 1261 338 | 2683 339 | 2479 340 | 236 341 | 1743 342 | 2033 343 | 47 344 | 196 345 | 1627 346 | 2557 347 | 595 348 | 513 349 | 2322 350 | 404 351 | 2937 352 | 1830 353 | 1030 354 | 1531 355 | 1584 356 | 1641 357 | 1825 358 | 421 359 | 1633 360 | 1146 361 | 2309 362 | 658 363 | 2736 364 | 1508 365 | 1801 366 | 1358 367 | 642 368 | 49 369 | 2426 370 | 2367 371 | 1081 372 | 2329 373 | 1050 374 | 1585 375 | 246 376 | 195 377 | 1416 378 | 2812 379 | 607 380 | 1205 381 | 2818 382 | 40 383 | 341 384 | 906 385 | 1816 386 | 73 387 | 1035 388 | 2486 389 | 1477 390 | 1305 391 | 800 392 | 2155 393 | 1946 394 | 909 395 | 2663 396 | 2585 397 | 734 398 | 1764 399 | 2604 400 | 129 401 | 143 402 | 2695 403 | 2401 404 | 1229 405 | 1602 406 | 1057 407 | 996 408 | 324 409 | 1986 410 | 1879 411 | 2616 412 | 942 413 | 2615 414 | 82 415 | 2516 416 | 2586 417 | 2131 418 | 2948 419 | 652 420 | 1460 421 | 2790 422 | 1907 423 | 930 424 | 2494 425 | 2458 426 | 245 427 | 430 428 | 1293 429 | 2808 430 | 789 431 | 279 432 | 992 433 | 1127 434 | 619 435 | 1200 436 | 963 437 | 2796 438 | 1850 439 | 561 440 | 2843 441 | 1259 442 | 83 443 | 2009 444 | 1092 445 | 2513 446 | 928 447 | 602 448 | 1289 449 | 356 450 | 529 451 | 1182 452 | 940 453 | 682 454 | 1971 455 | 1481 456 | 298 457 | 488 458 | 50 459 | 647 460 | 521 461 | 780 462 | 2295 463 | 1877 464 | 2565 465 | 261 466 | 1023 467 | 249 468 | 2509 469 | 225 470 | 2850 471 | 2492 472 | 2639 473 | 1278 474 | 2640 475 | 2319 476 | 1456 477 | 2126 478 | 397 479 | 2746 480 | 367 481 | 2469 482 | 785 483 | 411 484 | 1943 485 | 1993 486 | 2643 487 | 390 488 | 2365 489 | 2090 490 | 853 491 | 1861 492 | 1864 493 | 998 494 | 104 495 | 1605 496 | 2044 497 | 1062 498 | 1974 499 | 17 500 | 449 501 | 62 502 | 2206 503 | 2703 504 | 1442 505 | 1022 506 | 1838 507 | 1594 508 | 2247 509 | 925 510 | 1352 511 | 857 512 | 2010 513 | 1291 514 | 405 515 | 2118 516 | 71 517 | 1128 518 | 2357 519 | 1420 520 | 701 521 | 568 522 | 2485 523 | 2717 524 | 10 525 | 2181 526 | 2588 527 | 1767 528 | 1854 529 | 1409 530 | 465 531 | 2631 532 | 836 533 | 141 534 | 1863 535 | 641 536 | 2190 537 | 2120 538 | 712 539 | 1060 540 | 318 541 | 1857 542 | 2761 543 | 540 544 | 2363 545 | 1490 546 | 1463 547 | 1557 548 | 546 549 | 1282 550 | 2129 551 | 1228 552 | 1189 553 | 648 554 | 871 555 | 1011 556 | 1461 557 | 1812 558 | 2399 559 | 1921 560 | 2596 561 | 1301 562 | 1109 563 | 1224 564 | 422 565 | 1177 566 | 2273 567 | 1912 568 | 1376 569 | 1968 570 | 1167 571 | 2015 572 | 2370 573 | 2139 574 | 2656 575 | 900 576 | 216 577 | 1710 578 | 549 579 | 74 580 | 2939 581 | 1215 582 | 1185 583 | 1007 584 | 3006 585 | 1673 586 | 1281 587 | 1099 588 | 1202 589 | 2855 590 | 1428 591 | 142 592 | 2813 593 | 2412 594 | 1286 595 | 2464 596 | 1 597 | 2414 598 | 234 599 | 2863 600 | 1467 601 | 686 602 | 1492 603 | 592 604 | 2901 605 | 558 606 | 567 607 | 2026 608 | 201 609 | 2767 610 | 1952 611 | 830 612 | 1497 613 | 2570 614 | 1674 615 | 1723 616 | 2308 617 | 1475 618 | 1612 619 | 1815 620 | 2427 621 | 2201 622 | 1969 623 | 754 624 | 1982 625 | 399 626 | 2706 627 | 9 628 | 2462 629 | 2473 630 | 1310 631 | 2558 632 | 2269 633 | 2840 634 | 1028 635 | 2827 636 | 750 637 | 1930 638 | 1798 639 | 2967 640 | 2531 641 | 1002 642 | 2714 643 | 2856 644 | 426 645 | 2060 646 | 2842 647 | 1216 648 | 76 649 | 2314 650 | 1870 651 | 1590 652 | 2114 653 | 843 654 | 86 655 | 2275 656 | 1908 657 | 1916 658 | 45 659 | 2472 660 | 2398 661 | 1515 662 | 1973 663 | 2187 664 | 1369 665 | 283 666 | 2092 667 | 1819 668 | 1509 669 | 938 670 | 845 671 | 1941 672 | 128 673 | 1119 674 | 2864 675 | 413 676 | 2829 677 | 274 678 | 1570 679 | 766 680 | 2499 681 | 2099 682 | 3000 683 | 227 684 | 126 685 | 1039 686 | 1648 687 | 350 688 | 1032 689 | 2444 690 | 2969 691 | 1695 692 | 1465 693 | 692 694 | 2332 695 | 2664 696 | 1843 697 | 2603 698 | 2154 699 | 389 700 | 1756 701 | 1292 702 | 885 703 | 2296 704 | 2823 705 | 452 706 | 1865 707 | 1637 708 | 832 709 | 685 710 | 1155 711 | 1569 712 | 2023 713 | 1558 714 | 1412 715 | 278 716 | 1700 717 | 612 718 | 2186 719 | 956 720 | 2804 721 | 689 722 | 65 723 | 1706 724 | 1702 725 | 2933 726 | 1088 727 | 264 728 | 650 729 | 1619 730 | 77 731 | 1260 732 | 2720 733 | 913 734 | 2429 735 | 2928 736 | 272 737 | 2230 738 | 2056 739 | 2262 740 | 12 741 | 1768 742 | 2397 743 | 1532 744 | 1043 745 | 2112 746 | 2439 747 | 466 748 | 462 749 | 1963 750 | 2587 751 | 365 752 | 837 753 | 1479 754 | 1333 755 | 1174 756 | 2100 757 | 1678 758 | 436 759 | 3003 760 | 2325 761 | 42 762 | 1883 763 | 1553 764 | 2335 765 | 1990 766 | 1694 767 | 679 768 | 2143 769 | 1988 770 | 14 771 | 2930 772 | 2963 773 | 498 774 | 1744 775 | 2904 776 | 2086 777 | 2814 778 | 2740 779 | 571 780 | 1372 781 | 1926 782 | 2542 783 | 2900 784 | 1899 785 | 1653 786 | 181 787 | 3011 788 | 2250 789 | 1306 790 | 777 791 | 294 792 | 588 793 | 1415 794 | 2548 795 | 2459 796 | 1176 797 | 211 798 | 1534 799 | 1331 800 | 1449 801 | 1842 802 | 154 803 | 1876 804 | 2226 805 | 988 806 | 1608 807 | 2050 808 | 931 809 | 803 810 | 2690 811 | 303 812 | 1571 813 | 2042 814 | 221 815 | 982 816 | 168 817 | 2030 818 | 3007 819 | 490 820 | 910 821 | 425 822 | 1317 823 | 1610 824 | 926 825 | 379 826 | 121 827 | 1495 828 | 344 829 | 1964 830 | 2097 831 | 2169 832 | 222 833 | 2682 834 | 268 835 | 366 836 | 1017 837 | 2079 838 | 600 839 | 312 840 | 590 841 | 1716 842 | 1860 843 | 2788 844 | 286 845 | 2522 846 | 1302 847 | 374 848 | 1144 849 | 523 850 | 2978 851 | 815 852 | 495 853 | 687 854 | 1264 855 | 2115 856 | 467 857 | 1100 858 | 1103 859 | 2461 860 | 2611 861 | 1749 862 | 2355 863 | 2306 864 | 839 865 | 457 866 | 2407 867 | 578 868 | 929 869 | 811 870 | 1253 871 | 2786 872 | 1586 873 | 2770 874 | 2723 875 | 570 876 | 2477 877 | 61 878 | 1348 879 | 137 880 | 1132 881 | 2890 882 | 2893 883 | 59 884 | 2165 885 | 1689 886 | 310 887 | 1680 888 | 1312 889 | 2560 890 | 1382 891 | 2483 892 | 1566 893 | 2601 894 | 710 895 | 375 896 | 1543 897 | 587 898 | 2121 899 | 1470 900 | 1207 901 | 1436 902 | 2174 903 | 713 904 | 1536 905 | 1638 906 | 2195 907 | 1151 908 | 1742 909 | 562 910 | 1667 911 | 281 912 | 2446 913 | 2781 914 | 563 915 | 1238 916 | 89 917 | 459 918 | 2063 919 | 784 920 | 424 921 | 958 922 | 794 923 | 2878 924 | 2648 925 | 2742 926 | 524 927 | 2223 928 | 631 929 | 1130 930 | 499 931 | 2368 932 | 1268 933 | 69 934 | 969 935 | 1940 936 | 1183 937 | 2997 938 | 1049 939 | 2576 940 | 1232 941 | 2119 942 | 2443 943 | 217 944 | 616 945 | 270 946 | 240 947 | 2390 948 | 2080 949 | 175 950 | 1361 951 | 2418 952 | 2661 953 | 768 954 | 1759 955 | 2684 956 | 2873 957 | 2518 958 | 2268 959 | 361 960 | 1072 961 | 2440 962 | 254 963 | 2806 964 | 2032 965 | 820 966 | 1736 967 | 177 968 | 301 969 | 206 970 | 556 971 | 2921 972 | 967 973 | 609 974 | 547 975 | 606 976 | 2338 977 | 763 978 | 2630 979 | 1476 980 | 70 981 | 198 982 | 2287 983 | 1665 984 | 287 985 | 1856 986 | 2382 987 | 2564 988 | 1671 989 | 293 990 | 1262 991 | 2150 992 | 393 993 | 669 994 | 253 995 | 1814 996 | 507 997 | 893 998 | 2024 999 | 448 1000 | 2031 1001 | 2613 1002 | 1042 1003 | 2075 1004 | 566 1005 | 1503 1006 | 1626 1007 | 849 1008 | 349 1009 | 2681 1010 | 435 1011 | 1349 1012 | 2758 1013 | 1118 1014 | 1257 1015 | 1560 1016 | 818 1017 | 2500 1018 | 2658 1019 | 1074 1020 | 976 1021 | 243 1022 | 22 1023 | 1267 1024 | 2635 1025 | 848 1026 | 790 1027 | 304 1028 | 173 1029 | 904 1030 | 2163 1031 | 771 1032 | 733 1033 | 1769 1034 | 1588 1035 | 1051 1036 | 138 1037 | 348 1038 | 1162 1039 | 866 1040 | 2685 1041 | 27 1042 | 1392 1043 | 1997 1044 | 1691 1045 | 545 1046 | 2109 1047 | 260 1048 | 1110 1049 | 1468 1050 | 2711 1051 | 1518 1052 | 2403 1053 | 192 1054 | 2219 1055 | 2733 1056 | 767 1057 | 1890 1058 | 2949 1059 | 1318 1060 | 1148 1061 | 1669 1062 | 1643 1063 | 1505 1064 | 2384 1065 | 605 1066 | 2983 1067 | 1069 1068 | 2719 1069 | 1353 1070 | 1141 1071 | 1591 1072 | 3 1073 | 736 1074 | 1014 1075 | 2957 1076 | 451 1077 | 1208 1078 | 773 1079 | 2545 1080 | 1868 1081 | 2276 1082 | 683 1083 | 1944 1084 | 380 1085 | 2659 1086 | 2133 1087 | 338 1088 | 6 1089 | 1790 1090 | 125 1091 | 1818 1092 | 1599 1093 | 517 1094 | 1989 1095 | 875 1096 | 1898 1097 | 250 1098 | 1214 1099 | 892 1100 | 197 1101 | 2707 1102 | 1496 1103 | 2146 1104 | 2520 1105 | 305 1106 | 850 1107 | 1059 1108 | 189 1109 | 2000 1110 | 1888 1111 | 1559 1112 | 290 1113 | 2931 1114 | 203 1115 | 1607 1116 | 649 1117 | 1904 1118 | 1065 1119 | 1751 1120 | 2539 1121 | 280 1122 | 2085 1123 | 2793 1124 | 2581 1125 | 1718 1126 | 2534 1127 | 19 1128 | 2193 1129 | 97 1130 | 2491 1131 | 2834 1132 | 719 1133 | 1053 1134 | 979 1135 | 2841 1136 | 205 1137 | 2166 1138 | 2563 1139 | 2408 1140 | 2430 1141 | 1662 1142 | 2466 1143 | 28 1144 | 1927 1145 | 2862 1146 | 2651 1147 | 1429 1148 | 2356 1149 | 693 1150 | 2253 1151 | 1194 1152 | 743 1153 | 2301 1154 | 1758 1155 | 2257 1156 | 2805 1157 | 2571 1158 | 67 1159 | 1405 1160 | 2951 1161 | 64 1162 | 945 1163 | 1891 1164 | 228 1165 | 688 1166 | 269 1167 | 2955 1168 | 2979 1169 | 1734 1170 | 1720 1171 | 2001 1172 | 808 1173 | 2405 1174 | 2449 1175 | 676 1176 | 966 1177 | 1547 1178 | 352 1179 | 705 1180 | 2701 1181 | 986 1182 | 696 1183 | 1631 1184 | 1426 1185 | 473 1186 | 178 1187 | 326 1188 | 3005 1189 | 259 1190 | 515 1191 | 984 1192 | 895 1193 | 1179 1194 | 1587 1195 | 1858 1196 | 788 1197 | 302 1198 | 2053 1199 | 2787 1200 | 1761 1201 | 2354 1202 | 1164 1203 | 2110 1204 | 2267 1205 | 2941 1206 | 384 1207 | 1234 1208 | 1548 1209 | 1685 1210 | 1474 1211 | 2182 1212 | 518 1213 | 542 1214 | 1388 1215 | 1872 1216 | 1198 1217 | 332 1218 | 2070 1219 | 1285 1220 | 2942 1221 | 2784 1222 | 903 1223 | 1419 1224 | 1041 1225 | 369 1226 | 476 1227 | 408 1228 | 1679 1229 | 1978 1230 | 778 1231 | 230 1232 | 2713 1233 | 681 1234 | 213 1235 | 311 1236 | 1277 1237 | 978 1238 | 1875 1239 | 1027 1240 | 368 1241 | 1247 1242 | 516 1243 | 1826 1244 | 1530 1245 | 2137 1246 | 33 1247 | 727 1248 | 2919 1249 | 2052 1250 | 2511 1251 | 1117 1252 | 891 1253 | 1658 1254 | 1220 1255 | 675 1256 | 2909 1257 | 2089 1258 | 1785 1259 | 2799 1260 | 5 1261 | 898 1262 | 829 1263 | 1654 1264 | 1855 1265 | 757 1266 | 2743 1267 | 943 1268 | 2959 1269 | 2132 1270 | 2732 1271 | 2675 1272 | 874 1273 | 2546 1274 | 1327 1275 | 509 1276 | 574 1277 | 1938 1278 | 2409 1279 | 783 1280 | 2884 1281 | 2345 1282 | 2947 1283 | 1037 1284 | 2718 1285 | 1953 1286 | 273 1287 | 1048 1288 | 219 1289 | 2507 1290 | 2369 1291 | 330 1292 | 739 1293 | 2665 1294 | 1249 1295 | 1922 1296 | 2423 1297 | 487 1298 | 698 1299 | 1731 1300 | 534 1301 | 2510 1302 | 414 1303 | 779 1304 | 2294 1305 | 1016 1306 | 2040 1307 | 2318 1308 | 1096 1309 | 1012 1310 | 810 1311 | 876 1312 | 184 1313 | 1308 1314 | 1693 1315 | 1188 1316 | 2671 1317 | 1411 1318 | 2487 1319 | 1124 1320 | 1833 1321 | 2502 1322 | 721 1323 | 2014 1324 | 1450 1325 | 2167 1326 | 185 1327 | 1139 1328 | 709 1329 | 1236 1330 | 632 1331 | 1273 1332 | 1776 1333 | 2481 1334 | 1813 1335 | 443 1336 | 238 1337 | 2858 1338 | 75 1339 | 1746 1340 | 2254 1341 | 1739 1342 | 2144 1343 | 1015 1344 | 55 1345 | 662 1346 | 1378 1347 | 1332 1348 | 527 1349 | 1422 1350 | 2170 1351 | 3001 1352 | 1775 1353 | 1821 1354 | 1524 1355 | 127 1356 | 231 1357 | 2870 1358 | 1255 1359 | 1800 1360 | 99 1361 | 2832 1362 | 239 1363 | 2311 1364 | 1514 1365 | 1513 1366 | 1356 1367 | 2191 1368 | 2636 1369 | 920 1370 | 1464 1371 | 66 1372 | 2283 1373 | 1329 1374 | 1143 1375 | 159 1376 | 1288 1377 | 579 1378 | 1822 1379 | 769 1380 | 989 1381 | 633 1382 | 1788 1383 | 1820 1384 | 2920 1385 | 265 1386 | 1779 1387 | 357 1388 | 620 1389 | 31 1390 | 706 1391 | 569 1392 | 643 1393 | 2346 1394 | 657 1395 | 182 1396 | 1355 1397 | 113 1398 | 1417 1399 | 353 1400 | 1704 1401 | 1036 1402 | 13 1403 | 445 1404 | 1661 1405 | 423 1406 | 232 1407 | 364 1408 | 624 1409 | 1575 1410 | 2149 1411 | 576 1412 | 229 1413 | 883 1414 | 1423 1415 | 1539 1416 | 1713 1417 | 2592 1418 | 1078 1419 | 1918 1420 | 383 1421 | 2744 1422 | 237 1423 | 983 1424 | 1434 1425 | 673 1426 | 2243 1427 | 729 1428 | 343 1429 | 2905 1430 | 2046 1431 | 1956 1432 | 2566 1433 | 2992 1434 | 851 1435 | 78 1436 | 2292 1437 | 2654 1438 | 1203 1439 | 1690 1440 | 635 1441 | 1777 1442 | 896 1443 | 1173 1444 | 94 1445 | 2803 1446 | 1076 1447 | 2151 1448 | 1958 1449 | 1797 1450 | 252 1451 | 2161 1452 | 297 1453 | 2670 1454 | 2794 1455 | 2757 1456 | 2958 1457 | 2454 1458 | 1905 1459 | 2536 1460 | 1580 1461 | 2526 1462 | 2215 1463 | 2061 1464 | 2708 1465 | 1266 1466 | 1909 1467 | 2772 1468 | 1923 1469 | 860 1470 | 2173 1471 | 861 1472 | 2016 1473 | 322 1474 | 2051 1475 | 309 1476 | 1549 1477 | 1828 1478 | 1796 1479 | 670 1480 | 355 1481 | 44 1482 | 1961 1483 | 1686 1484 | 1780 1485 | 1957 1486 | 1354 1487 | 371 1488 | 1156 1489 | 1466 1490 | 2972 1491 | 1967 1492 | 402 1493 | 2383 1494 | 1494 1495 | 419 1496 | 1979 1497 | 2317 1498 | 1827 1499 | 2748 1500 | 458 1501 | 834 1502 | 1437 1503 | 1578 1504 | 2598 1505 | 1363 1506 | 1538 1507 | 1942 1508 | 2693 1509 | 1047 1510 | 48 1511 | 1252 1512 | 484 1513 | 2213 1514 | 2773 1515 | 1620 1516 | 1699 1517 | 2811 1518 | 1473 1519 | 2538 1520 | 822 1521 | 2929 1522 | 395 1523 | 993 1524 | 2421 1525 | 1364 1526 | 2727 1527 | 2045 1528 | 2752 1529 | 2710 1530 | 1373 1531 | 868 1532 | 2389 1533 | 1114 1534 | 774 1535 | 2073 1536 | 881 1537 | 1368 1538 | 505 1539 | 2094 1540 | 991 1541 | 2641 1542 | 2879 1543 | 828 1544 | 1878 1545 | 1121 1546 | 194 1547 | 2471 1548 | 331 1549 | 690 1550 | 950 1551 | 183 1552 | 1628 1553 | 2524 1554 | 2802 1555 | 2795 1556 | 2589 1557 | 2961 1558 | 962 1559 | 446 1560 | 2839 1561 | 2996 1562 | 2422 1563 | 282 1564 | 695 1565 | 2290 1566 | 1271 1567 | 2395 1568 | 2013 1569 | 2175 1570 | 478 1571 | 2988 1572 | 559 1573 | 1894 1574 | 1507 1575 | 2360 1576 | 2646 1577 | 2349 1578 | 2106 1579 | 2582 1580 | 1771 1581 | 193 1582 | 1315 1583 | 1184 1584 | 1104 1585 | 2925 1586 | 2866 1587 | 313 1588 | 908 1589 | 1105 1590 | 1817 1591 | 2885 1592 | 135 1593 | 879 1594 | 2751 1595 | 1834 1596 | 1533 1597 | 68 1598 | 306 1599 | 1498 1600 | 1955 1601 | 2869 1602 | 608 1603 | 1911 1604 | 2816 1605 | 1995 1606 | 865 1607 | 2831 1608 | 2107 1609 | 342 1610 | 2057 1611 | 2456 1612 | 1554 1613 | 1120 1614 | 703 1615 | 474 1616 | 1977 1617 | 2270 1618 | 2777 1619 | 512 1620 | 2036 1621 | 2512 1622 | 2908 1623 | 1400 1624 | 400 1625 | 2697 1626 | 539 1627 | 1598 1628 | 2776 1629 | 2974 1630 | 1568 1631 | 2506 1632 | 816 1633 | 1129 1634 | 1404 1635 | 1642 1636 | 2810 1637 | 1246 1638 | 134 1639 | 995 1640 | 1328 1641 | 139 1642 | 2881 1643 | 2434 1644 | 233 1645 | 2871 1646 | 2298 1647 | 1840 1648 | 672 1649 | 2255 1650 | 1623 1651 | 2003 1652 | 2544 1653 | 1163 1654 | 707 1655 | 1806 1656 | 224 1657 | 2551 1658 | 266 1659 | 1629 1660 | 599 1661 | 109 1662 | 2168 1663 | 2342 1664 | 2573 1665 | 2624 1666 | 1112 1667 | 2217 1668 | 1577 1669 | 455 1670 | 553 1671 | 2934 1672 | 2836 1673 | 2634 1674 | 388 1675 | 1357 1676 | 1618 1677 | 1218 1678 | 1160 1679 | 1675 1680 | 2852 1681 | 1722 1682 | 514 1683 | 2336 1684 | 699 1685 | 2104 1686 | 140 1687 | 1433 1688 | 919 1689 | 504 1690 | 1399 1691 | 1038 1692 | 1537 1693 | 2887 1694 | 819 1695 | 796 1696 | 1595 1697 | 2375 1698 | 615 1699 | 1765 1700 | 1604 1701 | 2341 1702 | 1555 1703 | 1324 1704 | 2807 1705 | 2771 1706 | 284 1707 | 1243 1708 | 1211 1709 | 1031 1710 | 2995 1711 | 1763 1712 | 2012 1713 | 1799 1714 | 1849 1715 | 491 1716 | 1889 1717 | 1427 1718 | 1454 1719 | 136 1720 | 554 1721 | 2848 1722 | 636 1723 | 486 1724 | 1210 1725 | 1410 1726 | 2180 1727 | 2211 1728 | 394 1729 | 3010 1730 | 1766 1731 | 536 1732 | 677 1733 | 2208 1734 | 87 1735 | 2224 1736 | 2763 1737 | 382 1738 | 2037 1739 | 381 1740 | 525 1741 | 680 1742 | 1839 1743 | 1297 1744 | 248 1745 | 809 1746 | 2724 1747 | 1197 1748 | 7 1749 | 2263 1750 | 1001 1751 | 2088 1752 | 2556 1753 | 2424 1754 | 1692 1755 | 1762 1756 | 997 1757 | 1593 1758 | 1624 1759 | 2468 1760 | 2628 1761 | 1717 1762 | 2789 1763 | 2962 1764 | 2261 1765 | 1987 1766 | 1709 1767 | 1242 1768 | 2105 1769 | 1351 1770 | 378 1771 | 2687 1772 | 1196 1773 | 2025 1774 | 2259 1775 | 161 1776 | 36 1777 | 999 1778 | 812 1779 | 167 1780 | 2976 1781 | 492 1782 | 2293 1783 | 43 1784 | 316 1785 | 335 1786 | 2379 1787 | 2567 1788 | 2910 1789 | 1026 1790 | 2248 1791 | 503 1792 | 1750 1793 | 2579 1794 | 2436 1795 | 2721 1796 | 2673 1797 | 1044 1798 | 664 1799 | 831 1800 | 1040 1801 | 668 1802 | 460 1803 | 2577 1804 | 1663 1805 | 2404 1806 | 1079 1807 | 1275 1808 | 1609 1809 | 493 1810 | 528 1811 | 166 1812 | 1004 1813 | 2986 1814 | 2555 1815 | 715 1816 | 2914 1817 | 742 1818 | 2638 1819 | 531 1820 | 2833 1821 | 2433 1822 | 555 1823 | 2134 1824 | 2067 1825 | 802 1826 | 46 1827 | 325 1828 | 1529 1829 | 2279 1830 | 1111 1831 | 153 1832 | 2156 1833 | 2205 1834 | 1407 1835 | 2392 1836 | 2835 1837 | 520 1838 | 1341 1839 | 663 1840 | 1999 1841 | 2754 1842 | 2725 1843 | 101 1844 | 1217 1845 | 1245 1846 | 1919 1847 | 2897 1848 | 2157 1849 | 1240 1850 | 2260 1851 | 1445 1852 | 1020 1853 | 3014 1854 | 2644 1855 | 1375 1856 | 1058 1857 | 651 1858 | 2882 1859 | 2484 1860 | 526 1861 | 1994 1862 | 1453 1863 | 2530 1864 | 749 1865 | 108 1866 | 1966 1867 | 952 1868 | 1682 1869 | 300 1870 | 1064 1871 | 2351 1872 | 2838 1873 | 2340 1874 | 2722 1875 | 1251 1876 | 1970 1877 | 0 1878 | 81 1879 | 8 1880 | 1280 1881 | 180 1882 | 2004 1883 | 1244 1884 | 1668 1885 | 1248 1886 | 1256 1887 | 1981 1888 | 377 1889 | 1431 1890 | 596 1891 | 2791 1892 | 1931 1893 | 1807 1894 | 160 1895 | 2221 1896 | 469 1897 | 2452 1898 | 1782 1899 | 1457 1900 | 2240 1901 | 1527 1902 | 275 1903 | 1542 1904 | 1408 1905 | 1269 1906 | 1844 1907 | 1823 1908 | 2098 1909 | 601 1910 | 1831 1911 | 2238 1912 | 158 1913 | 2362 1914 | 2669 1915 | 2503 1916 | 1384 1917 | 2591 1918 | 1640 1919 | 522 1920 | 1975 1921 | 2633 1922 | 2627 1923 | 1647 1924 | 146 1925 | 1377 1926 | 1478 1927 | 1670 1928 | 2514 1929 | 2148 1930 | 1021 1931 | 2943 1932 | 511 1933 | 2691 1934 | 149 1935 | 2059 1936 | 418 1937 | 207 1938 | 1696 1939 | 2457 1940 | 738 1941 | 1387 1942 | 2696 1943 | 1233 1944 | 941 1945 | 1209 1946 | 741 1947 | 2845 1948 | 2394 1949 | 2286 1950 | 2128 1951 | 2889 1952 | 2535 1953 | 552 1954 | 1848 1955 | 209 1956 | 1874 1957 | 1294 1958 | 1808 1959 | 2435 1960 | 1440 1961 | 806 1962 | 1213 1963 | 2095 1964 | 2272 1965 | 496 1966 | 960 1967 | 1781 1968 | 1265 1969 | 1133 1970 | 1859 1971 | 2216 1972 | 1772 1973 | 1917 1974 | 2617 1975 | 1770 1976 | 2618 1977 | 2482 1978 | 257 1979 | 1414 1980 | 2666 1981 | 2008 1982 | 2668 1983 | 1867 1984 | 1802 1985 | 1552 1986 | 2688 1987 | 2819 1988 | 1954 1989 | 25 1990 | 1521 1991 | 156 1992 | 162 1993 | 2347 1994 | 1902 1995 | 2072 1996 | 1344 1997 | 1309 1998 | 157 1999 | 1639 2000 | 1212 2001 | 2323 2002 | 2019 2003 | 1089 2004 | 2912 2005 | 954 2006 | 2227 2007 | 1439 2008 | 220 2009 | 2519 2010 | 2612 2011 | 2652 2012 | 2194 2013 | 1108 2014 | 1116 2015 | 684 2016 | 2271 2017 | 2944 2018 | 1005 2019 | 144 2020 | 2350 2021 | 124 2022 | 1660 2023 | 2780 2024 | 1526 2025 | 1321 2026 | 1386 2027 | 1925 2028 | 468 2029 | 2006 2030 | 1054 2031 | 1600 2032 | 833 2033 | 1135 2034 | 483 2035 | 16 2036 | 936 2037 | 1009 2038 | 1154 2039 | 2508 2040 | 2755 2041 | 1313 2042 | 2034 2043 | 1336 2044 | 1279 2045 | 479 2046 | 2235 2047 | 1573 2048 | 2202 2049 | 1084 2050 | 1920 2051 | 2698 2052 | 2960 2053 | 2950 2054 | 334 2055 | 1556 2056 | 775 2057 | 1837 2058 | 1753 2059 | 1934 2060 | 801 2061 | 2277 2062 | 347 2063 | 1145 2064 | 1175 2065 | 1928 2066 | 2420 2067 | 433 2068 | 1087 2069 | 267 2070 | 2101 2071 | 2417 2072 | 2824 2073 | 627 2074 | 2245 2075 | 34 2076 | 2857 2077 | 623 2078 | 1998 2079 | 442 2080 | 550 2081 | 1935 2082 | 1924 2083 | 564 2084 | 2413 2085 | 2953 2086 | 506 2087 | 622 2088 | 2764 2089 | 935 2090 | 782 2091 | 1745 2092 | 1681 2093 | 199 2094 | 2854 2095 | 965 2096 | 2860 2097 | 959 2098 | 2334 2099 | 255 2100 | 2778 2101 | 537 2102 | 1444 2103 | 968 2104 | 370 2105 | 1237 2106 | 1522 2107 | 644 2108 | 964 2109 | 551 2110 | 581 2111 | 2082 2112 | 2768 2113 | 1421 2114 | 145 2115 | 2607 2116 | 1424 2117 | 886 2118 | 2540 2119 | 1168 2120 | 117 2121 | 2594 2122 | 573 2123 | 2569 2124 | 629 2125 | 1846 2126 | 133 2127 | 1441 2128 | 2373 2129 | 2093 2130 | 2674 2131 | 726 2132 | 2935 2133 | 862 2134 | 403 2135 | 653 2136 | 447 2137 | 798 2138 | 401 2139 | 2316 2140 | 2288 2141 | 1150 2142 | 1406 2143 | 1296 2144 | 1385 2145 | 2867 2146 | 1683 2147 | 2946 2148 | 2039 2149 | 933 2150 | 2504 2151 | 2876 2152 | 725 2153 | 2645 2154 | 1425 2155 | 2437 2156 | 1263 2157 | 1550 2158 | 2735 2159 | 2625 2160 | 1295 2161 | 2906 2162 | 530 2163 | 1390 2164 | 53 2165 | 2339 2166 | 1113 2167 | 437 2168 | 2 2169 | 188 2170 | 1932 2171 | 944 2172 | 2872 2173 | 1715 2174 | 2653 2175 | 2679 2176 | 1603 2177 | 396 2178 | 431 2179 | 2460 2180 | 1008 2181 | 2448 2182 | 1488 2183 | 1729 2184 | 1138 2185 | 2922 2186 | 2442 2187 | 2667 2188 | 614 2189 | 2214 2190 | 1153 2191 | 2284 2192 | 746 2193 | 1929 2194 | 1793 2195 | 2002 2196 | 32 2197 | 315 2198 | 2078 2199 | 2111 2200 | 826 2201 | 2324 2202 | 1469 2203 | 463 2204 | 1646 2205 | 1965 2206 | 2348 2207 | 842 2208 | 2915 2209 | 428 2210 | 1721 2211 | 1483 2212 | 2916 2213 | 406 2214 | 2874 2215 | 1621 2216 | 791 2217 | 1086 2218 | 2203 2219 | 2411 2220 | 708 2221 | 1362 2222 | 2821 2223 | 373 2224 | 2894 2225 | 747 2226 | 2970 2227 | 646 2228 | 2902 2229 | 1052 2230 | 1346 2231 | 2178 2232 | 2898 2233 | 873 2234 | 2517 2235 | 93 2236 | 2228 2237 | 242 2238 | 543 2239 | 1307 2240 | 103 2241 | 2387 2242 | 212 2243 | 1091 2244 | 2177 2245 | 1565 2246 | 63 2247 | 666 2248 | 751 2249 | 1516 2250 | 2837 2251 | 2130 2252 | 1471 2253 | 823 2254 | 1886 2255 | 613 2256 | 354 2257 | 2377 2258 | 2533 2259 | 1159 2260 | 1773 2261 | 1708 2262 | 1106 2263 | 333 2264 | 439 2265 | 292 2266 | 847 2267 | 2715 2268 | 2048 2269 | 1939 2270 | 241 2271 | 2378 2272 | 2172 2273 | 111 2274 | 654 2275 | 955 2276 | 52 2277 | 1657 2278 | 1672 2279 | 2632 2280 | 603 2281 | 659 2282 | 1728 2283 | 2304 2284 | 2543 2285 | 2899 2286 | 1869 2287 | 92 2288 | 2760 2289 | 2353 2290 | 215 2291 | 223 2292 | 937 2293 | 2281 2294 | 2815 2295 | 634 2296 | 2328 2297 | 1985 2298 | 1615 2299 | 475 2300 | 2087 2301 | 2124 2302 | 2529 2303 | 2532 2304 | 2074 2305 | 2158 2306 | 1389 2307 | 107 2308 | 1290 2309 | 1733 2310 | 1630 2311 | 1360 2312 | 1486 2313 | 453 2314 | 327 2315 | 102 2316 | 2699 2317 | 2381 2318 | 2756 2319 | 2849 2320 | 922 2321 | 2083 2322 | 2769 2323 | 717 2324 | 1561 2325 | 441 2326 | 1254 2327 | 594 2328 | 1046 2329 | 1666 2330 | 2797 2331 | 2775 2332 | 1809 2333 | 359 2334 | 1161 2335 | 1221 2336 | 20 2337 | 1066 2338 | 519 2339 | 1396 2340 | 1325 2341 | 1083 2342 | 2265 2343 | 985 2344 | 2822 2345 | 2599 2346 | 24 2347 | 905 2348 | 2011 2349 | 2159 2350 | 2102 2351 | 1910 2352 | 977 2353 | 714 2354 | 1895 2355 | 656 2356 | 899 2357 | 2184 2358 | 2993 2359 | 37 2360 | 494 2361 | 907 2362 | 1915 2363 | 2445 2364 | 2142 2365 | 2712 2366 | 472 2367 | 1684 2368 | 1013 2369 | 584 2370 | 1887 2371 | 2541 2372 | 2907 2373 | 1394 2374 | 2465 2375 | 2140 2376 | 628 2377 | 1645 2378 | 1125 2379 | 1787 2380 | 386 2381 | 2366 2382 | 1611 2383 | 1122 2384 | 1443 2385 | 1340 2386 | 1836 2387 | 737 2388 | 1519 2389 | 1201 2390 | 1937 2391 | 321 2392 | 533 2393 | 296 2394 | 1541 2395 | 661 2396 | 758 2397 | 1283 2398 | 867 2399 | 329 2400 | 980 2401 | 2076 2402 | 813 2403 | 2327 2404 | 1070 2405 | 2358 2406 | 95 2407 | 1582 2408 | 3008 2409 | 870 2410 | 1871 2411 | 106 2412 | 2066 2413 | 2450 2414 | 1140 2415 | 1724 2416 | 2069 2417 | 57 2418 | 2750 2419 | 1452 2420 | 2077 2421 | 2055 2422 | 744 2423 | 1885 2424 | 1158 2425 | 975 2426 | 41 2427 | 532 2428 | 2297 2429 | 2447 2430 | 1747 2431 | 1980 2432 | 2299 2433 | 2730 2434 | 1687 2435 | 2590 2436 | 320 2437 | 1316 2438 | 795 2439 | 291 2440 | 934 2441 | 208 2442 | 2678 2443 | 2049 2444 | 858 2445 | 98 2446 | 1314 2447 | 2388 2448 | 105 2449 | 2391 2450 | 1231 2451 | 1567 2452 | 887 2453 | 2608 2454 | 1170 2455 | 427 2456 | 1447 2457 | 1381 2458 | 2199 2459 | 723 2460 | 1506 2461 | 2410 2462 | 151 2463 | 1936 2464 | 351 2465 | 2135 2466 | 1093 2467 | 765 2468 | 2431 2469 | 1983 2470 | 1418 2471 | 2726 2472 | 110 2473 | 299 2474 | 638 2475 | 2734 2476 | 1947 2477 | 2402 2478 | 3002 2479 | 1732 2480 | 323 2481 | 2320 2482 | 2493 2483 | 2572 2484 | 1323 2485 | 1304 2486 | 2236 2487 | 2302 2488 | 2315 2489 | 1614 2490 | 2584 2491 | 1190 2492 | 2689 2493 | 1097 2494 | 1711 2495 | 1829 2496 | 2256 2497 | 1510 2498 | 471 2499 | 2249 2500 | 722 2501 | 2704 2502 | 480 2503 | 1258 2504 | 2550 2505 | 202 2506 | 846 2507 | 2880 2508 | 500 2509 | 2765 2510 | 358 2511 | 1149 2512 | 1972 2513 | 1131 2514 | 2826 2515 | 2964 2516 | 1380 2517 | 630 2518 | 786 2519 | 2989 2520 | 2896 2521 | 501 2522 | 1172 2523 | 79 2524 | 150 2525 | 1688 2526 | 1579 2527 | 2475 2528 | 2108 2529 | 2903 2530 | 1596 2531 | 1391 2532 | 3013 2533 | 1903 2534 | 1489 2535 | 2655 2536 | 2393 2537 | 702 2538 | 1056 2539 | 1374 2540 | 1783 2541 | 1677 2542 | 655 2543 | 1395 2544 | 1435 2545 | 444 2546 | 1480 2547 | 1371 2548 | 1019 2549 | 1760 2550 | 1636 2551 | 2597 2552 | 804 2553 | 947 2554 | 665 2555 | 921 2556 | 15 2557 | 1884 2558 | 385 2559 | 1523 2560 | 2343 2561 | 2200 2562 | 844 2563 | 577 2564 | 1544 2565 | 176 2566 | 957 2567 | 438 2568 | 1835 2569 | 1343 2570 | 1334 2571 | 376 2572 | 732 2573 | 621 2574 | 88 2575 | 2380 2576 | 2164 2577 | 1976 2578 | 1187 2579 | 2451 2580 | 718 2581 | 2629 2582 | 1284 2583 | 1755 2584 | 2038 2585 | 1018 2586 | 759 2587 | 72 2588 | 916 2589 | 548 2590 | 731 2591 | 1342 2592 | 2657 2593 | 981 2594 | 2453 2595 | 953 2596 | 2716 2597 | 948 2598 | 2162 2599 | 454 2600 | 210 2601 | 39 2602 | 2027 2603 | 2068 2604 | 1714 2605 | 1634 2606 | 163 2607 | 35 2608 | 2886 2609 | 1748 2610 | 856 2611 | 2396 2612 | 179 2613 | 1347 2614 | 1152 2615 | 165 2616 | 2241 2617 | 2945 2618 | 1165 2619 | 2021 2620 | 116 2621 | 902 2622 | 235 2623 | 346 2624 | 1778 2625 | 667 2626 | 2623 2627 | 2251 2628 | 2234 2629 | 58 2630 | 1115 2631 | 2123 2632 | 2280 2633 | 2463 2634 | 762 2635 | 2428 2636 | 1430 2637 | 1134 2638 | 2029 2639 | 336 2640 | 972 2641 | 597 2642 | 825 2643 | 1792 2644 | 29 2645 | 1147 2646 | 1960 2647 | 1235 2648 | 2747 2649 | 2065 2650 | 2980 2651 | 760 2652 | 1517 2653 | 840 2654 | 263 2655 | 1055 2656 | 186 2657 | 2877 2658 | 1338 2659 | 1006 2660 | 51 2661 | 2244 2662 | 2859 2663 | 2246 2664 | 2386 2665 | 2600 2666 | 2116 2667 | 2415 2668 | 1136 2669 | 2054 2670 | 2496 2671 | 1186 2672 | 2985 2673 | 764 2674 | 258 2675 | 96 2676 | 1137 2677 | 2606 2678 | 586 2679 | 2749 2680 | 2062 2681 | 1320 2682 | 2991 2683 | 2846 2684 | 1847 2685 | 2574 2686 | 420 2687 | 1625 2688 | 2650 2689 | 1632 2690 | 2028 2691 | 1403 2692 | 2999 2693 | 1024 2694 | 412 2695 | 1485 2696 | 3012 2697 | 1500 2698 | 2198 2699 | 1786 2700 | 2212 2701 | 2614 2702 | 1950 2703 | 2498 2704 | 580 2705 | 704 2706 | 247 2707 | 2917 2708 | 1438 2709 | 872 2710 | 2705 2711 | 2602 2712 | 1379 2713 | 80 2714 | 1063 2715 | 889 2716 | 2091 2717 | 2136 2718 | 416 2719 | 1949 2720 | 1487 2721 | 2081 2722 | 1123 2723 | 1659 2724 | 2488 2725 | 1676 2726 | 1090 2727 | 770 2728 | 2138 2729 | 85 2730 | 2476 2731 | 2310 2732 | 2994 2733 | 1914 2734 | 2467 2735 | 1061 2736 | 1698 2737 | 1741 2738 | 752 2739 | 1664 2740 | 598 2741 | 1789 2742 | 1192 2743 | 30 2744 | 1697 2745 | 1157 2746 | 1326 2747 | 262 2748 | 793 2749 | 2968 2750 | 912 2751 | 1616 2752 | 277 2753 | 2438 2754 | 1873 2755 | 617 2756 | 1805 2757 | 660 2758 | 1774 2759 | 730 2760 | 994 2761 | 888 2762 | 26 2763 | 152 2764 | 2207 2765 | 1239 2766 | 2152 2767 | 2425 2768 | 2047 2769 | 317 2770 | 2372 2771 | 18 2772 | 2364 2773 | 2547 2774 | 2160 2775 | 2753 2776 | 1230 2777 | 1617 2778 | 1166 2779 | 1033 2780 | 2737 2781 | 2305 2782 | 2233 2783 | 1906 2784 | 1726 2785 | 2478 2786 | 489 2787 | 2406 2788 | 914 2789 | 497 2790 | 2189 2791 | 2385 2792 | 1073 2793 | 1459 2794 | 2374 2795 | 60 2796 | 1010 2797 | 776 2798 | 2844 2799 | 2266 2800 | 2676 2801 | 2987 2802 | 2595 2803 | 407 2804 | 1370 2805 | 339 2806 | 1901 2807 | 2127 2808 | 2861 2809 | 2562 2810 | 869 2811 | 2966 2812 | 1499 2813 | 2745 2814 | 640 2815 | 1472 2816 | 591 2817 | 2179 2818 | 391 2819 | 932 2820 | 1298 2821 | 1948 2822 | 2285 2823 | 54 2824 | 464 2825 | 1705 2826 | 1824 2827 | 1393 2828 | 745 2829 | 1551 2830 | 308 2831 | 2853 2832 | 2521 2833 | 2891 2834 | 1191 2835 | 2973 2836 | 1589 2837 | 2291 2838 | 2892 2839 | 1525 2840 | 398 2841 | 761 2842 | 1656 2843 | 1270 2844 | 2222 2845 | 2515 2846 | 1219 2847 | 1563 2848 | 2361 2849 | 1635 2850 | 1311 2851 | 415 2852 | 2828 2853 | 2527 2854 | 1754 2855 | 2141 2856 | 2018 2857 | 21 2858 | 2660 2859 | 1795 2860 | 112 2861 | 2258 2862 | 2662 2863 | 852 2864 | 1606 2865 | 481 2866 | 2998 2867 | 1540 2868 | 1651 2869 | 170 2870 | 1272 2871 | 544 2872 | 1583 2873 | 2303 2874 | 2868 2875 | 2237 2876 | 2525 2877 | 2278 2878 | 1085 2879 | 2300 2880 | 2125 2881 | 2626 2882 | 711 2883 | 174 2884 | 1880 2885 | 56 2886 | 1712 2887 | 1794 2888 | 2729 2889 | 1204 2890 | 1757 2891 | 1199 2892 | 1933 2893 | 2825 2894 | 2043 2895 | 2153 2896 | 1171 2897 | 748 2898 | 807 2899 | 1811 2900 | 1101 2901 | 863 2902 | 1034 2903 | 2326 2904 | 878 2905 | 1572 2906 | 645 2907 | 572 2908 | 990 2909 | 328 2910 | 1169 2911 | 2593 2912 | 2196 2913 | 1250 2914 | 565 2915 | 169 2916 | 2559 2917 | 2817 2918 | 2122 2919 | 1951 2920 | 2975 2921 | 115 2922 | 1535 2923 | 2307 2924 | 429 2925 | 1622 2926 | 2938 2927 | 2035 2928 | 218 2929 | 1996 2930 | 2489 2931 | 927 2932 | 123 2933 | 1644 2934 | 1397 2935 | 118 2936 | 450 2937 | 697 2938 | 1241 2939 | 541 2940 | 1841 2941 | 1180 2942 | 2441 2943 | 2505 2944 | 2649 2945 | 2578 2946 | 1564 2947 | 1402 2948 | 987 2949 | 1703 2950 | 2785 2951 | 2609 2952 | 827 2953 | 2117 2954 | 2497 2955 | 1655 2956 | 805 2957 | 756 2958 | 691 2959 | 1597 2960 | 610 2961 | 2913 2962 | 894 2963 | 1075 2964 | 1287 2965 | 1504 2966 | 2376 2967 | --------------------------------------------------------------------------------