├── .gitignore ├── Extraction ├── CroW │ ├── crow.m │ ├── crow_act.m │ ├── extract_resnet_crow.m │ └── extract_vgg_crow.m ├── DELF │ ├── aggre_delf_fv.m │ ├── aggre_delf_vlad.m │ ├── convert.py │ ├── delf_config_example.pbtxt │ ├── extract_delf.py │ └── imagelist.py ├── GeM │ ├── extract_resnet_gem.m │ ├── extract_resnet_rgem.m │ ├── extract_vgg_gem.m │ └── extract_vgg_rgem.m ├── MAC │ ├── extract_resnet_mac.m │ ├── extract_resnet_rmac.m │ ├── extract_vgg_mac.m │ ├── extract_vgg_rmac.m │ ├── mac.m │ ├── mac_act.m │ ├── rmac_regionvec.m │ └── rmac_regionvec_act.m ├── README.md ├── SIFT │ ├── aggre_sift_fv.m │ ├── aggre_sift_vlad.m │ └── extract_sift.m ├── SPoC │ ├── spoc.m │ ├── spoc_act.m │ ├── test_resnetspoc.m │ └── test_vggspoc.m ├── test_Holidays.py ├── test_Oxford5k.py ├── test_Paris6k.py └── utils │ ├── apply_whiten.m │ ├── compute_map.m │ ├── crop_qim.m │ ├── vecpostproc.m │ ├── vecs_normalize.m │ └── yael_pca.m ├── Figures ├── Framework.png ├── MST.png ├── Retrieval_examples.png └── mAP_difference.png ├── README.md ├── Relation ├── README.md └── relation_mining.m └── Translation ├── README.md ├── models.py ├── run_0_0.sh ├── run_0_1.sh ├── run_0_2.sh ├── run_1_0.sh ├── run_1_1.sh ├── run_1_2.sh ├── run_2_0.sh ├── run_2_1.sh ├── run_2_2.sh ├── test_Dis.py ├── test_mAP.py ├── train.py └── utils.py /.gitignore: -------------------------------------------------------------------------------- 1 | # Byte-compiled / optimized / DLL files 2 | __pycache__/ 3 | *.py[cod] 4 | *$py.class 5 | 6 | # C extensions 7 | *.so 8 | 9 | # Distribution / packaging 10 | .Python 11 | build/ 12 | develop-eggs/ 13 | dist/ 14 | downloads/ 15 | eggs/ 16 | .eggs/ 17 | lib/ 18 | lib64/ 19 | parts/ 20 | sdist/ 21 | var/ 22 | wheels/ 23 | *.egg-info/ 24 | .installed.cfg 25 | *.egg 26 | MANIFEST 27 | 28 | # PyInstaller 29 | # Usually these files are written by a python script from a template 30 | # before PyInstaller builds the exe, so as to inject date/other infos into it. 31 | *.manifest 32 | *.spec 33 | 34 | # Installer logs 35 | pip-log.txt 36 | pip-delete-this-directory.txt 37 | 38 | # Unit test / coverage reports 39 | htmlcov/ 40 | .tox/ 41 | .coverage 42 | .coverage.* 43 | .cache 44 | nosetests.xml 45 | coverage.xml 46 | *.cover 47 | .hypothesis/ 48 | .pytest_cache/ 49 | 50 | # Translations 51 | *.mo 52 | *.pot 53 | 54 | # Django stuff: 55 | *.log 56 | local_settings.py 57 | db.sqlite3 58 | 59 | # Flask stuff: 60 | instance/ 61 | .webassets-cache 62 | 63 | # Scrapy stuff: 64 | .scrapy 65 | 66 | # Sphinx documentation 67 | docs/_build/ 68 | 69 | # PyBuilder 70 | target/ 71 | 72 | # Jupyter Notebook 73 | .ipynb_checkpoints 74 | 75 | # pyenv 76 | .python-version 77 | 78 | # celery beat schedule file 79 | celerybeat-schedule 80 | 81 | # SageMath parsed files 82 | *.sage.py 83 | 84 | # Environments 85 | .env 86 | .venv 87 | env/ 88 | venv/ 89 | ENV/ 90 | env.bak/ 91 | venv.bak/ 92 | 93 | # Spyder project settings 94 | .spyderproject 95 | .spyproject 96 | 97 | # Rope project settings 98 | .ropeproject 99 | 100 | # mkdocs documentation 101 | /site 102 | 103 | # mypy 104 | .mypy_cache/ 105 | -------------------------------------------------------------------------------- /Extraction/CroW/crow.m: -------------------------------------------------------------------------------- 1 | 2 | function [x, X] = crow(I, net, flag) 3 | 4 | if size(I,3) == 1 5 | I = repmat(I, [1 1 3]); 6 | end 7 | 8 | sz = size(I); 9 | if sz(1) >= 1500 || sz(2) >= 1500 10 | I = imresize(I, [1000, 1000]); 11 | end 12 | if sz(1) < 224 || sz(2) < 224 13 | I = imresize(I, [224, 224]); 14 | end 15 | 16 | I = single(I) - mean(net.meta.normalization.averageImage(:)); 17 | % vgg 18 | if flag == 1 19 | if ~isa(net.layers{1}.weights{1}, 'gpuArray') 20 | rnet = vl_simplenn(net, I); 21 | X = max(rnet(end).x, 0); 22 | else 23 | rnet = vl_simplenn(net, gpuArray(I)); 24 | X = gather(max(rnet(end).x, 0)); 25 | end 26 | end 27 | 28 | %%%%%%%%%%%%%%%%%%%%%%%%%%%% 29 | % resnet 30 | if flag == 0 31 | I = gpuArray(I); 32 | net.eval({'data', I}); 33 | X = gather(max(net.vars(net.getVarIndex('pool5')).value, 0)); 34 | end 35 | %%%%%%%%%%%%%%%%%%%%%%%%%%%% 36 | x = crow_act(X); 37 | -------------------------------------------------------------------------------- /Extraction/CroW/crow_act.m: -------------------------------------------------------------------------------- 1 | function x = crow_act(x) 2 | 3 | if ~max(size(x, 1), size(x, 2)) 4 | x = zeros(size(x, 3), 1, class(x)); 5 | return; 6 | end 7 | 8 | % compute_crow_spatial_weight S 9 | a = 2; 10 | b = 2; 11 | 12 | S = sum(x, 3); 13 | z = (sum(sum(S.^a)))^(1.0/a); 14 | S = (S ./ z).^(1.0/b); 15 | 16 | % compute_crow_channel_weight C 17 | sz = size(x); 18 | w = sz(1); 19 | h = sz(2); 20 | K = sz(3); 21 | area = 1.0 * w * h; 22 | C = zeros(K, 1); 23 | 24 | for i = 1 : K 25 | tmp = x(:, :, i); 26 | C(i, 1) = sum(sum(tmp ~= 0)) / area; 27 | end 28 | 29 | C_sum = sum(C); 30 | for i = 1 : K 31 | d = C(i, 1); 32 | if d > 0 33 | C(i, 1) = log(C_sum / d); 34 | else 35 | C(i, 1) = 0; 36 | end 37 | end 38 | 39 | for i = 1 : K 40 | x(:, :, i) = x(:, :, i) .* S; 41 | end 42 | x = reshape(sum(sum(x, 1), 2), [size(x,3) 1]); 43 | x = x .* C; -------------------------------------------------------------------------------- /Extraction/CroW/extract_resnet_crow.m: -------------------------------------------------------------------------------- 1 | addpath('../utils'); 2 | 3 | g = gpuDevice(1) 4 | reset(g) 5 | 6 | %%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%% 7 | 8 | if exist('aml') ~= 3 9 | mex -compatibleArrayDims aml.c 10 | end 11 | 12 | % matconvnet is a prerequisite 13 | % run vl_setupnn for your installation to avoid downloading and compiling again 14 | if exist('vl_nnconv') ~= 3 15 | cd matconvnet-1.0-beta25/matlab/ 16 | if numel(dir(fullfile('mex', 'vl_nnconv.mex*'))) == 0 17 | vl_compilenn('verbose', 1, 'enableGPU', 1, 'cudaRoot', '/usr/local/cuda-8.0'); 18 | end 19 | vl_setupnn; 20 | cd ../../ 21 | end 22 | 23 | net = dagnn.DagNN.loadobj(load('imagenet-resnet-101-dag.mat')); 24 | net.mode = 'test'; 25 | net.conserveMemory = false; 26 | move(net, 'gpu') 27 | 28 | %%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%% 29 | 30 | data_dir = '../dvsd/extractD/data/'; 31 | datasets = {'Oxford5k', 'Paris6k', 'Holidays', 'Landmarks'}; 32 | desc_name = 'resnetcrow'; 33 | 34 | %%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%% 35 | % extract features 36 | fprintf('Extracting features...\n'); 37 | 38 | % Whiting 39 | fprintf('Leaning PCA-whitening features...\n'); 40 | white_dir = [data_dir, 'Landmarks/query/']; 41 | white_list = dir([white_dir '*.jpg']); 42 | vecs = {}; 43 | for i = 1 : size(white_list, 1) 44 | img_name = [white_dir white_list(i).name]; 45 | img = imread(img_name); 46 | resnetcrow = vecpostproc(crow(img, net, 0)); 47 | % load_name = ['../sdd/dtod/data/train/Landmarks/query/', desc_name, '/', white_list(i).name, '.mat'] 48 | % load(load_name); 49 | vecs{i} = resnetcrow; 50 | save_name = ['../sdd/dtod/data/train/Landmarks/query/', desc_name, '/', white_list(i).name, '.mat'] 51 | save(save_name, 'resnetcrow'); 52 | end 53 | 54 | % Learn PCA 55 | fprintf('Learning PCA-whitening\n'); 56 | [~, eigvec, eigval, Xm] = yael_pca (single(cell2mat(vecs))); 57 | 58 | for i = 1 : size(datasets, 2) 59 | dataset = datasets{i}; 60 | %%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%% 61 | if dataset(1) == 'L' 62 | % image files are expected under each dataset's folder %'test 63 | base_dir = [data_dir, dataset, '/base/']; 64 | base_list = dir([base_dir '*.jpg']); 65 | for i = 1 : size(base_list, 1) 66 | img_name = [base_dir base_list(i).name]; 67 | img = imread(img_name); 68 | resnetcrow = vecpostproc(crow(img, net, 0)); 69 | resnetcrow = vecpostproc(apply_whiten(resnetcrow, Xm, eigvec, eigval)); % 70 | save_name = ['../sdd/dtod/data/train/', dataset, '/base/', desc_name, '/', base_list(i).name, '.mat'] 71 | save(save_name, 'resnetcrow'); 72 | end 73 | %%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%% 74 | else 75 | % image files are expected under each dataset's folder %'test 76 | base_dir = [data_dir, dataset, '/base/']; 77 | query_dir = [data_dir, dataset, '/query/']; 78 | 79 | base_list = dir([base_dir '*.jpg']); 80 | query_list = dir([query_dir '*.jpg']); 81 | 82 | % Base 83 | for i = 1 : size(base_list, 1) 84 | img_name = [base_dir base_list(i).name]; 85 | img = imread(img_name); 86 | resnetcrow = vecpostproc(crow(img, net, 0)); 87 | resnetcrow = vecpostproc(apply_whiten(resnetcrow, Xm, eigvec, eigval)); % 88 | save_name = ['../sdd/dtod/data/test/', dataset, '/base/', desc_name, '/', base_list(i).name, '.mat'] 89 | save(save_name, 'resnetcrow'); 90 | end 91 | 92 | % Query 93 | for i = 1 : size(query_list, 1) 94 | img_name = [query_dir query_list(i).name]; 95 | img = imread(img_name); 96 | resnetcrow = vecpostproc(crow(img, net, 0)); 97 | resnetcrow = vecpostproc(apply_whiten(resnetcrow, Xm, eigvec, eigval)); % 98 | save_name = ['../sdd/dtod/data/test/', dataset, '/query/', desc_name, '/', query_list(i).name, '.mat'] 99 | save(save_name, 'resnetcrow'); 100 | end 101 | end 102 | end 103 | -------------------------------------------------------------------------------- /Extraction/CroW/extract_vgg_crow.m: -------------------------------------------------------------------------------- 1 | addpath('../utils'); 2 | 3 | g = gpuDevice(4) 4 | reset(g) 5 | 6 | %%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%% 7 | 8 | if exist('aml') ~= 3 9 | mex -compatibleArrayDims aml.c 10 | end 11 | 12 | % matconvnet is a prerequisite 13 | % run vl_setupnn for your installation to avoid downloading and compiling again 14 | if exist('vl_nnconv') ~= 3 15 | cd matconvnet-1.0-beta25/matlab/ 16 | if numel(dir(fullfile('mex', 'vl_nnconv.mex*'))) == 0 17 | vl_compilenn('verbose', 1, 'enableGPU', 1, 'cudaRoot', '/usr/local/cuda-8.0'); 18 | end 19 | vl_setupnn; 20 | cd ../../ 21 | end 22 | 23 | % choose pre-trained CNN model 24 | modelfn = 'imagenet-vgg-verydeep-16.mat'; 25 | lid = 31; % use VGG 26 | % parameters of the method 27 | use_gpu = 1; 28 | % matconvnet is a prerequisite 29 | % run vl_setupnn for your installation to avoid downloading and compiling again 30 | if exist('vl_nnconv') ~= 3 31 | cd matconvnet-1.0-beta25/matlab/ 32 | if numel(dir(fullfile('mex', 'vl_nnconv.mex*'))) == 0 33 | vl_compilenn('verbose', 1, 'enableGPU', 1, 'cudaRoot', '/usr/local/cuda-8.0'); 34 | end 35 | vl_setupnn; 36 | cd ../../ 37 | end 38 | 39 | net = load(modelfn); 40 | net.layers = {net.layers{1:lid}}; % remove fully connected layers 41 | 42 | if use_gpu 43 | net = vl_simplenn_move(net, 'gpu'); 44 | end 45 | 46 | % compatibility with matconvnet-1.0-beta25 (otherwise tested with matconvnet-1.0-beta15) 47 | for i=1:numel(net.layers), if strcmp(net.layers{i}.type,'conv'), net.layers{i}.dilate=[1 1]; net.layers{i}.opts={}; end, end 48 | for i=1:numel(net.layers), if strcmp(net.layers{i}.type,'relu'), net.layers{i}.leak=0; end, end 49 | for i=1:numel(net.layers), if strcmp(net.layers{i}.type,'pool'), net.layers{i}.opts={}; end, end 50 | 51 | %%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%% 52 | 53 | data_dir = '../dvsd/extractD/data/'; 54 | datasets = {'Oxford5k', 'Paris6k', 'Holidays', 'Landmarks'}; 55 | desc_name = 'vggcrow'; 56 | 57 | %%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%% 58 | % extract features 59 | fprintf('Extracting features...\n'); 60 | 61 | % Whiting 62 | fprintf('Leaning PCA-whitening features...\n'); 63 | white_dir = [data_dir, 'Landmarks/query/']; 64 | white_list = dir([white_dir '*.jpg']); 65 | vecs = {}; 66 | for i = 1 : size(white_list, 1) 67 | img_name = [white_dir white_list(i).name]; 68 | img = imread(img_name); 69 | vggcrow = vecpostproc(crow(img, net, 1)); 70 | % load_name = ['../sdd/dtod/data/train/Landmarks/query/', desc_name, '/', white_list(i).name, '.mat'] 71 | % load(load_name); 72 | vecs{i} = vggcrow; 73 | save_name = ['../sdd/dtod/data/train/Landmarks/query/', desc_name, '/', white_list(i).name, '.mat'] 74 | save(save_name, 'vggcrow'); 75 | end 76 | 77 | % Learn PCA 78 | fprintf('Learning PCA-whitening\n'); 79 | [~, eigvec, eigval, Xm] = yael_pca (single(cell2mat(vecs))); 80 | 81 | for i = 1 : size(datasets, 2) 82 | dataset = datasets{i}; 83 | %%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%% 84 | if dataset(1) == 'L' 85 | % image files are expected under each dataset's folder %'test 86 | base_dir = [data_dir, dataset, '/base/']; 87 | base_list = dir([base_dir '*.jpg']); 88 | for i = 1 : size(base_list, 1) 89 | img_name = [base_dir base_list(i).name]; 90 | img = imread(img_name); 91 | vggcrow = vecpostproc(crow(img, net, 1)); 92 | vggcrow = vecpostproc(apply_whiten(vggcrow, Xm, eigvec, eigval)); % 93 | save_name = ['../sdd/dtod/data/train/', dataset, '/base/', desc_name, '/', base_list(i).name, '.mat'] 94 | save(save_name, 'vggcrow'); 95 | end 96 | %%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%% 97 | else 98 | % image files are expected under each dataset's folder %'test 99 | base_dir = [data_dir, dataset, '/base/']; 100 | query_dir = [data_dir, dataset, '/query/']; 101 | 102 | base_list = dir([base_dir '*.jpg']); 103 | query_list = dir([query_dir '*.jpg']); 104 | 105 | % Base 106 | for i = 1 : size(base_list, 1) 107 | img_name = [base_dir base_list(i).name]; 108 | img = imread(img_name); 109 | vggcrow = vecpostproc(crow(img, net, 1)); 110 | vggcrow = vecpostproc(apply_whiten(vggcrow, Xm, eigvec, eigval)); % 111 | save_name = ['../sdd/dtod/data/test/', dataset, '/base/', desc_name, '/', base_list(i).name, '.mat'] 112 | save(save_name, 'vggcrow'); 113 | end 114 | 115 | % Query 116 | for i = 1 : size(query_list, 1) 117 | img_name = [query_dir query_list(i).name]; 118 | img = imread(img_name); 119 | vggcrow = vecpostproc(crow(img, net, 1)); 120 | vggcrow = vecpostproc(apply_whiten(vggcrow, Xm, eigvec, eigval)); % 121 | save_name = ['../sdd/dtod/data/test/', dataset, '/query/', desc_name, '/', query_list(i).name, '.mat'] 122 | save(save_name, 'vggcrow'); 123 | end 124 | end 125 | end 126 | -------------------------------------------------------------------------------- /Extraction/DELF/aggre_delf_fv.m: -------------------------------------------------------------------------------- 1 | %% 2 | clc;clear; 3 | 4 | %% 5 | run('../vlfeat/toolbox/vl_setup'); 6 | vl_version verbose 7 | addpath('../utils'); 8 | 9 | datasets = {'Oxford5k', 'Paris6k', 'Holidays', 'Landmarks'}; 10 | train_num = 4000; 11 | 12 | %%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%% 13 | 14 | train_dir = ['../../sdd/dtod/data/train/Landmarks/query/delf/']; 15 | train_list = dir([train_dir '*.mat']); 16 | 17 | % Train VLAD|FV & PCA 18 | train_delf_descriptors = {}; 19 | fprintf('Loading train delf...\n'); 20 | for i = 1 : train_num 21 | delf_name = [train_dir train_list(i).name]; 22 | load(delf_name) 23 | train_delf_descriptors{i} = delf';%' 24 | end 25 | fprintf('Loading done, num: %d...\n', train_num); 26 | 27 | %%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%% 28 | 29 | % Train GMM 30 | k = 32; % number of GMMs 31 | fprintf('Training GMM, k = %d...\n', k); 32 | all_descriptors = [train_delf_descriptors{:}]; 33 | [means, covariances, priors] = vl_gmm(all_descriptors, k); 34 | 35 | fprintf('Aggregation train...\n'); 36 | vecs = {}; 37 | for i = 1 : numel(train_delf_descriptors) 38 | % Encode using delffv 39 | delffv = vecpostproc(vl_fisher(train_delf_descriptors{i}, means, covariances, priors)); 40 | delffv = delffv';%' 41 | img_name = [train_dir train_list(i).name]; 42 | fprintf('Encoding delffv %d: %s\n', i, img_name); 43 | fv_name = ['../../sdd/dtod/data/train/Landmarks/query/delffv/' train_list(i).name]; 44 | save(fv_name, 'delffv'); 45 | % load(fv_name); 46 | vecs{i} = delffv; 47 | end 48 | 49 | clear train_delf_descriptors; 50 | 51 | %%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%% 52 | 53 | fprintf('Learning PCA-whitening\n'); 54 | [~, eigvec, eigval, Xm] = yael_pca(cell2mat(vecs')'); 55 | 56 | %%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%% 57 | 58 | for i = 1 : size(datasets, 2) 59 | dataset = datasets{i}; 60 | %%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%% 61 | if dataset(1) == 'L' 62 | base_dir = ['../../sdd/dtod/data/train/Landmarks/base/delf/']; 63 | base_list = dir([base_dir '*.mat']); 64 | 65 | % Base 66 | fprintf('Loading test base delf %s...\n', dataset); 67 | base_delf_descriptors = {}; 68 | for i = 1 : size(base_list, 1) 69 | delf_name = [base_dir base_list(i).name]; 70 | load(delf_name) 71 | base_delf_descriptors{i} = delf'; 72 | end 73 | fprintf('Loading done, num: %d...\n', size(base_list, 1)); 74 | 75 | % Aggre Base 76 | fprintf('Aggregation test base...\n'); 77 | for i = 1 : numel(base_delf_descriptors) 78 | delffv = vecpostproc(vl_fisher(base_delf_descriptors{i}, means, covariances, priors)); 79 | delffv = vecpostproc(apply_whiten(delffv, Xm, eigvec, eigval, 2048)); 80 | delffv = delffv'; 81 | img_name = [base_dir base_list(i).name]; 82 | fprintf('Encoding delffv %d: %s\n', i, img_name); 83 | fv_name = ['../../sdd/dtod/data/train/Landmarks/base/delffv/' base_list(i).name]; 84 | save(fv_name, 'delffv'); 85 | end 86 | %%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%% 87 | else 88 | base_dir = ['../../sdd/dtod/data/test/', dataset, '/base/delf/']; 89 | base_list = dir([base_dir '*.mat']); 90 | 91 | query_dir = ['../../sdd/dtod/data/test/', dataset, '/query/delf/']; 92 | query_list = dir([query_dir '*.mat']); 93 | 94 | % Base 95 | fprintf('Loading test base delf %s...\n', dataset); 96 | base_delf_descriptors = {}; 97 | for i = 1 : size(base_list, 1) 98 | delf_name = [base_dir base_list(i).name]; 99 | load(delf_name) 100 | base_delf_descriptors{i} = delf'; 101 | end 102 | fprintf('Loading done, num: %d...\n', size(base_list, 1)); 103 | 104 | % Query 105 | fprintf('Loading test query delf %s...\n', dataset); 106 | query_delf_descriptors = {}; 107 | for i = 1 : size(query_list, 1) 108 | delf_name = [query_dir query_list(i).name]; 109 | load(delf_name) 110 | query_delf_descriptors{i} = delf'; 111 | end 112 | fprintf('Loading done, num: %d...\n', size(query_list, 1)); 113 | 114 | % Aggre Base 115 | fprintf('Aggregation test base...\n'); 116 | for i = 1 : numel(base_delf_descriptors) 117 | delffv = vecpostproc(vl_fisher(base_delf_descriptors{i}, means, covariances, priors)); 118 | delffv = vecpostproc(apply_whiten (delffv, Xm, eigvec, eigval, 2048)); 119 | delffv = delffv'; 120 | img_name = [base_dir base_list(i).name]; 121 | fprintf('Encoding delffv %d: %s\n', i, img_name); 122 | fv_name = ['../../sdd/dtod/data/test/', dataset, '/base/delffv/' base_list(i).name]; 123 | save(fv_name, 'delffv'); 124 | end 125 | 126 | % Aggre Query 127 | fprintf('Aggregation test query...\n'); 128 | for i = 1 : numel(query_delf_descriptors) 129 | delffv = vecpostproc(vl_fisher(query_delf_descriptors{i}, means, covariances, priors)); 130 | delffv = vecpostproc(apply_whiten (delffv, Xm, eigvec, eigval, 2048)); 131 | delffv = delffv'; 132 | img_name = [query_dir query_list(i).name]; 133 | fprintf('Encoding delffv %d: %s\n', i, img_name); 134 | fv_name = ['../../sdd/dtod/data/test/', dataset, '/query/delffv/' query_list(i).name]; 135 | save(fv_name, 'delffv'); 136 | end 137 | clear base_delf_descriptors, query_delf_descriptors; 138 | end 139 | end 140 | 141 | -------------------------------------------------------------------------------- /Extraction/DELF/aggre_delf_vlad.m: -------------------------------------------------------------------------------- 1 | %% 2 | clc;clear; 3 | 4 | %% 5 | run('../vlfeat/toolbox/vl_setup'); 6 | vl_version verbose 7 | addpath('../utils'); 8 | 9 | datasets = {'Landmarks', 'Holidays', 'Paris6k', 'Oxford5k'}; 10 | train_num = 4000; 11 | 12 | %%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%% 13 | 14 | train_dir = ['../../sdd/dtod/data/train/Landmarks/query/delf/']; 15 | train_list = dir([train_dir '*.mat']); 16 | 17 | % Train VLAD|FV & PCA 18 | train_delf_descriptors = {}; 19 | fprintf('Loading train delf...\n'); 20 | for i = 1 : train_num 21 | delf_name = [train_dir train_list(i).name]; 22 | load(delf_name) 23 | train_delf_descriptors{i} = delf';%' 24 | end 25 | fprintf('Loading done, num: %d...\n', train_num); 26 | 27 | %%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%% 28 | 29 | %% Train K-MEANS 30 | k = 64; 31 | fprintf('Training K-MEANS, k = %d...\n', k); 32 | all_descriptors = [train_delf_descriptors{:}]; 33 | centroids = vl_kmeans(all_descriptors, k); 34 | kdtree = vl_kdtreebuild(centroids); 35 | 36 | fprintf('Aggregation train...\n'); 37 | vecs = {}; 38 | for i = 1 : numel(train_delf_descriptors) 39 | nn = vl_kdtreequery(kdtree, centroids, train_delf_descriptors{i}); 40 | assignments = zeros(k, numel(nn)); 41 | assignments(sub2ind(size(assignments), nn, 1:numel(nn))) = 1; 42 | 43 | delfvlad = vecpostproc(vl_vlad(train_delf_descriptors{i}, centroids, assignments)); 44 | delfvlad = delfvlad';%' 45 | 46 | img_name = [train_dir train_list(i).name]; 47 | fprintf('Encoding delfvlad %d: %s\n', i, img_name); 48 | vlad_name = ['../../sdd/dtod/data/train/Landmarks/query/delfvlad/' train_list(i).name]; 49 | save(vlad_name, 'delfvlad'); 50 | % load(vlad_name); 51 | vecs{i} = delfvlad; 52 | end 53 | 54 | clear train_delf_descriptors; 55 | 56 | %%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%% 57 | 58 | fprintf('Learning PCA-whitening\n'); 59 | [~, eigvec, eigval, Xm] = yael_pca(single(cell2mat(vecs')'));% 60 | 61 | %%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%% 62 | 63 | for i = 1 : size(datasets, 2) 64 | dataset = datasets{i}; 65 | %%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%% 66 | if dataset(1) == 'L' 67 | base_dir = ['../../sdd/dtod/data/train/Landmarks/base/delf/']; 68 | base_list = dir([base_dir '*.mat']); 69 | 70 | % Base 71 | fprintf('Loading test base delf %s...\n', dataset); 72 | base_delf_descriptors = {}; 73 | for i = 1 : size(base_list, 1) 74 | delf_name = [base_dir base_list(i).name]; 75 | load(delf_name) 76 | base_delf_descriptors{i} = delf'; 77 | end 78 | fprintf('Loading done, num: %d...\n', size(base_list, 1)); 79 | 80 | % Aggre Base 81 | fprintf('Aggregation test base...\n'); 82 | for i = 1 : numel(base_delf_descriptors) 83 | nn = vl_kdtreequery(kdtree, centroids, base_delf_descriptors{i}); 84 | assignments = zeros(k, numel(nn)); 85 | assignments(sub2ind(size(assignments), nn, 1:numel(nn))) = 1; 86 | 87 | delfvlad = vecpostproc(vl_vlad(base_delf_descriptors{i}, centroids, assignments)); 88 | delfvlad = vecpostproc(apply_whiten(delfvlad, Xm, eigvec, eigval, 2048)); % 89 | delfvlad = delfvlad';%' 90 | 91 | img_name = [base_dir base_list(i).name]; 92 | fprintf('Encoding delfvlad %d: %s\n', i, img_name); 93 | vlad_name = ['../../sdd/dtod/data/train/Landmarks/base/delfvlad/' base_list(i).name]; 94 | save(vlad_name, 'delfvlad'); 95 | end 96 | %%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%% 97 | else 98 | base_dir = ['../../sdd/dtod/data/test/', dataset, '/base/delf/']; 99 | base_list = dir([base_dir '*.mat']); 100 | 101 | query_dir = ['../../sdd/dtod/data/test/', dataset, '/query/delf/']; 102 | query_list = dir([query_dir '*.mat']); 103 | 104 | % Base 105 | fprintf('Loading test base delf %s...\n', dataset); 106 | base_delf_descriptors = {}; 107 | for i = 1 : size(base_list, 1) 108 | delf_name = [base_dir base_list(i).name]; 109 | load(delf_name) 110 | base_delf_descriptors{i} = delf'; 111 | end 112 | fprintf('Loading done, num: %d...\n', size(base_list, 1)); 113 | 114 | % Query 115 | fprintf('Loading test query delf %s...\n', dataset); 116 | query_delf_descriptors = {}; 117 | for i = 1 : size(query_list, 1) 118 | delf_name = [query_dir query_list(i).name]; 119 | load(delf_name) 120 | query_delf_descriptors{i} = delf'; 121 | end 122 | fprintf('Loading done, num: %d...\n', size(query_list, 1)); 123 | 124 | % Aggre Base 125 | fprintf('Aggregation test base...\n'); 126 | for i = 1 : numel(base_delf_descriptors) 127 | nn = vl_kdtreequery(kdtree, centroids, base_delf_descriptors{i}); 128 | assignments = zeros(k, numel(nn)); 129 | assignments(sub2ind(size(assignments), nn, 1:numel(nn))) = 1; 130 | 131 | delfvlad = vecpostproc(vl_vlad(base_delf_descriptors{i}, centroids, assignments)); 132 | delfvlad = vecpostproc(apply_whiten(delfvlad, Xm, eigvec, eigval, 2048)); % 133 | delfvlad = delfvlad';%' 134 | 135 | img_name = [base_dir base_list(i).name]; 136 | fprintf('Encoding delfvlad %d: %s\n', i, img_name); 137 | vlad_name = ['../../sdd/dtod/data/test/', dataset, '/base/delfvlad/' base_list(i).name]; 138 | save(vlad_name, 'delfvlad'); 139 | end 140 | 141 | % Aggre Query 142 | fprintf('Aggregation test query...\n'); 143 | for i = 1 : numel(query_delf_descriptors) 144 | nn = vl_kdtreequery(kdtree, centroids, query_delf_descriptors{i}); 145 | assignments = zeros(k, numel(nn)); 146 | assignments(sub2ind(size(assignments), nn, 1:numel(nn))) = 1; 147 | 148 | delfvlad = vecpostproc(vl_vlad(query_delf_descriptors{i}, centroids, assignments)); 149 | delfvlad = vecpostproc(apply_whiten(delfvlad, Xm, eigvec, eigval, 2048)); % 150 | delfvlad = delfvlad';%' 151 | 152 | img_name = [query_dir query_list(i).name]; 153 | fprintf('Encoding delfvlad %d: %s\n', i, img_name); 154 | vlad_name = ['../../sdd/dtod/data/test/', dataset, '/query/delfvlad/' query_list(i).name]; 155 | save(vlad_name, 'delfvlad'); 156 | end 157 | 158 | clear base_delf_descriptors, query_delf_descriptors; 159 | 160 | end 161 | end 162 | 163 | -------------------------------------------------------------------------------- /Extraction/DELF/convert.py: -------------------------------------------------------------------------------- 1 | import numpy as np 2 | import os 3 | import scipy.io as sio 4 | import natsort 5 | from delf import feature_io 6 | # from match_images import * 7 | # import argparse 8 | import pdb 9 | 10 | datasets = ['Oxford5k', 'Paris6k', 'Holidays', 'Landmarks'] 11 | 12 | for dataset in datasets: 13 | 14 | query_dir = './result/' + dataset + '/query' 15 | query_list = natsort.natsorted(os.listdir(query_dir)) 16 | result_dir = './delf/' + dataset + '/query' 17 | 18 | for q in query_list: 19 | # a = feature_io.ReadFromFile(query_dir + '/' + q) 20 | # print(len(a)) 21 | # print(a[0].shape) 22 | # print(a[1].shape) 23 | # print(a[2].shape) 24 | # print(a[3].shape) 25 | query_l, _, query_d, _, _ = feature_io.ReadFromFile(query_dir + '/' + q) 26 | mat_name = result_dir + '/' + q 27 | sio.savemat(mat_name, {'delf' : query_d}) 28 | print(mat_name) 29 | # break 30 | 31 | base_dir = './result/' + dataset + '/base' 32 | base_list = natsort.natsorted(os.listdir(base_dir)) 33 | result_dir = './delf/' + dataset + '/base' 34 | 35 | for b in base_list: 36 | # a = feature_io.ReadFromFile(base_dir + '/' + b) 37 | # print(len(a)) 38 | # print(a[0].shape) 39 | # print(a[1].shape) 40 | # print(a[2].shape) 41 | # print(a[3].shape) 42 | # print(b) 43 | base_l, _, base_d, _, _ = feature_io.ReadFromFile(base_dir + '/' + b) 44 | mat_name = result_dir + '/' + b 45 | sio.savemat(mat_name, {'delf' : base_d}) 46 | print(mat_name) 47 | # break 48 | # break -------------------------------------------------------------------------------- /Extraction/DELF/delf_config_example.pbtxt: -------------------------------------------------------------------------------- 1 | model_path: "parameters/delf_v1_20171026/model/" 2 | image_scales: .25 3 | image_scales: .3536 4 | image_scales: .5 5 | image_scales: .7071 6 | image_scales: 1.0 7 | image_scales: 1.4142 8 | image_scales: 2.0 9 | delf_local_config { 10 | use_pca: true 11 | # Note that for the exported model provided as an example, layer_name and 12 | # iou_threshold are hard-coded in the checkpoint. So, the layer_name and 13 | # iou_threshold variables here have no effect on the provided 14 | # extract_features.py script. 15 | layer_name: "resnet_v1_50/block3" 16 | iou_threshold: 1.0 17 | max_feature_num: 1000 18 | score_threshold: 100.0 19 | pca_parameters { 20 | mean_path: "parameters/delf_v1_20171026/pca/mean.datum" 21 | projection_matrix_path: "parameters/delf_v1_20171026/pca/pca_proj_mat.datum" 22 | pca_dim: 128 23 | use_whitening: false 24 | } 25 | } 26 | -------------------------------------------------------------------------------- /Extraction/DELF/imagelist.py: -------------------------------------------------------------------------------- 1 | # from PIL import Image 2 | import os 3 | import natsort 4 | import argparse 5 | ap = argparse.ArgumentParser() 6 | ap.add_argument( 7 | "-d", "--dir", required=True, help="data dir") 8 | args = vars(ap.parse_args()) 9 | path=args["dir"] 10 | files = natsort.natsorted(os.listdir(path)) 11 | f=open('./list_images.txt','w') 12 | i=0 13 | for file in files: 14 | f.write(path+'/'+file+'\n') 15 | # try: 16 | # im=Image.open(path+'/'+file) 17 | # f.write(file+'\n') 18 | # im=im.convert('RGB') 19 | # im.save(path+'_bmp/'+(file.replace('.JPEG','.bmp'))) 20 | # i=i+1 21 | # print(i) 22 | # except: 23 | # i=i+1 24 | # print('error:',i) 25 | # continue; 26 | f.close() 27 | -------------------------------------------------------------------------------- /Extraction/GeM/extract_resnet_gem.m: -------------------------------------------------------------------------------- 1 | [root] = fileparts(mfilename('fullpath')) ; 2 | % add paths from this package 3 | addpath(fullfile(root, 'cnnblocks')); 4 | addpath(fullfile(root, 'cnninit')); 5 | addpath(fullfile(root, 'cnntrain')); 6 | addpath(fullfile(root, 'cnnvecs')); 7 | addpath(fullfile(root, 'examples')); 8 | addpath(fullfile(root, 'whiten')); 9 | addpath(fullfile(root, 'utils')); 10 | addpath(fullfile(root, 'yael')); 11 | addpath(fullfile(root, 'helpers')); 12 | 13 | g = gpuDevice(4) 14 | reset(g) 15 | 16 | %%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%% 17 | 18 | % matconvnet is a prerequisite 19 | % run vl_setupnn for your installation to avoid downloading and compiling again 20 | if exist('vl_nnconv') ~= 3 21 | cd matconvnet-1.0-beta25/matlab/ 22 | if numel(dir(fullfile('mex', 'vl_nnconv.mex*'))) == 0 23 | vl_compilenn('verbose', 1, 'enableGPU', 1, 'cudaRoot', '/usr/local/cuda-8.0'); 24 | end 25 | vl_setupnn; 26 | cd ../../ 27 | end 28 | 29 | use_ms = 1; % use multi-scale representation, otherwise use single-scale 30 | use_rvec = 0; % use regional representation (R-MAC, R-GeM), otherwise use global (MAC, GeM) 31 | use_gpu = [1]; % use GPUs (array of GPUIDs), if empty use CPU 32 | 33 | network_file = './data/retrievalSfM120k-gem-resnet101.mat'; 34 | 35 | % Prepare function for desc extraction 36 | if ~use_rvec 37 | if ~use_ms 38 | descfun = @(x, y) cnn_vecms (x, y, 1); 39 | else 40 | descfun = @(x, y) cnn_vecms (x, y, [1, 1/sqrt(2), 1/2]); 41 | end 42 | else 43 | if ~use_ms 44 | descfun = @(x, y) cnn_vecrms (x, y, 3, 1); 45 | else 46 | descfun = @(x, y) cnn_vecrms (x, y, 3, [1, 1/sqrt(2), 1/2]); 47 | end 48 | end 49 | 50 | [~, network_name, ~] = fileparts(network_file); 51 | fprintf('>> %s: Evaluating CNN image retrieval...\n', network_name); 52 | 53 | load(network_file); 54 | net = dagnn.DagNN.loadobj(net); 55 | net.mode = 'test'; 56 | move(net, 'gpu') 57 | 58 | %%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%% 59 | 60 | data_dir = '../dvsd/extractD/data/'; 61 | datasets = {'Oxford5k', 'Paris6k', 'Holidays', 'Landmarks'}; 62 | desc_name = 'resnetgem'; 63 | 64 | %%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%% 65 | % extract features 66 | fprintf('Extracting features...\n'); 67 | 68 | % Whiting 69 | fprintf('Leaning PCA-whitening features...\n'); 70 | white_dir = [data_dir, 'Landmarks/query/']; 71 | white_list = dir([white_dir '*.jpg']); 72 | vecs = {}; 73 | for i = 1 : size(white_list, 1) 74 | img_name = [white_dir white_list(i).name]; 75 | img = imread(img_name); 76 | resnetgem = vecpostproc(descfun(img, net)); 77 | % load_name = ['../sdd/dtod/data/train/Landmarks/query/', desc_name, '/', white_list(i).name, '.mat'] 78 | % load(load_name); 79 | vecs{i} = resnetgem; 80 | save_name = ['../sdd/dtod/data/train/Landmarks/query/', desc_name, '/', white_list(i).name, '.mat'] 81 | save(save_name, 'resnetgem'); 82 | end 83 | 84 | % Learn PCA 85 | fprintf('Learning PCA-whitening\n'); 86 | [~, eigvec, eigval, Xm] = yael_pca (single(cell2mat(vecs))); 87 | 88 | for i = 1 : size(datasets, 2) 89 | dataset = datasets{i}; 90 | %%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%% 91 | if dataset(1) == 'L' 92 | % image files are expected under each dataset's folder %'test 93 | base_dir = [data_dir, dataset, '/base/']; 94 | base_list = dir([base_dir '*.jpg']); 95 | for i = 1 : size(base_list, 1) 96 | img_name = [base_dir base_list(i).name]; 97 | img = imread(img_name); 98 | resnetgem = vecpostproc(descfun(img, net)); 99 | resnetgem = vecpostproc(apply_whiten(resnetgem, Xm, eigvec, eigval)); % 100 | save_name = ['../sdd/dtod/data/train/', dataset, '/base/', desc_name, '/', base_list(i).name, '.mat'] 101 | save(save_name, 'resnetgem'); 102 | end 103 | %%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%% 104 | else 105 | % image files are expected under each dataset's folder %'test 106 | base_dir = [data_dir, dataset, '/base/']; 107 | query_dir = [data_dir, dataset, '/query/']; 108 | 109 | base_list = dir([base_dir '*.jpg']); 110 | query_list = dir([query_dir '*.jpg']); 111 | 112 | % Base 113 | for i = 1 : size(base_list, 1) 114 | img_name = [base_dir base_list(i).name]; 115 | img = imread(img_name); 116 | resnetgem = vecpostproc(descfun(img, net)); 117 | resnetgem = vecpostproc(apply_whiten(resnetgem, Xm, eigvec, eigval)); % 118 | save_name = ['../sdd/dtod/data/test/', dataset, '/base/', desc_name, '/', base_list(i).name, '.mat'] 119 | save(save_name, 'resnetgem'); 120 | end 121 | 122 | % Query 123 | for i = 1 : size(query_list, 1) 124 | img_name = [query_dir query_list(i).name]; 125 | img = imread(img_name); 126 | resnetgem = vecpostproc(descfun(img, net)); 127 | resnetgem = vecpostproc(apply_whiten(resnetgem, Xm, eigvec, eigval)); % 128 | save_name = ['../sdd/dtod/data/test/', dataset, '/query/', desc_name, '/', query_list(i).name, '.mat'] 129 | save(save_name, 'resnetgem'); 130 | end 131 | end 132 | end -------------------------------------------------------------------------------- /Extraction/GeM/extract_resnet_rgem.m: -------------------------------------------------------------------------------- 1 | [root] = fileparts(mfilename('fullpath')) ; 2 | % add paths from this package 3 | addpath(fullfile(root, 'cnnblocks')); 4 | addpath(fullfile(root, 'cnninit')); 5 | addpath(fullfile(root, 'cnntrain')); 6 | addpath(fullfile(root, 'cnnvecs')); 7 | addpath(fullfile(root, 'examples')); 8 | addpath(fullfile(root, 'whiten')); 9 | addpath(fullfile(root, 'utils')); 10 | addpath(fullfile(root, 'yael')); 11 | addpath(fullfile(root, 'helpers')); 12 | 13 | g = gpuDevice(4) 14 | reset(g) 15 | 16 | %%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%% 17 | % matconvnet is a prerequisite 18 | % run vl_setupnn for your installation to avoid downloading and compiling again 19 | if exist('vl_nnconv') ~= 3 20 | cd matconvnet-1.0-beta25/matlab/ 21 | if numel(dir(fullfile('mex', 'vl_nnconv.mex*'))) == 0 22 | vl_compilenn('verbose', 1, 'enableGPU', 1, 'cudaRoot', '/usr/local/cuda-8.0'); 23 | end 24 | vl_setupnn; 25 | cd ../../ 26 | end 27 | 28 | use_ms = 1; % use multi-scale representation, otherwise use single-scale 29 | use_rvec = 1; % use regional representation (R-MAC, R-GeM), otherwise use global (MAC, GeM) 30 | use_gpu = [1]; % use GPUs (array of GPUIDs), if empty use CPU 31 | 32 | network_file = './data/retrievalSfM120k-gem-resnet101.mat'; 33 | 34 | % Prepare function for desc extraction 35 | if ~use_rvec 36 | if ~use_ms 37 | descfun = @(x, y) cnn_vecms (x, y, 1); 38 | else 39 | descfun = @(x, y) cnn_vecms (x, y, [1, 1/sqrt(2), 1/2]); 40 | end 41 | else 42 | if ~use_ms 43 | descfun = @(x, y) cnn_vecrms (x, y, 3, 1); 44 | else 45 | descfun = @(x, y) cnn_vecrms (x, y, 3, [1, 1/sqrt(2), 1/2]); 46 | end 47 | end 48 | 49 | [~, network_name, ~] = fileparts(network_file); 50 | fprintf('>> %s: Evaluating CNN image retrieval...\n', network_name); 51 | 52 | load(network_file); 53 | net = dagnn.DagNN.loadobj(net); 54 | net.mode = 'test'; 55 | move(net, 'gpu') 56 | 57 | %%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%% 58 | 59 | data_dir = '../dvsd/extractD/data/'; 60 | datasets = {'Oxford5k', 'Paris6k', 'Holidays', 'Landmarks'}; 61 | desc_name = 'resnetrgem'; 62 | 63 | %%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%% 64 | % extract features 65 | fprintf('Extracting features...\n'); 66 | 67 | % Whiting 68 | fprintf('Leaning PCA-whitening features...\n'); 69 | white_dir = [data_dir, 'Landmarks/query/']; 70 | white_list = dir([white_dir '*.jpg']); 71 | vecs = {}; 72 | for i = 1 : size(white_list, 1) 73 | img_name = [white_dir white_list(i).name]; 74 | img = imread(img_name); 75 | resnetrgem = vecpostproc(descfun(img, net)); 76 | % load_name = ['../sdd/dtod/data/train/Landmarks/query/', desc_name, '/', white_list(i).name, '.mat'] 77 | % load(load_name); 78 | vecs{i} = resnetrgem; 79 | save_name = ['../sdd/dtod/data/train/Landmarks/query/', desc_name, '/', white_list(i).name, '.mat'] 80 | save(save_name, 'resnetrgem'); 81 | end 82 | 83 | % Learn PCA 84 | fprintf('Learning PCA-whitening\n'); 85 | [~, eigvec, eigval, Xm] = yael_pca (single(cell2mat(vecs))); 86 | 87 | for i = 1 : size(datasets, 2) 88 | dataset = datasets{i}; 89 | %%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%% 90 | if dataset(1) == 'L' 91 | % image files are expected under each dataset's folder %'test 92 | base_dir = [data_dir, dataset, '/base/']; 93 | base_list = dir([base_dir '*.jpg']); 94 | for i = 1 : size(base_list, 1) 95 | img_name = [base_dir base_list(i).name]; 96 | img = imread(img_name); 97 | resnetrgem = vecpostproc(descfun(img, net)); 98 | resnetrgem = vecpostproc(apply_whiten(resnetrgem, Xm, eigvec, eigval)); % 99 | save_name = ['../sdd/dtod/data/train/', dataset, '/base/', desc_name, '/', base_list(i).name, '.mat'] 100 | save(save_name, 'resnetrgem'); 101 | end 102 | %%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%% 103 | else 104 | % image files are expected under each dataset's folder %'test 105 | base_dir = [data_dir, dataset, '/base/']; 106 | query_dir = [data_dir, dataset, '/query/']; 107 | 108 | base_list = dir([base_dir '*.jpg']); 109 | query_list = dir([query_dir '*.jpg']); 110 | 111 | % Base 112 | for i = 1 : size(base_list, 1) 113 | img_name = [base_dir base_list(i).name]; 114 | img = imread(img_name); 115 | resnetrgem = vecpostproc(descfun(img, net)); 116 | resnetrgem = vecpostproc(apply_whiten(resnetrgem, Xm, eigvec, eigval)); % 117 | save_name = ['../sdd/dtod/data/test/', dataset, '/base/', desc_name, '/', base_list(i).name, '.mat'] 118 | save(save_name, 'resnetrgem'); 119 | end 120 | 121 | % Query 122 | for i = 1 : size(query_list, 1) 123 | img_name = [query_dir query_list(i).name]; 124 | img = imread(img_name); 125 | resnetrgem = vecpostproc(descfun(img, net)); 126 | resnetrgem = vecpostproc(apply_whiten(resnetrgem, Xm, eigvec, eigval)); % 127 | save_name = ['../sdd/dtod/data/test/', dataset, '/query/', desc_name, '/', query_list(i).name, '.mat'] 128 | save(save_name, 'resnetrgem'); 129 | end 130 | end 131 | end -------------------------------------------------------------------------------- /Extraction/GeM/extract_vgg_gem.m: -------------------------------------------------------------------------------- 1 | [root] = fileparts(mfilename('fullpath')) ; 2 | % add paths from this package 3 | addpath(fullfile(root, 'cnnblocks')); 4 | addpath(fullfile(root, 'cnninit')); 5 | addpath(fullfile(root, 'cnntrain')); 6 | addpath(fullfile(root, 'cnnvecs')); 7 | addpath(fullfile(root, 'examples')); 8 | addpath(fullfile(root, 'whiten')); 9 | addpath(fullfile(root, 'utils')); 10 | addpath(fullfile(root, 'yael')); 11 | addpath(fullfile(root, 'helpers')); 12 | 13 | g = gpuDevice(3) 14 | reset(g) 15 | 16 | %%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%% 17 | % matconvnet is a prerequisite 18 | % run vl_setupnn for your installation to avoid downloading and compiling again 19 | if exist('vl_nnconv') ~= 3 20 | cd matconvnet-1.0-beta25/matlab/ 21 | if numel(dir(fullfile('mex', 'vl_nnconv.mex*'))) == 0 22 | vl_compilenn('verbose', 1, 'enableGPU', 1, 'cudaRoot', '/usr/local/cuda-8.0'); 23 | end 24 | vl_setupnn; 25 | cd ../../ 26 | end 27 | 28 | use_ms = 1; % use multi-scale representation, otherwise use single-scale 29 | use_rvec = 0; % use regional representation (R-MAC, R-GeM), otherwise use global (MAC, GeM) 30 | use_gpu = [1]; % use GPUs (array of GPUIDs), if empty use CPU 31 | 32 | network_file = './data/retrievalSfM120k-gem-vgg.mat'; 33 | 34 | % Prepare function for desc extraction 35 | if ~use_rvec 36 | if ~use_ms 37 | descfun = @(x, y) cnn_vecms (x, y, 1); 38 | else 39 | descfun = @(x, y) cnn_vecms (x, y, [1, 1/sqrt(2), 1/2]); 40 | end 41 | else 42 | if ~use_ms 43 | descfun = @(x, y) cnn_vecrms (x, y, 3, 1); 44 | else 45 | descfun = @(x, y) cnn_vecrms (x, y, 3, [1, 1/sqrt(2), 1/2]); 46 | end 47 | end 48 | 49 | [~, network_name, ~] = fileparts(network_file); 50 | fprintf('>> %s: Evaluating CNN image retrieval...\n', network_name); 51 | 52 | load(network_file); 53 | net = dagnn.DagNN.loadobj(net); 54 | net.mode = 'test'; 55 | move(net, 'gpu') 56 | 57 | %%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%% 58 | 59 | data_dir = '../dvsd/extractD/data/'; 60 | datasets = {'Oxford5k', 'Paris6k', 'Holidays', 'Landmarks'}; 61 | desc_name = 'vgggem'; 62 | 63 | %%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%% 64 | % extract features 65 | fprintf('Extracting features...\n'); 66 | 67 | % Whiting 68 | fprintf('Leaning PCA-whitening features...\n'); 69 | white_dir = [data_dir, 'Landmarks/query/']; 70 | white_list = dir([white_dir '*.jpg']); 71 | vecs = {}; 72 | for i = 1 : size(white_list, 1) 73 | img_name = [white_dir white_list(i).name]; 74 | img = imread(img_name); 75 | vgggem = vecpostproc(descfun(img, net)); 76 | % load_name = ['../sdd/dtod/data/train/Landmarks/query/', desc_name, '/', white_list(i).name, '.mat'] 77 | % load(load_name); 78 | vecs{i} = vgggem; 79 | save_name = ['../sdd/dtod/data/train/Landmarks/query/', desc_name, '/', white_list(i).name, '.mat'] 80 | save(save_name, 'vgggem'); 81 | end 82 | 83 | % Learn PCA 84 | fprintf('Learning PCA-whitening\n'); 85 | [~, eigvec, eigval, Xm] = yael_pca (single(cell2mat(vecs))); 86 | 87 | for i = 1 : size(datasets, 2) 88 | dataset = datasets{i}; 89 | %%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%% 90 | if dataset(1) == 'L' 91 | % image files are expected under each dataset's folder %'test 92 | base_dir = [data_dir, dataset, '/base/']; 93 | base_list = dir([base_dir '*.jpg']); 94 | for i = 1 : size(base_list, 1) 95 | img_name = [base_dir base_list(i).name]; 96 | img = imread(img_name); 97 | vgggem = vecpostproc(descfun(img, net)); 98 | vgggem = vecpostproc(apply_whiten(vgggem, Xm, eigvec, eigval)); % 99 | save_name = ['../sdd/dtod/data/train/', dataset, '/base/', desc_name, '/', base_list(i).name, '.mat'] 100 | save(save_name, 'vgggem'); 101 | end 102 | %%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%% 103 | else 104 | % image files are expected under each dataset's folder %'test 105 | base_dir = [data_dir, dataset, '/base/']; 106 | query_dir = [data_dir, dataset, '/query/']; 107 | 108 | base_list = dir([base_dir '*.jpg']); 109 | query_list = dir([query_dir '*.jpg']); 110 | 111 | % Base 112 | for i = 1 : size(base_list, 1) 113 | img_name = [base_dir base_list(i).name]; 114 | img = imread(img_name); 115 | vgggem = vecpostproc(descfun(img, net)); 116 | vgggem = vecpostproc(apply_whiten(vgggem, Xm, eigvec, eigval)); % 117 | save_name = ['../sdd/dtod/data/test/', dataset, '/base/', desc_name, '/', base_list(i).name, '.mat'] 118 | save(save_name, 'vgggem'); 119 | end 120 | 121 | % Query 122 | for i = 1 : size(query_list, 1) 123 | img_name = [query_dir query_list(i).name]; 124 | img = imread(img_name); 125 | vgggem = vecpostproc(descfun(img, net)); 126 | vgggem = vecpostproc(apply_whiten(vgggem, Xm, eigvec, eigval)); % 127 | save_name = ['../sdd/dtod/data/test/', dataset, '/query/', desc_name, '/', query_list(i).name, '.mat'] 128 | save(save_name, 'vgggem'); 129 | end 130 | end 131 | end -------------------------------------------------------------------------------- /Extraction/GeM/extract_vgg_rgem.m: -------------------------------------------------------------------------------- 1 | [root] = fileparts(mfilename('fullpath')) ; 2 | % add paths from this package 3 | addpath(fullfile(root, 'cnnblocks')); 4 | addpath(fullfile(root, 'cnninit')); 5 | addpath(fullfile(root, 'cnntrain')); 6 | addpath(fullfile(root, 'cnnvecs')); 7 | addpath(fullfile(root, 'examples')); 8 | addpath(fullfile(root, 'whiten')); 9 | addpath(fullfile(root, 'utils')); 10 | addpath(fullfile(root, 'yael')); 11 | addpath(fullfile(root, 'helpers')); 12 | 13 | g = gpuDevice(3) 14 | reset(g) 15 | 16 | %%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%% 17 | % matconvnet is a prerequisite 18 | % run vl_setupnn for your installation to avoid downloading and compiling again 19 | if exist('vl_nnconv') ~= 3 20 | cd matconvnet-1.0-beta25/matlab/ 21 | if numel(dir(fullfile('mex', 'vl_nnconv.mex*'))) == 0 22 | vl_compilenn('verbose', 1, 'enableGPU', 1, 'cudaRoot', '/usr/local/cuda-8.0'); 23 | end 24 | vl_setupnn; 25 | cd ../../ 26 | end 27 | 28 | use_ms = 1; % use multi-scale representation, otherwise use single-scale 29 | use_rvec = 1; % use regional representation (R-MAC, R-GeM), otherwise use global (MAC, GeM) 30 | use_gpu = [1]; % use GPUs (array of GPUIDs), if empty use CPU 31 | 32 | network_file = './data/retrievalSfM120k-gem-vgg.mat'; 33 | 34 | % Prepare function for desc extraction 35 | if ~use_rvec 36 | if ~use_ms 37 | descfun = @(x, y) cnn_vecms (x, y, 1); 38 | else 39 | descfun = @(x, y) cnn_vecms (x, y, [1, 1/sqrt(2), 1/2]); 40 | end 41 | else 42 | if ~use_ms 43 | descfun = @(x, y) cnn_vecrms (x, y, 3, 1); 44 | else 45 | descfun = @(x, y) cnn_vecrms (x, y, 3, [1, 1/sqrt(2), 1/2]); 46 | end 47 | end 48 | 49 | [~, network_name, ~] = fileparts(network_file); 50 | fprintf('>> %s: Evaluating CNN image retrieval...\n', network_name); 51 | 52 | load(network_file); 53 | net = dagnn.DagNN.loadobj(net); 54 | net.mode = 'test'; 55 | move(net, 'gpu') 56 | 57 | %%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%% 58 | 59 | data_dir = '../dvsd/extractD/data/'; 60 | datasets = {'Oxford5k', 'Paris6k', 'Holidays', 'Landmarks'}; 61 | desc_name = 'vggrgem'; 62 | 63 | %%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%% 64 | % extract features 65 | fprintf('Extracting features...\n'); 66 | 67 | % Whiting 68 | fprintf('Leaning PCA-whitening features...\n'); 69 | white_dir = [data_dir, 'Landmarks/query/']; 70 | white_list = dir([white_dir '*.jpg']); 71 | vecs = {}; 72 | for i = 1 : size(white_list, 1) 73 | img_name = [white_dir white_list(i).name]; 74 | img = imread(img_name); 75 | vggrgem = vecpostproc(descfun(img, net)); 76 | % load_name = ['../sdd/dtod/data/train/Landmarks/query/', desc_name, '/', white_list(i).name, '.mat'] 77 | % load(load_name); 78 | vecs{i} = vggrgem; 79 | save_name = ['../sdd/dtod/data/train/Landmarks/query/', desc_name, '/', white_list(i).name, '.mat'] 80 | save(save_name, 'vggrgem'); 81 | end 82 | 83 | % Learn PCA 84 | fprintf('Learning PCA-whitening\n'); 85 | [~, eigvec, eigval, Xm] = yael_pca (single(cell2mat(vecs))); 86 | 87 | for i = 1 : size(datasets, 2) 88 | dataset = datasets{i}; 89 | %%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%% 90 | if dataset(1) == 'L' 91 | % image files are expected under each dataset's folder %'test 92 | base_dir = [data_dir, dataset, '/base/']; 93 | base_list = dir([base_dir '*.jpg']); 94 | for i = 1 : size(base_list, 1) 95 | img_name = [base_dir base_list(i).name]; 96 | img = imread(img_name); 97 | vggrgem = vecpostproc(descfun(img, net)); 98 | vggrgem = vecpostproc(apply_whiten(vggrgem, Xm, eigvec, eigval)); % 99 | save_name = ['../sdd/dtod/data/train/', dataset, '/base/', desc_name, '/', base_list(i).name, '.mat'] 100 | save(save_name, 'vggrgem'); 101 | end 102 | %%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%% 103 | else 104 | % image files are expected under each dataset's folder %'test 105 | base_dir = [data_dir, dataset, '/base/']; 106 | query_dir = [data_dir, dataset, '/query/']; 107 | 108 | base_list = dir([base_dir '*.jpg']); 109 | query_list = dir([query_dir '*.jpg']); 110 | 111 | % Base 112 | for i = 1 : size(base_list, 1) 113 | img_name = [base_dir base_list(i).name]; 114 | img = imread(img_name); 115 | vggrgem = vecpostproc(descfun(img, net)); 116 | vggrgem = vecpostproc(apply_whiten(vggrgem, Xm, eigvec, eigval)); % 117 | save_name = ['../sdd/dtod/data/test/', dataset, '/base/', desc_name, '/', base_list(i).name, '.mat'] 118 | save(save_name, 'vggrgem'); 119 | end 120 | 121 | % Query 122 | for i = 1 : size(query_list, 1) 123 | img_name = [query_dir query_list(i).name]; 124 | img = imread(img_name); 125 | vggrgem = vecpostproc(descfun(img, net)); 126 | vggrgem = vecpostproc(apply_whiten(vggrgem, Xm, eigvec, eigval)); % 127 | save_name = ['../sdd/dtod/data/test/', dataset, '/query/', desc_name, '/', query_list(i).name, '.mat'] 128 | save(save_name, 'vggrgem'); 129 | end 130 | end 131 | end -------------------------------------------------------------------------------- /Extraction/MAC/extract_resnet_mac.m: -------------------------------------------------------------------------------- 1 | % Code for the methods presented in the paper 2 | % G. Tolias, R. Sicre and H. Jegou, Particular object retrieval with integral max-pooling of CNN activations, ICLR 2016. 3 | % This version of the code is not optimized to run efficiently 4 | % but to be easily readable and to reproduce the results of the paper 5 | % 6 | % Authored by G. Tolias, 2015. 7 | 8 | % if exist(save_name,'file') 9 | % fprintf('Pass '); 10 | % save_name 11 | % continue 12 | % end 13 | 14 | addpath('../utils'); 15 | 16 | g = gpuDevice(4) 17 | reset(g) 18 | 19 | %%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%% 20 | 21 | if exist('aml') ~= 3 22 | mex -compatibleArrayDims aml.c 23 | end 24 | 25 | % matconvnet is a prerequisite 26 | % run vl_setupnn for your installation to avoid downloading and compiling again 27 | if exist('vl_nnconv') ~= 3 28 | cd matconvnet-1.0-beta25/matlab/ 29 | if numel(dir(fullfile('mex', 'vl_nnconv.mex*'))) == 0 30 | vl_compilenn('verbose', 1, 'enableGPU', 1, 'cudaRoot', '/usr/local/cuda-8.0'); 31 | end 32 | vl_setupnn; 33 | cd ../../ 34 | end 35 | 36 | net = dagnn.DagNN.loadobj(load('imagenet-resnet-101-dag.mat')); 37 | net.mode = 'test'; 38 | net.conserveMemory = false; 39 | move(net, 'gpu') 40 | 41 | %%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%% 42 | 43 | data_dir = '../dvsd/extractD/data/'; 44 | datasets = {'Landmarks'};%{'Oxford5k', 'Paris6k', 'Holidays', 'Landmarks'}; 45 | desc_name = 'resnetmac'; 46 | 47 | %%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%% 48 | % extract features 49 | fprintf('Extracting features...\n'); 50 | 51 | % Whiting 52 | fprintf('Leaning PCA-whitening features...\n'); 53 | white_dir = [data_dir, 'Landmarks/query/']; 54 | white_list = dir([white_dir '*.jpg']); 55 | vecs = {}; 56 | for i = 1 : size(white_list, 1) 57 | img_name = [white_dir white_list(i).name]; 58 | img = imread(img_name); 59 | resnetmac = vecpostproc(mac(img, net, 0)); 60 | % load_name = ['../sdd/dtod/data/train/Landmarks/query/', desc_name, '/', white_list(i).name, '.mat'] 61 | % load(load_name); 62 | vecs{i} = resnetmac; 63 | save_name = ['../sdd/dtod/data/train/Landmarks/query/', desc_name, '/', white_list(i).name, '.mat'] 64 | save(save_name, 'resnetmac'); 65 | end 66 | 67 | % Learn PCA 68 | fprintf('Learning PCA-whitening\n'); 69 | [~, eigvec, eigval, Xm] = yael_pca (single(cell2mat(vecs))); 70 | 71 | for i = 1 : size(datasets, 2) 72 | dataset = datasets{i}; 73 | %%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%% 74 | if dataset(1) == 'L' 75 | % image files are expected under each dataset's folder %'test 76 | base_dir = [data_dir, dataset, '/base/']; 77 | base_list = dir([base_dir '*.jpg']); 78 | for i = 1 : size(base_list, 1) 79 | img_name = [base_dir base_list(i).name]; 80 | save_name = ['../sdd/dtod/data/train/', dataset, '/base/', desc_name, '/', base_list(i).name, '.mat'] 81 | img = imread(img_name); 82 | resnetmac = vecpostproc(mac(img, net, 0)); 83 | resnetmac = vecpostproc(apply_whiten(resnetmac, Xm, eigvec, eigval)); % 84 | save(save_name, 'resnetmac'); 85 | end 86 | %%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%% 87 | else 88 | % image files are expected under each dataset's folder %'test 89 | base_dir = [data_dir, dataset, '/base/']; 90 | query_dir = [data_dir, dataset, '/query/']; 91 | 92 | base_list = dir([base_dir '*.jpg']); 93 | query_list = dir([query_dir '*.jpg']); 94 | 95 | % Base 96 | for i = 1 : size(base_list, 1) 97 | img_name = [base_dir base_list(i).name]; 98 | img = imread(img_name); 99 | resnetmac = vecpostproc(mac(img, net, 0)); 100 | resnetmac = vecpostproc(apply_whiten(resnetmac, Xm, eigvec, eigval)); % 101 | save_name = ['../sdd/dtod/data/test/', dataset, '/base/', desc_name, '/', base_list(i).name, '.mat'] 102 | save(save_name, 'resnetmac'); 103 | end 104 | 105 | % Query 106 | for i = 1 : size(query_list, 1) 107 | img_name = [query_dir query_list(i).name]; 108 | img = imread(img_name); 109 | resnetmac = vecpostproc(mac(img, net, 0)); 110 | resnetmac = vecpostproc(apply_whiten(resnetmac, Xm, eigvec, eigval)); % 111 | save_name = ['../sdd/dtod/data/test/', dataset, '/query/', desc_name, '/', query_list(i).name, '.mat'] 112 | save(save_name, 'resnetmac'); 113 | end 114 | end 115 | end 116 | -------------------------------------------------------------------------------- /Extraction/MAC/extract_resnet_rmac.m: -------------------------------------------------------------------------------- 1 | % Code for the methods presented in the paper 2 | % G. Tolias, R. Sicre and H. Jegou, Particular object retrieval with integral max-pooling of CNN activations, ICLR 2016. 3 | % This version of the code is not optimized to run efficiently 4 | % but to be easily readable and to reproduce the results of the paper 5 | % 6 | % Authored by G. Tolias, 2015. 7 | 8 | addpath('../utils'); 9 | 10 | % g = gpuDevice(2) 11 | % reset(g) 12 | 13 | %%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%% 14 | 15 | % if exist('aml') ~= 3 16 | % mex -compatibleArrayDims aml.c 17 | % end 18 | 19 | % % matconvnet is a prerequisite 20 | % % run vl_setupnn for your installation to avoid downloading and compiling again 21 | % if exist('vl_nnconv') ~= 3 22 | % cd matconvnet-1.0-beta25/matlab/ 23 | % if numel(dir(fullfile('mex', 'vl_nnconv.mex*'))) == 0 24 | % vl_compilenn('verbose', 1, 'enableGPU', 1, 'cudaRoot', '/usr/local/cuda-8.0'); 25 | % end 26 | % vl_setupnn; 27 | % cd ../../ 28 | % end 29 | 30 | % net = dagnn.DagNN.loadobj(load('imagenet-resnet-101-dag.mat')); 31 | % net.mode = 'test'; 32 | % net.conserveMemory = false; 33 | % move(net, 'gpu') 34 | 35 | %%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%% 36 | 37 | data_dir = '../dvsd/extractD/data/'; 38 | datasets = {'Oxford5k', 'Paris6k', 'Holidays', 'Landmarks'}; 39 | desc_name = 'resnetrmac'; 40 | 41 | % number of levels in the region pyramid of R-MAC 42 | L = 3; 43 | 44 | %%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%% 45 | % extract features 46 | fprintf('Extracting features...\n'); 47 | 48 | % Whiting 49 | fprintf('Leaning PCA-whitening features...\n'); 50 | white_dir = [data_dir, 'Landmarks/query/']; 51 | white_list = dir([white_dir '*.jpg']); 52 | vecs = {}; 53 | for i = 1 : size(white_list, 1) 54 | img_name = [white_dir white_list(i).name]; 55 | img = imread(img_name); 56 | resnetrmac = vecpostproc(rmac_regionvec(img, net, L, 0)); 57 | % load_name = ['../sdd/dtod/data/train/Landmarks/query/', desc_name, '/', white_list(i).name, '.mat'] 58 | % load(load_name); 59 | vecs{i} = resnetrmac; 60 | save_name = ['../sdd/dtod/data/train/Landmarks/query/', desc_name, '/', white_list(i).name, '.mat'] 61 | save(save_name, 'resnetrmac'); 62 | end 63 | 64 | % Learn PCA 65 | fprintf('Learning PCA-whitening\n'); 66 | [~, eigvec, eigval, Xm] = yael_pca (single(cell2mat(vecs))); 67 | 68 | for i = 1 : size(datasets, 2) 69 | dataset = datasets{i}; 70 | %%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%% 71 | if dataset(1) == 'L' 72 | % image files are expected under each dataset's folder %'test 73 | base_dir = [data_dir, dataset, '/base/']; 74 | base_list = dir([base_dir '*.jpg']); 75 | for i = 1 : size(base_list, 1) 76 | %%%%%%%% 77 | img_name = [base_dir base_list(i).name]; 78 | img = imread(img_name); 79 | resnetrmac = vecpostproc(rmac_regionvec(img, net, L, 0)); 80 | resnetrmac = vecpostproc(apply_whiten(resnetrmac, Xm, eigvec, eigval)); % 81 | %%%%%%%% 82 | save_name = ['../sdd/dtod/data/train/', dataset, '/base/', desc_name, '/', base_list(i).name, '.mat'] 83 | 84 | %%%%%%%% 85 | % load(save_name); 86 | resnetrmac = vecpostproc(sum(resnetrmac, 2)); 87 | %%%%%%%% 88 | 89 | save(save_name, 'resnetrmac'); 90 | end 91 | %%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%% 92 | else 93 | % image files are expected under each dataset's folder %'test 94 | base_dir = [data_dir, dataset, '/base/']; 95 | query_dir = [data_dir, dataset, '/query/']; 96 | 97 | base_list = dir([base_dir '*.jpg']); 98 | query_list = dir([query_dir '*.jpg']); 99 | 100 | % Base 101 | for i = 1 : size(base_list, 1) 102 | %%%%%%%% 103 | img_name = [base_dir base_list(i).name]; 104 | img = imread(img_name); 105 | resnetrmac = vecpostproc(rmac_regionvec(img, net, L, 0)); 106 | resnetrmac = vecpostproc(apply_whiten(resnetrmac, Xm, eigvec, eigval)); % 107 | %%%%%%%% 108 | save_name = ['../sdd/dtod/data/test/', dataset, '/base/', desc_name, '/', base_list(i).name, '.mat'] 109 | 110 | %%%%%%%% 111 | % load(save_name); 112 | resnetrmac = vecpostproc(sum(resnetrmac, 2)); 113 | %%%%%%%% 114 | 115 | save(save_name, 'resnetrmac'); 116 | end 117 | 118 | % Query 119 | for i = 1 : size(query_list, 1) 120 | %%%%%%%% 121 | img_name = [query_dir query_list(i).name]; 122 | img = imread(img_name); 123 | resnetrmac = vecpostproc(rmac_regionvec(img, net, L, 0)); 124 | resnetrmac = vecpostproc(apply_whiten(resnetrmac, Xm, eigvec, eigval)); % 125 | %%%%%%%% 126 | save_name = ['../sdd/dtod/data/test/', dataset, '/query/', desc_name, '/', query_list(i).name, '.mat'] 127 | 128 | %%%%%%%% 129 | % load(save_name); 130 | resnetrmac = vecpostproc(sum(resnetrmac, 2)); 131 | %%%%%%%% 132 | 133 | save(save_name, 'resnetrmac'); 134 | end 135 | end 136 | end 137 | -------------------------------------------------------------------------------- /Extraction/MAC/extract_vgg_mac.m: -------------------------------------------------------------------------------- 1 | % Code for the methods presented in the paper 2 | % G. Tolias, R. Sicre and H. Jegou, Particular object retrieval with integral max-pooling of CNN activations, ICLR 2016. 3 | % This version of the code is not optimized to run efficiently 4 | % but to be easily readable and to reproduce the results of the paper 5 | % 6 | % Authored by G. Tolias, 2015. 7 | 8 | addpath('../utils'); 9 | 10 | g = gpuDevice(2) 11 | reset(g) 12 | 13 | %%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%% 14 | 15 | if exist('aml') ~= 3 16 | mex -compatibleArrayDims aml.c 17 | end 18 | 19 | % matconvnet is a prerequisite 20 | % run vl_setupnn for your installation to avoid downloading and compiling again 21 | if exist('vl_nnconv') ~= 3 22 | cd matconvnet-1.0-beta25/matlab/ 23 | if numel(dir(fullfile('mex', 'vl_nnconv.mex*'))) == 0 24 | vl_compilenn('verbose', 1, 'enableGPU', 1, 'cudaRoot', '/usr/local/cuda-8.0'); 25 | end 26 | vl_setupnn; 27 | cd ../../ 28 | end 29 | 30 | % choose pre-trained CNN model 31 | modelfn = 'imagenet-vgg-verydeep-16.mat'; 32 | lid = 31; % use VGG 33 | % parameters of the method 34 | use_gpu = 1; 35 | % matconvnet is a prerequisite 36 | % run vl_setupnn for your installation to avoid downloading and compiling again 37 | if exist('vl_nnconv') ~= 3 38 | cd matconvnet-1.0-beta25/matlab/ 39 | if numel(dir(fullfile('mex', 'vl_nnconv.mex*'))) == 0 40 | vl_compilenn('verbose', 1, 'enableGPU', 1, 'cudaRoot', '/usr/local/cuda-8.0'); 41 | end 42 | vl_setupnn; 43 | cd ../../ 44 | end 45 | 46 | net = load(modelfn); 47 | net.layers = {net.layers{1:lid}}; % remove fully connected layers 48 | 49 | if use_gpu 50 | net = vl_simplenn_move(net, 'gpu'); 51 | end 52 | 53 | % compatibility with matconvnet-1.0-beta25 (otherwise tested with matconvnet-1.0-beta15) 54 | for i=1:numel(net.layers), if strcmp(net.layers{i}.type,'conv'), net.layers{i}.dilate=[1 1]; net.layers{i}.opts={}; end, end 55 | for i=1:numel(net.layers), if strcmp(net.layers{i}.type,'relu'), net.layers{i}.leak=0; end, end 56 | for i=1:numel(net.layers), if strcmp(net.layers{i}.type,'pool'), net.layers{i}.opts={}; end, end 57 | 58 | %%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%% 59 | 60 | data_dir = '../dvsd/extractD/data/'; 61 | datasets = {'Oxford5k', 'Paris6k', 'Holidays', 'Landmarks'}; 62 | desc_name = 'vggmac'; 63 | 64 | %%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%% 65 | % extract features 66 | fprintf('Extracting features...\n'); 67 | 68 | % Whiting 69 | fprintf('Leaning PCA-whitening features...\n'); 70 | white_dir = [data_dir, 'Landmarks/query/']; 71 | white_list = dir([white_dir '*.jpg']); 72 | vecs = {}; 73 | for i = 1 : size(white_list, 1) 74 | img_name = [white_dir white_list(i).name]; 75 | img = imread(img_name); 76 | vggmac = vecpostproc(mac(img, net, 1)); 77 | % load_name = ['../sdd/dtod/data/train/Landmarks/query/', desc_name, '/', white_list(i).name, '.mat'] 78 | % load(load_name); 79 | vecs{i} = vggmac; 80 | save_name = ['../sdd/dtod/data/train/Landmarks/query/', desc_name, '/', white_list(i).name, '.mat'] 81 | save(save_name, 'vggmac'); 82 | end 83 | 84 | % Learn PCA 85 | fprintf('Learning PCA-whitening\n'); 86 | [~, eigvec, eigval, Xm] = yael_pca (single(cell2mat(vecs))); 87 | 88 | for i = 1 : size(datasets, 2) 89 | dataset = datasets{i}; 90 | %%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%% 91 | if dataset(1) == 'L' 92 | % image files are expected under each dataset's folder %'test 93 | base_dir = [data_dir, dataset, '/base/']; 94 | base_list = dir([base_dir '*.jpg']); 95 | for i = 1 : size(base_list, 1) 96 | img_name = [base_dir base_list(i).name]; 97 | img = imread(img_name); 98 | vggmac = vecpostproc(mac(img, net, 1)); 99 | vggmac = vecpostproc(apply_whiten(vggmac, Xm, eigvec, eigval)); % 100 | save_name = ['../sdd/dtod/data/train/', dataset, '/base/', desc_name, '/', base_list(i).name, '.mat'] 101 | save(save_name, 'vggmac'); 102 | end 103 | %%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%% 104 | else 105 | % image files are expected under each dataset's folder %'test 106 | base_dir = [data_dir, dataset, '/base/']; 107 | query_dir = [data_dir, dataset, '/query/']; 108 | 109 | base_list = dir([base_dir '*.jpg']); 110 | query_list = dir([query_dir '*.jpg']); 111 | 112 | % Base 113 | for i = 1 : size(base_list, 1) 114 | img_name = [base_dir base_list(i).name]; 115 | img = imread(img_name); 116 | vggmac = vecpostproc(mac(img, net, 1)); 117 | vggmac = vecpostproc(apply_whiten(vggmac, Xm, eigvec, eigval)); % 118 | save_name = ['../sdd/dtod/data/test/', dataset, '/base/', desc_name, '/', base_list(i).name, '.mat'] 119 | save(save_name, 'vggmac'); 120 | end 121 | 122 | % Query 123 | for i = 1 : size(query_list, 1) 124 | img_name = [query_dir query_list(i).name]; 125 | img = imread(img_name); 126 | vggmac = vecpostproc(mac(img, net, 1)); 127 | vggmac = vecpostproc(apply_whiten(vggmac, Xm, eigvec, eigval)); % 128 | save_name = ['../sdd/dtod/data/test/', dataset, '/query/', desc_name, '/', query_list(i).name, '.mat'] 129 | save(save_name, 'vggmac'); 130 | end 131 | end 132 | end 133 | -------------------------------------------------------------------------------- /Extraction/MAC/extract_vgg_rmac.m: -------------------------------------------------------------------------------- 1 | % Code for the methods presented in the paper 2 | % G. Tolias, R. Sicre and H. Jegou, Particular object retrieval with integral max-pooling of CNN activations, ICLR 2016. 3 | % This version of the code is not optimized to run efficiently 4 | % but to be easily readable and to reproduce the results of the paper 5 | % 6 | % Authored by G. Tolias, 2015. 7 | 8 | addpath('../utils'); 9 | 10 | % g = gpuDevice(4) 11 | % reset(g) 12 | 13 | %%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%% 14 | 15 | % if exist('aml') ~= 3 16 | % mex -compatibleArrayDims aml.c 17 | % end 18 | 19 | % % matconvnet is a prerequisite 20 | % % run vl_setupnn for your installation to avoid downloading and compiling again 21 | % if exist('vl_nnconv') ~= 3 22 | % cd matconvnet-1.0-beta25/matlab/ 23 | % if numel(dir(fullfile('mex', 'vl_nnconv.mex*'))) == 0 24 | % vl_compilenn('verbose', 1, 'enableGPU', 1, 'cudaRoot', '/usr/local/cuda-8.0'); 25 | % end 26 | % vl_setupnn; 27 | % cd ../../ 28 | % end 29 | 30 | % % choose pre-trained CNN model 31 | % modelfn = 'imagenet-vgg-verydeep-16.mat'; 32 | % lid = 31; % use VGG 33 | % % parameters of the method 34 | % use_gpu = 1; 35 | % % matconvnet is a prerequisite 36 | % % run vl_setupnn for your installation to avoid downloading and compiling again 37 | % if exist('vl_nnconv') ~= 3 38 | % cd matconvnet-1.0-beta25/matlab/ 39 | % if numel(dir(fullfile('mex', 'vl_nnconv.mex*'))) == 0 40 | % vl_compilenn('verbose', 1, 'enableGPU', 1, 'cudaRoot', '/usr/local/cuda-8.0'); 41 | % end 42 | % vl_setupnn; 43 | % cd ../../ 44 | % end 45 | 46 | % net = load(modelfn); 47 | % net.layers = {net.layers{1:lid}}; % remove fully connected layers 48 | 49 | % if use_gpu 50 | % net = vl_simplenn_move(net, 'gpu'); 51 | % end 52 | 53 | % % compatibility with matconvnet-1.0-beta25 (otherwise tested with matconvnet-1.0-beta15) 54 | % for i=1:numel(net.layers), if strcmp(net.layers{i}.type,'conv'), net.layers{i}.dilate=[1 1]; net.layers{i}.opts={}; end, end 55 | % for i=1:numel(net.layers), if strcmp(net.layers{i}.type,'relu'), net.layers{i}.leak=0; end, end 56 | % for i=1:numel(net.layers), if strcmp(net.layers{i}.type,'pool'), net.layers{i}.opts={}; end, end 57 | 58 | %%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%% 59 | 60 | data_dir = '../dvsd/extractD/data/'; 61 | datasets = {'Oxford5k', 'Paris6k', 'Holidays', 'Landmarks'}; 62 | desc_name = 'vggrmac'; 63 | 64 | % number of levels in the region pyramid of R-MAC 65 | L = 3; 66 | 67 | %%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%% 68 | % extract features 69 | fprintf('Extracting features...\n'); 70 | 71 | % Whiting 72 | fprintf('Leaning PCA-whitening features...\n'); 73 | white_dir = [data_dir, 'Landmarks/query/']; 74 | white_list = dir([white_dir '*.jpg']); 75 | vecs = {}; 76 | for i = 1 : size(white_list, 1) 77 | img_name = [white_dir white_list(i).name]; 78 | img = imread(img_name); 79 | vggrmac = vecpostproc(rmac_regionvec(img, net, L, 1)); 80 | % load_name = ['../sdd/dtod/data/train/Landmarks/query/', desc_name, '/', white_list(i).name, '.mat'] 81 | % load(load_name); 82 | vecs{i} = vggrmac; 83 | save_name = ['../sdd/dtod/data/train/Landmarks/query/', desc_name, '/', white_list(i).name, '.mat'] 84 | save(save_name, 'vggrmac'); 85 | end 86 | 87 | % Learn PCA 88 | fprintf('Learning PCA-whitening\n'); 89 | [~, eigvec, eigval, Xm] = yael_pca (single(cell2mat(vecs))); 90 | 91 | for i = 1 : size(datasets, 2) 92 | dataset = datasets{i}; 93 | %%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%% 94 | if dataset(1) == 'L' 95 | % image files are expected under each dataset's folder %'test 96 | base_dir = [data_dir, dataset, '/base/']; 97 | base_list = dir([base_dir '*.jpg']); 98 | for i = 1 : size(base_list, 1) 99 | img_name = [base_dir base_list(i).name]; 100 | img = imread(img_name); 101 | vggrmac = vecpostproc(rmac_regionvec(img, net, L, 1)); 102 | vggrmac = vecpostproc(apply_whiten(vggrmac, Xm, eigvec, eigval)); % 103 | save_name = ['../sdd/dtod/data/train/', dataset, '/base/', desc_name, '/', base_list(i).name, '.mat'] 104 | 105 | %%%%%%%% 106 | % load(save_name); 107 | vggrmac = vecpostproc(sum(vggrmac, 2)); 108 | % size(vggrmac) 109 | %%%%%%%% 110 | 111 | save(save_name, 'vggrmac'); 112 | end 113 | %%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%% 114 | else 115 | % image files are expected under each dataset's folder %'test 116 | base_dir = [data_dir, dataset, '/base/']; 117 | query_dir = [data_dir, dataset, '/query/']; 118 | 119 | base_list = dir([base_dir '*.jpg']); 120 | query_list = dir([query_dir '*.jpg']); 121 | 122 | % Base 123 | for i = 1 : size(base_list, 1) 124 | img_name = [base_dir base_list(i).name]; 125 | img = imread(img_name); 126 | vggrmac = vecpostproc(rmac_regionvec(img, net, L, 1)); 127 | vggrmac = vecpostproc(apply_whiten(vggrmac, Xm, eigvec, eigval)); % 128 | save_name = ['../sdd/dtod/data/test/', dataset, '/base/', desc_name, '/', base_list(i).name, '.mat'] 129 | 130 | %%%%%%%% 131 | % load(save_name); 132 | vggrmac = vecpostproc(sum(vggrmac, 2)); 133 | %%%%%%%% 134 | 135 | save(save_name, 'vggrmac'); 136 | end 137 | 138 | % Query 139 | for i = 1 : size(query_list, 1) 140 | img_name = [query_dir query_list(i).name]; 141 | img = imread(img_name); 142 | vggrmac = vecpostproc(rmac_regionvec(img, net, L, 1)); 143 | vggrmac = vecpostproc(apply_whiten(vggrmac, Xm, eigvec, eigval)); % 144 | save_name = ['../sdd/dtod/data/test/', dataset, '/query/', desc_name, '/', query_list(i).name, '.mat'] 145 | 146 | %%%%%%%% 147 | % load(save_name); 148 | vggrmac = vecpostproc(sum(vggrmac, 2)); 149 | %%%%%%%% 150 | 151 | save(save_name, 'vggrmac'); 152 | end 153 | end 154 | end 155 | -------------------------------------------------------------------------------- /Extraction/MAC/mac.m: -------------------------------------------------------------------------------- 1 | % 2 | % Authored by G. Tolias, 2015. 3 | % 4 | function [x, X] = MAC(I, net, flag) 5 | 6 | if size(I,3) == 1 7 | I = repmat(I, [1 1 3]); 8 | end 9 | 10 | sz = size(I); 11 | if sz(1) >= 1500 || sz(2) >= 1500 12 | I = imresize(I, [1000, 1000]); 13 | end 14 | if sz(1) < 224 || sz(2) < 224 15 | I = imresize(I, [224, 224]); 16 | end 17 | 18 | I = single(I) - mean(net.meta.normalization.averageImage(:)); 19 | % I = single(ones(1000, 1000, 3)); 20 | % sz = size(I) 21 | % vgg 22 | if flag == 1 23 | if ~isa(net.layers{1}.weights{1}, 'gpuArray') 24 | rnet = vl_simplenn(net, I); 25 | X = max(rnet(end).x, 0); 26 | else 27 | rnet = vl_simplenn(net, gpuArray(I)); 28 | X = gather(max(rnet(end).x, 0)); 29 | end 30 | end 31 | 32 | %%%%%%%%%%%%%%%%%%%%%%%%%%%% 33 | % resnet 34 | if flag == 0 35 | I = gpuArray(I); 36 | net.eval({'data', I}); 37 | X = gather(max(net.vars(net.getVarIndex('pool5')).value, 0)); 38 | end 39 | %%%%%%%%%%%%%%%%%%%%%%%%%%%% 40 | % X 41 | % size(X) 42 | x = mac_act(X); 43 | 44 | -------------------------------------------------------------------------------- /Extraction/MAC/mac_act.m: -------------------------------------------------------------------------------- 1 | % 2 | % Authored by G. Tolias, 2015. 3 | % 4 | function x = MAC_act(x) 5 | 6 | if ~max(size(x, 1), size(x, 2)) 7 | x = zeros(size(x, 3), 1, class(x)); 8 | return; 9 | end 10 | 11 | x = reshape(max(max(x, [], 1), [], 2), [size(x,3) 1]); -------------------------------------------------------------------------------- /Extraction/MAC/rmac_regionvec.m: -------------------------------------------------------------------------------- 1 | % 2 | % Authored by G. Tolias, 2015. 3 | % 4 | function [vecs, X] = rmac_regionvec(I, net, L, flag) 5 | 6 | if size(I,3) == 1 7 | I = repmat(I, [1 1 3]); 8 | end 9 | 10 | sz = size(I); 11 | if sz(1) >= 1500 || sz(2) >= 1500 12 | I = imresize(I, [1000, 1000]); 13 | end 14 | if sz(1) < 224 || sz(2) < 224 15 | I = imresize(I, [224, 224]); 16 | end 17 | 18 | I = single(I) - mean(net.meta.normalization.averageImage(:)); 19 | 20 | % vgg 21 | if flag == 1 22 | if ~isa(net.layers{1}.weights{1}, 'gpuArray') 23 | rnet = vl_simplenn(net, I); 24 | X = max(rnet(end).x, 0); 25 | else 26 | rnet = vl_simplenn(net, gpuArray(I)); 27 | X = gather(max(rnet(end).x, 0)); 28 | end 29 | end 30 | 31 | %%%%%%%%%%%%%%%%%%%%%%%%%%%% 32 | % resnet 33 | if flag == 0 34 | I = gpuArray(I); 35 | net.eval({'data', I}); 36 | X = gather(max(net.vars(net.getVarIndex('pool5')).value, 0)); 37 | end 38 | %%%%%%%%%%%%%%%%%%%%%%%%%%%% 39 | 40 | vecs = rmac_regionvec_act(X, L); 41 | -------------------------------------------------------------------------------- /Extraction/MAC/rmac_regionvec_act.m: -------------------------------------------------------------------------------- 1 | % 2 | % Authored by G. Tolias, 2015. 3 | % 4 | function vecs = rmac_regionvec_act(X, L) 5 | 6 | ovr = 0.4; % desired overlap of neighboring regions 7 | steps = [2 3 4 5 6 7]; % possible regions for the long dimension 8 | 9 | W = size(X, 2); 10 | H = size(X, 1); 11 | 12 | w = min([W H]); 13 | w2 = floor(w/2 -1); 14 | 15 | b = (max(H, W)-w)./(steps-1); 16 | [~, idx] = min(abs(((w.^2 - w.*b)./w.^2)-ovr)); % steps(idx) regions for long dimension 17 | 18 | % region overplus per dimension 19 | Wd = 0; 20 | Hd = 0; 21 | if H < W 22 | Wd = idx; 23 | elseif H > W 24 | Hd = idx; 25 | end 26 | 27 | vecs = []; 28 | 29 | for l = 1:L 30 | 31 | wl = floor(2*w./(l+1)); 32 | wl2 = floor(wl/2 - 1); 33 | 34 | b = (W-wl)./(l+Wd-1); 35 | if isnan(b), b = 0; end % for the first level 36 | cenW = floor(wl2 + [0:l-1+Wd]*b) -wl2; % center coordinates 37 | b = (H-wl)./(l+Hd-1); 38 | if isnan(b), b = 0; end % for the first level 39 | cenH = floor(wl2 + [0:l-1+Hd]*b) - wl2; % center coordinates 40 | 41 | for i_ = cenH 42 | for j_ = cenW 43 | R = X(i_+[1:wl],j_+[1:wl],:); 44 | if ~min(size(R)) 45 | continue; 46 | end 47 | x = mac_act(R); % get mac per region 48 | vecs = [vecs, x]; 49 | end 50 | end 51 | 52 | end 53 | 54 | -------------------------------------------------------------------------------- /Extraction/README.md: -------------------------------------------------------------------------------- 1 | # Prerequisites 2 | - matlab (for extracting and aggregating features) 3 | - python2, python3, and g++ (for mAP evaluation) 4 | - tensorflow (for extracting DELF) 5 | - matconvnet (for extracting features) 6 | 7 | # Extraction 8 | 1. Download the images of test datasets ([Holidays](http://lear.inrialpes.fr/people/jegou/data.php#holidays), [Oxford5k](http://www.robots.ox.ac.uk/~vgg/data/oxbuildings/), [Paris6k](http://www.robots.ox.ac.uk/~vgg/data/parisbuildings/)). And each dataset is seperated to `galary` for searching and `query` for querying. 9 | 2. Extract different types of features by the following steps. 10 | 11 | ## SIFT 12 | 1. Set up [VLFeat](http://www.vlfeat.org/install-matlab.html) for matlab: download [VLFeat binary package](http://www.vlfeat.org/download/vlfeat-0.9.21-bin.tar.gz) and unzip it. 13 | 2. Run `extract_sift.m` by assigning the **image_dir**, **save_dir** in the code. The **image_dir** is the dataset's abosolute path, and **save_dir** is the path of extrected features. 14 | > matlab 15 | > \>\> extract_sift 16 | 3. After extracting the local features, revise the data path and save path in `aggre_sift_fv.m` and run it for aggregating SIFT by FV. Also `aggre_sift_vlad.m` by VLAD. 17 | > \>\> aggre_sift_fv 18 | > \>\> aggre_sift_vlad 19 | 20 | ## DELF 21 | 1. Set up [DELF](https://github.com/tensorflow/models/tree/master/research/delf). 22 | 2. Generate the text file for **list_images_path**. 23 | > python imagelist.py -dir x 24 | 3. Run `extract_delf.py` for extracting. 25 | > python extract_delf.py --list_images_path x.txt --output_dir x 26 | 4. Run `aggre_delf_fv.m` for aggregating DELF by FV, and `aggre_delf_vlad.m` by VLAD. 27 | > matlab 28 | > \>\> aggre_delf_fv 29 | > \>\> aggre_delf_vlad 30 | 5. Convert the files to .mat. 31 | > python convert.py 32 | 33 | ## MAC 34 | 1. Set up [Matconvnet](http://www.vlfeat.org/matconvnet/). 35 | 2. Download the pre-trained models from the Matconvnet site: [vgg-16](http://www.vlfeat.org/matconvnet/models/imagenet-vgg-verydeep-16.mat) and [resnet-101](http://www.vlfeat.org/matconvnet/models/imagenet-resnet-101-dag.mat). 36 | 3. Run the extraction for V-MAC, V-rMAC, R-MAC and R-rMAC. 37 | > matlab 38 | > \>\> extract_vgg_mac 39 | > \>\> extract_vgg_rmac 40 | > \>\> extract_resnet_mac 41 | > \>\> extract_resnet_rmac 42 | 43 | ## CroW 44 | 1. The same as Step 1 above. 45 | 2. The same as Step 2 above. 46 | 3. Run the extraction for V-CroW and R-CroW. 47 | > matlab 48 | > \>\> extract_vgg_crow 49 | > \>\> extract_resnet_crow 50 | 51 | ## SPoC 52 | 1. The same as Step 1 above. 53 | 2. The same as Step 2 above. 54 | 3. Run the extraction for V-SPoC and R-SPoC. 55 | > matlab 56 | > \>\> extract_vgg_spoc 57 | > \>\> extract_resnet_spoc 58 | 59 | ## GeM 60 | 1. Download and set up the official implementation of [GeM](https://github.com/filipradenovic/cnnimageretrieval). 61 | 2. Run the extraction for V-GeM, V-rGeM, R-GeM and R-rGeM. 62 | > matlab 63 | > \>\> extract_vgg_gem 64 | > \>\> extract_vgg_rgem 65 | > \>\> extract_resnet_gem 66 | > \>\> extract_resnet_rgem 67 | 68 | # Evaluation 69 | After extraction, we use the official implementation to evaluate the mAP of test datasets ([Holidays](http://lear.inrialpes.fr/people/jegou/data.php#holidays), [Oxford5k](http://www.robots.ox.ac.uk/~vgg/data/oxbuildings/), [Paris6k](http://www.robots.ox.ac.uk/~vgg/data/parisbuildings/)). 70 | 71 | ## Holidays 72 | 1. Extract the features for the galary data and query data. 73 | 2. Download the [Evaluation Package](https://lear.inrialpes.fr/~jegou/code/eval_holidays.tgz) from the official site and unzip it. Then, move the `holidays_images.dat` and `holidays_map.py` to the same path with file `test_Holidays.py`. 74 | 3. Run the brute-force retrieval for features by assigning the **feature_name**, **feature_dim**, **galary_path**, **query_path**. The **feature_name** is the type of features to be tested, the **feature_dim** is the dimension of the feature, the **galary_path** is the feature path of extracted features for the images to be retrieved, and the **query_path** is the feature path of extracted features for the query images. 75 | > python test_Holidays.py --feature_name x --feature_dim x --galary_path x --query_path x 76 | 77 | ## Oxford5k 78 | 1. Extract the features for the galary data and query data. 79 | 2. Download the [Groundtruth](http://www.robots.ox.ac.uk/~vgg/data/oxbuildings/gt_files_170407.tgz) from the official site, and unzip this file to the same path with `test_Oxford5k.py`. Then, rename the file by using `mv gt_files_170407 Oxford5k_gnd`. 80 | 3. Download [C++ code](http://www.robots.ox.ac.uk/~vgg/data/oxbuildings/compute_ap.cpp) from the official site. 81 | 4. Compile the `compute_ap.cpp` file by using `g++ -O compute_ap.cpp -o compute_ap`. 82 | 5. Run the brute-force retrieval for features by assigning the **feature_name**, **feature_dim**, **galary_path**, **query_path**. The **feature_name** is the type of features to be tested, the **feature_dim** is the dimension of the feature, the **galary_path** is the feature path of extracted features for the images to be retrieved, and the **query_path** is the feature path of extracted features for the query images. 83 | > python test_Oxford5k.py --feature_name x --feature_dim x --galary_path x --query_path x 84 | 85 | ## Paris6k 86 | 1. Extract the features for the galary data and query data. 87 | 2. Download the [Groundtruth](http://www.robots.ox.ac.uk/~vgg/data/parisbuildings/paris_120310.tgz) from the official site, and unzip this file to the same path with `test_Paris6k.py`. Then, rename the file by using `mv paris_120310 Paris6k_gnd`. 88 | 3. Run the brute-force retrieval for features by assigning the **feature_name**, **feature_dim**, **galary_path**, **query_path**. The **feature_name** is the type of features to be tested, the **feature_dim** is the dimension of the feature, the **galary_path** is the feature path of extracted features for the images to be retrieved, and the **query_path** is the feature path of extracted features for the query images. 89 | > python test_Paris6k.py --feature_name x --feature_dim x --galary_path x --query_path x -------------------------------------------------------------------------------- /Extraction/SIFT/aggre_sift_fv.m: -------------------------------------------------------------------------------- 1 | %% 2 | clc;clear; 3 | 4 | %% 5 | run('../vlfeat/toolbox/vl_setup'); 6 | vl_version verbose 7 | addpath('../utils'); 8 | 9 | datasets = {'Oxford5k', 'Paris6k', 'Holidays', 'Landmarks'}; 10 | train_num = 4000; 11 | 12 | %%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%% 13 | 14 | train_dir = ['../../sdd/dtod/data/train/Landmarks/query/sift/']; 15 | train_list = dir([train_dir '*.mat']); 16 | 17 | % Train VLAD|FV & PCA 18 | train_sift_descriptors = {}; 19 | fprintf('Loading train sift...\n'); 20 | for i = 1 : train_num 21 | sift_name = [train_dir train_list(i).name]; 22 | load(sift_name) 23 | 24 | sift = vecs_normalize(sift, 1); 25 | sift = sift .^ (1.0/2); 26 | 27 | train_sift_descriptors{i} = sift; 28 | end 29 | fprintf('Loading done, num: %d...\n', train_num); 30 | 31 | %%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%% 32 | 33 | % Train GMM 34 | k = 32; % number of GMMs 35 | fprintf('Training GMM, k = %d...\n', k); 36 | all_descriptors = [train_sift_descriptors{:}]; 37 | [means, covariances, priors] = vl_gmm(all_descriptors, k); 38 | 39 | fprintf('Aggregation train...\n'); 40 | vecs = {}; 41 | for i = 1 : numel(train_sift_descriptors) 42 | % Encode using siftfv 43 | siftfv = vecpostproc(vl_fisher(train_sift_descriptors{i}, means, covariances, priors)); 44 | siftfv = siftfv';%' 45 | img_name = [train_dir train_list(i).name]; 46 | fprintf('Encoding siftfv %d: %s\n', i, img_name); 47 | fv_name = ['../../sdd/dtod/data/train/Landmarks/query/siftfv/' train_list(i).name]; 48 | save(fv_name, 'siftfv'); 49 | % load(fv_name); 50 | vecs{i} = siftfv; 51 | end 52 | 53 | clear train_sift_descriptors; 54 | 55 | %%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%% 56 | 57 | fprintf('Learning PCA-whitening\n'); 58 | [~, eigvec, eigval, Xm] = yael_pca(cell2mat(vecs')'); 59 | 60 | %%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%% 61 | 62 | for i = 1 : size(datasets, 2) 63 | dataset = datasets{i}; 64 | %%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%% 65 | if dataset(1) == 'L' 66 | base_dir = ['../../sdd/dtod/data/train/Landmarks/base/sift/']; 67 | base_list = dir([base_dir '*.mat']); 68 | 69 | % Base 70 | fprintf('Aggre test base sift %s...\n', dataset); 71 | for i = 1 : size(base_list, 1) 72 | sift_name = [base_dir base_list(i).name]; 73 | load(sift_name) 74 | 75 | sift = vecs_normalize(sift, 1); 76 | sift = sift .^ (1.0/2); 77 | 78 | % Aggre Base 79 | siftfv = vecpostproc(vl_fisher(sift, means, covariances, priors)); 80 | siftfv = vecpostproc(apply_whiten(siftfv, Xm, eigvec, eigval, 2048)); 81 | siftfv = siftfv'; 82 | img_name = [base_dir base_list(i).name]; 83 | fprintf('Encoding siftfv %d: %s\n', i, img_name); 84 | fv_name = ['../../sdd/dtod/data/train/Landmarks/base/siftfv/' base_list(i).name]; 85 | save(fv_name, 'siftfv'); 86 | end 87 | fprintf('Aggre base done, num: %d...\n', size(base_list, 1)); 88 | 89 | %%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%% 90 | else 91 | base_dir = ['../../sdd/dtod/data/test/', dataset, '/base/sift/']; 92 | base_list = dir([base_dir '*.mat']); 93 | 94 | query_dir = ['../../sdd/dtod/data/test/', dataset, '/query/sift/']; 95 | query_list = dir([query_dir '*.mat']); 96 | 97 | % Base 98 | fprintf('Aggre test base sift %s...\n', dataset); 99 | for i = 1 : size(base_list, 1) 100 | sift_name = [base_dir base_list(i).name]; 101 | load(sift_name) 102 | 103 | sift = vecs_normalize(sift, 1); 104 | sift = sift .^ (1.0/2); 105 | 106 | % Aggre Base 107 | siftfv = vecpostproc(vl_fisher(sift, means, covariances, priors)); 108 | siftfv = vecpostproc(apply_whiten (siftfv, Xm, eigvec, eigval, 2048)); 109 | siftfv = siftfv'; 110 | img_name = [base_dir base_list(i).name]; 111 | fprintf('Encoding siftfv %d: %s\n', i, img_name); 112 | fv_name = ['../../sdd/dtod/data/test/', dataset, '/base/siftfv/' base_list(i).name]; 113 | save(fv_name, 'siftfv'); 114 | end 115 | fprintf('Aggre test base done, num: %d...\n', size(base_list, 1)); 116 | 117 | % Query 118 | fprintf('Aggre test query sift %s...\n', dataset); 119 | for i = 1 : size(query_list, 1) 120 | sift_name = [query_dir query_list(i).name]; 121 | load(sift_name) 122 | 123 | sift = vecs_normalize(sift, 1); 124 | sift = sift .^ (1.0/2); 125 | 126 | % Aggre Query 127 | siftfv = vecpostproc(vl_fisher(sift, means, covariances, priors)); 128 | siftfv = vecpostproc(apply_whiten (siftfv, Xm, eigvec, eigval, 2048)); 129 | siftfv = siftfv'; 130 | img_name = [query_dir query_list(i).name]; 131 | fprintf('Encoding siftfv %d: %s\n', i, img_name); 132 | fv_name = ['../../sdd/dtod/data/test/', dataset, '/query/siftfv/' query_list(i).name]; 133 | save(fv_name, 'siftfv'); 134 | end 135 | fprintf('Aggre test base done, num: %d...\n', size(query_list, 1)); 136 | end 137 | end 138 | -------------------------------------------------------------------------------- /Extraction/SIFT/aggre_sift_vlad.m: -------------------------------------------------------------------------------- 1 | %% 2 | clc;clear; 3 | 4 | %% 5 | run('../vlfeat/toolbox/vl_setup'); 6 | vl_version verbose 7 | addpath('../utils'); 8 | 9 | datasets = {'Landmarks', 'Holidays', 'Paris6k', 'Oxford5k'}; 10 | train_num = 4000; 11 | 12 | %%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%% 13 | 14 | train_dir = ['../../sdd/dtod/data/train/Landmarks/query/sift/']; 15 | train_list = dir([train_dir '*.mat']); 16 | 17 | % Train VLAD|FV & PCA 18 | train_sift_descriptors = {}; 19 | fprintf('Loading train sift...\n'); 20 | for i = 1 : train_num 21 | sift_name = [train_dir train_list(i).name]; 22 | load(sift_name) 23 | 24 | sift = vecs_normalize(sift, 1); 25 | sift = sift .^ (1.0/2); 26 | 27 | train_sift_descriptors{i} = sift; 28 | end 29 | fprintf('Loading done, num: %d...\n', train_num); 30 | 31 | %%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%% 32 | 33 | %% Train K-MEANS 34 | k = 64; 35 | fprintf('Training K-MEANS, k = %d...\n', k); 36 | all_descriptors = [train_sift_descriptors{:}]; 37 | centroids = vl_kmeans(all_descriptors, k); 38 | kdtree = vl_kdtreebuild(centroids); 39 | 40 | fprintf('Aggregation train...\n'); 41 | vecs = {}; 42 | for i = 1 : numel(train_sift_descriptors) 43 | nn = vl_kdtreequery(kdtree, centroids, train_sift_descriptors{i}); 44 | assignments = zeros(k, numel(nn)); 45 | assignments(sub2ind(size(assignments), nn, 1:numel(nn))) = 1; 46 | 47 | siftvlad = vecpostproc(vl_vlad(train_sift_descriptors{i}, centroids, assignments)); 48 | siftvlad = siftvlad';%' 49 | 50 | img_name = [train_dir train_list(i).name]; 51 | fprintf('Encoding siftvlad %d: %s\n', i, img_name); 52 | vlad_name = ['../../sdd/dtod/data/train/Landmarks/query/siftvlad/' train_list(i).name]; 53 | save(vlad_name, 'siftvlad'); 54 | % load(vlad_name); 55 | vecs{i} = siftvlad; 56 | end 57 | 58 | clear train_sift_descriptors; 59 | 60 | %%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%% 61 | 62 | fprintf('Learning PCA-whitening\n'); 63 | [~, eigvec, eigval, Xm] = yael_pca(single(cell2mat(vecs')'));% 64 | 65 | %%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%% 66 | 67 | for i = 1 : size(datasets, 2) 68 | dataset = datasets{i}; 69 | %%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%% 70 | if dataset(1) == 'L' 71 | base_dir = ['../../sdd/dtod/data/train/Landmarks/base/sift/']; 72 | base_list = dir([base_dir '*.mat']); 73 | 74 | % Base 75 | fprintf('Aggre test base sift %s...\n', dataset); 76 | for i = 1 : size(base_list, 1) 77 | sift_name = [base_dir base_list(i).name]; 78 | load(sift_name) 79 | 80 | sift = vecs_normalize(sift, 1); 81 | sift = sift .^ (1.0/2); 82 | 83 | % Aggre Base 84 | nn = vl_kdtreequery(kdtree, centroids, sift); 85 | assignments = zeros(k, numel(nn)); 86 | assignments(sub2ind(size(assignments), nn, 1:numel(nn))) = 1; 87 | 88 | siftvlad = vecpostproc(vl_vlad(sift, centroids, assignments)); 89 | siftvlad = vecpostproc(apply_whiten(siftvlad, Xm, eigvec, eigval, 2048)); % 90 | siftvlad = siftvlad';%' 91 | 92 | img_name = [base_dir base_list(i).name]; 93 | fprintf('Encoding siftvlad %d: %s\n', i, img_name); 94 | vlad_name = ['../../sdd/dtod/data/train/Landmarks/base/siftvlad/' base_list(i).name]; 95 | save(vlad_name, 'siftvlad'); 96 | 97 | end 98 | fprintf('Aggre test base done, num: %d...\n', size(base_list, 1)); 99 | 100 | %%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%% 101 | else 102 | base_dir = ['../../sdd/dtod/data/test/', dataset, '/base/sift/']; 103 | base_list = dir([base_dir '*.mat']); 104 | 105 | query_dir = ['../../sdd/dtod/data/test/', dataset, '/query/sift/']; 106 | query_list = dir([query_dir '*.mat']); 107 | 108 | % Base 109 | fprintf('Aggre test base sift %s...\n', dataset); 110 | for i = 1 : size(base_list, 1) 111 | sift_name = [base_dir base_list(i).name]; 112 | load(sift_name) 113 | 114 | sift = vecs_normalize(sift, 1); 115 | sift = sift .^ (1.0/2); 116 | 117 | % Aggre Base 118 | nn = vl_kdtreequery(kdtree, centroids, sift); 119 | assignments = zeros(k, numel(nn)); 120 | assignments(sub2ind(size(assignments), nn, 1:numel(nn))) = 1; 121 | 122 | siftvlad = vecpostproc(vl_vlad(sift, centroids, assignments)); 123 | siftvlad = vecpostproc(apply_whiten(siftvlad, Xm, eigvec, eigval, 2048)); % 124 | siftvlad = siftvlad'; 125 | 126 | img_name = [base_dir base_list(i).name]; 127 | fprintf('Encoding siftvlad %d: %s\n', i, img_name); 128 | vlad_name = ['../../sdd/dtod/data/test/', dataset, '/base/siftvlad/' base_list(i).name]; 129 | save(vlad_name, 'siftvlad'); 130 | end 131 | fprintf('Aggre test base done, num: %d...\n', size(base_list, 1)); 132 | 133 | % Query 134 | fprintf('Aggre test query sift %s...\n', dataset); 135 | for i = 1 : size(query_list, 1) 136 | sift_name = [query_dir query_list(i).name]; 137 | load(sift_name) 138 | 139 | sift = vecs_normalize(sift, 1); 140 | sift = sift .^ (1.0/2); 141 | 142 | % Aggre Query 143 | nn = vl_kdtreequery(kdtree, centroids, sift); 144 | assignments = zeros(k, numel(nn)); 145 | assignments(sub2ind(size(assignments), nn, 1:numel(nn))) = 1; 146 | 147 | siftvlad = vecpostproc(vl_vlad(sift, centroids, assignments)); 148 | siftvlad = vecpostproc(apply_whiten(siftvlad, Xm, eigvec, eigval, 2048)); % 149 | siftvlad = siftvlad'; 150 | 151 | img_name = [query_dir query_list(i).name]; 152 | fprintf('Encoding siftvlad %d: %s\n', i, img_name); 153 | vlad_name = ['../../sdd/dtod/data/test/', dataset, '/query/siftvlad/' query_list(i).name]; 154 | save(vlad_name, 'siftvlad'); 155 | end 156 | fprintf('Aggre test query done, num: %d...\n', size(query_list, 1)); 157 | 158 | end 159 | end 160 | -------------------------------------------------------------------------------- /Extraction/SIFT/extract_sift.m: -------------------------------------------------------------------------------- 1 | clc;clear; 2 | run('../vlfeat/toolbox/vl_setup'); 3 | % vl_version verbose 4 | 5 | %% Set image path 6 | % image dir - (absolute path!) 7 | image_dir = ['/home/hujie/MAC_Retrieval/galary_images/']; 8 | % image list 9 | image_list = dir([image_dir '*.jpg']) 10 | % feature save dir 11 | save_dir = ['./SIFT/']; 12 | 13 | %% Extract 14 | for i = 1 : size(image_list, 1) 15 | img_name = [image_dir image_list(i).name]; 16 | img = imread(img_name); 17 | img = single(rgb2gray(img)); 18 | [f, sift] = vl_sift(img); 19 | feat_path = [save_dir image_list(i).name '.mat']; 20 | save(feat_path, 'sift'); 21 | fprintf('Extracting Sift %d: %s\n', i, img_name); 22 | break 23 | end -------------------------------------------------------------------------------- /Extraction/SPoC/spoc.m: -------------------------------------------------------------------------------- 1 | 2 | function [x, X] = spoc(I, net, flag) 3 | 4 | if size(I,3) == 1 5 | I = repmat(I, [1 1 3]); 6 | end 7 | 8 | sz = size(I); 9 | if sz(1) >= 1500 || sz(2) >= 1500 10 | I = imresize(I, [1000, 1000]); 11 | end 12 | if sz(1) < 224 || sz(2) < 224 13 | I = imresize(I, [224, 224]); 14 | end 15 | 16 | I = single(I) - mean(net.meta.normalization.averageImage(:)); 17 | % vgg 18 | if flag == 1 19 | if ~isa(net.layers{1}.weights{1}, 'gpuArray') 20 | rnet = vl_simplenn(net, I); 21 | X = max(rnet(end).x, 0); 22 | else 23 | rnet = vl_simplenn(net, gpuArray(I)); 24 | X = gather(max(rnet(end).x, 0)); 25 | end 26 | end 27 | 28 | %%%%%%%%%%%%%%%%%%%%%%%%%%%% 29 | % resnet 30 | if flag == 0 31 | I = gpuArray(I); 32 | net.eval({'data', I}); 33 | X = gather(max(net.vars(net.getVarIndex('pool5')).value, 0)); 34 | end 35 | %%%%%%%%%%%%%%%%%%%%%%%%%%%% 36 | 37 | x = spoc_act(X); 38 | -------------------------------------------------------------------------------- /Extraction/SPoC/spoc_act.m: -------------------------------------------------------------------------------- 1 | function x = spoc_act(x) 2 | 3 | if ~max(size(x, 1), size(x, 2)) 4 | x = zeros(size(x, 3), 1, class(x)); 5 | return; 6 | end 7 | 8 | x = reshape(mean(mean(x, 1), 2), [size(x,3) 1]); -------------------------------------------------------------------------------- /Extraction/SPoC/test_resnetspoc.m: -------------------------------------------------------------------------------- 1 | addpath('../utils'); 2 | 3 | g = gpuDevice(3) 4 | reset(g) 5 | 6 | %%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%% 7 | 8 | if exist('aml') ~= 3 9 | mex -compatibleArrayDims aml.c 10 | end 11 | 12 | % matconvnet is a prerequisite 13 | % run vl_setupnn for your installation to avoid downloading and compiling again 14 | if exist('vl_nnconv') ~= 3 15 | cd matconvnet-1.0-beta25/matlab/ 16 | if numel(dir(fullfile('mex', 'vl_nnconv.mex*'))) == 0 17 | vl_compilenn('verbose', 1, 'enableGPU', 1, 'cudaRoot', '/usr/local/cuda-8.0'); 18 | end 19 | vl_setupnn; 20 | cd ../../ 21 | end 22 | 23 | net = dagnn.DagNN.loadobj(load('imagenet-resnet-101-dag.mat')); 24 | net.mode = 'test'; 25 | net.conserveMemory = false; 26 | move(net, 'gpu') 27 | 28 | %%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%% 29 | 30 | data_dir = '../dvsd/extractD/data/'; 31 | datasets = {'Oxford5k', 'Paris6k', 'Holidays', 'Landmarks'}; 32 | desc_name = 'resnetspoc'; 33 | 34 | %%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%% 35 | % extract features 36 | fprintf('Extracting features...\n'); 37 | 38 | % Whiting 39 | fprintf('Leaning PCA-whitening features...\n'); 40 | white_dir = [data_dir, 'Landmarks/query/']; 41 | white_list = dir([white_dir '*.jpg']); 42 | vecs = {}; 43 | for i = 1 : size(white_list, 1) 44 | img_name = [white_dir white_list(i).name]; 45 | img = imread(img_name); 46 | resnetspoc = vecpostproc(spoc(img, net, 0)); 47 | % load_name = ['../sdd/dtod/data/train/Landmarks/query/', desc_name, '/', white_list(i).name, '.mat'] 48 | % load(load_name); 49 | vecs{i} = resnetspoc; 50 | save_name = ['../sdd/dtod/data/train/Landmarks/query/', desc_name, '/', white_list(i).name, '.mat'] 51 | save(save_name, 'resnetspoc'); 52 | end 53 | 54 | % Learn PCA 55 | fprintf('Learning PCA-whitening\n'); 56 | [~, eigvec, eigval, Xm] = yael_pca (single(cell2mat(vecs))); 57 | 58 | for i = 1 : size(datasets, 2) 59 | dataset = datasets{i}; 60 | %%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%% 61 | if dataset(1) == 'L' 62 | % image files are expected under each dataset's folder %'test 63 | base_dir = [data_dir, dataset, '/base/']; 64 | base_list = dir([base_dir '*.jpg']); 65 | for i = 1 : size(base_list, 1) 66 | img_name = [base_dir base_list(i).name]; 67 | img = imread(img_name); 68 | resnetspoc = vecpostproc(spoc(img, net, 0)); 69 | resnetspoc = vecpostproc(apply_whiten(resnetspoc, Xm, eigvec, eigval)); % 70 | save_name = ['../sdd/dtod/data/train/', dataset, '/base/', desc_name, '/', base_list(i).name, '.mat'] 71 | save(save_name, 'resnetspoc'); 72 | end 73 | %%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%% 74 | else 75 | % image files are expected under each dataset's folder %'test 76 | base_dir = [data_dir, dataset, '/base/']; 77 | query_dir = [data_dir, dataset, '/query/']; 78 | 79 | base_list = dir([base_dir '*.jpg']); 80 | query_list = dir([query_dir '*.jpg']); 81 | 82 | % Base 83 | for i = 1 : size(base_list, 1) 84 | img_name = [base_dir base_list(i).name]; 85 | img = imread(img_name); 86 | resnetspoc = vecpostproc(spoc(img, net, 0)); 87 | resnetspoc = vecpostproc(apply_whiten(resnetspoc, Xm, eigvec, eigval)); % 88 | save_name = ['../sdd/dtod/data/test/', dataset, '/base/', desc_name, '/', base_list(i).name, '.mat'] 89 | save(save_name, 'resnetspoc'); 90 | end 91 | 92 | % Query 93 | for i = 1 : size(query_list, 1) 94 | img_name = [query_dir query_list(i).name]; 95 | img = imread(img_name); 96 | resnetspoc = vecpostproc(spoc(img, net, 0)); 97 | resnetspoc = vecpostproc(apply_whiten(resnetspoc, Xm, eigvec, eigval)); % 98 | save_name = ['../sdd/dtod/data/test/', dataset, '/query/', desc_name, '/', query_list(i).name, '.mat'] 99 | save(save_name, 'resnetspoc'); 100 | end 101 | end 102 | end 103 | -------------------------------------------------------------------------------- /Extraction/SPoC/test_vggspoc.m: -------------------------------------------------------------------------------- 1 | addpath('../utils'); 2 | 3 | g = gpuDevice(4) 4 | reset(g) 5 | 6 | %%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%% 7 | 8 | if exist('aml') ~= 3 9 | mex -compatibleArrayDims aml.c 10 | end 11 | 12 | % matconvnet is a prerequisite 13 | % run vl_setupnn for your installation to avoid downloading and compiling again 14 | if exist('vl_nnconv') ~= 3 15 | cd matconvnet-1.0-beta25/matlab/ 16 | if numel(dir(fullfile('mex', 'vl_nnconv.mex*'))) == 0 17 | vl_compilenn('verbose', 1, 'enableGPU', 1, 'cudaRoot', '/usr/local/cuda-8.0'); 18 | end 19 | vl_setupnn; 20 | cd ../../ 21 | end 22 | 23 | % choose pre-trained CNN model 24 | modelfn = 'imagenet-vgg-verydeep-16.mat'; 25 | lid = 31; % use VGG 26 | % parameters of the method 27 | use_gpu = 1; 28 | % matconvnet is a prerequisite 29 | % run vl_setupnn for your installation to avoid downloading and compiling again 30 | if exist('vl_nnconv') ~= 3 31 | cd matconvnet-1.0-beta25/matlab/ 32 | if numel(dir(fullfile('mex', 'vl_nnconv.mex*'))) == 0 33 | vl_compilenn('verbose', 1, 'enableGPU', 1, 'cudaRoot', '/usr/local/cuda-8.0'); 34 | end 35 | vl_setupnn; 36 | cd ../../ 37 | end 38 | 39 | net = load(modelfn); 40 | net.layers = {net.layers{1:lid}}; % remove fully connected layers 41 | 42 | if use_gpu 43 | net = vl_simplenn_move(net, 'gpu'); 44 | end 45 | 46 | % compatibility with matconvnet-1.0-beta25 (otherwise tested with matconvnet-1.0-beta15) 47 | for i=1:numel(net.layers), if strcmp(net.layers{i}.type,'conv'), net.layers{i}.dilate=[1 1]; net.layers{i}.opts={}; end, end 48 | for i=1:numel(net.layers), if strcmp(net.layers{i}.type,'relu'), net.layers{i}.leak=0; end, end 49 | for i=1:numel(net.layers), if strcmp(net.layers{i}.type,'pool'), net.layers{i}.opts={}; end, end 50 | 51 | %%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%% 52 | 53 | data_dir = '../dvsd/extractD/data/'; 54 | datasets = {'Oxford5k', 'Paris6k', 'Holidays', 'Landmarks'}; 55 | desc_name = 'vggspoc'; 56 | 57 | %%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%% 58 | % extract features 59 | fprintf('Extracting features...\n'); 60 | 61 | % Whiting 62 | fprintf('Leaning PCA-whitening features...\n'); 63 | white_dir = [data_dir, 'Landmarks/query/']; 64 | white_list = dir([white_dir '*.jpg']); 65 | vecs = {}; 66 | for i = 1 : size(white_list, 1) 67 | img_name = [white_dir white_list(i).name]; 68 | img = imread(img_name); 69 | vggspoc = vecpostproc(spoc(img, net, 1)); 70 | % load_name = ['../sdd/dtod/data/train/Landmarks/query/', desc_name, '/', white_list(i).name, '.mat'] 71 | % load(load_name); 72 | vecs{i} = vggspoc; 73 | save_name = ['../sdd/dtod/data/train/Landmarks/query/', desc_name, '/', white_list(i).name, '.mat'] 74 | save(save_name, 'vggspoc'); 75 | end 76 | 77 | % Learn PCA 78 | fprintf('Learning PCA-whitening\n'); 79 | [~, eigvec, eigval, Xm] = yael_pca (single(cell2mat(vecs))); 80 | 81 | for i = 1 : size(datasets, 2) 82 | dataset = datasets{i}; 83 | %%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%% 84 | if dataset(1) == 'L' 85 | % image files are expected under each dataset's folder %'test 86 | base_dir = [data_dir, dataset, '/base/']; 87 | base_list = dir([base_dir '*.jpg']); 88 | for i = 1 : size(base_list, 1) 89 | img_name = [base_dir base_list(i).name]; 90 | img = imread(img_name); 91 | vggspoc = vecpostproc(spoc(img, net, 1)); 92 | vggspoc = vecpostproc(apply_whiten(vggspoc, Xm, eigvec, eigval)); % 93 | save_name = ['../sdd/dtod/data/train/', dataset, '/base/', desc_name, '/', base_list(i).name, '.mat'] 94 | save(save_name, 'vggspoc'); 95 | end 96 | %%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%% 97 | else 98 | % image files are expected under each dataset's folder %'test 99 | base_dir = [data_dir, dataset, '/base/']; 100 | query_dir = [data_dir, dataset, '/query/']; 101 | 102 | base_list = dir([base_dir '*.jpg']); 103 | query_list = dir([query_dir '*.jpg']); 104 | 105 | % Base 106 | for i = 1 : size(base_list, 1) 107 | img_name = [base_dir base_list(i).name]; 108 | img = imread(img_name); 109 | vggspoc = vecpostproc(spoc(img, net, 1)); 110 | vggspoc = vecpostproc(apply_whiten(vggspoc, Xm, eigvec, eigval)); % 111 | save_name = ['../sdd/dtod/data/test/', dataset, '/base/', desc_name, '/', base_list(i).name, '.mat'] 112 | save(save_name, 'vggspoc'); 113 | end 114 | 115 | % Query 116 | for i = 1 : size(query_list, 1) 117 | img_name = [query_dir query_list(i).name]; 118 | img = imread(img_name); 119 | vggspoc = vecpostproc(spoc(img, net, 1)); 120 | vggspoc = vecpostproc(apply_whiten(vggspoc, Xm, eigvec, eigval)); % 121 | save_name = ['../sdd/dtod/data/test/', dataset, '/query/', desc_name, '/', query_list(i).name, '.mat'] 122 | save(save_name, 'vggspoc'); 123 | end 124 | end 125 | end 126 | -------------------------------------------------------------------------------- /Extraction/test_Holidays.py: -------------------------------------------------------------------------------- 1 | import os 2 | import argparse 3 | import numpy as np 4 | import scipy.io as sio 5 | 6 | # python test_Holidays.py --feature_name x --feature_dim x --galary_path x --query_path x 7 | parser = argparse.ArgumentParser(description = 'Test Holidays mAP.') 8 | parser.add_argument('--feature_name', required = True, help = 'sift_fv') 9 | parser.add_argument('--feature_dim', type = int, default = 2048, help = 'Dimension of feature') 10 | parser.add_argument('--galary_path', required = True, help = './Holidays/galary/') 11 | parser.add_argument('--query_path', required = True, help = './Holidays/query/') 12 | opt = parser.parse_args() 13 | print(opt) 14 | 15 | # Test Setting 16 | feature_name = opt.feature_name 17 | feature_dim = opt.feature_dim 18 | galary_path = opt.galary_path + '/' + feature_name 19 | query_path = opt.query_path + '/' + feature_name 20 | 21 | # Loading Galary 22 | galary_files = os.listdir(galary_path) 23 | galary = np.zeros((len(galary_files), feature_dim)) 24 | names = [] 25 | for i, file in enumerate(galary_files): 26 | feat_tmp = sio.loadmat(galary_path + '/' + file) 27 | feat = feat_tmp[feature_name].reshape(-1) 28 | galary[i, :] = feat 29 | tmp = file.split('.') 30 | name = tmp[0] + '.' + tmp[1] 31 | # print(name) 32 | names.append(name) 33 | 34 | # Loading Queries 35 | query_files = os.listdir(query_path) 36 | ## Result File Name 37 | result_path = './Holidays_results/' 38 | if not os.path.exists(result_path): 39 | os.mkdir(result_path) 40 | 41 | result_file_name = result_path + feature_name + '_result.dat' 42 | print(result_file_name) 43 | resultFile = open(result_file_name, 'w') 44 | ## Searching and Saving the Result 45 | for file in query_files: 46 | feat_tmp = sio.loadmat(query_path + '/' + file) 47 | ### Remove .mat 48 | tmp = file.split('.') 49 | re = tmp[0] + '.' + tmp[1] + ' ' 50 | resultFile.write(re) 51 | 52 | feat = feat_tmp[feature_name] 53 | dist = np.sum((feat - galary) ** 2, 1) 54 | index = np.argsort(dist) 55 | sz = len(names) 56 | for i in range(sz): 57 | re = str(i) + ' ' + names[index[i]] 58 | if i == sz - 1: 59 | re = re + '\n' 60 | else: 61 | re = re + ' ' 62 | resultFile.write(re) 63 | resultFile.close() 64 | 65 | # Evaluating 66 | t = os.popen('python2 holidays_map.py ' + result_file_name + ' ./holidays_images.dat') 67 | # print(t.read()) 68 | mAP = float(t.read()) 69 | print("The mAP of {}: {}.\n".format(feature_name, mAP)) 70 | -------------------------------------------------------------------------------- /Extraction/test_Oxford5k.py: -------------------------------------------------------------------------------- 1 | import os 2 | import argparse 3 | import numpy as np 4 | import scipy.io as sio 5 | 6 | # python test_Holidays.py --feature_name x --feature_dim x --galary_path x --query_path x 7 | parser = argparse.ArgumentParser(description = 'Test Oxford5k mAP.') 8 | parser.add_argument('--feature_name', required = True, help = 'sift_fv') 9 | parser.add_argument('--feature_dim', type = int, default = 2048, help = 'Dimension of feature') 10 | parser.add_argument('--galary_path', required = True, help = './Oxford5k/galary/') 11 | parser.add_argument('--query_path', required = True, help = './Oxford5k/query/') 12 | opt = parser.parse_args() 13 | print(opt) 14 | 15 | # Test Setting 16 | feature_name = opt.feature_name 17 | feature_dim = opt.feature_dim 18 | galary_path = opt.galary_path + '/' + feature_name 19 | query_path = opt.query_path + '/' + feature_name 20 | 21 | # Loading Galary 22 | galary_files = os.listdir(galary_path) 23 | galary = np.zeros((len(galary_files), feature_dim)) 24 | names = [] 25 | for i, file in enumerate(galary_files): 26 | feat_tmp = sio.loadmat(galary_path + '/' + file) 27 | feat = feat_tmp[feature_name].reshape(-1) 28 | galary[i, :] = feat 29 | tmp = file.split('.') 30 | name = tmp[0] 31 | names.append(name) 32 | 33 | # Loading Queries 34 | query_files = os.listdir(query_path) 35 | ## Result File Name 36 | result_path = './Oxford5k_results/' 37 | if not os.path.exists(result_path): 38 | os.mkdir(result_path) 39 | 40 | mAP = 0.0 41 | sz = len(names) 42 | ## Searching and Saving the Result 43 | for file in query_files: 44 | feat_tmp = sio.loadmat(query_path + '/' + file) 45 | query = file.split('.')[0] 46 | result_file_name = result_path + query + '_' + feature_name + '_result.txt' 47 | # print(result_file_name) 48 | resultFile = open(result_file_name, 'w') 49 | feat = feat_tmp[feature_name] 50 | dist = np.sum((feat - galary) ** 2, 1) 51 | index = np.argsort(dist) 52 | for i in range(sz): 53 | re = names[index[i]] + '\n' 54 | resultFile.write(re) 55 | # Evaluating 56 | t = os.popen('./compute_ap ./Oxford5k_gnd/' + query + ' ' + result_file_name) 57 | ap = float(t.read()) 58 | mAP = mAP + ap 59 | resultFile.close() 60 | 61 | print("The mAP: {}.\n".format(mAP / len(query_files))) 62 | -------------------------------------------------------------------------------- /Extraction/test_Paris6k.py: -------------------------------------------------------------------------------- 1 | import os 2 | import argparse 3 | import numpy as np 4 | import scipy.io as sio 5 | 6 | # python test_Holidays.py --feature_name x --feature_dim x --galary_path x --query_path x 7 | parser = argparse.ArgumentParser(description = 'Test Paris6k mAP.') 8 | parser.add_argument('--feature_name', required = True, help = 'sift_fv') 9 | parser.add_argument('--feature_dim', type = int, default = 2048, help = 'Dimension of feature') 10 | parser.add_argument('--galary_path', required = True, help = './Paris6k/galary/') 11 | parser.add_argument('--query_path', required = True, help = './Paris6k/query/') 12 | opt = parser.parse_args() 13 | print(opt) 14 | 15 | # Test Setting 16 | feature_name = opt.feature_name 17 | feature_dim = opt.feature_dim 18 | galary_path = opt.galary_path + '/' + feature_name 19 | query_path = opt.query_path + '/' + feature_name 20 | 21 | # Loading Galary 22 | galary_files = os.listdir(galary_path) 23 | galary = np.zeros((len(galary_files), feature_dim)) 24 | names = [] 25 | for i, file in enumerate(galary_files): 26 | feat_tmp = sio.loadmat(galary_path + '/' + file) 27 | feat = feat_tmp[feature_name].reshape(-1) 28 | galary[i, :] = feat 29 | tmp = file.split('.') 30 | name = tmp[0] 31 | names.append(name) 32 | 33 | # Loading Queries 34 | query_files = os.listdir(query_path) 35 | ## Result File Name 36 | result_path = './Paris6k_results/' 37 | if not os.path.exists(result_path): 38 | os.mkdir(result_path) 39 | 40 | mAP = 0.0 41 | sz = len(names) 42 | ## Searching and Saving the Result 43 | for file in query_files: 44 | feat_tmp = sio.loadmat(query_path + '/' + file) 45 | query = file.split('.')[0] 46 | result_file_name = result_path + query + '_' + feature_name + '_result.txt' 47 | # print(result_file_name) 48 | resultFile = open(result_file_name, 'w') 49 | feat = feat_tmp[feature_name] 50 | dist = np.sum((feat - galary) ** 2, 1) 51 | index = np.argsort(dist) 52 | for i in range(sz): 53 | re = names[index[i]] + '\n' 54 | resultFile.write(re) 55 | # Evaluating 56 | t = os.popen('./compute_ap ./Paris6k_gnd/' + query + ' ' + result_file_name) 57 | ap = float(t.read()) 58 | mAP = mAP + ap 59 | resultFile.close() 60 | 61 | print("The mAP: {}.\n".format(mAP / len(query_files))) 62 | -------------------------------------------------------------------------------- /Extraction/utils/apply_whiten.m: -------------------------------------------------------------------------------- 1 | % apply PCA-whitening, with or without dimensionality reduction 2 | function x_ = apply_whiten (x, xm, eigvec, eigval, dout) 3 | 4 | if ~exist ('dout') 5 | dout = size (x, 1); 6 | end 7 | 8 | x_ = bsxfun (@minus, x, xm); % Subtract the mean 9 | 10 | x_ = diag(eigval(1:dout).^-0.5)*eigvec(:,1:dout)' * x_; 11 | x_ = replacenan (x_); 12 | 13 | % replace all nan values in a matrix (with zero) 14 | function y = replacenan (x, v) 15 | 16 | if ~exist ('v') 17 | v = 0; 18 | end 19 | 20 | y = x; 21 | y(isnan(x)) = v; -------------------------------------------------------------------------------- /Extraction/utils/compute_map.m: -------------------------------------------------------------------------------- 1 | % This function computes the mAP for a given set of returned results. 2 | % 3 | % Usage: map = compute_map (ranks, gnd); 4 | % 5 | % Notes: 6 | % 1) ranks starts from 1, size(ranks) = db_size X #queries 7 | % 2) The junk results (e.g., the query itself) should be declared in the gnd stuct array 8 | function [map, aps] = compute_map (ranks, gnd, verbose) 9 | 10 | if nargin < 3 11 | verbose = false; 12 | end 13 | 14 | map = 0; 15 | nq = numel (gnd); % number of queries 16 | aps = zeros (nq, 1); 17 | 18 | for i = 1:nq 19 | qgnd = gnd(i).ok; 20 | if isfield (gnd(i), 'junk') 21 | qgndj = gnd(i).junk; 22 | else 23 | qgndj = []; 24 | end 25 | 26 | % positions of positive and junk images 27 | [~, pos] = intersect (ranks (:,i), qgnd); 28 | [~, junk] = intersect (ranks (:,i), qgndj); 29 | 30 | pos = sort(pos); 31 | junk = sort(junk); 32 | 33 | k = 0; 34 | ij = 1; 35 | 36 | if length (junk) 37 | % decrease positions of positives based on the number of junk images appearing before them 38 | ip = 1; 39 | while ip <= numel (pos) 40 | 41 | while ( ij <= length (junk) & pos (ip) > junk (ij) ) 42 | k = k + 1; 43 | ij = ij + 1; 44 | end 45 | 46 | pos (ip) = pos (ip) - k; 47 | ip = ip + 1; 48 | end 49 | end 50 | 51 | ap = score_ap_from_ranks1 (pos, length (qgnd)); 52 | 53 | if verbose 54 | fprintf ('query no %d -> gnd = ', i); 55 | fprintf ('%d ', qgnd); 56 | fprintf ('\n tp ranks = '); 57 | fprintf ('%d ', pos); 58 | fprintf (' -> ap=%.3f\n', ap); 59 | end 60 | map = map + ap; 61 | aps (i) = ap; 62 | 63 | end 64 | map = map / nq; 65 | 66 | end 67 | 68 | 69 | % This function computes the AP for a query 70 | function ap = score_ap_from_ranks1 (ranks, nres) 71 | 72 | % number of images ranked by the system 73 | nimgranks = length (ranks); 74 | ranks = ranks - 1; 75 | 76 | % accumulate trapezoids in PR-plot 77 | ap = 0; 78 | 79 | recall_step = 1 / nres; 80 | 81 | for j = 1:nimgranks 82 | rank = ranks(j); 83 | 84 | if rank == 0 85 | precision_0 = 1.0; 86 | else 87 | precision_0 = (j - 1) / rank; 88 | end 89 | 90 | precision_1 = j / (rank + 1); 91 | ap = ap + (precision_0 + precision_1) * recall_step / 2; 92 | end 93 | 94 | end 95 | -------------------------------------------------------------------------------- /Extraction/utils/crop_qim.m: -------------------------------------------------------------------------------- 1 | function im = crop_qim(imfn, bbx) 2 | 3 | im = imread(imfn); 4 | bbx = uint32(max(bbx + 1, 1)); 5 | im = im(bbx(2):min(bbx(4),size(im,1)), bbx(1):min(bbx(3),size(im,2)), :); 6 | -------------------------------------------------------------------------------- /Extraction/utils/vecpostproc.m: -------------------------------------------------------------------------------- 1 | % suite of descriptor post-processing operations 2 | % 3 | % Authors: A. Bursuc, G. Tolias, H. Jegou. 2015. 4 | 5 | function x = vecpostproc(x, a) 6 | if ~exist('a'), a = 1; end 7 | x = replacenan (vecs_normalize (powerlaw (x, a))); 8 | 9 | % apply powerlaw 10 | function x = powerlaw (x, a) 11 | if a == 1, return; end 12 | x = sign (x) .* abs(x) .^ a; 13 | 14 | % replace all nan values in a matrix (with zero) 15 | function y = replacenan (x, v) 16 | if ~exist ('v') 17 | v = 0; 18 | end 19 | y = x; 20 | y(isnan(x)) = v; 21 | 22 | % l2 normalization 23 | function X = vecs_normalize(X) 24 | l = sqrt(sum(X.^2)); 25 | X = bsxfun(@rdivide,X,l); 26 | X = replacenan(X); -------------------------------------------------------------------------------- /Extraction/utils/vecs_normalize.m: -------------------------------------------------------------------------------- 1 | % This function normalize a set of vectors 2 | % Parameters: 3 | % v the set of vectors to be normalized (column stored) 4 | % nr the norm for which the normalization is performed (Default: Euclidean) 5 | % rval replace value in case the vector is 0-norm 6 | % 7 | % Output: 8 | % vout the normalized vector 9 | % vnr the norms of the input vectors 10 | % 11 | % Remark: the function return Nan for vectors of null norm 12 | function [vout, vnr] = yael_vecs_normalize (v, nr, rval) 13 | 14 | if nargin < 2, nr = 2; end 15 | 16 | % norm of each column 17 | vnr = (sum (v.^nr)) .^ (1 / nr); 18 | 19 | % sparse multiplication to apply the norm 20 | vout = bsxfun (@times, double(v), double(1 ./ vnr)); 21 | 22 | if exist('rval') 23 | [~, ko] = find (isnan (vout)); 24 | ko = unique (ko); 25 | vout (:, ko) = rval; 26 | end 27 | -------------------------------------------------------------------------------- /Extraction/utils/yael_pca.m: -------------------------------------------------------------------------------- 1 | % PCA with automatic selection of the method: covariance or gram matrix 2 | % Usage: [X, eigvec, eigval, Xm] = pca (X, dout, center, verbose) 3 | % X input vector set (1 vector per column) 4 | % dout number of principal components to be computed 5 | % center need to center data? 6 | % 7 | % Note: the eigenvalues are given in decreasing order of magnitude 8 | % 9 | % Author: Herve Jegou, 2011. 10 | % Last revision: 08/10/2013 11 | 12 | function [X, eigvec, eigval, Xm] = yael_pca (X, dout, center, verbose) 13 | 14 | if nargin < 3, center = true; end 15 | % if ~exist ('verbose'), verbose = false; end 16 | 17 | verbose = true; 18 | 19 | X = double (X); 20 | d = size (X, 1); 21 | n = size (X, 2); 22 | 23 | if nargin < 2 24 | dout = d; 25 | end 26 | 27 | if center 28 | Xm = mean (X, 2); 29 | X = bsxfun (@minus, X, Xm); 30 | else 31 | Xm = zeros (d, 1); 32 | end 33 | 34 | 35 | opts.issym = true; 36 | opts.isreal = true; 37 | opts.tol = eps; 38 | opts.disp = 0; 39 | 40 | % PCA with covariance matrix 41 | if n > d %|| n < d 42 | if verbose, fprintf ('PCA with covariance matrix: %d -> %d\n', d, dout); end 43 | Xcov = X * X'; 44 | Xcov = (Xcov + Xcov') / (2 * n); 45 | 46 | if dout < d 47 | [eigvec, eigval] = eigs (Xcov, dout, 'LM', opts); 48 | else 49 | [eigvec, eigval] = eig (Xcov); 50 | end 51 | else 52 | % PCA with gram matrix 53 | if verbose, fprintf ('PCA with gram matrix: %d -> %d\n', d, dout); end 54 | Xgram = X' * X; 55 | Xgram = (Xgram + Xgram') / 2; 56 | if dout < d 57 | [eigvec, eigval] = eigs (Xgram, dout, 'LM', opts); 58 | else 59 | [eigvec, eigval] = eig (Xgram); 60 | end 61 | eigvec = single (X * eigvec); 62 | eigvec = vecs_normalize (eigvec); 63 | end 64 | 65 | 66 | X = eigvec' * X; 67 | X = single (X); 68 | eigval = diag(eigval); 69 | 70 | % We prefer a consistent order 71 | [~, eigord] = sort (eigval, 'descend'); 72 | eigval = eigval (eigord); 73 | eigvec = eigvec (:, eigord); 74 | X = X(eigord, :); 75 | -------------------------------------------------------------------------------- /Figures/Framework.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/hujiecpp/VisualFeatureTranslation/f03347a9e188795edc34634c602f10bc188405a2/Figures/Framework.png -------------------------------------------------------------------------------- /Figures/MST.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/hujiecpp/VisualFeatureTranslation/f03347a9e188795edc34634c602f10bc188405a2/Figures/MST.png -------------------------------------------------------------------------------- /Figures/Retrieval_examples.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/hujiecpp/VisualFeatureTranslation/f03347a9e188795edc34634c602f10bc188405a2/Figures/Retrieval_examples.png -------------------------------------------------------------------------------- /Figures/mAP_difference.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/hujiecpp/VisualFeatureTranslation/f03347a9e188795edc34634c602f10bc188405a2/Figures/mAP_difference.png -------------------------------------------------------------------------------- /README.md: -------------------------------------------------------------------------------- 1 | This is the project page of our paper: 2 | 3 | "Towards Visual Feature Translation." Hu, J., Ji, R., Liu, H., Zhang, S., Deng, C., & Tian, Q. In *CVPR 2019.* \[[paper](http://openaccess.thecvf.com/content_CVPR_2019/html/Hu_Towards_Visual_Feature_Translation_CVPR_2019_paper.html)\] 4 | 5 | If you have any problem, please feel free to contact us. (hujie.cpp@gmail.com) 6 | 7 | ![The framework of our paper.](https://github.com/hujiecpp/VisualFeatureTranslation/blob/master/Figures/Framework.png) 8 | # 1. Feature Extraction 9 | This section contains the process of collecting popular content-based image retrieval features for preparing the meta-data of our paper. 10 | 11 | The extracted features are evaluated in this section, and the code with details can be found in: [./Extraction/](https://github.com/hujiecpp/VisualFeatureTranslation/tree/master/Extraction) 12 | 13 | ## 1.1 Evaluation 14 | ### 1.1.1 Datasets 15 | Datasets for evaluation: 16 | - [Holidays](http://lear.inrialpes.fr/people/jegou/data.php#holidays) [1] 17 | - [Oxford5k](http://www.robots.ox.ac.uk/~vgg/data/oxbuildings/) [2] 18 | - [Paris6k](http://www.robots.ox.ac.uk/~vgg/data/parisbuildings/) [3] 19 | 20 | Dataset for PCA whitening and creating codebooks: 21 | - [Google-Landmarks](https://www.kaggle.com/c/landmark-retrieval-challenge) [4] 22 | 23 | ### 1.1.2 Measurement 24 | We use the mean Average Precision (mAP) provided by the official site of above datasets for evaluation. 25 | 26 | ## 1.2 Features 27 | Please note that our extractions for images do not use the bounding boxes of the objects. 28 | 29 | The local features (e.g., SIFT and DELF) are aggregated by the codebooks learned on 4,000 randomly picked images of Google-Landmarks dataset. 30 | 31 | And the features of picked images are used to train the PCA whitening for all features of other images. 32 | 33 | The features are listed bellow: 34 | 35 | - **SIFT-FV** and **SIFT-VLAD**: The Scale Invariant Feature Transform (SIFT) [5] features are extracted and then aggregate by Fisher Vector (FV) [6] and Vector of Locally Aggregated Descriptors (VLAD) [7]. 36 | 37 | - **DELF-FV** and **DELV-VLAD**: The DEep Local Features (DELF) [8] are extracted and then aggregate also by FV and VLAD. 38 | 39 | - **V-CroW** and **R-CroW**: The abbreviation V represents the VGG [9] backbone network, and R [10] represents the Resnet50 backbone network. The Cross-dimensional Weighting (CroW) [11] are then used to aggregate the deep features generated by the backbone networks. 40 | 41 | - **V-SPoC** and **R-SPoC**: The Sum-Pooled Convolutional features (SPoC) [12] are used to aggregate the deep features generated by the backbone networks. 42 | 43 | - **V-MAC**, **V-rMAC** and **R-MAC**, **R-rMAC**: The Maximum Activations of Convolutions (MAC) [13] and the regional Maximum Activations of Convolutions (rMAC) [14] are used to aggregate the deep features generated by the backbone networks. 44 | 45 | - **V-GeM**, **V-rGeM** and **R-GeM**, **R-rGeM**: The Generalized-Mean pooling (GeM) [15] is used to aggregate the deep features generated by the backbone networks. 46 | 47 | ## 1.3 Results 48 | The mAP (%) of collected features are as follows: 49 | 50 | | | Holidays | Oxford5k | Paris6k | 51 | | :---: |:--------:|:--------:|:-------:| 52 | |SIFT-FV |61.77 |36.25 |36.91 | 53 | |SIFT-VLAD |63.92 |40.49 |41.49 | 54 | |DELF-FV |83.42 |73.38 |83.06 | 55 | |DELF-VLAD |84.61 |75.31 |82.54 | 56 | |V-CroW |83.17 |68.38 |79.79 | 57 | |V-GeM |84.57 |82.71 |86.85 | 58 | |V-MAC |74.18 |60.97 |72.65 | 59 | |V-rGeM |85.06 |82.30 |87.33 | 60 | |V-rMAC |83.50 |70.84 |83.54 | 61 | |V-SPoC |83.38 |66.43 |78.47 | 62 | |R-CroW |86.38 |61.73 |75.46 | 63 | |R-GeM |89.08 |84.47 |91.87 | 64 | |R-MAC |88.53 |60.82 |77.74 | 65 | |R-rGeM |89.32 |84.60 |91.90 | 66 | |R-rMAC |89.08 |68.46 |83.00 | 67 | |R-SPoC |86.57 |62.36 |76.75 | 68 | 69 | # 2. Feature Translation 70 | We translate different types of features and test them in this section. 71 | 72 | The code with details can be found in: [./Translation/](https://github.com/hujiecpp/VisualFeatureTranslation/tree/master/Translation) 73 | 74 | ## 2.1 Evaluation 75 | ### 2.1.1 Datasets 76 | Datasets for evaluating the translation results: 77 | - [Holidays](http://lear.inrialpes.fr/people/jegou/data.php#holidays) [1] 78 | - [Oxford5k](http://www.robots.ox.ac.uk/~vgg/data/oxbuildings/) [2] 79 | - [Paris6k](http://www.robots.ox.ac.uk/~vgg/data/parisbuildings/) [3] 80 | 81 | Dataset for training the Hybrid Auto-Encoder (HAE): 82 | - [Google-Landmarks](https://www.kaggle.com/c/landmark-retrieval-challenge) [4] 83 | 84 | ### 2.1.2 Measurement 85 | The mean average precision (mAP) is used to evaluate the retrieval performance. We translate the source features of galary images to the target space, and the target features of query images are used for searching. 86 | - Galary: Source -> Target 87 | - Query: Target 88 | 89 | ## 2.2 Hybrid Auto-Encoder 90 | The Hybrid Auto-Encoder (HAE) is trained with **Translation** (Source -> Target) and **Reconstruction** (Target -> Target), in which we can get the **Translation Error** and **Reconstruction Error** to optimize the network. 91 | 92 | ## 2.3 Results 93 | ### 2.3.1 Translation Results 94 | The mAP(%) difference between target and translated features on three public datasets: Holidays (Green), Oxford5k (Blue) and Paris6k (Brown). 95 | 96 | ![The mAP difference.](https://github.com/hujiecpp/VisualFeatureTranslation/blob/master/Figures/mAP_difference.png) 97 | 98 | ### 2.3.2 Retrieval Examples 99 | The retrieval results for querying images of the *Eiffel Tower* (up) and the *Arc de Triomphe* (down) with the target features and the translated features. The images are resized for better view and the interesting results are colored by red bounding boxes. 100 | 101 | ![Some retrieval results.](https://github.com/hujiecpp/VisualFeatureTranslation/blob/master/Figures/Retrieval_examples.png) 102 | 103 | # 3. Relation Mining 104 | We mine the relation of different types of features in this section, and the code with details can be found in: [./Relation/](https://github.com/hujiecpp/VisualFeatureTranslation/tree/master/Relation) 105 | ## 3.1 Affinity Measurement 106 | If the **Translation Error** is close to **Reconstruction Error**, we think the **Translation** between source and target features is similar to the **Reconstruction** of target features, which indicates the source and target features have high affinity. 107 | 108 | Therefore, we regard the difference between the **Translation Error** and **Reconstruction Error** as an affinity measurement. 109 | 110 | By normalizing, we can finally get an Undirected Affinity Measurement. 111 | 112 | ## 3.2 Visualization Result 113 | The Undirected Affinity can be visualized by applying a Minimum Spanning Tree algorithm. 114 | 115 | The length of edges is the average value of the results on Holidays, Oxford5k and Paris6k datasets. The images are the retrieval results for a query image of the *Pantheon* with corresponding features in the main trunk of the MST. The close feature pairs such as R-SPoC and R-CroW have similar ranking lists. 116 | 117 | ![The MST.](https://github.com/hujiecpp/VisualFeatureTranslation/blob/master/Figures/MST.png) 118 | # 4. Reference 119 | [1] "Hamming embedding and weak geometric consistency for large scale image search." Jégou, H., Douze, M., & Schmid, C. In *ECCV 2008.* 120 | [2] "Object retrieval with large vocabularies and fast spatial matching." Philbin, J., Chum, O., Isard, M., Sivic, J. & Zisserman, A. In *CVPR 2007.* 121 | [3] "Lost in Quantization: Improving Particular Object Retrieval in Large Scale Image Databases." Philbin, J., Chum, O., Isard, M., Sivic, J. & Zisserman, A. In *CVPR 2008.* 122 | [4] "Large-scale image retrieval with attentive deep local features." Noh, H., Araujo, A., Sim, J., Weyand, T., & Han, B. In *ICCV 2017.* 123 | [5] "Distinctive image features from scale-invariant keypoints." Lowe, D. G. *IJCV 2004.* 124 | [6] "Large-scale image retrieval with compressed fisher vectors." Perronnin, F., Liu, Y., Sánchez, J., & Poirier, H. In *CVPR 2010.* 125 | [7] "Aggregating local descriptors into a compact image representation." Jégou, H., Douze, M., Schmid, C., & Pérez, P. In *CVPR 2010.* 126 | [8] "Large-scale image retrieval with attentive deep local features." Noh, H., Araujo, A., Sim, J., Weyand, T., & Han, B. In *ICCV 2017.* 127 | [9] "Very deep convolutional networks for large-scale image recognition." Simonyan, K., & Zisserman, A. *arXiv:1409.1556.* 128 | [10] "Deep residual learning for image recognition." He, K., Zhang, X., Ren, S., & Sun, J. In *CVPR 2016.* 129 | [11] "Cross-dimensional weighting for aggregated deep convolutional features." Kalantidis, Y., Mellina, C., & Osindero, S. In *ECCV 2016.* 130 | [12] "Aggregating local deep features for image retrieval." Babenko, A., & Lempitsky, V. In *ICCV 2015.* 131 | [13] "Visual instance retrieval with deep convolutional networks." Razavian, A. S., Sullivan, J., Carlsson, S., & Maki, A. *MTA 2016.* 132 | [14] "Particular object retrieval with integral max-pooling of CNN activations." Tolias, G., Sicre, R., & Jégou, H. In *ICLR 2016.* 133 | [15] "Fine-tuning CNN image retrieval with no human annotation." Radenović, F., Tolias, G., & Chum, O. *PAMI 2018.* 134 | 135 | # 5. Citation 136 | If our paper helps your research, please cite it in your publications: 137 | ``` 138 | @InProceedings{Hu_2019_CVPR, 139 | author = {Hu, Jie and Ji, Rongrong and Liu, Hong and Zhang, Shengchuan and Deng, Cheng and Tian, Qi}, 140 | title = {Towards Visual Feature Translation}, 141 | booktitle = {The IEEE Conference on Computer Vision and Pattern Recognition (CVPR)}, 142 | year = {2019} 143 | } 144 | ``` 145 | -------------------------------------------------------------------------------- /Relation/README.md: -------------------------------------------------------------------------------- 1 | # Prerequisites 2 | - matlab 3 | 4 | # Relation Mining 5 | Run the following code to calculate the Undirected Affinity Measurement, and find the MST: 6 | > matlab 7 | > \>\>relation_mining -------------------------------------------------------------------------------- /Relation/relation_mining.m: -------------------------------------------------------------------------------- 1 | clear;clc; 2 | mean_D = zeros(16, 16); 3 | mean_U = zeros(16, 16); 4 | %% Holidays 5 | % D 6 | Ho_D = [ ... 7 | 0.00004016 0.16112835 0.38455757 0.38059205 0.39365301 0.37735879 0.30602518 0.38785723 0.47682840 0.51400489 0.59477532 0.50396389 0.49956948 0.47147053 0.52883146 0.54016439 ; 8 | 0.12637218 0.00005725 0.38379166 0.37741232 0.39425638 0.37289304 0.30648515 0.38557580 0.47389895 0.50636297 0.59639740 0.49883512 0.50030041 0.46680227 0.53476005 0.53587595 ; 9 | 0.35122018 0.37073364 0.00014827 0.30220079 0.16109490 0.27881619 0.06885355 0.00364377 0.42006797 0.50068903 0.58110350 0.47421604 0.46628472 0.39073607 0.51393452 0.51869644 ; 10 | 0.36222613 0.37836395 0.31686577 0.00001498 0.29652032 0.05034594 0.22556031 0.32191339 0.44824123 0.43666556 0.56768525 0.43794516 0.47284260 0.44395056 0.51735138 0.52479088 ; 11 | 0.38019802 0.39802881 0.17162700 0.29739094 0.00013378 0.30424726 0.05346745 0.18241300 0.44203874 0.50629586 0.56051391 0.50090653 0.46690595 0.43942103 0.52278956 0.52927370 ; 12 | 0.35570413 0.37226798 0.29104108 0.05127223 0.30193734 0.00007418 0.20438957 0.29597351 0.44451347 0.44404769 0.57554078 0.41135931 0.45699197 0.43336868 0.51605940 0.52383492 ; 13 | 0.36529251 0.38346966 0.10249010 0.28807071 0.06980760 0.26757869 0.00010398 0.11252451 0.42755067 0.49625146 0.56482363 0.46641031 0.44389880 0.41931331 0.51913406 0.52820309 ; 14 | 0.34912589 0.36842977 0.00370607 0.30461454 0.16794026 0.28175309 0.07591791 0.00027894 0.41955000 0.50101382 0.58434743 0.47405839 0.46864200 0.38946167 0.50527017 0.51235262 ; 15 | 0.39260733 0.40503088 0.38310936 0.37328225 0.37202272 0.37452570 0.30040324 0.38531923 0.00171373 0.40229809 0.31104136 0.39811283 0.22519557 0.06317376 0.52101156 0.52207385 ; 16 | 0.40710115 0.41545377 0.41075724 0.36853951 0.39612070 0.37397635 0.33225384 0.42243311 0.40046573 0.00016243 0.49234822 0.07845985 0.41916135 0.41875011 0.51741544 0.52228648 ; 17 | 0.45448044 0.46329710 0.44411954 0.41908932 0.42145351 0.42794985 0.36131066 0.45178163 0.32189596 0.48684534 0.00161987 0.50468159 0.13732730 0.42332676 0.55545527 0.54946192 ; 18 | 0.40376831 0.40263768 0.40328050 0.36744079 0.39650401 0.35982054 0.31843951 0.40695277 0.39605609 0.07848392 0.50610590 0.00076357 0.39769861 0.40583679 0.51637258 0.52028037 ; 19 | 0.42503760 0.44049409 0.42083129 0.40643468 0.39854792 0.40283915 0.33088151 0.42176700 0.26195452 0.45479894 0.15045021 0.43610650 0.00152473 0.34654054 0.53829360 0.53962507 ; 20 | 0.38056790 0.39654382 0.36993116 0.36771613 0.36636698 0.36608320 0.29131052 0.37134361 0.05571789 0.40986940 0.39073846 0.39959568 0.28557277 0.00089215 0.50934391 0.51066971 ; 21 | 0.48869303 0.49694230 0.48629510 0.47235593 0.47476605 0.47111002 0.40204090 0.48614824 0.65121245 0.65425819 0.69978297 0.64935404 0.63761598 0.65085196 0.00001088 0.27259157 ; 22 | 0.49016396 0.50365506 0.48678225 0.47328591 0.47847369 0.47295392 0.40523314 0.48939863 0.65294951 0.65539128 0.70127106 0.65176982 0.63889706 0.65124547 0.26299321 0.00000855 ; 23 | ]; 24 | % U 25 | U_row = Ho_D; 26 | for i = 1 : 16 27 | U_row(i, :) = (U_row(i, :)-min(U_row(i,:))) / (max(U_row(i,:)) - min(U_row(i,:))); 28 | end 29 | U_col = Ho_D'; 30 | for i = 1 : 16 31 | U_col(i, :) = (U_col(i, :)-min(U_col(i,:))) / (max(U_col(i,:)) - min(U_col(i,:))); 32 | end 33 | Ho_U = (U_row + U_row' + U_col + U_col') / 4; 34 | 35 | mean_D = mean_D + Ho_D; 36 | mean_U = mean_U + Ho_U; 37 | %% Oxford5k 38 | % D 39 | Ox_D = [ ... 40 | 0.00001982 0.17718754 0.38738763 0.38894540 0.39714545 0.38386682 0.31109700 0.38719401 0.47462261 0.52256185 0.59011316 0.51159650 0.48896796 0.45662397 0.52574542 0.53628247 ; 41 | 0.13932558 0.00007914 0.38592955 0.38482603 0.39854720 0.38105869 0.31243837 0.38666221 0.47462830 0.51712239 0.59352309 0.50555623 0.49364194 0.45637834 0.53080298 0.53187790 ; 42 | 0.34614673 0.36956623 0.00011761 0.31464511 0.15350488 0.28061152 0.04652112 0.00363312 0.42106795 0.51473916 0.56882900 0.47933671 0.44078648 0.39345309 0.52431707 0.52859344 ; 43 | 0.35837962 0.37588054 0.32463083 0.00006329 0.30269811 0.06130375 0.23983592 0.32917461 0.46226984 0.44442725 0.56762290 0.44648594 0.47226369 0.45188439 0.52366415 0.53088027 ; 44 | 0.37045385 0.39210257 0.16166133 0.30362323 0.00009233 0.30645472 0.06577200 0.17102753 0.44077879 0.52010143 0.54899603 0.50740182 0.45127252 0.43018106 0.52471281 0.53080806 ; 45 | 0.34815685 0.36725024 0.28890282 0.06218181 0.30432349 0.00008953 0.21054059 0.29144517 0.44985494 0.44961274 0.57153469 0.41407543 0.44927785 0.43174854 0.51911933 0.52694278 ; 46 | 0.35238083 0.37506959 0.06902315 0.29844433 0.08490778 0.26915690 0.00013951 0.07460322 0.42309195 0.50654584 0.55184865 0.46954563 0.42230707 0.40464222 0.52178164 0.53150868 ; 47 | 0.34406705 0.36530845 0.00367190 0.31569791 0.16013984 0.28057513 0.05014063 0.00016652 0.42094725 0.51499581 0.56997663 0.47664836 0.44146848 0.39067507 0.51701192 0.52358572 ; 48 | 0.37045526 0.39244939 0.37741604 0.38029844 0.36984685 0.37881500 0.29945639 0.37924132 0.00038669 0.42140743 0.30168304 0.40573803 0.20098722 0.05012240 0.52163153 0.52497232 ; 49 | 0.38925770 0.40436269 0.41010889 0.37215945 0.39972347 0.37568057 0.33868542 0.42036432 0.41394761 0.00071525 0.49897891 0.08949783 0.41980404 0.42025247 0.51908794 0.52625367 ; 50 | 0.42896210 0.44701001 0.43139961 0.41836306 0.41438764 0.42630321 0.35893700 0.43839398 0.30709243 0.49612415 0.00130560 0.50757170 0.15123415 0.38483384 0.54205412 0.53998872 ; 51 | 0.38703358 0.39311354 0.40107766 0.37251654 0.39812779 0.36159644 0.32176238 0.40230960 0.40286276 0.08990960 0.50949442 0.00038973 0.39239228 0.40062112 0.51724725 0.52295199 ; 52 | 0.40087712 0.42506756 0.40801433 0.40880033 0.39123234 0.40159410 0.32541403 0.40770018 0.23312777 0.46391177 0.16760188 0.43380839 0.00132673 0.29371035 0.52748088 0.53060417 ; 53 | 0.35998003 0.38297359 0.36492315 0.37592417 0.36552989 0.36995906 0.29047391 0.36720845 0.04821447 0.42737538 0.37317577 0.40506569 0.24767263 0.00011457 0.51342103 0.51741864 ; 54 | 0.49340008 0.50517965 0.50380486 0.49045822 0.49553370 0.48904181 0.43289891 0.50440687 0.67924166 0.68095791 0.71823615 0.66855371 0.65857404 0.66698873 0.00000737 0.33771707 ; 55 | 0.49530999 0.51032974 0.50457996 0.49213329 0.49690920 0.49094003 0.43605629 0.50717634 0.68150961 0.68054581 0.71973687 0.67079043 0.66077012 0.66779834 0.33429328 0.00010618 ; 56 | ]; 57 | % U 58 | U_row = Ox_D; 59 | for i = 1 : 16 60 | U_row(i, :) = (U_row(i, :)-min(U_row(i,:))) / (max(U_row(i,:)) - min(U_row(i,:))); 61 | end 62 | U_col = Ox_D'; 63 | for i = 1 : 16 64 | U_col(i, :) = (U_col(i, :)-min(U_col(i,:))) / (max(U_col(i,:)) - min(U_col(i,:))); 65 | end 66 | Ox_U = (U_row + U_row' + U_col + U_col') / 4; 67 | 68 | mean_D = mean_D + Ox_D; 69 | mean_U = mean_U + Ox_U; 70 | 71 | %% Paris6k 72 | % D 73 | Pa_D = [ ... 74 | 0.00007025 0.16581138 0.37689063 0.37892833 0.38854820 0.37219775 0.29973409 0.37693128 0.46187431 0.50140160 0.58165580 0.48804605 0.47248453 0.44836944 0.52865241 0.53617630 ; 75 | 0.13023687 0.00003459 0.37669498 0.37602797 0.38902295 0.36856312 0.30053693 0.37624076 0.46067521 0.49399939 0.58596712 0.48195177 0.47539470 0.44761646 0.53207335 0.53155650 ; 76 | 0.33842788 0.35878650 0.00008022 0.30922940 0.15018076 0.27727017 0.04587109 0.00368919 0.42047226 0.50044918 0.56822371 0.46592060 0.43752399 0.40050966 0.53000681 0.53131049 ; 77 | 0.35386952 0.37268051 0.32107338 0.00006998 0.29916680 0.05751756 0.23253472 0.32441664 0.45559165 0.42965052 0.56023216 0.42836961 0.45740157 0.45260802 0.52903636 0.53285375 ; 78 | 0.36507331 0.38730877 0.15915692 0.30020839 0.00002773 0.30292079 0.06314842 0.16775353 0.44022050 0.50565559 0.54469419 0.49325880 0.44336334 0.43366835 0.52872744 0.53316567 ; 79 | 0.34073662 0.36009490 0.28545552 0.05857656 0.30033386 0.00009054 0.20471491 0.28831667 0.44556251 0.43351889 0.56606066 0.39957809 0.43804765 0.43495396 0.52441201 0.52842947 ; 80 | 0.34597283 0.36661514 0.06820619 0.29148754 0.08140369 0.26426828 0.00010780 0.07291038 0.42220071 0.48973227 0.55014676 0.45429823 0.41643843 0.41098291 0.52647665 0.53466328 ; 81 | 0.33554381 0.35543093 0.00365428 0.31053382 0.15625229 0.27673253 0.04921762 0.00015869 0.42043263 0.50016320 0.57068282 0.46327969 0.43674481 0.39822808 0.52273373 0.52627891 ; 82 | 0.36884363 0.38865219 0.38003102 0.37671316 0.36879024 0.37511507 0.29773620 0.38067999 0.00177733 0.40624326 0.30777252 0.39239231 0.19533312 0.04378105 0.52455271 0.52588678 ; 83 | 0.38772410 0.40020692 0.40634206 0.36660326 0.39461690 0.36882806 0.32972193 0.41565195 0.40353453 0.00249153 0.48676583 0.08249554 0.40077138 0.41776446 0.52145877 0.52638161 ; 84 | 0.43080725 0.45088101 0.42989355 0.41365069 0.41362754 0.42191243 0.35588452 0.43569875 0.31722376 0.48260492 0.00154934 0.49645728 0.15437511 0.39210418 0.54659210 0.54303351 ; 85 | 0.38445496 0.38740897 0.39759251 0.36508501 0.39294991 0.35500753 0.31443885 0.39807254 0.39235523 0.08352482 0.50038302 0.00081658 0.37755054 0.39675888 0.52035318 0.52423985 ; 86 | 0.39974636 0.42388755 0.40558219 0.40124762 0.38761067 0.39544860 0.32101607 0.40426883 0.23112336 0.44322568 0.16860799 0.41585565 0.00203587 0.28883460 0.53125530 0.53341834 ; 87 | 0.35910573 0.38081208 0.37051463 0.37334615 0.36470285 0.36872247 0.29001287 0.37091351 0.04429592 0.41595596 0.37740979 0.39303088 0.24604203 0.00019566 0.51649615 0.51862430 ; 88 | 0.49341823 0.50496193 0.49618658 0.48517552 0.49390706 0.48247328 0.42684728 0.49728629 0.67720032 0.66944498 0.71432287 0.65952247 0.65649855 0.66313237 0.00000394 0.32507289 ; 89 | 0.49360071 0.50981132 0.49714428 0.48468992 0.49424729 0.48268187 0.42855516 0.49972588 0.67857546 0.66884238 0.71540058 0.66052634 0.65890205 0.66416746 0.32193131 0.00005413 ; 90 | ]; 91 | % U 92 | U_row = Pa_D; 93 | for i = 1 : 16 94 | U_row(i, :) = (U_row(i, :)-min(U_row(i,:))) / (max(U_row(i,:)) - min(U_row(i,:))); 95 | end 96 | U_col = Pa_D'; 97 | for i = 1 : 16 98 | U_col(i, :) = (U_col(i, :)-min(U_col(i,:))) / (max(U_col(i,:)) - min(U_col(i,:))); 99 | end 100 | Pa_U = (U_row + U_row' + U_col + U_col') / 4; 101 | 102 | mean_D = mean_D + Pa_D; 103 | mean_U = mean_U + Pa_U; 104 | %% Mean 105 | mean_U = mean_U / 3; 106 | mean_D = mean_D / 3; 107 | 108 | % save('mean_D.mat', 'mean_D') 109 | % save('mean_U.mat', 'mean_U') 110 | 111 | %% Graph 112 | features = {'DELF-FV','DELF-VLAD','R-CroW','R-GeM','R-MAC','R-rGeM','R-rMAC','R-SPoC','V-CroW','V-GeM','V-MAC','V-rGeM','V-rMAC','V-SPoC','SIFT-FV','SIFT-VLAD'}; 113 | %% Mean 114 | 115 | x_label = {'DELF-FV','DELF-VLAD','R-CroW','R-GeM','R-MAC','R-rGeM','R-rMAC','R-SPoC','V-CroW','V-GeM','V-MAC','V-rGeM','V-rMAC','V-SPoC','SIFT-FV','SIFT-VLAD'}; 116 | y_label = {'DELF-FV','DELF-VLAD','R-CroW','R-GeM','R-MAC','R-rGeM','R-rMAC','R-SPoC','V-CroW','V-GeM','V-MAC','V-rGeM','V-rMAC','V-SPoC','SIFT-FV','SIFT-VLAD'}; 117 | figure; 118 | h = heatmap(x_label, y_label, mean_U, ... 119 | 'FontSize',12,'FontName','Times New Roman', 'CellLabelFormat','%0.2f'); 120 | % h.Title = 'The Directed Affinity Matrix U on Average'; 121 | 122 | x_label = {'DELF-FV','DELF-VLAD','R-CroW','R-GeM','R-MAC','R-rGeM','R-rMAC','R-SPoC','V-CroW','V-GeM','V-MAC','V-rGeM','V-rMAC','V-SPoC','SIFT-FV','SIFT-VLAD'}; 123 | y_label = {'DELF-FV','DELF-VLAD','R-CroW','R-GeM','R-MAC','R-rGeM','R-rMAC','R-SPoC','V-CroW','V-GeM','V-MAC','V-rGeM','V-rMAC','V-SPoC','SIFT-FV','SIFT-VLAD'}; 124 | figure; 125 | h = heatmap(x_label, y_label, mean_D * 10, ... 126 | 'FontSize',12,'FontName','Times New Roman', 'CellLabelFormat','%0.2f'); 127 | % h.Title = 'The Directed Affinity Matrix D on Average'; 128 | 129 | clc; 130 | for i = 1 : 16 131 | for j = i + 1 : 16 132 | fprintf('{"source": "%s", "target": "%s", "dist": %.2f},\n', features{i}, features{j}, mean_U(i,j)); 133 | end 134 | end 135 | % MST 136 | fprintf('MST~~\n'); 137 | s = []; 138 | t = []; 139 | weights = []; 140 | for i = 1 : 16 141 | for j = i + 1 : 16 142 | s = [s, i]; 143 | t = [t, j]; 144 | weights = [weights, mean_U(i, j)]; 145 | end 146 | end 147 | G = graph(s,t,weights); 148 | [T,pred] = minspantree(G); 149 | for i = 1 : 15 150 | a = T.Edges.EndNodes(i, 1); 151 | b = T.Edges.EndNodes(i, 2); 152 | fprintf('{"source": "%s", "target": "%s", "dist": %.4f},\n', features{a}, features{b}, mean_U(a,b)); 153 | end 154 | -------------------------------------------------------------------------------- /Translation/README.md: -------------------------------------------------------------------------------- 1 | # Prerequisites 2 | - python3 3 | - pytorch 0.3 4 | 5 | # Training 6 | We seperate the translations into groups, run the script for corresponding training. 7 | 8 | For example: 9 | > ./run_0_0.sh 10 | 11 | # Test 12 | For calculating the translation mAP results, run: 13 | > python test_mAP.py 14 | 15 | For calculating the Undirected Affinity Measurement, run: 16 | > python test_Dis.py -------------------------------------------------------------------------------- /Translation/models.py: -------------------------------------------------------------------------------- 1 | import torch 2 | import torch.nn as nn 3 | from torch.nn import init 4 | import functools 5 | 6 | def weightsInit(layer): 7 | classname = layer.__class__.__name__ 8 | if classname.find('Linear') != -1: 9 | init.xavier_normal(layer.weight.data, gain=1) 10 | 11 | class Encoder(nn.Module): 12 | def __init__(self, ngpu, dim): 13 | super(Encoder, self).__init__() 14 | self.ngpu = ngpu 15 | 16 | if dim == 2048: 17 | parameters = [2048, 2048, 2048, 510] 18 | if dim == 512: 19 | parameters = [512, 512, 510] 20 | 21 | seq = [] 22 | for i in range(len(parameters) - 1): 23 | # 24 | seq = seq + [nn.Linear(parameters[i], parameters[i + 1])] + [nn.ELU()] 25 | self.model = nn.Sequential(*seq) 26 | 27 | def forward(self, input): 28 | if isinstance(input.data, torch.cuda.FloatTensor) and self.ngpu > 1: 29 | output = nn.parallel.data_parallel(self.model, input, range(self.ngpu)) 30 | else: 31 | output = self.model(input) 32 | return output 33 | 34 | class Decoder(nn.Module): 35 | def __init__(self, ngpu, dim): 36 | super(Decoder, self).__init__() 37 | self.ngpu = ngpu 38 | 39 | if dim == 2048: 40 | parameters = [510, 2048, 2048, 2048] 41 | if dim == 512: 42 | parameters = [510, 512, 512] 43 | 44 | seq = [] 45 | for i in range(len(parameters) - 1): 46 | seq = seq + [nn.Linear(parameters[i], parameters[i + 1])] 47 | if i == len(parameters) - 2: 48 | seq = seq #+ [nn.Tanh()] 49 | else: 50 | # + [nn.BatchNorm1d(parameters[i + 1])]+ [nn.Dropout(0.5)] 51 | seq = seq + [nn.ELU()] 52 | self.model = nn.Sequential(*seq) 53 | def forward(self, input): 54 | if isinstance(input.data, torch.cuda.FloatTensor) and self.ngpu > 1: 55 | output = nn.parallel.data_parallel(self.model, input, range(self.ngpu)) 56 | else: 57 | output = self.model(input) 58 | 59 | output = nn.functional.normalize(output) 60 | 61 | return output 62 | -------------------------------------------------------------------------------- /Translation/run_0_0.sh: -------------------------------------------------------------------------------- 1 | CUDA_VISIBLE_DEVICES=0 python train.py --ori delffv --ori_dimension 2048 --dst delffv --dst_dimension 2048 --cuda 2 | CUDA_VISIBLE_DEVICES=0 python train.py --ori delffv --ori_dimension 2048 --dst delfvlad --dst_dimension 2048 --cuda 3 | # CUDA_VISIBLE_DEVICES=0 python train.py --ori delffv --ori_dimension 2048 --dst hesaffsiftfv --dst_dimension 2048 --cuda 4 | # CUDA_VISIBLE_DEVICES=0 python train.py --ori delffv --ori_dimension 2048 --dst hesaffsiftvlad --dst_dimension 2048 --cuda 5 | CUDA_VISIBLE_DEVICES=0 python train.py --ori delffv --ori_dimension 2048 --dst resnetcrow --dst_dimension 2048 --cuda 6 | CUDA_VISIBLE_DEVICES=0 python train.py --ori delffv --ori_dimension 2048 --dst resnetgem --dst_dimension 2048 --cuda 7 | CUDA_VISIBLE_DEVICES=0 python train.py --ori delffv --ori_dimension 2048 --dst resnetmac --dst_dimension 2048 --cuda 8 | CUDA_VISIBLE_DEVICES=0 python train.py --ori delffv --ori_dimension 2048 --dst resnetrgem --dst_dimension 2048 --cuda 9 | CUDA_VISIBLE_DEVICES=0 python train.py --ori delffv --ori_dimension 2048 --dst resnetrmac --dst_dimension 2048 --cuda 10 | CUDA_VISIBLE_DEVICES=0 python train.py --ori delffv --ori_dimension 2048 --dst resnetspoc --dst_dimension 2048 --cuda 11 | CUDA_VISIBLE_DEVICES=0 python train.py --ori delffv --ori_dimension 2048 --dst siftfv --dst_dimension 2048 --cuda 12 | CUDA_VISIBLE_DEVICES=0 python train.py --ori delffv --ori_dimension 2048 --dst siftvlad --dst_dimension 2048 --cuda 13 | CUDA_VISIBLE_DEVICES=0 python train.py --ori delffv --ori_dimension 2048 --dst vggcrow --dst_dimension 512 --cuda 14 | CUDA_VISIBLE_DEVICES=0 python train.py --ori delffv --ori_dimension 2048 --dst vgggem --dst_dimension 512 --cuda 15 | CUDA_VISIBLE_DEVICES=0 python train.py --ori delffv --ori_dimension 2048 --dst vggmac --dst_dimension 512 --cuda 16 | CUDA_VISIBLE_DEVICES=0 python train.py --ori delffv --ori_dimension 2048 --dst vggrgem --dst_dimension 512 --cuda 17 | CUDA_VISIBLE_DEVICES=0 python train.py --ori delffv --ori_dimension 2048 --dst vggrmac --dst_dimension 512 --cuda 18 | CUDA_VISIBLE_DEVICES=0 python train.py --ori delffv --ori_dimension 2048 --dst vggspoc --dst_dimension 512 --cuda 19 | 20 | CUDA_VISIBLE_DEVICES=0 python train.py --ori delfvlad --ori_dimension 2048 --dst delffv --dst_dimension 2048 --cuda 21 | CUDA_VISIBLE_DEVICES=0 python train.py --ori delfvlad --ori_dimension 2048 --dst delfvlad --dst_dimension 2048 --cuda 22 | # CUDA_VISIBLE_DEVICES=0 python train.py --ori delfvlad --ori_dimension 2048 --dst hesaffsiftfv --dst_dimension 2048 --cuda 23 | # CUDA_VISIBLE_DEVICES=0 python train.py --ori delfvlad --ori_dimension 2048 --dst hesaffsiftvlad --dst_dimension 2048 --cuda 24 | CUDA_VISIBLE_DEVICES=0 python train.py --ori delfvlad --ori_dimension 2048 --dst resnetcrow --dst_dimension 2048 --cuda 25 | CUDA_VISIBLE_DEVICES=0 python train.py --ori delfvlad --ori_dimension 2048 --dst resnetgem --dst_dimension 2048 --cuda 26 | CUDA_VISIBLE_DEVICES=0 python train.py --ori delfvlad --ori_dimension 2048 --dst resnetmac --dst_dimension 2048 --cuda 27 | CUDA_VISIBLE_DEVICES=0 python train.py --ori delfvlad --ori_dimension 2048 --dst resnetrgem --dst_dimension 2048 --cuda 28 | CUDA_VISIBLE_DEVICES=0 python train.py --ori delfvlad --ori_dimension 2048 --dst resnetrmac --dst_dimension 2048 --cuda 29 | CUDA_VISIBLE_DEVICES=0 python train.py --ori delfvlad --ori_dimension 2048 --dst resnetspoc --dst_dimension 2048 --cuda 30 | CUDA_VISIBLE_DEVICES=0 python train.py --ori delfvlad --ori_dimension 2048 --dst siftfv --dst_dimension 2048 --cuda 31 | CUDA_VISIBLE_DEVICES=0 python train.py --ori delfvlad --ori_dimension 2048 --dst siftvlad --dst_dimension 2048 --cuda 32 | CUDA_VISIBLE_DEVICES=0 python train.py --ori delfvlad --ori_dimension 2048 --dst vggcrow --dst_dimension 512 --cuda 33 | CUDA_VISIBLE_DEVICES=0 python train.py --ori delfvlad --ori_dimension 2048 --dst vgggem --dst_dimension 512 --cuda 34 | CUDA_VISIBLE_DEVICES=0 python train.py --ori delfvlad --ori_dimension 2048 --dst vggmac --dst_dimension 512 --cuda 35 | CUDA_VISIBLE_DEVICES=0 python train.py --ori delfvlad --ori_dimension 2048 --dst vggrgem --dst_dimension 512 --cuda 36 | CUDA_VISIBLE_DEVICES=0 python train.py --ori delfvlad --ori_dimension 2048 --dst vggrmac --dst_dimension 512 --cuda 37 | CUDA_VISIBLE_DEVICES=0 python train.py --ori delfvlad --ori_dimension 2048 --dst vggspoc --dst_dimension 512 --cuda 38 | 39 | -------------------------------------------------------------------------------- /Translation/run_0_1.sh: -------------------------------------------------------------------------------- 1 | 2 | # CUDA_VISIBLE_DEVICES=0 python train.py --ori hesaffsiftfv --ori_dimension 2048 --dst delffv --dst_dimension 2048 --cuda 3 | # CUDA_VISIBLE_DEVICES=0 python train.py --ori hesaffsiftfv --ori_dimension 2048 --dst delfvlad --dst_dimension 2048 --cuda 4 | # CUDA_VISIBLE_DEVICES=0 python train.py --ori hesaffsiftfv --ori_dimension 2048 --dst hesaffsiftfv --dst_dimension 2048 --cuda 5 | # CUDA_VISIBLE_DEVICES=0 python train.py --ori hesaffsiftfv --ori_dimension 2048 --dst hesaffsiftvlad --dst_dimension 2048 --cuda 6 | # CUDA_VISIBLE_DEVICES=0 python train.py --ori hesaffsiftfv --ori_dimension 2048 --dst resnetcrow --dst_dimension 2048 --cuda 7 | # CUDA_VISIBLE_DEVICES=0 python train.py --ori hesaffsiftfv --ori_dimension 2048 --dst resnetgem --dst_dimension 2048 --cuda 8 | # CUDA_VISIBLE_DEVICES=0 python train.py --ori hesaffsiftfv --ori_dimension 2048 --dst resnetmac --dst_dimension 2048 --cuda 9 | # CUDA_VISIBLE_DEVICES=0 python train.py --ori hesaffsiftfv --ori_dimension 2048 --dst resnetrgem --dst_dimension 2048 --cuda 10 | # CUDA_VISIBLE_DEVICES=0 python train.py --ori hesaffsiftfv --ori_dimension 2048 --dst resnetrmac --dst_dimension 2048 --cuda 11 | # CUDA_VISIBLE_DEVICES=0 python train.py --ori hesaffsiftfv --ori_dimension 2048 --dst resnetspoc --dst_dimension 2048 --cuda 12 | # CUDA_VISIBLE_DEVICES=0 python train.py --ori hesaffsiftfv --ori_dimension 2048 --dst siftfv --dst_dimension 2048 --cuda 13 | # CUDA_VISIBLE_DEVICES=0 python train.py --ori hesaffsiftfv --ori_dimension 2048 --dst siftvlad --dst_dimension 2048 --cuda 14 | # CUDA_VISIBLE_DEVICES=0 python train.py --ori hesaffsiftfv --ori_dimension 2048 --dst vggcrow --dst_dimension 512 --cuda 15 | # CUDA_VISIBLE_DEVICES=0 python train.py --ori hesaffsiftfv --ori_dimension 2048 --dst vgggem --dst_dimension 512 --cuda 16 | # CUDA_VISIBLE_DEVICES=0 python train.py --ori hesaffsiftfv --ori_dimension 2048 --dst vggmac --dst_dimension 512 --cuda 17 | # CUDA_VISIBLE_DEVICES=0 python train.py --ori hesaffsiftfv --ori_dimension 2048 --dst vggrgem --dst_dimension 512 --cuda 18 | # CUDA_VISIBLE_DEVICES=0 python train.py --ori hesaffsiftfv --ori_dimension 2048 --dst vggrmac --dst_dimension 512 --cuda 19 | # CUDA_VISIBLE_DEVICES=0 python train.py --ori hesaffsiftfv --ori_dimension 2048 --dst vggspoc --dst_dimension 512 --cuda 20 | 21 | # CUDA_VISIBLE_DEVICES=0 python train.py --ori hesaffsiftvlad --ori_dimension 2048 --dst delffv --dst_dimension 2048 --cuda 22 | # CUDA_VISIBLE_DEVICES=0 python train.py --ori hesaffsiftvlad --ori_dimension 2048 --dst delfvlad --dst_dimension 2048 --cuda 23 | # CUDA_VISIBLE_DEVICES=0 python train.py --ori hesaffsiftvlad --ori_dimension 2048 --dst hesaffsiftfv --dst_dimension 2048 --cuda 24 | # CUDA_VISIBLE_DEVICES=0 python train.py --ori hesaffsiftvlad --ori_dimension 2048 --dst hesaffsiftvlad --dst_dimension 2048 --cuda 25 | # CUDA_VISIBLE_DEVICES=0 python train.py --ori hesaffsiftvlad --ori_dimension 2048 --dst resnetcrow --dst_dimension 2048 --cuda 26 | # CUDA_VISIBLE_DEVICES=0 python train.py --ori hesaffsiftvlad --ori_dimension 2048 --dst resnetgem --dst_dimension 2048 --cuda 27 | # CUDA_VISIBLE_DEVICES=0 python train.py --ori hesaffsiftvlad --ori_dimension 2048 --dst resnetmac --dst_dimension 2048 --cuda 28 | # CUDA_VISIBLE_DEVICES=0 python train.py --ori hesaffsiftvlad --ori_dimension 2048 --dst resnetrgem --dst_dimension 2048 --cuda 29 | # CUDA_VISIBLE_DEVICES=0 python train.py --ori hesaffsiftvlad --ori_dimension 2048 --dst resnetrmac --dst_dimension 2048 --cuda 30 | # CUDA_VISIBLE_DEVICES=0 python train.py --ori hesaffsiftvlad --ori_dimension 2048 --dst resnetspoc --dst_dimension 2048 --cuda 31 | # CUDA_VISIBLE_DEVICES=0 python train.py --ori hesaffsiftvlad --ori_dimension 2048 --dst siftfv --dst_dimension 2048 --cuda 32 | # CUDA_VISIBLE_DEVICES=0 python train.py --ori hesaffsiftvlad --ori_dimension 2048 --dst siftvlad --dst_dimension 2048 --cuda 33 | # CUDA_VISIBLE_DEVICES=0 python train.py --ori hesaffsiftvlad --ori_dimension 2048 --dst vggcrow --dst_dimension 512 --cuda 34 | # CUDA_VISIBLE_DEVICES=0 python train.py --ori hesaffsiftvlad --ori_dimension 2048 --dst vgggem --dst_dimension 512 --cuda 35 | # CUDA_VISIBLE_DEVICES=0 python train.py --ori hesaffsiftvlad --ori_dimension 2048 --dst vggmac --dst_dimension 512 --cuda 36 | # CUDA_VISIBLE_DEVICES=0 python train.py --ori hesaffsiftvlad --ori_dimension 2048 --dst vggrgem --dst_dimension 512 --cuda 37 | # CUDA_VISIBLE_DEVICES=0 python train.py --ori hesaffsiftvlad --ori_dimension 2048 --dst vggrmac --dst_dimension 512 --cuda 38 | # CUDA_VISIBLE_DEVICES=0 python train.py --ori hesaffsiftvlad --ori_dimension 2048 --dst vggspoc --dst_dimension 512 --cuda 39 | -------------------------------------------------------------------------------- /Translation/run_0_2.sh: -------------------------------------------------------------------------------- 1 | 2 | CUDA_VISIBLE_DEVICES=0 python train.py --ori siftfv --ori_dimension 2048 --dst delffv --dst_dimension 2048 --cuda 3 | CUDA_VISIBLE_DEVICES=0 python train.py --ori siftfv --ori_dimension 2048 --dst delfvlad --dst_dimension 2048 --cuda 4 | # CUDA_VISIBLE_DEVICES=0 python train.py --ori siftfv --ori_dimension 2048 --dst hesaffsiftfv --dst_dimension 2048 --cuda 5 | # CUDA_VISIBLE_DEVICES=0 python train.py --ori siftfv --ori_dimension 2048 --dst hesaffsiftvlad --dst_dimension 2048 --cuda 6 | CUDA_VISIBLE_DEVICES=0 python train.py --ori siftfv --ori_dimension 2048 --dst resnetcrow --dst_dimension 2048 --cuda 7 | CUDA_VISIBLE_DEVICES=0 python train.py --ori siftfv --ori_dimension 2048 --dst resnetgem --dst_dimension 2048 --cuda 8 | CUDA_VISIBLE_DEVICES=0 python train.py --ori siftfv --ori_dimension 2048 --dst resnetmac --dst_dimension 2048 --cuda 9 | CUDA_VISIBLE_DEVICES=0 python train.py --ori siftfv --ori_dimension 2048 --dst resnetrgem --dst_dimension 2048 --cuda 10 | CUDA_VISIBLE_DEVICES=0 python train.py --ori siftfv --ori_dimension 2048 --dst resnetrmac --dst_dimension 2048 --cuda 11 | CUDA_VISIBLE_DEVICES=0 python train.py --ori siftfv --ori_dimension 2048 --dst resnetspoc --dst_dimension 2048 --cuda 12 | CUDA_VISIBLE_DEVICES=0 python train.py --ori siftfv --ori_dimension 2048 --dst siftfv --dst_dimension 2048 --cuda 13 | CUDA_VISIBLE_DEVICES=0 python train.py --ori siftfv --ori_dimension 2048 --dst siftvlad --dst_dimension 2048 --cuda 14 | CUDA_VISIBLE_DEVICES=0 python train.py --ori siftfv --ori_dimension 2048 --dst vggcrow --dst_dimension 512 --cuda 15 | CUDA_VISIBLE_DEVICES=0 python train.py --ori siftfv --ori_dimension 2048 --dst vgggem --dst_dimension 512 --cuda 16 | CUDA_VISIBLE_DEVICES=0 python train.py --ori siftfv --ori_dimension 2048 --dst vggmac --dst_dimension 512 --cuda 17 | CUDA_VISIBLE_DEVICES=0 python train.py --ori siftfv --ori_dimension 2048 --dst vggrgem --dst_dimension 512 --cuda 18 | CUDA_VISIBLE_DEVICES=0 python train.py --ori siftfv --ori_dimension 2048 --dst vggrmac --dst_dimension 512 --cuda 19 | CUDA_VISIBLE_DEVICES=0 python train.py --ori siftfv --ori_dimension 2048 --dst vggspoc --dst_dimension 512 --cuda 20 | 21 | CUDA_VISIBLE_DEVICES=0 python train.py --ori siftvlad --ori_dimension 2048 --dst delffv --dst_dimension 2048 --cuda 22 | CUDA_VISIBLE_DEVICES=0 python train.py --ori siftvlad --ori_dimension 2048 --dst delfvlad --dst_dimension 2048 --cuda 23 | # CUDA_VISIBLE_DEVICES=0 python train.py --ori siftvlad --ori_dimension 2048 --dst hesaffsiftfv --dst_dimension 2048 --cuda 24 | # CUDA_VISIBLE_DEVICES=0 python train.py --ori siftvlad --ori_dimension 2048 --dst hesaffsiftvlad --dst_dimension 2048 --cuda 25 | CUDA_VISIBLE_DEVICES=0 python train.py --ori siftvlad --ori_dimension 2048 --dst resnetcrow --dst_dimension 2048 --cuda 26 | CUDA_VISIBLE_DEVICES=0 python train.py --ori siftvlad --ori_dimension 2048 --dst resnetgem --dst_dimension 2048 --cuda 27 | CUDA_VISIBLE_DEVICES=0 python train.py --ori siftvlad --ori_dimension 2048 --dst resnetmac --dst_dimension 2048 --cuda 28 | CUDA_VISIBLE_DEVICES=0 python train.py --ori siftvlad --ori_dimension 2048 --dst resnetrgem --dst_dimension 2048 --cuda 29 | CUDA_VISIBLE_DEVICES=0 python train.py --ori siftvlad --ori_dimension 2048 --dst resnetrmac --dst_dimension 2048 --cuda 30 | CUDA_VISIBLE_DEVICES=0 python train.py --ori siftvlad --ori_dimension 2048 --dst resnetspoc --dst_dimension 2048 --cuda 31 | CUDA_VISIBLE_DEVICES=0 python train.py --ori siftvlad --ori_dimension 2048 --dst siftfv --dst_dimension 2048 --cuda 32 | CUDA_VISIBLE_DEVICES=0 python train.py --ori siftvlad --ori_dimension 2048 --dst siftvlad --dst_dimension 2048 --cuda 33 | CUDA_VISIBLE_DEVICES=0 python train.py --ori siftvlad --ori_dimension 2048 --dst vggcrow --dst_dimension 512 --cuda 34 | CUDA_VISIBLE_DEVICES=0 python train.py --ori siftvlad --ori_dimension 2048 --dst vgggem --dst_dimension 512 --cuda 35 | CUDA_VISIBLE_DEVICES=0 python train.py --ori siftvlad --ori_dimension 2048 --dst vggmac --dst_dimension 512 --cuda 36 | CUDA_VISIBLE_DEVICES=0 python train.py --ori siftvlad --ori_dimension 2048 --dst vggrgem --dst_dimension 512 --cuda 37 | CUDA_VISIBLE_DEVICES=0 python train.py --ori siftvlad --ori_dimension 2048 --dst vggrmac --dst_dimension 512 --cuda 38 | CUDA_VISIBLE_DEVICES=0 python train.py --ori siftvlad --ori_dimension 2048 --dst vggspoc --dst_dimension 512 --cuda -------------------------------------------------------------------------------- /Translation/run_1_0.sh: -------------------------------------------------------------------------------- 1 | CUDA_VISIBLE_DEVICES=2 python train.py --ori resnetcrow --ori_dimension 2048 --dst delffv --dst_dimension 2048 --cuda 2 | CUDA_VISIBLE_DEVICES=2 python train.py --ori resnetcrow --ori_dimension 2048 --dst delfvlad --dst_dimension 2048 --cuda 3 | # CUDA_VISIBLE_DEVICES=2 python train.py --ori resnetcrow --ori_dimension 2048 --dst hesaffsiftfv --dst_dimension 2048 --cuda 4 | # CUDA_VISIBLE_DEVICES=2 python train.py --ori resnetcrow --ori_dimension 2048 --dst hesaffsiftvlad --dst_dimension 2048 --cuda 5 | CUDA_VISIBLE_DEVICES=2 python train.py --ori resnetcrow --ori_dimension 2048 --dst resnetcrow --dst_dimension 2048 --cuda 6 | CUDA_VISIBLE_DEVICES=2 python train.py --ori resnetcrow --ori_dimension 2048 --dst resnetgem --dst_dimension 2048 --cuda 7 | CUDA_VISIBLE_DEVICES=2 python train.py --ori resnetcrow --ori_dimension 2048 --dst resnetmac --dst_dimension 2048 --cuda 8 | CUDA_VISIBLE_DEVICES=2 python train.py --ori resnetcrow --ori_dimension 2048 --dst resnetrgem --dst_dimension 2048 --cuda 9 | CUDA_VISIBLE_DEVICES=2 python train.py --ori resnetcrow --ori_dimension 2048 --dst resnetrmac --dst_dimension 2048 --cuda 10 | CUDA_VISIBLE_DEVICES=2 python train.py --ori resnetcrow --ori_dimension 2048 --dst resnetspoc --dst_dimension 2048 --cuda 11 | CUDA_VISIBLE_DEVICES=2 python train.py --ori resnetcrow --ori_dimension 2048 --dst siftfv --dst_dimension 2048 --cuda 12 | CUDA_VISIBLE_DEVICES=2 python train.py --ori resnetcrow --ori_dimension 2048 --dst siftvlad --dst_dimension 2048 --cuda 13 | CUDA_VISIBLE_DEVICES=2 python train.py --ori resnetcrow --ori_dimension 2048 --dst vggcrow --dst_dimension 512 --cuda 14 | CUDA_VISIBLE_DEVICES=2 python train.py --ori resnetcrow --ori_dimension 2048 --dst vgggem --dst_dimension 512 --cuda 15 | CUDA_VISIBLE_DEVICES=2 python train.py --ori resnetcrow --ori_dimension 2048 --dst vggmac --dst_dimension 512 --cuda 16 | CUDA_VISIBLE_DEVICES=2 python train.py --ori resnetcrow --ori_dimension 2048 --dst vggrgem --dst_dimension 512 --cuda 17 | CUDA_VISIBLE_DEVICES=2 python train.py --ori resnetcrow --ori_dimension 2048 --dst vggrmac --dst_dimension 512 --cuda 18 | CUDA_VISIBLE_DEVICES=2 python train.py --ori resnetcrow --ori_dimension 2048 --dst vggspoc --dst_dimension 512 --cuda 19 | 20 | CUDA_VISIBLE_DEVICES=2 python train.py --ori resnetgem --ori_dimension 2048 --dst delffv --dst_dimension 2048 --cuda 21 | CUDA_VISIBLE_DEVICES=2 python train.py --ori resnetgem --ori_dimension 2048 --dst delfvlad --dst_dimension 2048 --cuda 22 | # CUDA_VISIBLE_DEVICES=2 python train.py --ori resnetgem --ori_dimension 2048 --dst hesaffsiftfv --dst_dimension 2048 --cuda 23 | # CUDA_VISIBLE_DEVICES=2 python train.py --ori resnetgem --ori_dimension 2048 --dst hesaffsiftvlad --dst_dimension 2048 --cuda 24 | CUDA_VISIBLE_DEVICES=2 python train.py --ori resnetgem --ori_dimension 2048 --dst resnetcrow --dst_dimension 2048 --cuda 25 | CUDA_VISIBLE_DEVICES=2 python train.py --ori resnetgem --ori_dimension 2048 --dst resnetgem --dst_dimension 2048 --cuda 26 | CUDA_VISIBLE_DEVICES=2 python train.py --ori resnetgem --ori_dimension 2048 --dst resnetmac --dst_dimension 2048 --cuda 27 | CUDA_VISIBLE_DEVICES=2 python train.py --ori resnetgem --ori_dimension 2048 --dst resnetrgem --dst_dimension 2048 --cuda 28 | CUDA_VISIBLE_DEVICES=2 python train.py --ori resnetgem --ori_dimension 2048 --dst resnetrmac --dst_dimension 2048 --cuda 29 | CUDA_VISIBLE_DEVICES=2 python train.py --ori resnetgem --ori_dimension 2048 --dst resnetspoc --dst_dimension 2048 --cuda 30 | CUDA_VISIBLE_DEVICES=2 python train.py --ori resnetgem --ori_dimension 2048 --dst siftfv --dst_dimension 2048 --cuda 31 | CUDA_VISIBLE_DEVICES=2 python train.py --ori resnetgem --ori_dimension 2048 --dst siftvlad --dst_dimension 2048 --cuda 32 | CUDA_VISIBLE_DEVICES=2 python train.py --ori resnetgem --ori_dimension 2048 --dst vggcrow --dst_dimension 512 --cuda 33 | CUDA_VISIBLE_DEVICES=2 python train.py --ori resnetgem --ori_dimension 2048 --dst vgggem --dst_dimension 512 --cuda 34 | CUDA_VISIBLE_DEVICES=2 python train.py --ori resnetgem --ori_dimension 2048 --dst vggmac --dst_dimension 512 --cuda 35 | CUDA_VISIBLE_DEVICES=2 python train.py --ori resnetgem --ori_dimension 2048 --dst vggrgem --dst_dimension 512 --cuda 36 | CUDA_VISIBLE_DEVICES=2 python train.py --ori resnetgem --ori_dimension 2048 --dst vggrmac --dst_dimension 512 --cuda 37 | CUDA_VISIBLE_DEVICES=2 python train.py --ori resnetgem --ori_dimension 2048 --dst vggspoc --dst_dimension 512 --cuda 38 | -------------------------------------------------------------------------------- /Translation/run_1_1.sh: -------------------------------------------------------------------------------- 1 | CUDA_VISIBLE_DEVICES=1 python train.py --ori resnetmac --ori_dimension 2048 --dst delffv --dst_dimension 2048 --cuda 2 | CUDA_VISIBLE_DEVICES=1 python train.py --ori resnetmac --ori_dimension 2048 --dst delfvlad --dst_dimension 2048 --cuda 3 | # CUDA_VISIBLE_DEVICES=1 python train.py --ori resnetmac --ori_dimension 2048 --dst hesaffsiftfv --dst_dimension 2048 --cuda 4 | # CUDA_VISIBLE_DEVICES=1 python train.py --ori resnetmac --ori_dimension 2048 --dst hesaffsiftvlad --dst_dimension 2048 --cuda 5 | CUDA_VISIBLE_DEVICES=1 python train.py --ori resnetmac --ori_dimension 2048 --dst resnetcrow --dst_dimension 2048 --cuda 6 | CUDA_VISIBLE_DEVICES=1 python train.py --ori resnetmac --ori_dimension 2048 --dst resnetgem --dst_dimension 2048 --cuda 7 | CUDA_VISIBLE_DEVICES=1 python train.py --ori resnetmac --ori_dimension 2048 --dst resnetmac --dst_dimension 2048 --cuda 8 | CUDA_VISIBLE_DEVICES=1 python train.py --ori resnetmac --ori_dimension 2048 --dst resnetrgem --dst_dimension 2048 --cuda 9 | CUDA_VISIBLE_DEVICES=1 python train.py --ori resnetmac --ori_dimension 2048 --dst resnetrmac --dst_dimension 2048 --cuda 10 | CUDA_VISIBLE_DEVICES=1 python train.py --ori resnetmac --ori_dimension 2048 --dst resnetspoc --dst_dimension 2048 --cuda 11 | CUDA_VISIBLE_DEVICES=1 python train.py --ori resnetmac --ori_dimension 2048 --dst siftfv --dst_dimension 2048 --cuda 12 | CUDA_VISIBLE_DEVICES=1 python train.py --ori resnetmac --ori_dimension 2048 --dst siftvlad --dst_dimension 2048 --cuda 13 | CUDA_VISIBLE_DEVICES=1 python train.py --ori resnetmac --ori_dimension 2048 --dst vggcrow --dst_dimension 512 --cuda 14 | CUDA_VISIBLE_DEVICES=1 python train.py --ori resnetmac --ori_dimension 2048 --dst vgggem --dst_dimension 512 --cuda 15 | CUDA_VISIBLE_DEVICES=1 python train.py --ori resnetmac --ori_dimension 2048 --dst vggmac --dst_dimension 512 --cuda 16 | CUDA_VISIBLE_DEVICES=1 python train.py --ori resnetmac --ori_dimension 2048 --dst vggrgem --dst_dimension 512 --cuda 17 | CUDA_VISIBLE_DEVICES=1 python train.py --ori resnetmac --ori_dimension 2048 --dst vggrmac --dst_dimension 512 --cuda 18 | CUDA_VISIBLE_DEVICES=1 python train.py --ori resnetmac --ori_dimension 2048 --dst vggspoc --dst_dimension 512 --cuda 19 | 20 | CUDA_VISIBLE_DEVICES=1 python train.py --ori resnetrgem --ori_dimension 2048 --dst delffv --dst_dimension 2048 --cuda 21 | CUDA_VISIBLE_DEVICES=1 python train.py --ori resnetrgem --ori_dimension 2048 --dst delfvlad --dst_dimension 2048 --cuda 22 | # CUDA_VISIBLE_DEVICES=1 python train.py --ori resnetrgem --ori_dimension 2048 --dst hesaffsiftfv --dst_dimension 2048 --cuda 23 | # CUDA_VISIBLE_DEVICES=1 python train.py --ori resnetrgem --ori_dimension 2048 --dst hesaffsiftvlad --dst_dimension 2048 --cuda 24 | CUDA_VISIBLE_DEVICES=1 python train.py --ori resnetrgem --ori_dimension 2048 --dst resnetcrow --dst_dimension 2048 --cuda 25 | CUDA_VISIBLE_DEVICES=1 python train.py --ori resnetrgem --ori_dimension 2048 --dst resnetgem --dst_dimension 2048 --cuda 26 | CUDA_VISIBLE_DEVICES=1 python train.py --ori resnetrgem --ori_dimension 2048 --dst resnetmac --dst_dimension 2048 --cuda 27 | CUDA_VISIBLE_DEVICES=1 python train.py --ori resnetrgem --ori_dimension 2048 --dst resnetrgem --dst_dimension 2048 --cuda 28 | CUDA_VISIBLE_DEVICES=1 python train.py --ori resnetrgem --ori_dimension 2048 --dst resnetrmac --dst_dimension 2048 --cuda 29 | CUDA_VISIBLE_DEVICES=1 python train.py --ori resnetrgem --ori_dimension 2048 --dst resnetspoc --dst_dimension 2048 --cuda 30 | CUDA_VISIBLE_DEVICES=1 python train.py --ori resnetrgem --ori_dimension 2048 --dst siftfv --dst_dimension 2048 --cuda 31 | CUDA_VISIBLE_DEVICES=1 python train.py --ori resnetrgem --ori_dimension 2048 --dst siftvlad --dst_dimension 2048 --cuda 32 | CUDA_VISIBLE_DEVICES=1 python train.py --ori resnetrgem --ori_dimension 2048 --dst vggcrow --dst_dimension 512 --cuda 33 | CUDA_VISIBLE_DEVICES=1 python train.py --ori resnetrgem --ori_dimension 2048 --dst vgggem --dst_dimension 512 --cuda 34 | CUDA_VISIBLE_DEVICES=1 python train.py --ori resnetrgem --ori_dimension 2048 --dst vggmac --dst_dimension 512 --cuda 35 | CUDA_VISIBLE_DEVICES=1 python train.py --ori resnetrgem --ori_dimension 2048 --dst vggrgem --dst_dimension 512 --cuda 36 | CUDA_VISIBLE_DEVICES=1 python train.py --ori resnetrgem --ori_dimension 2048 --dst vggrmac --dst_dimension 512 --cuda 37 | CUDA_VISIBLE_DEVICES=1 python train.py --ori resnetrgem --ori_dimension 2048 --dst vggspoc --dst_dimension 512 --cuda 38 | -------------------------------------------------------------------------------- /Translation/run_1_2.sh: -------------------------------------------------------------------------------- 1 | 2 | CUDA_VISIBLE_DEVICES=1 python train.py --ori resnetrmac --ori_dimension 2048 --dst delffv --dst_dimension 2048 --cuda 3 | CUDA_VISIBLE_DEVICES=1 python train.py --ori resnetrmac --ori_dimension 2048 --dst delfvlad --dst_dimension 2048 --cuda 4 | # CUDA_VISIBLE_DEVICES=1 python train.py --ori resnetrmac --ori_dimension 2048 --dst hesaffsiftfv --dst_dimension 2048 --cuda 5 | # CUDA_VISIBLE_DEVICES=1 python train.py --ori resnetrmac --ori_dimension 2048 --dst hesaffsiftvlad --dst_dimension 2048 --cuda 6 | CUDA_VISIBLE_DEVICES=1 python train.py --ori resnetrmac --ori_dimension 2048 --dst resnetcrow --dst_dimension 2048 --cuda 7 | CUDA_VISIBLE_DEVICES=1 python train.py --ori resnetrmac --ori_dimension 2048 --dst resnetgem --dst_dimension 2048 --cuda 8 | CUDA_VISIBLE_DEVICES=1 python train.py --ori resnetrmac --ori_dimension 2048 --dst resnetmac --dst_dimension 2048 --cuda 9 | CUDA_VISIBLE_DEVICES=1 python train.py --ori resnetrmac --ori_dimension 2048 --dst resnetrgem --dst_dimension 2048 --cuda 10 | CUDA_VISIBLE_DEVICES=1 python train.py --ori resnetrmac --ori_dimension 2048 --dst resnetrmac --dst_dimension 2048 --cuda 11 | CUDA_VISIBLE_DEVICES=1 python train.py --ori resnetrmac --ori_dimension 2048 --dst resnetspoc --dst_dimension 2048 --cuda 12 | CUDA_VISIBLE_DEVICES=1 python train.py --ori resnetrmac --ori_dimension 2048 --dst siftfv --dst_dimension 2048 --cuda 13 | CUDA_VISIBLE_DEVICES=1 python train.py --ori resnetrmac --ori_dimension 2048 --dst siftvlad --dst_dimension 2048 --cuda 14 | CUDA_VISIBLE_DEVICES=1 python train.py --ori resnetrmac --ori_dimension 2048 --dst vggcrow --dst_dimension 512 --cuda 15 | CUDA_VISIBLE_DEVICES=1 python train.py --ori resnetrmac --ori_dimension 2048 --dst vgggem --dst_dimension 512 --cuda 16 | CUDA_VISIBLE_DEVICES=1 python train.py --ori resnetrmac --ori_dimension 2048 --dst vggmac --dst_dimension 512 --cuda 17 | CUDA_VISIBLE_DEVICES=1 python train.py --ori resnetrmac --ori_dimension 2048 --dst vggrgem --dst_dimension 512 --cuda 18 | CUDA_VISIBLE_DEVICES=1 python train.py --ori resnetrmac --ori_dimension 2048 --dst vggrmac --dst_dimension 512 --cuda 19 | CUDA_VISIBLE_DEVICES=1 python train.py --ori resnetrmac --ori_dimension 2048 --dst vggspoc --dst_dimension 512 --cuda 20 | 21 | CUDA_VISIBLE_DEVICES=1 python train.py --ori resnetspoc --ori_dimension 2048 --dst delffv --dst_dimension 2048 --cuda 22 | CUDA_VISIBLE_DEVICES=1 python train.py --ori resnetspoc --ori_dimension 2048 --dst delfvlad --dst_dimension 2048 --cuda 23 | # CUDA_VISIBLE_DEVICES=1 python train.py --ori resnetspoc --ori_dimension 2048 --dst hesaffsiftfv --dst_dimension 2048 --cuda 24 | # CUDA_VISIBLE_DEVICES=1 python train.py --ori resnetspoc --ori_dimension 2048 --dst hesaffsiftvlad --dst_dimension 2048 --cuda 25 | CUDA_VISIBLE_DEVICES=1 python train.py --ori resnetspoc --ori_dimension 2048 --dst resnetcrow --dst_dimension 2048 --cuda 26 | CUDA_VISIBLE_DEVICES=1 python train.py --ori resnetspoc --ori_dimension 2048 --dst resnetgem --dst_dimension 2048 --cuda 27 | CUDA_VISIBLE_DEVICES=1 python train.py --ori resnetspoc --ori_dimension 2048 --dst resnetmac --dst_dimension 2048 --cuda 28 | CUDA_VISIBLE_DEVICES=1 python train.py --ori resnetspoc --ori_dimension 2048 --dst resnetrgem --dst_dimension 2048 --cuda 29 | CUDA_VISIBLE_DEVICES=1 python train.py --ori resnetspoc --ori_dimension 2048 --dst resnetrmac --dst_dimension 2048 --cuda 30 | CUDA_VISIBLE_DEVICES=1 python train.py --ori resnetspoc --ori_dimension 2048 --dst resnetspoc --dst_dimension 2048 --cuda 31 | CUDA_VISIBLE_DEVICES=1 python train.py --ori resnetspoc --ori_dimension 2048 --dst siftfv --dst_dimension 2048 --cuda 32 | CUDA_VISIBLE_DEVICES=1 python train.py --ori resnetspoc --ori_dimension 2048 --dst siftvlad --dst_dimension 2048 --cuda 33 | CUDA_VISIBLE_DEVICES=1 python train.py --ori resnetspoc --ori_dimension 2048 --dst vggcrow --dst_dimension 512 --cuda 34 | CUDA_VISIBLE_DEVICES=1 python train.py --ori resnetspoc --ori_dimension 2048 --dst vgggem --dst_dimension 512 --cuda 35 | CUDA_VISIBLE_DEVICES=1 python train.py --ori resnetspoc --ori_dimension 2048 --dst vggmac --dst_dimension 512 --cuda 36 | CUDA_VISIBLE_DEVICES=1 python train.py --ori resnetspoc --ori_dimension 2048 --dst vggrgem --dst_dimension 512 --cuda 37 | CUDA_VISIBLE_DEVICES=1 python train.py --ori resnetspoc --ori_dimension 2048 --dst vggrmac --dst_dimension 512 --cuda 38 | CUDA_VISIBLE_DEVICES=1 python train.py --ori resnetspoc --ori_dimension 2048 --dst vggspoc --dst_dimension 512 --cuda -------------------------------------------------------------------------------- /Translation/run_2_0.sh: -------------------------------------------------------------------------------- 1 | CUDA_VISIBLE_DEVICES=2 python train.py --ori vggcrow --ori_dimension 512 --dst delffv --dst_dimension 2048 --cuda 2 | CUDA_VISIBLE_DEVICES=2 python train.py --ori vggcrow --ori_dimension 512 --dst delfvlad --dst_dimension 2048 --cuda 3 | # CUDA_VISIBLE_DEVICES=2 python train.py --ori vggcrow --ori_dimension 512 --dst hesaffsiftfv --dst_dimension 2048 --cuda 4 | # CUDA_VISIBLE_DEVICES=2 python train.py --ori vggcrow --ori_dimension 512 --dst hesaffsiftvlad --dst_dimension 2048 --cuda 5 | CUDA_VISIBLE_DEVICES=2 python train.py --ori vggcrow --ori_dimension 512 --dst resnetcrow --dst_dimension 2048 --cuda 6 | CUDA_VISIBLE_DEVICES=2 python train.py --ori vggcrow --ori_dimension 512 --dst resnetgem --dst_dimension 2048 --cuda 7 | CUDA_VISIBLE_DEVICES=2 python train.py --ori vggcrow --ori_dimension 512 --dst resnetmac --dst_dimension 2048 --cuda 8 | CUDA_VISIBLE_DEVICES=2 python train.py --ori vggcrow --ori_dimension 512 --dst resnetrgem --dst_dimension 2048 --cuda 9 | CUDA_VISIBLE_DEVICES=2 python train.py --ori vggcrow --ori_dimension 512 --dst resnetrmac --dst_dimension 2048 --cuda 10 | CUDA_VISIBLE_DEVICES=2 python train.py --ori vggcrow --ori_dimension 512 --dst resnetspoc --dst_dimension 2048 --cuda 11 | CUDA_VISIBLE_DEVICES=2 python train.py --ori vggcrow --ori_dimension 512 --dst siftfv --dst_dimension 2048 --cuda 12 | CUDA_VISIBLE_DEVICES=2 python train.py --ori vggcrow --ori_dimension 512 --dst siftvlad --dst_dimension 2048 --cuda 13 | CUDA_VISIBLE_DEVICES=2 python train.py --ori vggcrow --ori_dimension 512 --dst vggcrow --dst_dimension 512 --cuda 14 | CUDA_VISIBLE_DEVICES=2 python train.py --ori vggcrow --ori_dimension 512 --dst vgggem --dst_dimension 512 --cuda 15 | CUDA_VISIBLE_DEVICES=2 python train.py --ori vggcrow --ori_dimension 512 --dst vggmac --dst_dimension 512 --cuda 16 | CUDA_VISIBLE_DEVICES=2 python train.py --ori vggcrow --ori_dimension 512 --dst vggrgem --dst_dimension 512 --cuda 17 | CUDA_VISIBLE_DEVICES=2 python train.py --ori vggcrow --ori_dimension 512 --dst vggrmac --dst_dimension 512 --cuda 18 | CUDA_VISIBLE_DEVICES=2 python train.py --ori vggcrow --ori_dimension 512 --dst vggspoc --dst_dimension 512 --cuda 19 | 20 | CUDA_VISIBLE_DEVICES=2 python train.py --ori vgggem --ori_dimension 512 --dst delffv --dst_dimension 2048 --cuda 21 | CUDA_VISIBLE_DEVICES=2 python train.py --ori vgggem --ori_dimension 512 --dst delfvlad --dst_dimension 2048 --cuda 22 | # CUDA_VISIBLE_DEVICES=2 python train.py --ori vgggem --ori_dimension 512 --dst hesaffsiftfv --dst_dimension 2048 --cuda 23 | # CUDA_VISIBLE_DEVICES=2 python train.py --ori vgggem --ori_dimension 512 --dst hesaffsiftvlad --dst_dimension 2048 --cuda 24 | CUDA_VISIBLE_DEVICES=2 python train.py --ori vgggem --ori_dimension 512 --dst resnetcrow --dst_dimension 2048 --cuda 25 | CUDA_VISIBLE_DEVICES=2 python train.py --ori vgggem --ori_dimension 512 --dst resnetgem --dst_dimension 2048 --cuda 26 | CUDA_VISIBLE_DEVICES=2 python train.py --ori vgggem --ori_dimension 512 --dst resnetmac --dst_dimension 2048 --cuda 27 | CUDA_VISIBLE_DEVICES=2 python train.py --ori vgggem --ori_dimension 512 --dst resnetrgem --dst_dimension 2048 --cuda 28 | CUDA_VISIBLE_DEVICES=2 python train.py --ori vgggem --ori_dimension 512 --dst resnetrmac --dst_dimension 2048 --cuda 29 | CUDA_VISIBLE_DEVICES=2 python train.py --ori vgggem --ori_dimension 512 --dst resnetspoc --dst_dimension 2048 --cuda 30 | CUDA_VISIBLE_DEVICES=2 python train.py --ori vgggem --ori_dimension 512 --dst siftfv --dst_dimension 2048 --cuda 31 | CUDA_VISIBLE_DEVICES=2 python train.py --ori vgggem --ori_dimension 512 --dst siftvlad --dst_dimension 2048 --cuda 32 | CUDA_VISIBLE_DEVICES=2 python train.py --ori vgggem --ori_dimension 512 --dst vggcrow --dst_dimension 512 --cuda 33 | CUDA_VISIBLE_DEVICES=2 python train.py --ori vgggem --ori_dimension 512 --dst vgggem --dst_dimension 512 --cuda 34 | CUDA_VISIBLE_DEVICES=2 python train.py --ori vgggem --ori_dimension 512 --dst vggmac --dst_dimension 512 --cuda 35 | CUDA_VISIBLE_DEVICES=2 python train.py --ori vgggem --ori_dimension 512 --dst vggrgem --dst_dimension 512 --cuda 36 | CUDA_VISIBLE_DEVICES=2 python train.py --ori vgggem --ori_dimension 512 --dst vggrmac --dst_dimension 512 --cuda 37 | CUDA_VISIBLE_DEVICES=2 python train.py --ori vgggem --ori_dimension 512 --dst vggspoc --dst_dimension 512 --cuda 38 | -------------------------------------------------------------------------------- /Translation/run_2_1.sh: -------------------------------------------------------------------------------- 1 | 2 | 3 | CUDA_VISIBLE_DEVICES=2 python train.py --ori vggmac --ori_dimension 512 --dst delffv --dst_dimension 2048 --cuda 4 | CUDA_VISIBLE_DEVICES=2 python train.py --ori vggmac --ori_dimension 512 --dst delfvlad --dst_dimension 2048 --cuda 5 | # CUDA_VISIBLE_DEVICES=2 python train.py --ori vggmac --ori_dimension 512 --dst hesaffsiftfv --dst_dimension 2048 --cuda 6 | # CUDA_VISIBLE_DEVICES=2 python train.py --ori vggmac --ori_dimension 512 --dst hesaffsiftvlad --dst_dimension 2048 --cuda 7 | CUDA_VISIBLE_DEVICES=2 python train.py --ori vggmac --ori_dimension 512 --dst resnetcrow --dst_dimension 2048 --cuda 8 | CUDA_VISIBLE_DEVICES=2 python train.py --ori vggmac --ori_dimension 512 --dst resnetgem --dst_dimension 2048 --cuda 9 | CUDA_VISIBLE_DEVICES=2 python train.py --ori vggmac --ori_dimension 512 --dst resnetmac --dst_dimension 2048 --cuda 10 | CUDA_VISIBLE_DEVICES=2 python train.py --ori vggmac --ori_dimension 512 --dst resnetrgem --dst_dimension 2048 --cuda 11 | CUDA_VISIBLE_DEVICES=2 python train.py --ori vggmac --ori_dimension 512 --dst resnetrmac --dst_dimension 2048 --cuda 12 | CUDA_VISIBLE_DEVICES=2 python train.py --ori vggmac --ori_dimension 512 --dst resnetspoc --dst_dimension 2048 --cuda 13 | CUDA_VISIBLE_DEVICES=2 python train.py --ori vggmac --ori_dimension 512 --dst siftfv --dst_dimension 2048 --cuda 14 | CUDA_VISIBLE_DEVICES=2 python train.py --ori vggmac --ori_dimension 512 --dst siftvlad --dst_dimension 2048 --cuda 15 | CUDA_VISIBLE_DEVICES=2 python train.py --ori vggmac --ori_dimension 512 --dst vggcrow --dst_dimension 512 --cuda 16 | CUDA_VISIBLE_DEVICES=2 python train.py --ori vggmac --ori_dimension 512 --dst vgggem --dst_dimension 512 --cuda 17 | CUDA_VISIBLE_DEVICES=2 python train.py --ori vggmac --ori_dimension 512 --dst vggmac --dst_dimension 512 --cuda 18 | CUDA_VISIBLE_DEVICES=2 python train.py --ori vggmac --ori_dimension 512 --dst vggrgem --dst_dimension 512 --cuda 19 | CUDA_VISIBLE_DEVICES=2 python train.py --ori vggmac --ori_dimension 512 --dst vggrmac --dst_dimension 512 --cuda 20 | CUDA_VISIBLE_DEVICES=2 python train.py --ori vggmac --ori_dimension 512 --dst vggspoc --dst_dimension 512 --cuda 21 | 22 | CUDA_VISIBLE_DEVICES=2 python train.py --ori vggrgem --ori_dimension 512 --dst delffv --dst_dimension 2048 --cuda 23 | CUDA_VISIBLE_DEVICES=2 python train.py --ori vggrgem --ori_dimension 512 --dst delfvlad --dst_dimension 2048 --cuda 24 | # CUDA_VISIBLE_DEVICES=2 python train.py --ori vggrgem --ori_dimension 512 --dst hesaffsiftfv --dst_dimension 2048 --cuda 25 | # CUDA_VISIBLE_DEVICES=2 python train.py --ori vggrgem --ori_dimension 512 --dst hesaffsiftvlad --dst_dimension 2048 --cuda 26 | CUDA_VISIBLE_DEVICES=2 python train.py --ori vggrgem --ori_dimension 512 --dst resnetcrow --dst_dimension 2048 --cuda 27 | CUDA_VISIBLE_DEVICES=2 python train.py --ori vggrgem --ori_dimension 512 --dst resnetgem --dst_dimension 2048 --cuda 28 | CUDA_VISIBLE_DEVICES=2 python train.py --ori vggrgem --ori_dimension 512 --dst resnetmac --dst_dimension 2048 --cuda 29 | CUDA_VISIBLE_DEVICES=2 python train.py --ori vggrgem --ori_dimension 512 --dst resnetrgem --dst_dimension 2048 --cuda 30 | CUDA_VISIBLE_DEVICES=2 python train.py --ori vggrgem --ori_dimension 512 --dst resnetrmac --dst_dimension 2048 --cuda 31 | CUDA_VISIBLE_DEVICES=2 python train.py --ori vggrgem --ori_dimension 512 --dst resnetspoc --dst_dimension 2048 --cuda 32 | CUDA_VISIBLE_DEVICES=2 python train.py --ori vggrgem --ori_dimension 512 --dst siftfv --dst_dimension 2048 --cuda 33 | CUDA_VISIBLE_DEVICES=2 python train.py --ori vggrgem --ori_dimension 512 --dst siftvlad --dst_dimension 2048 --cuda 34 | CUDA_VISIBLE_DEVICES=2 python train.py --ori vggrgem --ori_dimension 512 --dst vggcrow --dst_dimension 512 --cuda 35 | CUDA_VISIBLE_DEVICES=2 python train.py --ori vggrgem --ori_dimension 512 --dst vgggem --dst_dimension 512 --cuda 36 | CUDA_VISIBLE_DEVICES=2 python train.py --ori vggrgem --ori_dimension 512 --dst vggmac --dst_dimension 512 --cuda 37 | CUDA_VISIBLE_DEVICES=2 python train.py --ori vggrgem --ori_dimension 512 --dst vggrgem --dst_dimension 512 --cuda 38 | CUDA_VISIBLE_DEVICES=2 python train.py --ori vggrgem --ori_dimension 512 --dst vggrmac --dst_dimension 512 --cuda 39 | CUDA_VISIBLE_DEVICES=2 python train.py --ori vggrgem --ori_dimension 512 --dst vggspoc --dst_dimension 512 --cuda 40 | -------------------------------------------------------------------------------- /Translation/run_2_2.sh: -------------------------------------------------------------------------------- 1 | 2 | CUDA_VISIBLE_DEVICES=2 python train.py --ori vggrmac --ori_dimension 512 --dst delffv --dst_dimension 2048 --cuda 3 | CUDA_VISIBLE_DEVICES=2 python train.py --ori vggrmac --ori_dimension 512 --dst delfvlad --dst_dimension 2048 --cuda 4 | # CUDA_VISIBLE_DEVICES=2 python train.py --ori vggrmac --ori_dimension 512 --dst hesaffsiftfv --dst_dimension 2048 --cuda 5 | # CUDA_VISIBLE_DEVICES=2 python train.py --ori vggrmac --ori_dimension 512 --dst hesaffsiftvlad --dst_dimension 2048 --cuda 6 | CUDA_VISIBLE_DEVICES=2 python train.py --ori vggrmac --ori_dimension 512 --dst resnetcrow --dst_dimension 2048 --cuda 7 | CUDA_VISIBLE_DEVICES=2 python train.py --ori vggrmac --ori_dimension 512 --dst resnetgem --dst_dimension 2048 --cuda 8 | CUDA_VISIBLE_DEVICES=2 python train.py --ori vggrmac --ori_dimension 512 --dst resnetmac --dst_dimension 2048 --cuda 9 | CUDA_VISIBLE_DEVICES=2 python train.py --ori vggrmac --ori_dimension 512 --dst resnetrgem --dst_dimension 2048 --cuda 10 | CUDA_VISIBLE_DEVICES=2 python train.py --ori vggrmac --ori_dimension 512 --dst resnetrmac --dst_dimension 2048 --cuda 11 | CUDA_VISIBLE_DEVICES=2 python train.py --ori vggrmac --ori_dimension 512 --dst resnetspoc --dst_dimension 2048 --cuda 12 | CUDA_VISIBLE_DEVICES=2 python train.py --ori vggrmac --ori_dimension 512 --dst siftfv --dst_dimension 2048 --cuda 13 | CUDA_VISIBLE_DEVICES=2 python train.py --ori vggrmac --ori_dimension 512 --dst siftvlad --dst_dimension 2048 --cuda 14 | CUDA_VISIBLE_DEVICES=2 python train.py --ori vggrmac --ori_dimension 512 --dst vggcrow --dst_dimension 512 --cuda 15 | CUDA_VISIBLE_DEVICES=2 python train.py --ori vggrmac --ori_dimension 512 --dst vgggem --dst_dimension 512 --cuda 16 | CUDA_VISIBLE_DEVICES=2 python train.py --ori vggrmac --ori_dimension 512 --dst vggmac --dst_dimension 512 --cuda 17 | CUDA_VISIBLE_DEVICES=2 python train.py --ori vggrmac --ori_dimension 512 --dst vggrgem --dst_dimension 512 --cuda 18 | CUDA_VISIBLE_DEVICES=2 python train.py --ori vggrmac --ori_dimension 512 --dst vggrmac --dst_dimension 512 --cuda 19 | CUDA_VISIBLE_DEVICES=2 python train.py --ori vggrmac --ori_dimension 512 --dst vggspoc --dst_dimension 512 --cuda 20 | 21 | CUDA_VISIBLE_DEVICES=2 python train.py --ori vggspoc --ori_dimension 512 --dst delffv --dst_dimension 2048 --cuda 22 | CUDA_VISIBLE_DEVICES=2 python train.py --ori vggspoc --ori_dimension 512 --dst delfvlad --dst_dimension 2048 --cuda 23 | # CUDA_VISIBLE_DEVICES=2 python train.py --ori vggspoc --ori_dimension 512 --dst hesaffsiftfv --dst_dimension 2048 --cuda 24 | # CUDA_VISIBLE_DEVICES=2 python train.py --ori vggspoc --ori_dimension 512 --dst hesaffsiftvlad --dst_dimension 2048 --cuda 25 | CUDA_VISIBLE_DEVICES=2 python train.py --ori vggspoc --ori_dimension 512 --dst resnetcrow --dst_dimension 2048 --cuda 26 | CUDA_VISIBLE_DEVICES=2 python train.py --ori vggspoc --ori_dimension 512 --dst resnetgem --dst_dimension 2048 --cuda 27 | CUDA_VISIBLE_DEVICES=2 python train.py --ori vggspoc --ori_dimension 512 --dst resnetmac --dst_dimension 2048 --cuda 28 | CUDA_VISIBLE_DEVICES=2 python train.py --ori vggspoc --ori_dimension 512 --dst resnetrgem --dst_dimension 2048 --cuda 29 | CUDA_VISIBLE_DEVICES=2 python train.py --ori vggspoc --ori_dimension 512 --dst resnetrmac --dst_dimension 2048 --cuda 30 | CUDA_VISIBLE_DEVICES=2 python train.py --ori vggspoc --ori_dimension 512 --dst resnetspoc --dst_dimension 2048 --cuda 31 | CUDA_VISIBLE_DEVICES=2 python train.py --ori vggspoc --ori_dimension 512 --dst siftfv --dst_dimension 2048 --cuda 32 | CUDA_VISIBLE_DEVICES=2 python train.py --ori vggspoc --ori_dimension 512 --dst siftvlad --dst_dimension 2048 --cuda 33 | CUDA_VISIBLE_DEVICES=2 python train.py --ori vggspoc --ori_dimension 512 --dst vggcrow --dst_dimension 512 --cuda 34 | CUDA_VISIBLE_DEVICES=2 python train.py --ori vggspoc --ori_dimension 512 --dst vgggem --dst_dimension 512 --cuda 35 | CUDA_VISIBLE_DEVICES=2 python train.py --ori vggspoc --ori_dimension 512 --dst vggmac --dst_dimension 512 --cuda 36 | CUDA_VISIBLE_DEVICES=2 python train.py --ori vggspoc --ori_dimension 512 --dst vggrgem --dst_dimension 512 --cuda 37 | CUDA_VISIBLE_DEVICES=2 python train.py --ori vggspoc --ori_dimension 512 --dst vggrmac --dst_dimension 512 --cuda 38 | CUDA_VISIBLE_DEVICES=2 python train.py --ori vggspoc --ori_dimension 512 --dst vggspoc --dst_dimension 512 --cuda 39 | -------------------------------------------------------------------------------- /Translation/test_Dis.py: -------------------------------------------------------------------------------- 1 | import os 2 | import argparse 3 | import torch 4 | import scipy.io as sio 5 | import torch.nn as nn 6 | import torch.optim as optim 7 | import torch.backends.cudnn as cudnn 8 | from torch.autograd import Variable 9 | from torch.utils.data import DataLoader 10 | from utils import * 11 | from models import * 12 | import functools 13 | import numpy as np 14 | 15 | def eculideanDis(a, b): 16 | return np.sqrt(np.sum((a - b) ** 2, 1)) 17 | 18 | def test_Holidays(ori_name, dst_name, d2d, ori_dim, dst_dim): 19 | print('----> Loading data......') 20 | Holidays_base_set = dataFromFolder('./data/test/Holidays/base', ori = ori_name, dst = dst_name) 21 | Holidays_base_loader = DataLoader(dataset = Holidays_base_set, num_workers = 4, batch_size = 1, shuffle = False) 22 | 23 | Holidays_query_set = dataFromFolder('./data/test/Holidays/query', ori = ori_name, dst = dst_name) 24 | Holidays_query_loader = DataLoader(dataset = Holidays_query_set, num_workers = 4, batch_size = 1, shuffle = False) 25 | 26 | ngpu = 1 27 | 28 | ori = Variable(torch.FloatTensor(1, ori_dim)).cuda() 29 | dst = Variable(torch.FloatTensor(1, dst_dim)).cuda() 30 | 31 | Es = Encoder(ngpu, ori_dim).cuda() 32 | Es_model_dir = "./checkpoint/Holidays/{}/Es.pth".format(d2d) 33 | print(Es_model_dir) 34 | Es_state_dict = torch.load(Es_model_dir) 35 | Es.load_state_dict(Es_state_dict) 36 | 37 | Et = Encoder(ngpu, dst_dim).cuda() 38 | Et_model_dir = "./checkpoint/Holidays/{}/Et.pth".format(d2d) 39 | print(Et_model_dir) 40 | Et_state_dict = torch.load(Et_model_dir) 41 | Et.load_state_dict(Et_state_dict) 42 | 43 | D = Decoder(ngpu, dst_dim).cuda() 44 | D_model_dir = "./checkpoint/Holidays/{}/D.pth".format(d2d) 45 | print(D_model_dir) 46 | D_state_dict = torch.load(D_model_dir) 47 | D.load_state_dict(D_state_dict) 48 | 49 | Es.eval() 50 | Et.eval() 51 | D.eval() 52 | 53 | cnt = 0.0 54 | total_translation_error = 0.0 55 | total_reconstruction_error = 0.0 56 | 57 | # base 58 | for iteration, batch in enumerate(Holidays_base_loader, 1): 59 | ori_cpu, dst_cpu, name = batch[0], batch[1], batch[2] 60 | ori.data.resize_(ori_cpu.size()).copy_(ori_cpu) 61 | dst.data.resize_(dst_cpu.size()).copy_(dst_cpu) 62 | 63 | # translation 64 | ori_latent = Es(ori) 65 | dst_rec = D(ori_latent) 66 | 67 | dst_rec = dst_rec.cpu().data.numpy() 68 | 69 | if not os.path.exists("./result/Holidays/base"): 70 | os.mkdir("./result/Holidays/base") 71 | if not os.path.exists("./result/Holidays/base/{}".format(d2d)): 72 | os.mkdir("./result/Holidays/base/{}".format(d2d)) 73 | sio.savemat("./result/Holidays/base/{}/{}".format(d2d, name[0]), {dst_name : dst_rec}) 74 | 75 | cnt = cnt + 1 76 | # translation error 77 | translation_error = eculideanDis(dst_rec, dst_cpu.numpy()) 78 | 79 | # reconstruction 80 | ori_latent = Et(dst) 81 | dst_rec = D(ori_latent) 82 | 83 | dst_rec = dst_rec.cpu().data.numpy() 84 | 85 | # reconstruction error 86 | reconstruction_error = eculideanDis(dst_rec, dst_cpu.numpy()) 87 | 88 | total_translation_error = total_translation_error + translation_error 89 | total_reconstruction_error = total_reconstruction_error + reconstruction_error 90 | 91 | 92 | # Holidays_now_mAP = computeGroundTruth_2(base_dataset_dir = './result/Holidays', 93 | # query_dataset_dir = './data/test/Holidays', 94 | # base_d2d = d2d, query_d2d = dst_name, 95 | # descriptor_name = dst_name, descriptor_dim = dst_dim) 96 | dis_difference = 1.0 / cnt * (total_translation_error - total_reconstruction_error) 97 | return dis_difference#, Holidays_now_mAP 98 | 99 | def test_Oxford5k(ori_name, dst_name, d2d, ori_dim, dst_dim): 100 | print('----> Loading data......') 101 | Oxford5k_base_set = dataFromFolder('./data/test/Oxford5k/base', ori = ori_name, dst = dst_name) 102 | Oxford5k_base_loader = DataLoader(dataset = Oxford5k_base_set, num_workers = 4, batch_size = 1, shuffle = False) 103 | 104 | Oxford5k_query_set = dataFromFolder('./data/test/Oxford5k/query', ori = ori_name, dst = dst_name) 105 | Oxford5k_query_loader = DataLoader(dataset = Oxford5k_query_set, num_workers = 4, batch_size = 1, shuffle = False) 106 | 107 | ngpu = 1 108 | 109 | ori = Variable(torch.FloatTensor(1, ori_dim)).cuda() 110 | dst = Variable(torch.FloatTensor(1, dst_dim)).cuda() 111 | 112 | Es = Encoder(ngpu, ori_dim).cuda() 113 | Es_model_dir = "./checkpoint/Oxford5k/{}/Es.pth".format(d2d) 114 | Es_state_dict = torch.load(Es_model_dir) 115 | Es.load_state_dict(Es_state_dict) 116 | 117 | Et = Encoder(ngpu, dst_dim).cuda() 118 | Et_model_dir = "./checkpoint/Oxford5k/{}/Et.pth".format(d2d) 119 | Et_state_dict = torch.load(Et_model_dir) 120 | Et.load_state_dict(Et_state_dict) 121 | 122 | D = Decoder(ngpu, dst_dim).cuda() 123 | D_model_dir = "./checkpoint/Oxford5k/{}/D.pth".format(d2d) 124 | D_state_dict = torch.load(D_model_dir) 125 | D.load_state_dict(D_state_dict) 126 | 127 | Es.eval() 128 | Et.eval() 129 | D.eval() 130 | 131 | cnt = 0.0 132 | total_translation_error = 0.0 133 | total_reconstruction_error = 0.0 134 | 135 | # base 136 | for iteration, batch in enumerate(Oxford5k_base_loader, 1): 137 | ori_cpu, dst_cpu, name = batch[0], batch[1], batch[2] 138 | ori.data.resize_(ori_cpu.size()).copy_(ori_cpu) 139 | dst.data.resize_(dst_cpu.size()).copy_(dst_cpu) 140 | 141 | # translation 142 | ori_latent = Es(ori) 143 | dst_rec = D(ori_latent) 144 | 145 | dst_rec = dst_rec.cpu().data.numpy() 146 | 147 | if not os.path.exists("./result/Oxford5k/base"): 148 | os.mkdir("./result/Oxford5k/base") 149 | if not os.path.exists("./result/Oxford5k/base/{}".format(d2d)): 150 | os.mkdir("./result/Oxford5k/base/{}".format(d2d)) 151 | sio.savemat("./result/Oxford5k/base/{}/{}".format(d2d, name[0]), {dst_name : dst_rec}) 152 | 153 | cnt = cnt + 1 154 | # translation error 155 | translation_error = eculideanDis(dst_rec, dst_cpu.numpy()) 156 | 157 | # reconstruction 158 | ori_latent = Et(dst) 159 | dst_rec = D(ori_latent) 160 | 161 | dst_rec = dst_rec.cpu().data.numpy() 162 | 163 | # reconstruction error 164 | reconstruction_error = eculideanDis(dst_rec, dst_cpu.numpy()) 165 | 166 | total_translation_error = total_translation_error + translation_error 167 | total_reconstruction_error = total_reconstruction_error + reconstruction_error 168 | 169 | 170 | # Oxford5k_now_mAP = computeGroundTruth_1(base_dataset_dir = './result/Oxford5k', 171 | # query_dataset_dir = './data/test/Oxford5k', 172 | # base_d2d = d2d, query_d2d = dst_name, 173 | # descriptor_name = dst_name, descriptor_dim = dst_dim) 174 | dis_difference = 1.0 / cnt * (total_translation_error - total_reconstruction_error) 175 | return dis_difference#, Oxford5k_now_mAP 176 | 177 | def test_Paris6k(ori_name, dst_name, d2d, ori_dim, dst_dim): 178 | print('----> Loading data......') 179 | Paris6k_base_set = dataFromFolder('./data/test/Paris6k/base', ori = ori_name, dst = dst_name) 180 | Paris6k_base_loader = DataLoader(dataset = Paris6k_base_set, num_workers = 4, batch_size = 1, shuffle = False) 181 | 182 | Paris6k_query_set = dataFromFolder('./data/test/Paris6k/query', ori = ori_name, dst = dst_name) 183 | Paris6k_query_loader = DataLoader(dataset = Paris6k_query_set, num_workers = 4, batch_size = 1, shuffle = False) 184 | 185 | ngpu = 1 186 | 187 | ori = Variable(torch.FloatTensor(1, ori_dim)).cuda() 188 | dst = Variable(torch.FloatTensor(1, dst_dim)).cuda() 189 | 190 | Es = Encoder(ngpu, ori_dim).cuda() 191 | Es_model_dir = "./checkpoint/Paris6k/{}/Es.pth".format(d2d) 192 | Es_state_dict = torch.load(Es_model_dir) 193 | Es.load_state_dict(Es_state_dict) 194 | 195 | Et = Encoder(ngpu, dst_dim).cuda() 196 | Et_model_dir = "./checkpoint/Paris6k/{}/Et.pth".format(d2d) 197 | Et_state_dict = torch.load(Et_model_dir) 198 | Et.load_state_dict(Et_state_dict) 199 | 200 | D = Decoder(ngpu, dst_dim).cuda() 201 | D_model_dir = "./checkpoint/Paris6k/{}/D.pth".format(d2d) 202 | D_state_dict = torch.load(D_model_dir) 203 | D.load_state_dict(D_state_dict) 204 | 205 | Es.eval() 206 | Et.eval() 207 | D.eval() 208 | 209 | cnt = 0.0 210 | total_translation_error = 0.0 211 | total_reconstruction_error = 0.0 212 | 213 | # base 214 | for iteration, batch in enumerate(Paris6k_base_loader, 1): 215 | ori_cpu, dst_cpu, name = batch[0], batch[1], batch[2] 216 | ori.data.resize_(ori_cpu.size()).copy_(ori_cpu) 217 | dst.data.resize_(dst_cpu.size()).copy_(dst_cpu) 218 | 219 | # translation 220 | ori_latent = Es(ori) 221 | dst_rec = D(ori_latent) 222 | 223 | dst_rec = dst_rec.cpu().data.numpy() 224 | 225 | if not os.path.exists("./result/Paris6k/base"): 226 | os.mkdir("./result/Paris6k/base") 227 | if not os.path.exists("./result/Paris6k/base/{}".format(d2d)): 228 | os.mkdir("./result/Paris6k/base/{}".format(d2d)) 229 | sio.savemat("./result/Paris6k/base/{}/{}".format(d2d, name[0]), {dst_name : dst_rec}) 230 | 231 | cnt = cnt + 1 232 | # translation error 233 | translation_error = eculideanDis(dst_rec, dst_cpu.numpy()) 234 | 235 | # reconstruction 236 | ori_latent = Et(dst) 237 | dst_rec = D(ori_latent) 238 | 239 | dst_rec = dst_rec.cpu().data.numpy() 240 | 241 | # reconstruction error 242 | reconstruction_error = eculideanDis(dst_rec, dst_cpu.numpy()) 243 | 244 | total_translation_error = total_translation_error + translation_error 245 | total_reconstruction_error = total_reconstruction_error + reconstruction_error 246 | 247 | 248 | # Paris6k_now_mAP = computeGroundTruth_1(base_dataset_dir = './result/Paris6k', 249 | # query_dataset_dir = './data/test/Paris6k', 250 | # base_d2d = d2d, query_d2d = dst_name, 251 | # descriptor_name = dst_name, descriptor_dim = dst_dim) 252 | dis_difference = 1.0 / cnt * (total_translation_error - total_reconstruction_error) 253 | return dis_difference#, Paris6k_now_mAP 254 | 255 | if __name__ == "__main__": 256 | #"hesaffsiftfv", "hesaffsiftvlad", 257 | descs = ["delffv", "delfvlad", 258 | "resnetcrow", "resnetgem", "resnetmac", "resnetrgem", "resnetrmac", "resnetspoc", 259 | "vggcrow", "vgggem", "vggmac", "vggrgem", "vggrmac", "vggspoc", 260 | "siftfv", "siftvlad"] 261 | dims = {"delffv":2048, "delfvlad":2048, "hesaffsiftfv":2048, "hesaffsiftvlad":2048, "siftfv":2048, "siftvlad":2048, 262 | "resnetcrow":2048, "resnetgem":2048, "resnetmac":2048, "resnetrgem":2048, "resnetrmac":2048, "resnetspoc":2048, 263 | "vggcrow":512, "vgggem":512, "vggmac":512, "vggrgem":512, "vggrmac":512, "vggspoc":512} 264 | 265 | # mAP_filename = './result/mAP_result_Paris6k.txt' 266 | # mAP_file = open(mAP_filename, 'a') 267 | 268 | dis_filename = './result/dis_result.txt' 269 | dis_file = open(dis_filename, 'a') 270 | 271 | re = "Holidays Result:\n" 272 | # mAP_file.write(re) 273 | dis_file.write(re) 274 | for ori in descs: 275 | re = ori + ': ' 276 | # mAP_file.write(re) 277 | for dst in descs: 278 | d2d = ori + '_' + dst 279 | ori_dim = dims[ori] 280 | dst_dim = dims[dst] 281 | dis = test_Holidays(ori, dst, d2d, ori_dim, dst_dim) 282 | 283 | # re = "{:.2f} ".format(mAP * 100) 284 | # mAP_file.write(re) 285 | # print(re) 286 | 287 | re = "{:.8f} ".format(dis[0]) 288 | dis_file.write(re) 289 | print(re) 290 | 291 | # mAP_file.write('\n') 292 | dis_file.write('\n') 293 | 294 | re = "Oxford5k Result:\n" 295 | # mAP_file.write(re) 296 | dis_file.write(re) 297 | for ori in descs: 298 | re = ori + ': ' 299 | # mAP_file.write(re) 300 | for dst in descs: 301 | d2d = ori + '_' + dst 302 | ori_dim = dims[ori] 303 | dst_dim = dims[dst] 304 | dis = test_Oxford5k(ori, dst, d2d, ori_dim, dst_dim) 305 | 306 | # re = "{:.2f} ".format(mAP * 100) 307 | # mAP_file.write(re) 308 | # print(re) 309 | 310 | re = "{:.8f} ".format(dis[0]) 311 | dis_file.write(re) 312 | print(re) 313 | 314 | # mAP_file.write('\n') 315 | dis_file.write('\n') 316 | 317 | re = "Paris6k Result:\n" 318 | # mAP_file.write(re) 319 | dis_file.write(re) 320 | for ori in descs: 321 | re = ori + ': ' 322 | # mAP_file.write(re) 323 | for dst in descs: 324 | d2d = ori + '_' + dst 325 | ori_dim = dims[ori] 326 | dst_dim = dims[dst] 327 | dis = test_Paris6k(ori, dst, d2d, ori_dim, dst_dim) 328 | 329 | # re = "{:.2f} ".format(mAP * 100) 330 | # mAP_file.write(re) 331 | # print(re) 332 | 333 | re = "{:.8f} ".format(dis[0]) 334 | dis_file.write(re) 335 | print(re) 336 | 337 | # mAP_file.write('\n') 338 | dis_file.write('\n') 339 | 340 | 341 | # mAP_file.close() 342 | dis_file.close() 343 | -------------------------------------------------------------------------------- /Translation/train.py: -------------------------------------------------------------------------------- 1 | import os 2 | import argparse 3 | import torch 4 | import scipy.io as sio 5 | import torch.nn as nn 6 | import torch.optim as optim 7 | import torch.backends.cudnn as cudnn 8 | from torch.autograd import Variable 9 | from torch.utils.data import DataLoader 10 | from models import * 11 | from utils import * 12 | import functools 13 | import numpy as np 14 | 15 | ### Training parament setting 16 | parser = argparse.ArgumentParser(description = 'D2D translation implementation') 17 | parser.add_argument('--ori', required = True, help = 'fv') 18 | parser.add_argument('--ori_dimension', type = int, default = 2048, help = 'dimension of the ori descriptor') 19 | parser.add_argument('--dst', required = True, help = 'fv') 20 | parser.add_argument('--dst_dimension', type = int, default = 2048, help = 'dimension of the ori descriptor') 21 | parser.add_argument('--nEpochs', type = int, default = 10, help = 'number of epochs to train the model') 22 | parser.add_argument('--lr', type = float, default = 0.000001, help = 'Learning Rate. Default = 0.002') 23 | parser.add_argument('--beta1', type = float, default = 0.5, help = 'beta1 for adam. default = 0.5') 24 | parser.add_argument('--cuda', action = 'store_true', help = 'use cuda?') 25 | parser.add_argument('--threads', type = int, default = 8, help = 'number of threads for data loader to use') 26 | parser.add_argument('--seed', type = int, default = 2333, help = 'random seed to use. Default=233') 27 | opt = parser.parse_args() 28 | opt.d2d = opt.ori + '_' + opt.dst 29 | print(opt) 30 | 31 | # ### batch size 32 | batch_size = 5 #10 50 / 10 / best:5 33 | 34 | ### cuda setting 35 | if opt.cuda and not torch.cuda.is_available(): 36 | raise Exception("No GPU found, please run without --cuda") 37 | ### uses the inbuilt cudnn auto-tuner to find the fastest convolution algorithms 38 | cudnn.benchmark = True 39 | 40 | ### random seed 41 | torch.manual_seed(opt.seed) 42 | if opt.cuda: 43 | torch.cuda.manual_seed(opt.seed) 44 | 45 | ### Load data 46 | print('----> Loading data......') 47 | train_set = dataFromFolder('./data/train/Landmarks/base', ori = opt.ori, dst = opt.dst) 48 | training_data_loader = DataLoader(dataset = train_set, num_workers = opt.threads, batch_size = batch_size, shuffle = True) 49 | 50 | ### Init models 51 | print('----> Initialize models......') 52 | ngpu = 1 53 | ori_dim = opt.ori_dimension 54 | dst_dim = opt.dst_dimension 55 | 56 | Es = Encoder(ngpu, ori_dim) 57 | Es.apply(weightsInit) 58 | 59 | Et = Encoder(ngpu, dst_dim) 60 | Et.apply(weightsInit) 61 | 62 | D = Decoder(ngpu, dst_dim) 63 | D.apply(weightsInit) 64 | 65 | ### Init setting 66 | loss_MSE = nn.MSELoss() 67 | 68 | ori = Variable(torch.FloatTensor(batch_size, ori_dim)) 69 | dst = Variable(torch.FloatTensor(batch_size, dst_dim)) 70 | 71 | if opt.cuda: 72 | Es = Es.cuda() 73 | Et = Et.cuda() 74 | 75 | D = D.cuda() 76 | loss_MSE = loss_MSE.cuda() 77 | ori = ori.cuda() 78 | dst = dst.cuda() 79 | 80 | ### optimizer 81 | optimizer_Es = optim.Adam(Es.parameters(), lr=opt.lr, betas=(opt.beta1, 0.999), weight_decay = 1e-5)#1 82 | optimizer_Et = optim.Adam(Et.parameters(), lr=opt.lr, betas=(opt.beta1, 0.999), weight_decay = 1e-5) 83 | optimizer_D = optim.Adam(D.parameters(), lr=opt.lr, betas=(opt.beta1, 0.999), weight_decay = 1e-5) 84 | 85 | def train(epoch): 86 | Es.train() 87 | Et.train() 88 | 89 | D.train() 90 | sz = len(training_data_loader) 91 | for iteration, batch in enumerate(training_data_loader, 1): 92 | ori_cpu, dst_cpu = batch[0], batch[1] 93 | ori.data.resize_(ori_cpu.size()).copy_(ori_cpu) 94 | dst.data.resize_(dst_cpu.size()).copy_(dst_cpu) 95 | 96 | ori_latent = Es(ori) 97 | dst_rec = D(ori_latent) 98 | 99 | dst_latent = Et(dst) 100 | dst_self_rec = D(dst_latent) 101 | 102 | err_g_ori = loss_MSE(dst_rec, dst) 103 | err_g_dst = loss_MSE(dst_self_rec, dst) 104 | err_g = err_g_ori + err_g_dst 105 | err_g.backward() 106 | out_ori = err_g_ori.data.mean() 107 | out_dst = err_g_dst.data.mean() 108 | optimizer_Es.step() 109 | optimizer_Et.step() 110 | optimizer_D.step() 111 | if iteration % 1000 == 0: 112 | print("===> Epoch[{}]({}/{}), loss_ori: [{:.10f}], loss_dst: [{:.10f}]".format(epoch, iteration, sz, out_ori, out_dst)) 113 | 114 | def test_Oxford5k(epoch, Oxford5k_gnd_mAP): 115 | Es.eval() 116 | D.eval() 117 | # base 118 | for iteration, batch in enumerate(Oxford5k_base_loader, 1): 119 | ori_cpu, dst_cpu, name = batch[0], batch[1], batch[2] 120 | ori.data.resize_(ori_cpu.size()).copy_(ori_cpu) 121 | dst.data.resize_(dst_cpu.size()).copy_(dst_cpu) 122 | 123 | ori_latent = Es(ori) 124 | dst_rec = D(ori_latent) 125 | 126 | dst_rec = dst_rec.cpu().data.numpy() 127 | ori_latent = ori_latent.cpu().data.numpy() 128 | 129 | if not os.path.exists("./result"): 130 | os.mkdir("./result") 131 | if not os.path.exists("./result/Oxford5k"): 132 | os.mkdir("./result/Oxford5k") 133 | if not os.path.exists("./result/Oxford5k/base"): 134 | os.mkdir("./result/Oxford5k/base") 135 | if not os.path.exists("./result/Oxford5k/base/{}".format(opt.d2d)): 136 | os.mkdir("./result/Oxford5k/base/{}".format(opt.d2d)) 137 | sio.savemat("./result/Oxford5k/base/{}/{}".format(opt.d2d, name[0]), {opt.dst : dst_rec}) 138 | 139 | Oxford5k_now_mAP = computeGroundTruth_1( 140 | base_dataset_dir = './result/Oxford5k', 141 | query_dataset_dir = './data/test/Oxford5k', 142 | base_d2d = opt.d2d, query_d2d = opt.dst, 143 | descriptor_name = opt.dst, descriptor_dim = dst_dim) 144 | resultFileName = './result/Oxford5k/' + opt.d2d + '.txt' 145 | resultFile = open(resultFileName, 'a') 146 | re = "{} (epoch {}), mAP: {:.4f} / {:.4f}.\n".format(opt.d2d, epoch, Oxford5k_gnd_mAP, Oxford5k_now_mAP) 147 | resultFile.write(re) 148 | resultFile.close() 149 | 150 | return Oxford5k_now_mAP 151 | 152 | def test_Paris6k(epoch, Paris6k_gnd_mAP): 153 | Es.eval() 154 | D.eval() 155 | # base 156 | for iteration, batch in enumerate(Paris6k_base_loader, 1): 157 | ori_cpu, dst_cpu, name = batch[0], batch[1], batch[2] 158 | ori.data.resize_(ori_cpu.size()).copy_(ori_cpu) 159 | dst.data.resize_(dst_cpu.size()).copy_(dst_cpu) 160 | 161 | ori_latent = Es(ori) 162 | dst_rec = D(ori_latent) 163 | 164 | dst_rec = dst_rec.cpu().data.numpy() 165 | ori_latent = ori_latent.cpu().data.numpy() 166 | 167 | if not os.path.exists("./result"): 168 | os.mkdir("./result") 169 | if not os.path.exists("./result/Paris6k"): 170 | os.mkdir("./result/Paris6k") 171 | if not os.path.exists("./result/Paris6k/base"): 172 | os.mkdir("./result/Paris6k/base") 173 | if not os.path.exists("./result/Paris6k/base/{}".format(opt.d2d)): 174 | os.mkdir("./result/Paris6k/base/{}".format(opt.d2d)) 175 | sio.savemat("./result/Paris6k/base/{}/{}".format(opt.d2d, name[0]), {opt.dst : dst_rec}) 176 | 177 | Paris6k_now_mAP = computeGroundTruth_1( 178 | base_dataset_dir = './result/Paris6k', 179 | query_dataset_dir = './data/test/Paris6k', 180 | base_d2d = opt.d2d, query_d2d = opt.dst, 181 | descriptor_name = opt.dst, descriptor_dim = dst_dim) 182 | resultFileName = './result/Paris6k/' + opt.d2d + '.txt' 183 | resultFile = open(resultFileName, 'a') 184 | re = "{} (epoch {}), mAP: {:.4f} / {:.4f}.\n".format(opt.d2d, epoch, Paris6k_gnd_mAP, Paris6k_now_mAP) 185 | resultFile.write(re) 186 | resultFile.close() 187 | 188 | return Paris6k_now_mAP 189 | 190 | def test_Holidays(epoch, Holidays_gnd_mAP): 191 | Es.eval() 192 | D.eval() 193 | # base 194 | for iteration, batch in enumerate(Holidays_base_loader, 1): 195 | ori_cpu, dst_cpu, name = batch[0], batch[1], batch[2] 196 | ori.data.resize_(ori_cpu.size()).copy_(ori_cpu) 197 | dst.data.resize_(dst_cpu.size()).copy_(dst_cpu) 198 | 199 | ori_latent = Es(ori) 200 | dst_rec = D(ori_latent) 201 | 202 | dst_rec = dst_rec.cpu().data.numpy() 203 | ori_latent = ori_latent.cpu().data.numpy() 204 | 205 | if not os.path.exists("./result"): 206 | os.mkdir("./result") 207 | if not os.path.exists("./result/Holidays"): 208 | os.mkdir("./result/Holidays") 209 | if not os.path.exists("./result/Holidays/base"): 210 | os.mkdir("./result/Holidays/base") 211 | if not os.path.exists("./result/Holidays/base/{}".format(opt.d2d)): 212 | os.mkdir("./result/Holidays/base/{}".format(opt.d2d)) 213 | sio.savemat("./result/Holidays/base/{}/{}".format(opt.d2d, name[0]), {opt.dst : dst_rec}) 214 | 215 | Holidays_now_mAP = computeGroundTruth_2( 216 | base_dataset_dir = './result/Holidays', 217 | query_dataset_dir = './data/test/Holidays', 218 | base_d2d = opt.d2d, query_d2d = opt.dst, 219 | descriptor_name = opt.dst, descriptor_dim = dst_dim) 220 | resultFileName = './result/Holidays/' + opt.d2d + '.txt' 221 | resultFile = open(resultFileName, 'a') 222 | re = "{} (epoch {}), mAP: {:.4f} / {:.4f}.\n".format(opt.d2d, epoch, Holidays_gnd_mAP, Holidays_now_mAP) 223 | resultFile.write(re) 224 | resultFile.close() 225 | 226 | return Holidays_now_mAP 227 | 228 | def checkpoint(dataset): 229 | Es_path = "checkpoint/{}/{}/Es.pth".format(dataset, opt.d2d) 230 | Et_path = "checkpoint/{}/{}/Et.pth".format(dataset, opt.d2d) 231 | D_path = "checkpoint/{}/{}/D.pth".format(dataset, opt.d2d) 232 | 233 | if not os.path.exists("checkpoint"): 234 | os.mkdir("checkpoint") 235 | 236 | if not os.path.exists("checkpoint/{}".format(dataset)): 237 | os.mkdir("checkpoint/{}".format(dataset)) 238 | 239 | if not os.path.exists("checkpoint/{}/{}".format(dataset, opt.d2d)): 240 | os.mkdir("checkpoint/{}/{}".format(dataset, opt.d2d)) 241 | 242 | torch.save(Es.state_dict(), Es_path) 243 | torch.save(Et.state_dict(), Et_path) 244 | torch.save(D.state_dict(), D_path) 245 | 246 | # print("Checkpoint {} saved.".format(epoch)) 247 | 248 | if __name__ == "__main__": 249 | # "hesaffsiftfv":0.5691, "hesaffsiftvlad":0.5827, 250 | Oxford5k_mAPs = {"delffv":0.7338, "delfvlad":0.7531, "siftfv":0.3625, "siftvlad":0.4049, 251 | "resnetcrow":0.6173, "resnetgem":0.8447, "resnetmac":0.6082, "resnetrgem":0.8460, "resnetrmac":0.6846, "resnetspoc":0.6236, 252 | "vggcrow":0.6838, "vgggem":0.8271, "vggmac":0.6097, "vggrgem":0.8230, "vggrmac":0.7084, "vggspoc":0.6643} 253 | # "hesaffsiftfv":0.5509, "hesaffsiftvlad":0.5295, 254 | Paris6k_mAPs = {"delffv":0.8306, "delfvlad":0.8254, "siftfv":0.3691, "siftvlad":0.4149, 255 | "resnetcrow":0.7546, "resnetgem":0.9187, "resnetmac":0.7774, "resnetrgem":0.9190, "resnetrmac":0.8300, "resnetspoc":0.7675, 256 | "vggcrow":0.7979, "vgggem":0.8685, "vggmac":0.7265, "vggrgem":0.8733, "vggrmac":0.8354, "vggspoc":0.7847} 257 | # "hesaffsiftfv":0.6466, "hesaffsiftvlad":0.6405, 258 | Holidays_mAPs = {"delffv":0.8342, "delfvlad":0.8461, "siftfv":0.6177, "siftvlad":0.6392, 259 | "resnetcrow":0.8638, "resnetgem":0.8908, "resnetmac":0.8853, "resnetrgem":0.8932, "resnetrmac":0.8908, "resnetspoc":0.8657, 260 | "vggcrow":0.8317, "vgggem":0.8457, "vggmac":0.7418, "vggrgem":0.8506, "vggrmac":0.8350, "vggspoc":0.8338} 261 | 262 | Oxford5k_gnd_mAP = Oxford5k_mAPs[opt.ori]#computeGroundTruth_1('./data/test/Oxford5k', './data/test/Oxford5k', opt.dst, opt.dst, opt.dst, dst_dim)# 263 | Paris6k_gnd_mAP = Paris6k_mAPs[opt.ori]#computeGroundTruth_1('./data/test/Paris6k', './data/test/Paris6k', opt.dst, opt.dst, opt.dst, dst_dim)# 264 | Holidays_gnd_mAP = Holidays_mAPs[opt.ori]#computeGroundTruth_2('./data/test/Holidays', './data/test/Holidays', opt.dst, opt.dst, opt.dst, dst_dim)# 265 | 266 | max_Oxford5k_mAP = 0.0 267 | max_Paris6k_mAP = 0.0 268 | max_Holidays_mAP = 0.0 269 | 270 | for epoch in range(1, opt.nEpochs + 1): 271 | train(epoch) 272 | 273 | if epoch == opt.nEpochs: 274 | checkpoint('Oxford5k') 275 | checkpoint('Paris6k') 276 | checkpoint('Holidays') 277 | -------------------------------------------------------------------------------- /Translation/utils.py: -------------------------------------------------------------------------------- 1 | import torch 2 | import numpy as np 3 | import scipy.io as sio 4 | import os 5 | from os import listdir 6 | from os.path import join 7 | import torch.utils.data as data 8 | from scipy.io import loadmat 9 | 10 | ### Load data set 11 | # read data from data set files 12 | class dataFromFolder(data.Dataset): 13 | # init 14 | def __init__(self, data_dir, ori, dst): 15 | super(dataFromFolder, self).__init__() 16 | self.desc_ori_path = join(data_dir, ori) 17 | self.desc_dst_path = join(data_dir, dst) 18 | self.file_names = [x for x in listdir(self.desc_ori_path)] 19 | self.ori = ori 20 | self.dst = dst 21 | 22 | # load descriptor pair 23 | def __getitem__(self, index): 24 | file_name = self.file_names[index] 25 | desc_ori_tmp = sio.loadmat(join(self.desc_ori_path, file_name)) 26 | desc_dst_tmp = sio.loadmat(join(self.desc_dst_path, file_name)) 27 | 28 | desc_ori = torch.from_numpy(desc_ori_tmp[self.ori]) 29 | desc_dst = torch.from_numpy(desc_dst_tmp[self.dst]) 30 | 31 | return desc_ori.squeeze(), desc_dst.squeeze(), file_name # torch 32 | 33 | # return data set num 34 | def __len__(self): 35 | return len(self.file_names) 36 | 37 | def computeGroundTruth_1(base_dataset_dir = './data/test/Oxford5k', query_dataset_dir = './data/test/Oxford5k', base_d2d = 'vgg16', query_d2d = 'vgg16', descriptor_name = 'vgg16', descriptor_dim = 4096): 38 | base_dir = base_dataset_dir + '/base/' + base_d2d 39 | query_dir = query_dataset_dir + '/query/' + query_d2d 40 | 41 | base_files = os.listdir(base_dir) 42 | bases = np.zeros((len(base_files), descriptor_dim)) 43 | names = [] 44 | top_k = 0 45 | for file in base_files: 46 | desc_tmp = sio.loadmat(base_dir + '/' + file) 47 | desc = desc_tmp[descriptor_name].reshape(-1) 48 | bases[top_k, :] = desc 49 | tmp = file.split('.') 50 | name = tmp[0] 51 | # print(name) 52 | names.append(name) 53 | top_k = top_k + 1 54 | # print(top_k) 55 | ## 56 | query_files = os.listdir(query_dir) 57 | id = 0 58 | mAP = 0.0 59 | for file in query_files: 60 | desc_tmp = sio.loadmat(query_dir + '/' + file) 61 | query = file.split('.')[0] 62 | resultFileName = query_dataset_dir + '/resdat/' + query + '_' + query_d2d + '.txt' 63 | print(resultFileName) 64 | resultFile = open(resultFileName, 'w') 65 | desc = desc_tmp[descriptor_name].reshape(-1) 66 | # L2 67 | # dist = np.sum((desc - bases) ** 2, 1) 68 | # cos 69 | dist = -np.dot(bases, desc) 70 | 71 | index = np.argsort(dist) 72 | for i in range(top_k): 73 | re = names[index[i]] + '\n' 74 | resultFile.write(re) 75 | # print(re) 76 | # print(dist[index[i]]) 77 | id = id + 1 78 | t = os.popen(query_dataset_dir + '/compute_ap ' + query_dataset_dir + '/gnd/' + query + ' ' + resultFileName) 79 | ap = float(t.read()) 80 | mAP = mAP + ap 81 | # print("Runing {}: the ap is {}.".format(id, ap)) 82 | resultFile.close() 83 | 84 | print("The mAP {} : {}.".format(base_d2d, mAP / id)) 85 | return mAP / id 86 | 87 | def computeGroundTruth_2(base_dataset_dir = './data/test/Oxford5k', query_dataset_dir = './data/test/Oxford5k', base_d2d = 'vgg16', query_d2d = 'vgg16', descriptor_name = 'vgg16', descriptor_dim = 4096): 88 | base_dir = base_dataset_dir + '/base/' + base_d2d 89 | query_dir = query_dataset_dir + '/query/' + query_d2d 90 | 91 | base_files = os.listdir(base_dir) 92 | bases = np.zeros((len(base_files), descriptor_dim)) 93 | names = [] 94 | top_k = 0 95 | 96 | for file in base_files: 97 | desc_tmp = sio.loadmat(base_dir + '/' + file) 98 | desc = desc_tmp[descriptor_name].reshape(-1) 99 | bases[top_k, :] = desc 100 | tmp = file.split('.') 101 | name = tmp[0] + '.' + tmp[1] 102 | # print(name) 103 | names.append(name) 104 | top_k = top_k + 1 105 | 106 | query_files = os.listdir(query_dir) 107 | id = 0 108 | 109 | resultFileName = query_dataset_dir + '/resdat/' + query_d2d + '.dat' 110 | resultFile = open(resultFileName, 'w') 111 | print(resultFileName) 112 | 113 | for file in query_files: 114 | desc_tmp = sio.loadmat(query_dir + '/' + file) 115 | tmp = file.split('.') 116 | 117 | re = tmp[0] + '.' + tmp[1] + ' ' 118 | resultFile.write(re) 119 | 120 | desc = desc_tmp[descriptor_name].reshape(-1) 121 | # L2 122 | # dist = np.sum((desc - bases) ** 2, 1) 123 | # cos 124 | dist = -np.dot(bases, desc) 125 | 126 | index = np.argsort(dist) 127 | for i in range(top_k): 128 | re = str(i) + ' ' + names[index[i]] 129 | if i == top_k - 1: 130 | re = re + '\n' 131 | else: 132 | re = re + ' ' 133 | resultFile.write(re) 134 | id = id + 1 135 | # print("Runing {}.".format(id)) 136 | # break 137 | resultFile.close() 138 | # print('python ' + query_dataset_dir + '/holidays_map.py ' + resultFileName + ' ' + query_dataset_dir + '/holidays_images.dat') 139 | t = os.popen('python2 ' + query_dataset_dir + '/holidays_map.py ' + resultFileName + ' ' + query_dataset_dir + '/holidays_images.dat') 140 | # print(t.read()) 141 | mAP = float(t.read()) 142 | print("The mAP {} : {}.".format(base_d2d, mAP)) 143 | return mAP 144 | --------------------------------------------------------------------------------