diff --git a/README.md b/README.md
index 5a262ec..202c2c8 100644
--- a/README.md
+++ b/README.md
@@ -1,16 +1,16 @@
-# CNNImageRetrieval: Training and evaluating CNNs for Image Retrieval
+# CNN Image Retrieval in MatConvNet: Training and evaluating CNNs for Image Retrieval in MatConvNet
-**CNNImageRetrieval** is a MATLAB toolbox that implements the training and testing of the approach described in our papers:
+This is a MATLAB toolbox that implements the training and testing of the approach described in our papers:
-> *Fine-tuning CNN Image Retrieval with No Human Annotation*,
-> Radenović F., Tolias G., Chum O.,
-> arXiv 2017 [[arXiv](https://arxiv.org/abs/1711.02512)]
+**Fine-tuning CNN Image Retrieval with No Human Annotation**,
+Radenović F., Tolias G., Chum O.,
+TPAMI 2018 [[arXiv](https://arxiv.org/abs/1711.02512)]
-> *CNN Image Retrieval Learns from BoW: Unsupervised Fine-Tuning with Hard Examples*,
-> Radenović F., Tolias G., Chum O.,
-> ECCV 2016 [[arXiv](http://arxiv.org/abs/1604.02426)]
+**CNN Image Retrieval Learns from BoW: Unsupervised Fine-Tuning with Hard Examples**,
+Radenović F., Tolias G., Chum O.,
+ECCV 2016 [[arXiv](http://arxiv.org/abs/1604.02426)]
-
+
## What is it?
@@ -28,7 +28,7 @@ In order to run this toolbox you will need:
1. MatConvNet MATLAB toolbox version [1.0-beta25](http://www.vlfeat.org/matconvnet/download/matconvnet-1.0-beta25.tar.gz)
1. All the rest (data + networks) is automatically downloaded with our scripts
-## Execution
+## Execution (training and testing)
Run the following script in MATLAB:
@@ -38,16 +38,28 @@ Run the following script in MATLAB:
>> train_cnnimageretrieval;
>> test_cnnimageretrieval;
```
+See ```[CNNIMAGERETRIEVAL_ROOT]/examples/train_cnnimageretrieval``` and ```[CNNIMAGERETRIEVAL_ROOT]/examples/test_cnnimageretrieval``` for additional details.
-## Citation
+We provide the pretrained networks trained using the same parameters as in our ECCV 2016 and TPAMI 2018 papers. Performance comparison with the networks trained with our [CNN Image Retrieval in PyTorch](https://github.com/filipradenovic/cnnimageretrieval-pytorch), on the original and the revisited Oxford and Paris benchmarks:
-Related publications:
+| Model | Oxford | Paris | ROxf (M) | RPar (M) | ROxf (H) | RPar (H) |
+|:------|:------:|:------:|:------:|:------:|:------:|:------:|
+| VGG16-GeM (MatConvNet) | 87.9 | 87.7 | 61.9 | 69.3 | 33.7 | 44.3 |
+| VGG16-GeM (PyTorch) | 87.2 | 87.8 | 60.5 | 69.3 | 32.4 | 44.3 |
+| ResNet101-GeM (MatConvNet) | 87.8 | 92.7 | 64.7 | 77.2 | 38.5 | 56.3 |
+| ResNet101-GeM (PyTorch) | 88.2 | 92.5 | 65.3 | 76.6 | 40.0 | 55.2 |
+
+**Note**: Data and networks used for training and testing are automatically downloaded when using the example scripts.
+
+## Related publications
+
+### Training (fine-tuning) convolutional neural networks
```
-@inproceedings{Radenovic-arXiv17a,
- title={Fine-tuning {CNN} Image Retrieval with No Human Annotation},
- author={Radenovi{\'c}, Filip and Tolias, Giorgos and Chum, Ond{\v{r}}ej},
- booktitle = {arXiv:1711.02512},
- year={2017}
+@article{RTC18,
+ title = {Fine-tuning {CNN} Image Retrieval with No Human Annotation},
+ author = {Radenovi{\'c}, F. and Tolias, G. and Chum, O.}
+ journal = {TPAMI},
+ year = {2018}
}
```
```
@@ -57,4 +69,14 @@ Related publications:
booktitle = {ECCV},
year = {2016}
}
+```
+
+### Revisited benchmarks for Oxford and Paris ('roxford5k' and 'rparis6k')
+```
+@inproceedings{RITAC18,
+ author = {Radenovi{\'c}, F. and Iscen, A. and Tolias, G. and Avrithis, Y. and Chum, O.},
+ title = {Revisiting Oxford and Paris: Large-Scale Image Retrieval Benchmarking},
+ booktitle = {CVPR},
+ year = {2018}
+}
```
\ No newline at end of file
diff --git a/examples/test_cnnimageretrieval.m b/examples/test_cnnimageretrieval.m
index 36c6423..9aec7bd 100644
--- a/examples/test_cnnimageretrieval.m
+++ b/examples/test_cnnimageretrieval.m
@@ -1,5 +1,5 @@
% TEST_CNNIMAGERETRIEVAL Code to evaluate (not train) the methods presented in the papers:
-% F. Radenovic, G. Tolias, O. Chum, Fine-tuning CNN Image Retrieval with No Human Annotation, arXiv 2017
+% F. Radenovic, G. Tolias, O. Chum, Fine-tuning CNN Image Retrieval with No Human Annotation, TPAMI 2018
% F. Radenovic, G. Tolias, O. Chum, CNN Image Retrieval Learns from BoW: Unsupervised Fine-Tuning with Hard Examples, ECCV 2016
%
% Authors: F. Radenovic, G. Tolias, O. Chum. 2017.
@@ -18,17 +18,17 @@
download_test(data_root);
% Set test options
-test_datasets = {'oxford5k', 'paris6k'}; % list of datasets to evaluate on
+test_datasets = {'oxford5k', 'paris6k', 'roxford5k', 'rparis6k'}; % list of datasets to evaluate on
test_imdim = 1024; % choose test image dimensionality
use_ms = 1; % use multi-scale representation, otherwise use single-scale
use_rvec = 0; % use regional representation (R-MAC, R-GeM), otherwise use global (MAC, GeM)
-use_gpu = [1]; % use GPUs (array of GPUIDs), if empty use CPU
+use_gpu = [1,1,1,2,2,2,3,3,3,6,6,6,7,7,7,8,8,8]; % use GPUs (array of GPUIDs), if empty use CPU
% Choose ECCV16 fine-tuned CNN network
% network_file = fullfile(data_root, 'networks', 'retrieval-SfM-30k', 'retrievalSfM30k-siamac-alex.mat');
% network_file = fullfile(data_root, 'networks', 'retrieval-SfM-30k', 'retrievalSfM30k-siamac-vgg.mat');
-% Choose arXiv17 fine-tuned CNN network
+% Choose TPAMI18 fine-tuned CNN network
% network_file = fullfile(data_root, 'networks', 'retrieval-SfM-30k', 'retrievalSfM30k-gem-alex.mat');
% network_file = fullfile(data_root, 'networks', 'retrieval-SfM-120k', 'retrievalSfM120k-gem-vgg.mat');
network_file = fullfile(data_root, 'networks', 'retrieval-SfM-120k', 'retrievalSfM120k-gem-resnet101.mat');
@@ -177,14 +177,32 @@
qvecsLw = whitenapply(qvecs, Lw.m, Lw.P); % apply whitening on query descriptors
fprintf('>> %s: Retrieval...\n', test_datasets{d});
- % raw descriptors
- sim = vecs'*qvecs;
- [sim, ranks] = sort(sim, 'descend');
- map = compute_map (ranks, cfg.gnd);
- fprintf('>> %s: mAP = %.4f, without whiten\n', test_datasets{d}, map);
- % with learned whitening
- sim = vecsLw'*qvecsLw;
- [sim, ranks] = sort(sim, 'descend');
- map = compute_map (ranks, cfg.gnd);
- fprintf('>> %s: mAP = %.4f, with whiten\n', test_datasets{d}, map);
+ if strcmp(test_datasets{d}, 'oxford5k') || strcmp(test_datasets{d}, 'paris6k')
+ % % raw descriptors
+ % sim = vecs'*qvecs;
+ % [sim, ranks] = sort(sim, 'descend');
+ % map = compute_map (ranks, cfg.gnd);
+ % fprintf('>> %s: mAP = %.4f, without whiten\n', test_datasets{d}, map);
+ % with learned whitening
+ sim = vecsLw'*qvecsLw;
+ [sim, ranks] = sort(sim, 'descend');
+ map = compute_map (ranks, cfg.gnd);
+ fprintf('>> %s: mAP = %.4f\n', test_datasets{d}, map);
+ elseif strcmp(test_datasets{d}, 'roxford5k') || strcmp(test_datasets{d}, 'rparis6k')
+ sim = vecsLw'*qvecsLw;
+ [sim, ranks] = sort(sim, 'descend');
+ % evaluate ranks
+ ks = [1, 5, 10];
+ % search for easy (E setup)
+ for i = 1:numel(cfg.gnd), gnd(i).ok = [cfg.gnd(i).easy]; gnd(i).junk = [cfg.gnd(i).junk, cfg.gnd(i).hard]; end
+ [mapE, apsE, mprE, prsE] = compute_map (ranks, gnd, ks);
+ % search for easy & hard (M setup)
+ for i = 1:numel(cfg.gnd), gnd(i).ok = [cfg.gnd(i).easy, cfg.gnd(i).hard]; gnd(i).junk = cfg.gnd(i).junk; end
+ [mapM, apsM, mprM, prsM] = compute_map (ranks, gnd, ks);
+ % search for hard (H setup)
+ for i = 1:numel(cfg.gnd), gnd(i).ok = [cfg.gnd(i).hard]; gnd(i).junk = [cfg.gnd(i).junk, cfg.gnd(i).easy]; end
+ [mapH, apsH, mprH, prsH] = compute_map (ranks, gnd, ks);
+ fprintf('>> %s: mAP E: %.2f, M: %.2f, H: %.2f\n', test_datasets{d}, 100*mapE, 100*mapM, 100*mapH);
+ fprintf('>> %s: mP@k[%d %d %d] E: [%.2f %.2f %.2f], M: [%.2f %.2f %.2f], H: [%.2f %.2f %.2f]\n', test_datasets{d}, ks(1), ks(2), ks(3), 100*mprE, 100*mprM, 100*mprH);
+ end
end
diff --git a/examples/train_cnnimageretrieval.m b/examples/train_cnnimageretrieval.m
index b3ab2b1..9cb3613 100644
--- a/examples/train_cnnimageretrieval.m
+++ b/examples/train_cnnimageretrieval.m
@@ -1,5 +1,5 @@
% TRAIN_CNNIMAGERETRIEVAL Code to train the methods presented in the papers:
-% F. Radenovic, G. Tolias, O. Chum, Fine-tuning CNN Image Retrieval with No Human Annotation, arXiv 2017
+% F. Radenovic, G. Tolias, O. Chum, Fine-tuning CNN Image Retrieval with No Human Annotation, TPAMI 2018
% F. Radenovic, G. Tolias, O. Chum, CNN Image Retrieval Learns from BoW: Unsupervised Fine-Tuning with Hard Examples, ECCV 2016
%
% Note: The method has been re-coded since our ECCV 2016 paper and minor differences in performance might appear.
@@ -20,7 +20,7 @@
%-------------------------------------------------------------------------------
-% Reproduce training from arXiv17 paper: Fine-tuning CNN Image Retrieval ...
+% Reproduce training from TPAMI18 paper: Fine-tuning CNN Image Retrieval ...
%-------------------------------------------------------------------------------
%% RESNET101 -------------------------------------------------------------------
@@ -35,7 +35,7 @@
% Set train parameters
% We provide 2 pools of training images comprising 30k and 120k images
-% The latter is used in our arXiv17 paper
+% The latter is used in our TPAMI18 paper
opts.train.dbPath = fullfile(data_root, 'train', 'dbs', 'retrieval-SfM-120k.mat');
opts.train.batchSize = 5;
opts.train.numEpochs = 30;
@@ -77,7 +77,7 @@
% Set train parameters
% We provide 2 pools of training images comprising 30k and 120k images
-% The latter is used in our arXiv17 paper
+% The latter is used in our TPAMI18 paper
opts.train.dbPath = fullfile(data_root, 'train', 'dbs', 'retrieval-SfM-120k.mat');
opts.train.batchSize = 5;
opts.train.numEpochs = 30;
diff --git a/utils/compute_map.m b/utils/compute_map.m
index d9137db..b78631f 100644
--- a/utils/compute_map.m
+++ b/utils/compute_map.m
@@ -1,67 +1,113 @@
-function [map, aps] = compute_map (ranks, gnd, verbose)
-% COMPUTE_MAP computes the mAP for a given set of returned results.
+function [map, aps, pr, prs] = compute_map (ranks, gnd, kappas)
+% COMPUTE_MAP This function computes the mAP for a given set of returned results.
%
-% mAP = compute_map (RANKS, GND);
+% Usage:
+% map = compute_map (ranks, gnd)
+% computes mean average precsion (map) only
%
-% RANKS starts from 1, size(ranks) = db_size X #queries.
-% Junk results (e.g., the query itself) should be declared in the gnd stuct array
+% [map, aps, pr, prs] = compute_map (ranks, gnd, kappas)
+% computes mean average precision (map), average precision (aps) for each query
+% computes mean precision at kappas (pr), precision at kappas (prs) for each query
%
-% Authors: G. Tolias, Y. Avrithis, H. Jegou. 2013.
+% Notes:
+% 1) ranks starts from 1, size(ranks) = db_size X #queries
+% 2) The junk results (e.g., the query itself) should be declared in the gnd stuct array
+% 3) If there are no positive images for some query, that query is excluded from the evaluation
-if nargin < 3
- verbose = false;
-end
+ if ~exist('kappas'), kappas = 0; end
-map = 0;
-nq = numel (gnd); % number of queries
-aps = zeros (nq, 1);
+ nq = numel (gnd); % number of queries
+ % init map and pr
+ map = 0;
+ aps = zeros (nq, 1);
+ pr = zeros(1, numel(kappas));
+ prs = zeros (nq, numel(kappas));
+ nempty = 0;
+
+ for i = 1:nq
+ qgnd = gnd(i).ok;
+
+ if isempty(qgnd) % no positive at all, skip from the average
+ aps (i) = nan;
+ prs (i, :) = nan;
+ nempty = nempty + 1;
+ continue;
+ end
+
+ if isfield (gnd(i), 'junk')
+ qgndj = gnd(i).junk;
+ else
+ qgndj = [];
+ end
+
+ % positions of positive and junk images
+ [~, pos] = intersect (ranks (:,i), qgnd);
+ [~, junk] = intersect (ranks (:,i), qgndj);
+
+ pos = sort(pos);
+ junk = sort(junk);
+
+ k = 0;
+ ij = 1;
+
+ if length (junk)
+ % decrease positions of positives based on the number of junk images appearing before them
+ ip = 1;
+ while ip <= numel (pos)
+
+ while ( ij <= length (junk) & pos (ip) > junk (ij) )
+ k = k + 1;
+ ij = ij + 1;
+ end
+
+ pos (ip) = pos (ip) - k;
+ ip = ip + 1;
+ end
+ end
+
+ % compute ap
+ ap = score_ap_from_ranks1 (pos, length (qgnd));
+ map = map + ap;
+ aps (i) = ap;
+
+ % compute precision@k
+ for j = 1:numel(kappas)
+ kq = min(max(pos), kappas(j));
+ prs(i, j) = numel(find(pos <= kq)) ./ kq;
+ end
+ pr = pr + prs(i, :);
-for i = 1:nq
- qgnd = gnd(i).ok;
- if isfield (gnd(i), 'junk')
- qgndj = gnd(i).junk;
- else
- qgndj = [];
- end
-
- % positions of positive and junk images
- [~, pos] = intersect (ranks (:,i), qgnd);
- [~, junk] = intersect (ranks (:,i), qgndj);
-
- pos = sort(pos);
- junk = sort(junk);
-
- k = 0;
- ij = 1;
-
- if length (junk)
- % decrease positions of positives based on the number of junk images appearing before them
- ip = 1;
- while ip <= numel (pos)
-
- while ( ij <= length (junk) & pos (ip) > junk (ij) )
- k = k + 1;
- ij = ij + 1;
- end
-
- pos (ip) = pos (ip) - k;
- ip = ip + 1;
- end
- end
-
- ap = compute_ap (pos, length (qgnd));
-
- if verbose
- fprintf ('query no %d -> gnd = ', i);
- fprintf ('%d ', qgnd);
- fprintf ('\n tp ranks = ');
- fprintf ('%d ', pos);
- fprintf (' -> ap=%.3f\n', ap);
end
- map = map + ap;
- aps (i) = ap;
+
+ map = map / (nq-nempty);
+ pr = pr / (nq-nempty);
end
-map = map / nq;
+
+% This function computes the AP for a query
+function ap = score_ap_from_ranks1 (ranks, nres)
+
+% number of images ranked by the system
+nimgranks = length (ranks);
+ranks = ranks - 1;
+
+% accumulate trapezoids in PR-plot
+ap = 0;
+
+recall_step = 1 / nres;
+
+for j = 1:nimgranks
+ rank = ranks(j);
+
+ if rank == 0
+ precision_0 = 1.0;
+ else
+ precision_0 = (j - 1) / rank;
+ end
+
+ precision_1 = j / (rank + 1);
+ ap = ap + (precision_0 + precision_1) * recall_step / 2;
end
+
+end
\ No newline at end of file
diff --git a/utils/configdataset.m b/utils/configdataset.m
index 72c705c..0548258 100644
--- a/utils/configdataset.m
+++ b/utils/configdataset.m
@@ -20,6 +20,18 @@
params.qext = '.jpg';
params.dir_data= [dir_main 'paris6k/'];
cfg = config_paris (params);
+
+ case 'roxford5k'
+ params.ext = '.jpg';
+ params.qext = '.jpg';
+ params.dir_data = [dir_main 'roxford5k/'];
+ cfg = config_roxford (params);
+
+ case 'rparis6k'
+ params.ext = '.jpg';
+ params.qext = '.jpg';
+ params.dir_data = [dir_main 'rparis6k/'];
+ cfg = config_rparis (params);
otherwise, error ('Unkown dataset %s\n', dataset);
end
@@ -59,6 +71,30 @@
cfg.n = length (cfg.imlist); % number of database images
cfg.nq = length (cfg.qidx); % number of query images
+%----------------------------------------------------
+function cfg = config_roxford (cfg)
+ % Load groundtruth
+%----------------------------------------------------
+ cfg.gnd_fname = [cfg.dir_data 'gnd_roxford5k.mat'];
+ load (cfg.gnd_fname); % Retrieve list of image names, ground truth and query numbers
+ cfg.imlist = imlist;
+ cfg.qimlist = qimlist;
+ cfg.gnd = gnd;
+ cfg.n = length (cfg.imlist); % number of database images
+ cfg.nq = length (cfg.qimlist); % number of query images
+
+%----------------------------------------------------
+function cfg = config_rparis (cfg)
+ % Load groundtruth
+%----------------------------------------------------
+ cfg.gnd_fname = [cfg.dir_data 'gnd_rparis6k.mat'];
+ load (cfg.gnd_fname); % Retrieve list of image names, ground truth and query numbers
+ cfg.imlist = imlist;
+ cfg.qimlist = qimlist;
+ cfg.gnd = gnd;
+ cfg.n = length (cfg.imlist); % number of database images
+ cfg.nq = length (cfg.qimlist); % number of query images
+
%----------------------------------------------------
function fname = config_imname (cfg, i)
%----------------------------------------------------
diff --git a/utils/download_test.m b/utils/download_test.m
index 9a1d840..467a6b0 100644
--- a/utils/download_test.m
+++ b/utils/download_test.m
@@ -3,8 +3,10 @@ function download_test(data_dir)
%
% download_test(DATA_ROOT) checks if the data and networks necessary for running the testing script exist.
% If not it downloads it in the folder structure:
-% DATA_ROOT/test/oxford5k/ : folder with oxford5k images
-% DATA_ROOT/test/paris6k/ : folder with paris6k images
+% DATA_ROOT/test/oxford5k/ : folder with Oxford5k images and ground truth file
+% DATA_ROOT/test/paris6k/ : folder with Paris6k images and ground truth file
+% DATA_ROOT/test/roxford5k/ : folder with Oxford5k images and revisited ground truth file
+% DATA_ROOT/test/rparis6k/ : folder with Paris6k images and revisited ground truth file
% DATA_ROOT/networks/retrieval-SfM-30k/ : CNN models fine-tuned for image retrieval using retrieval-SfM-30k data
% DATA_ROOT/networks/retrieval-SfM-120k/ : CNN models fine-tuned for image retrieval using retrieval-SfM-120k data
@@ -13,14 +15,14 @@ function download_test(data_dir)
mkdir(data_dir);
end
- % Create test folder if it does not exist
- train_dir = fullfile(data_dir, 'test');
- if ~exist(train_dir, 'dir')
- mkdir(train_dir);
+ % Create datasets folder if it does not exist
+ datasets_dir = fullfile(data_dir, 'test');
+ if ~exist(datasets_dir, 'dir')
+ mkdir(datasets_dir);
end
% Download datasets folders test/DATASETNAME/
- datasets = {'oxford5k', 'paris6k'};
+ datasets = {'oxford5k', 'paris6k', 'roxford5k', 'rparis6k'};
for di = 1:numel(datasets)
dataset = datasets{di};
switch dataset
@@ -30,35 +32,53 @@ function download_test(data_dir)
case 'paris6k'
src_dir = fullfile('http://www.robots.ox.ac.uk/~vgg/data/parisbuildings');
dl_files = {'paris_1.tgz', 'paris_2.tgz'};
+ case 'roxford5k'
+ src_dir = fullfile('http://www.robots.ox.ac.uk/~vgg/data/oxbuildings');
+ dl_files = {'oxbuild_images.tgz'};
+ case 'rparis6k'
+ src_dir = fullfile('http://www.robots.ox.ac.uk/~vgg/data/parisbuildings');
+ dl_files = {'paris_1.tgz', 'paris_2.tgz'};
otherwise
error ('Unkown dataset %s\n', dataset);
end
- dst_dir = fullfile(data_dir, 'test', dataset, 'jpg');
+
+ dst_dir = fullfile(datasets_dir, dataset, 'jpg');
if ~exist(dst_dir, 'dir')
- fprintf('>> Dataset %s directory does not exist. Creating: %s\n', dataset, dst_dir);
- mkdir(dst_dir);
- for dli = 1:numel(dl_files)
- dl_file = dl_files{dli};
- src_file = fullfile(src_dir, dl_file);
- dst_file = fullfile(dst_dir, dl_file);
- fprintf('>> Downloading dataset %s archive %s...\n', dataset, dl_file);
- system(sprintf('wget %s -O %s', src_file, dst_file));
- fprintf('>> Extracting dataset %s archive %s...\n', dataset, dl_file);
- % create tmp folder
- dst_dir_tmp = fullfile(dst_dir, 'tmp');
- system(sprintf('mkdir %s', dst_dir_tmp));
- % extract in tmp folder
- system(sprintf('tar -zxf %s -C %s', dst_file, dst_dir_tmp));
- % remove all (possible) subfolders by moving only files in dst_dir
- system(sprintf('find %s -type f -exec mv -i {} %s \\;', dst_dir_tmp, dst_dir));
- % remove tmp folder
- system(sprintf('rm -rf %s', dst_dir_tmp));
- fprintf('>> Extracted, deleting dataset %s archive %s...\n', dataset, dl_file);
- system(sprintf('rm %s', dst_file));
+ % for oxford and paris download images
+ if strcmp(dataset, 'oxford5k') || strcmp(dataset, 'paris6k')
+ fprintf('>> Dataset %s directory does not exist. Creating: %s\n', dataset, dst_dir);
+ mkdir(dst_dir);
+ for dli = 1:numel(dl_files)
+ dl_file = dl_files{dli};
+ src_file = fullfile(src_dir, dl_file);
+ dst_file = fullfile(dst_dir, dl_file);
+ fprintf('>> Downloading dataset %s archive %s...\n', dataset, dl_file);
+ system(sprintf('wget %s -O %s', src_file, dst_file));
+ fprintf('>> Extracting dataset %s archive %s...\n', dataset, dl_file);
+ % create tmp folder
+ dst_dir_tmp = fullfile(dst_dir, 'tmp');
+ system(sprintf('mkdir %s', dst_dir_tmp));
+ % extract in tmp folder
+ system(sprintf('tar -zxf %s -C %s', dst_file, dst_dir_tmp));
+ % remove all (possible) subfolders by moving only files in dst_dir
+ system(sprintf('find %s -type f -exec mv -i {} %s \\;', dst_dir_tmp, dst_dir));
+ % remove tmp folder
+ system(sprintf('rm -rf %s', dst_dir_tmp));
+ fprintf('>> Extracted, deleting dataset %s archive %s...\n', dataset, dl_file);
+ system(sprintf('rm %s', dst_file));
+ end
+ % for roxford and rparis just make sym links
+ elseif strcmp(dataset, 'roxford5k') || strcmp(dataset, 'rparis6k')
+ fprintf('>> Dataset %s directory does not exist. Creating: %s\n', dataset, dst_dir);
+ dataset_old = dataset(2:end);
+ dst_dir_old = fullfile(datasets_dir, dataset_old, 'jpg');
+ mkdir(fullfile(datasets_dir, dataset));
+ system(sprintf('ln -s %s %s', dst_dir_old, dst_dir));
+ fprintf('>> Created symbolic link from %s jpg to %s jpg\n', dataset_old, dataset);
end
end
gnd_src_dir = fullfile('http://cmp.felk.cvut.cz/cnnimageretrieval/data', 'test', dataset);
- gnd_dst_dir = fullfile(data_dir, 'test', dataset);
+ gnd_dst_dir = fullfile(datasets_dir, dataset);
gnd_dl_file = sprintf('gnd_%s.mat', dataset);
gnd_src_file = fullfile(gnd_src_dir, gnd_dl_file);
gnd_dst_file = fullfile(gnd_dst_dir, gnd_dl_file);