-
Notifications
You must be signed in to change notification settings - Fork 42
/
run_aachen-v11.sh
executable file
·103 lines (91 loc) · 5.52 KB
/
run_aachen-v11.sh
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
67
68
69
70
71
72
73
74
75
76
77
78
79
80
81
82
83
84
85
86
87
88
89
90
91
92
93
94
95
96
97
98
99
100
101
102
103
# Run this script in docker,
# but first pull the most recent version.
# docker pull kapture/kapture-localization
# docker run --runtime=nvidia -it --rm --volume <my_data>:<my_data> kapture/kapture-localization
# once the docker container is launched, go to your working directory of your choice (all data will be stored there)
# and run this script from there (of course you can also change WORKING_DIR=${PWD} to something else and run the script from somewhere else)
# 0a) Define paths and params
PYTHONBIN=python3.6
WORKING_DIR=${PWD}
DATASETS_PATH=${WORKING_DIR}/datasets
DATASET=Aachen-Day-Night-v1.1
mkdir -p ${DATASETS_PATH}
TOPK=20 # number of retrieved images for mapping and localization
KPTS=20000 # number of local features to extract
# 0b) Get extraction code for local and global features
# ! skip if already done !
# Deep Image retrieval - AP-GeM
pip3 install scikit-learn==0.22 torchvision==0.5.0 gdown tqdm
cd ${WORKING_DIR}
git clone https://github.com/naver/deep-image-retrieval.git
cd deep-image-retrieval
mkdir -p dirtorch/data/
cd dirtorch/data/
gdown --id 1r76NLHtJsH-Ybfda4aLkUIoW3EEsi25I # downloads a pre-trained model of AP-GeM
unzip Resnet101-AP-GeM-LM18.pt.zip
rm -rf Resnet101-AP-GeM-LM18.pt.zip
# R2D2
cd ${WORKING_DIR}
git clone https://github.com/naver/r2d2.git
# 1) Download dataset
# Note that you will be asked to accept or decline the license terms before download.
mkdir ${DATASETS_PATH}
kapture_download_dataset.py --install_path ${DATASETS_PATH} update
kapture_download_dataset.py --install_path ${DATASETS_PATH} install ${DATASET}_mapping ${DATASET}_query_day ${DATASET}_query_night
rm -rf ${DATASETS_PATH}/${DATASET}/mapping/reconstruction # remove the keypoints and 3D points that come with the dataset (this is Aachen specific)
kapture_merge.py -v debug \
-i ${DATASETS_PATH}/${DATASET}/query_day ${DATASETS_PATH}/${DATASET}/query_night \
-o ${DATASETS_PATH}/${DATASET}/query \
--image_transfer link_relative
# 2) Create temporal mapping and query sets (they will be modified)
mkdir -p ${WORKING_DIR}/${DATASET}/mapping/sensors
cp ${DATASETS_PATH}/${DATASET}/mapping/sensors/*.txt ${WORKING_DIR}/${DATASET}/mapping/sensors/
ln -s ${DATASETS_PATH}/${DATASET}/mapping/sensors/records_data ${WORKING_DIR}/${DATASET}/mapping/sensors/records_data
mkdir -p ${WORKING_DIR}/${DATASET}/query/sensors
cp ${DATASETS_PATH}/${DATASET}/query/sensors/*.txt ${WORKING_DIR}/${DATASET}/query/sensors/
ln -s ${DATASETS_PATH}/${DATASET}/query/sensors/records_data ${WORKING_DIR}/${DATASET}/query/sensors/records_data
# 3) Merge mapping and query kaptures (this will make it easier to extract the local and global features and it will be used for the localization step)
kapture_merge.py -v debug \
-i ${WORKING_DIR}/${DATASET}/mapping ${WORKING_DIR}/${DATASET}/query \
-o ${WORKING_DIR}/${DATASET}/map_plus_query \
--image_transfer link_relative
# 4) Extract global features (we will use AP-GeM here)
cd ${WORKING_DIR}/deep-image-retrieval
${PYTHONBIN} -m dirtorch.extract_kapture --kapture-root ${WORKING_DIR}/${DATASET}/map_plus_query/ --checkpoint dirtorch/data/Resnet101-AP-GeM-LM18.pt --gpu 0
# move to right location
mkdir -p ${WORKING_DIR}/${DATASET}/global_features/Resnet101-AP-GeM-LM18/global_features
mv ${WORKING_DIR}/${DATASET}/map_plus_query/reconstruction/global_features/Resnet101-AP-GeM-LM18/* ${WORKING_DIR}/${DATASET}/global_features/Resnet101-AP-GeM-LM18/global_features/
rm -rf ${WORKING_DIR}/${DATASET}/map_plus_query/reconstruction/global_features/Resnet101-AP-GeM-LM18
# 5) Extract local features (we will use R2D2 here)
cd ${WORKING_DIR}/r2d2
${PYTHONBIN} extract_kapture.py --model models/r2d2_WASF_N8_big.pt --kapture-root ${WORKING_DIR}/${DATASET}/map_plus_query/ --min-scale 0.3 --min-size 128 --max-size 9999 --top-k ${KPTS}
# move to right location
mkdir -p ${WORKING_DIR}/${DATASET}/local_features/r2d2_WASF_N8_big/descriptors
mv ${WORKING_DIR}/${DATASET}/map_plus_query/reconstruction/descriptors/r2d2_WASF_N8_big/* ${WORKING_DIR}/${DATASET}/local_features/r2d2_WASF_N8_big/descriptors/
mkdir -p ${WORKING_DIR}/${DATASET}/local_features/r2d2_WASF_N8_big/keypoints
mv ${WORKING_DIR}/${DATASET}/map_plus_query/reconstruction/keypoints/r2d2_WASF_N8_big/* ${WORKING_DIR}/${DATASET}/local_features/r2d2_WASF_N8_big/keypoints/
# 6) mapping pipeline
LOCAL=r2d2_WASF_N8_big
GLOBAL=Resnet101-AP-GeM-LM18
kapture_pipeline_mapping.py -v debug -f \
-i ${WORKING_DIR}/${DATASET}/mapping \
-kpt ${WORKING_DIR}/${DATASET}/local_features/${LOCAL}/keypoints \
-desc ${WORKING_DIR}/${DATASET}/local_features/${LOCAL}/descriptors \
-gfeat ${WORKING_DIR}/${DATASET}/global_features/${GLOBAL}/global_features \
-matches ${WORKING_DIR}/${DATASET}/local_features/${LOCAL}/NN_no_gv/matches \
-matches-gv ${WORKING_DIR}/${DATASET}/local_features/${LOCAL}/NN_colmap_gv/matches \
--colmap-map ${WORKING_DIR}/${DATASET}/colmap-sfm/${LOCAL}/${GLOBAL} \
--topk ${TOPK}
# 7) localization pipeline
kapture_pipeline_localize.py -v debug -f \
-i ${WORKING_DIR}/${DATASET}/mapping \
--query ${WORKING_DIR}/${DATASET}/query \
-kpt ${WORKING_DIR}/${DATASET}/local_features/${LOCAL}/keypoints \
-desc ${WORKING_DIR}/${DATASET}/local_features/${LOCAL}/descriptors \
-gfeat ${WORKING_DIR}/${DATASET}/global_features/${GLOBAL}/global_features \
-matches ${WORKING_DIR}/${DATASET}/local_features/${LOCAL}/NN_no_gv/matches \
-matches-gv ${WORKING_DIR}/${DATASET}/local_features/${LOCAL}/NN_colmap_gv/matches \
--colmap-map ${WORKING_DIR}/${DATASET}/colmap-sfm/${LOCAL}/${GLOBAL} \
-o ${WORKING_DIR}/${DATASET}/colmap-localize/${LOCAL}/${GLOBAL} \
--topk ${TOPK} \
--config 2