forked from SJTU-ViSYS/FeatureBooster
-
Notifications
You must be signed in to change notification settings - Fork 0
/
config.yaml
74 lines (73 loc) · 2.2 KB
/
config.yaml
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
67
68
69
70
71
72
73
74
# # we recommand use xxx+Boost-F and xxx+Boost-B to represent the float and binary boosted feature of xxx
# Name_of_your_boosted_feat: # name likes ORB+Boost-B
# keypoint_dim: # the dimansion of the geomerty propetry
# keypoint_encoder: # list likes [32, 64, 128, 256]
# descriptor_dim: # the dimansion of the descriptor
# descriptor_encoder: # list likes [512, 256]
# Attentional_layers: # the number of attentional layer
# last_activation: # the type of last activation.
# l2_normalization: # whether to use l2 normalization
# output_dim: # the dimansion of boosted feature
ORB+Boost-B:
keypoint_dim: 4
keypoint_encoder: [32, 64, 128, 256]
descriptor_dim: 256
descriptor_encoder: [512, 256]
Attentional_layers: 4
last_activation: 'tanh'
l2_normalization: false
output_dim: 256
SIFT+Boost-F:
keypoint_dim: 4
keypoint_encoder: [32, 64, 128, 128]
descriptor_encoder: [256, 128]
descriptor_dim: 128
Attentional_layers: 4
last_activation:
l2_normalization: true
output_dim: 128
SIFT+Boost-B:
keypoint_dim: 4
keypoint_encoder: [32, 64, 128, 128]
descriptor_encoder: [256, 128]
descriptor_dim: 128
Attentional_layers: 4
last_activation: 'tanh'
l2_normalization: false
output_dim: 256
SuperPoint+Boost-F:
keypoint_dim: 3
keypoint_encoder: [32, 64, 128, 256]
descriptor_encoder: [256, 256]
descriptor_dim: 256
Attentional_layers: 9
last_activation:
l2_normalization: true
output_dim: 256
SuperPoint+Boost-B:
keypoint_dim: 3
keypoint_encoder: [32, 64, 128, 256]
descriptor_encoder: [256, 256]
descriptor_dim: 256
Attentional_layers: 9
last_activation: 'tanh'
l2_normalization: false
output_dim: 256
ALIKE+Boost-F:
keypoint_dim: 3
keypoint_encoder: [32, 64, 128, 128]
descriptor_encoder: [256, 128]
descriptor_dim: 128
Attentional_layers: 9
last_activation:
l2_normalization: true
output_dim: 128
ALIKE+Boost-B:
keypoint_dim: 3
keypoint_encoder: [32, 64, 128, 128]
descriptor_encoder: [256, 128]
descriptor_dim: 128
Attentional_layers: 9
last_activation: 'tanh'
l2_normalization: false
output_dim: 256