stevenbucaille commited on
Commit
9ae1310
·
verified ·
1 Parent(s): 4a321e2
Files changed (1) hide show
  1. config.json +4 -4
config.json CHANGED
@@ -1,9 +1,5 @@
1
  {
2
  "activation_function": "relu",
3
- "aggregation_sizes": [
4
- 4,
5
- 4
6
- ],
7
  "architectures": [
8
  "EfficientLoFTRForKeypointMatching"
9
  ],
@@ -19,11 +15,15 @@
19
  "fine_matching_slice_dim": 8,
20
  "hidden_size": 256,
21
  "initializer_range": 0.02,
 
 
22
  "mlp_activation_function": "leaky_relu",
23
  "model_type": "efficientloftr",
24
  "num_attention_heads": 8,
25
  "num_attention_layers": 4,
26
  "num_key_value_heads": 8,
 
 
27
  "rope_scaling": {
28
  "dim": 64,
29
  "rope_type": "2d"
 
1
  {
2
  "activation_function": "relu",
 
 
 
 
3
  "architectures": [
4
  "EfficientLoFTRForKeypointMatching"
5
  ],
 
15
  "fine_matching_slice_dim": 8,
16
  "hidden_size": 256,
17
  "initializer_range": 0.02,
18
+ "kv_aggregation_kernel_size": 4,
19
+ "kv_aggregation_stride": 4,
20
  "mlp_activation_function": "leaky_relu",
21
  "model_type": "efficientloftr",
22
  "num_attention_heads": 8,
23
  "num_attention_layers": 4,
24
  "num_key_value_heads": 8,
25
+ "q_aggregation_kernel_size": 4,
26
+ "q_aggregation_stride": 4,
27
  "rope_scaling": {
28
  "dim": 64,
29
  "rope_type": "2d"