Correct `patch_size`

#3
by hans00 - opened
Files changed (1) hide show
  1. config.json +1 -4
config.json CHANGED
@@ -63,10 +63,7 @@
63
  "num_attention_heads": 16,
64
  "num_channels": 3,
65
  "num_hidden_layers": 24,
66
- "patch_size": [
67
- 14,
68
- 14
69
- ],
70
  "projection_dropout": 0.0,
71
  "torch_dtype": "bfloat16",
72
  "use_absolute_position_embeddings": true,
 
63
  "num_attention_heads": 16,
64
  "num_channels": 3,
65
  "num_hidden_layers": 24,
66
+ "patch_size": 14,
 
 
 
67
  "projection_dropout": 0.0,
68
  "torch_dtype": "bfloat16",
69
  "use_absolute_position_embeddings": true,