AlexBefest commited on
Commit
725af72
·
verified ·
1 Parent(s): 0c85b52

Pushing model weights

Browse files
Files changed (1) hide show
  1. config.json +62 -0
config.json ADDED
@@ -0,0 +1,62 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ {
2
+ "architectures": [
3
+ "Gemma3ForConditionalGeneration"
4
+ ],
5
+ "boi_token_index": 255999,
6
+ "bos_token_id": 2,
7
+ "eoi_token_index": 256000,
8
+ "eos_token_id": 106,
9
+ "image_token_index": 262144,
10
+ "initializer_range": 0.02,
11
+ "mm_tokens_per_image": 256,
12
+ "model_type": "gemma3",
13
+ "pad_token_id": 0,
14
+ "text_config": {
15
+ "attention_bias": false,
16
+ "attention_dropout": 0.0,
17
+ "attn_logit_softcapping": null,
18
+ "cache_implementation": "hybrid",
19
+ "final_logit_softcapping": null,
20
+ "head_dim": 128,
21
+ "hidden_activation": "gelu_pytorch_tanh",
22
+ "hidden_size": 5376,
23
+ "initializer_range": 0.02,
24
+ "intermediate_size": 21504,
25
+ "max_position_embeddings": 131072,
26
+ "model_type": "gemma3_text",
27
+ "num_attention_heads": 32,
28
+ "num_hidden_layers": 62,
29
+ "num_key_value_heads": 16,
30
+ "query_pre_attn_scalar": 168,
31
+ "rms_norm_eps": 1e-06,
32
+ "rope_local_base_freq": 10000.0,
33
+ "rope_scaling": {
34
+ "factor": 8.0,
35
+ "rope_type": "linear"
36
+ },
37
+ "rope_theta": 1000000.0,
38
+ "sliding_window": 1024,
39
+ "sliding_window_pattern": 6,
40
+ "torch_dtype": "bfloat16",
41
+ "use_cache": true,
42
+ "vocab_size": 262208
43
+ },
44
+ "torch_dtype": "bfloat16",
45
+ "transformers_version": "4.50.0.dev0",
46
+ "unsloth_fixed": true,
47
+ "vision_config": {
48
+ "attention_dropout": 0.0,
49
+ "hidden_act": "gelu_pytorch_tanh",
50
+ "hidden_size": 1152,
51
+ "image_size": 896,
52
+ "intermediate_size": 4304,
53
+ "layer_norm_eps": 1e-06,
54
+ "model_type": "siglip_vision_model",
55
+ "num_attention_heads": 16,
56
+ "num_channels": 3,
57
+ "num_hidden_layers": 27,
58
+ "patch_size": 14,
59
+ "torch_dtype": "bfloat16",
60
+ "vision_use_head": false
61
+ }
62
+ }