RaushanTurganbay HF Staff commited on
Commit
04b93a1
·
verified ·
1 Parent(s): b61e24c

Upload Gemma3ForCausalLM

Browse files
Files changed (3) hide show
  1. config.json +2 -2
  2. generation_config.json +1 -1
  3. model.safetensors +2 -2
config.json CHANGED
@@ -14,7 +14,7 @@
14
  "final_logit_softcapping": null,
15
  "head_dim": 8,
16
  "hidden_activation": "gelu_pytorch_tanh",
17
- "hidden_size": 8,
18
  "initializer_range": 0.02,
19
  "intermediate_size": 32,
20
  "max_position_embeddings": 32768,
@@ -31,7 +31,7 @@
31
  "sliding_window": 512,
32
  "sliding_window_pattern": 6,
33
  "torch_dtype": "float32",
34
- "transformers_version": "4.50.0.dev0",
35
  "use_cache": true,
36
  "vocab_size": 262144
37
  }
 
14
  "final_logit_softcapping": null,
15
  "head_dim": 8,
16
  "hidden_activation": "gelu_pytorch_tanh",
17
+ "hidden_size": 16,
18
  "initializer_range": 0.02,
19
  "intermediate_size": 32,
20
  "max_position_embeddings": 32768,
 
31
  "sliding_window": 512,
32
  "sliding_window_pattern": 6,
33
  "torch_dtype": "float32",
34
+ "transformers_version": "4.52.0.dev0",
35
  "use_cache": true,
36
  "vocab_size": 262144
37
  }
generation_config.json CHANGED
@@ -7,5 +7,5 @@
7
  106
8
  ],
9
  "pad_token_id": 0,
10
- "transformers_version": "4.50.0.dev0"
11
  }
 
7
  106
8
  ],
9
  "pad_token_id": 0,
10
+ "transformers_version": "4.52.0.dev0"
11
  }
model.safetensors CHANGED
@@ -1,3 +1,3 @@
1
  version https://git-lfs.github.com/spec/v1
2
- oid sha256:e7daf934435f4d342b1aeebb3852479ead5ed76313ccdd1f1ec7de577cd68539
3
- size 8401176
 
1
  version https://git-lfs.github.com/spec/v1
2
+ oid sha256:2729def179913fde4d621fe026ef5a5d9353975d126c181101ab3bac27fe1840
3
+ size 16799368