rdiehlmartinez commited on
Commit
20c568e
·
verified ·
1 Parent(s): 6bfde12

Updating decoder HF config

Browse files
Files changed (1) hide show
  1. config.json +5 -5
config.json CHANGED
@@ -1,22 +1,22 @@
1
  {
2
  "activation_hidden_dim": 6144,
3
  "architectures": [
4
- "PicoHF"
5
  ],
6
  "attention_n_heads": 12,
7
  "attention_n_kv_heads": 4,
8
  "auto_map": {
9
- "AutoConfig": "pico.PicoHFConfig",
10
- "AutoModelForCausalLM": "pico.PicoHF"
11
  },
12
  "batch_size": 1024,
13
  "d_model": 1536,
14
  "max_seq_len": 2048,
15
- "model_type": "pico",
16
  "n_layers": 12,
17
  "norm_eps": 1e-06,
18
  "position_emb_theta": 10000.0,
19
  "torch_dtype": "float32",
20
- "transformers_version": "4.48.1",
21
  "vocab_size": 50304
22
  }
 
1
  {
2
  "activation_hidden_dim": 6144,
3
  "architectures": [
4
+ "PicoDecoderHF"
5
  ],
6
  "attention_n_heads": 12,
7
  "attention_n_kv_heads": 4,
8
  "auto_map": {
9
+ "AutoConfig": "pico_decoder.PicoDecoderHFConfig",
10
+ "AutoModelForCausalLM": "pico_decoder.PicoDecoderHF"
11
  },
12
  "batch_size": 1024,
13
  "d_model": 1536,
14
  "max_seq_len": 2048,
15
+ "model_type": "pico_decoder",
16
  "n_layers": 12,
17
  "norm_eps": 1e-06,
18
  "position_emb_theta": 10000.0,
19
  "torch_dtype": "float32",
20
+ "transformers_version": "4.48.3",
21
  "vocab_size": 50304
22
  }