codewithdark's picture
Update config.json
55c4358 verified
raw
history blame contribute delete
448 Bytes
{
"architectures": [
"LatentRecurrentDepthModel"
],
"auto_map": {
"AutoModel": "modeling_latent_recurrent_depth.LatentRecurrentDepthModel",
"AutoModelForCausalLM": "modeling_latent_recurrent_depth.LatentRecurrentDepthModel",
"AutoConfig": "modeling_latent_recurrent_depth.LatentRecurrentDepthConfig"
},
"model_type": "latent_recurrent_depth",
"vocab_size": 50257,
"d_model": 768,
"num_heads": 12,
"dropout": 0.1
}