codewithdark's picture
Update config.json
8e13a50 verified
raw
history blame
382 Bytes
{
"architectures": [
"LatentRecurrentDepthModel"
],
"auto_map": {
"AutoModelForCausalLM": "Model/modeling_latent_recurrent_depth.LatentRecurrentDepthModel",
"AutoConfig": "Model/modeling_latent_recurrent_depth.LatentRecurrentDepthConfig"
},
"model_type": "latent_recurrent_depth",
"vocab_size": 50257,
"d_model": 768,
"num_heads": 12,
"dropout": 0.1
}