ArabicLM / config.json
codewithdark's picture
Upload folder using huggingface_hub
707cb91 verified
raw
history blame contribute delete
283 Bytes
{
"architectures": [
"ArabicGPTModel"
],
"dropout": 0.1,
"embed_dim": 512,
"ff_dim": 2048,
"max_seq_len": 256,
"model_type": "arabic-gpt",
"num_heads": 8,
"num_layers": 12,
"torch_dtype": "float32",
"transformers_version": "4.51.3",
"vocab_size": 32000
}