|
{ |
|
"activation_fn_name": "swish", |
|
"architectures": [ |
|
"TpttModel" |
|
], |
|
"auto_map": { |
|
"AutoConfig": "configuration_tptt.TpttConfig", |
|
"AutoModelForCausalLM": "modeling_tptt.TpttModel" |
|
}, |
|
"base_model_name": "apple/OpenELM-1_1B", |
|
"ffn_dim_divisor": 256, |
|
"ffn_multipliers": [ |
|
0.5, |
|
0.63, |
|
0.76, |
|
0.89, |
|
1.02, |
|
1.15, |
|
1.28, |
|
1.41, |
|
1.54, |
|
1.67, |
|
1.8, |
|
1.93, |
|
2.06, |
|
2.19, |
|
2.31, |
|
2.44, |
|
2.57, |
|
2.7, |
|
2.83, |
|
2.96, |
|
3.09, |
|
3.22, |
|
3.35, |
|
3.48, |
|
3.61, |
|
3.74, |
|
3.87, |
|
4.0 |
|
], |
|
"ffn_with_glu": true, |
|
"head_dim": 64, |
|
"initializer_range": 0.02, |
|
"lora_config": { |
|
"alpha_pattern": {}, |
|
"auto_mapping": null, |
|
"base_model_name_or_path": null, |
|
"bias": "none", |
|
"eva_config": null, |
|
"exclude_modules": null, |
|
"fan_in_fan_out": false, |
|
"inference_mode": false, |
|
"init_lora_weights": true, |
|
"layer_replication": null, |
|
"layers_pattern": null, |
|
"layers_to_transform": null, |
|
"loftq_config": {}, |
|
"lora_alpha": 16, |
|
"lora_bias": false, |
|
"lora_dropout": 0.05, |
|
"megatron_config": null, |
|
"megatron_core": "megatron.core", |
|
"modules_to_save": null, |
|
"peft_type": "LORA", |
|
"r": 8, |
|
"rank_pattern": {}, |
|
"revision": null, |
|
"target_modules": [ |
|
"out_proj", |
|
"qkv_proj" |
|
], |
|
"task_type": "CAUSAL_LM", |
|
"use_dora": false, |
|
"use_rslora": false |
|
}, |
|
"mag_weight": 0.5, |
|
"max_chunk_size": 64, |
|
"max_context_length": 2048, |
|
"max_self_attn_length": 8192, |
|
"model_dim": 2048, |
|
"model_type": "tptt", |
|
"normalization_layer_name": "rms_norm", |
|
"normalize_qk_projections": true, |
|
"num_gqa_groups": 4, |
|
"num_kv_heads": [ |
|
4, |
|
4, |
|
4, |
|
5, |
|
5, |
|
5, |
|
5, |
|
5, |
|
5, |
|
5, |
|
6, |
|
6, |
|
6, |
|
6, |
|
6, |
|
6, |
|
6, |
|
6, |
|
7, |
|
7, |
|
7, |
|
7, |
|
7, |
|
7, |
|
8, |
|
8, |
|
8, |
|
8 |
|
], |
|
"num_query_heads": [ |
|
16, |
|
16, |
|
16, |
|
20, |
|
20, |
|
20, |
|
20, |
|
20, |
|
20, |
|
20, |
|
24, |
|
24, |
|
24, |
|
24, |
|
24, |
|
24, |
|
24, |
|
24, |
|
28, |
|
28, |
|
28, |
|
28, |
|
28, |
|
28, |
|
32, |
|
32, |
|
32, |
|
32 |
|
], |
|
"num_transformer_layers": 28, |
|
"operator_mode": "delta_rule", |
|
"qkv_multipliers": [ |
|
0.5, |
|
1.0 |
|
], |
|
"rope_freq_constant": 10000, |
|
"rope_max_length": 4096, |
|
"share_input_output_layers": true, |
|
"target_modules_names": [ |
|
"attn", |
|
"self_attn", |
|
"attention" |
|
], |
|
"torch_dtype": "bfloat16", |
|
"transformers_version": "4.51.3", |
|
"use_cache": true, |
|
"vocab_size": 32000 |
|
} |
|
|