HRM-checkpoint-ARC-2 / all_config.yaml
imone's picture
Upload folder using huggingface_hub
ee2c595 verified
arch:
H_cycles: 2
H_layers: 4
L_cycles: 2
L_layers: 4
expansion: 4
halt_exploration_prob: 0.1
halt_max_steps: 16
hidden_size: 512
loss:
loss_type: stablemax_cross_entropy
name: losses@ACTLossHead
name: hrm.hrm_act_v1@HierarchicalReasoningModel_ACTV1
num_heads: 8
pos_encodings: rope
puzzle_emb_ndim: 512
beta1: 0.9
beta2: 0.95
checkpoint_every_eval: true
checkpoint_path: checkpoints/Arc-2-aug-1000 ACT-torch/HierarchicalReasoningModel_ACTV1
bright-mustang
data_path: data/arc-2-aug-1000
epochs: 100000
eval_interval: 10000
eval_save_outputs: []
global_batch_size: 768
lr: 0.0001
lr_min_ratio: 1.0
lr_warmup_steps: 2000
project_name: Arc-2-aug-1000 ACT-torch
puzzle_emb_lr: 0.01
puzzle_emb_weight_decay: 0.1
run_name: HierarchicalReasoningModel_ACTV1 bright-mustang
seed: 0
weight_decay: 0.1