dir="$1" | |
cp="$dir/checkpoints/checkpoint_last.pt" | |
echo "dir: $dir" | |
declare -A tasks | |
tasks[cola]="/fsx-wav2vec/abaevski/data/nlp/GLUE/CoLA-bin" | |
tasks[qnli]="/fsx-wav2vec/abaevski/data/nlp/GLUE/QNLI-bin" | |
tasks[mrpc]="/fsx-wav2vec/abaevski/data/nlp/GLUE/MRPC-bin" | |
tasks[rte]="/fsx-wav2vec/abaevski/data/nlp/GLUE/RTE-bin" | |
tasks[sst_2]="/fsx-wav2vec/abaevski/data/nlp/GLUE/SST-2-bin" | |
tasks[mnli]="/fsx-wav2vec/abaevski/data/nlp/GLUE/MNLI-bin" | |
tasks[qqp]="/fsx-wav2vec/abaevski/data/nlp/GLUE/QQP-bin" | |
tasks[sts_b]="/fsx-wav2vec/abaevski/data/nlp/GLUE/STS-B-bin" | |
lrs=(5e-6 8e-6 1e-5 2e-5) | |
for task data_path in ${(kv)tasks}; do | |
for lr in $lrs; do | |
echo $lr $task | |
PYTHONPATH=. PREFIX="${PREFIX}" SUFFIX="" \ | |
python fairseq_cli/hydra_train.py -m --config-dir examples/data2vec/config/multi/text_finetuning \ | |
--config-name $task +run_config=local task.data="$data_path" common.log_interval=200 dataset.num_workers=1 \ | |
model.model_path="$cp" hydra.sweep.dir="$dir/finetune_lr/$task/$lr" "optimization.lr=[${lr}]" +model=text_wrap | |
done | |
done | |