File size: 1,096 Bytes
6789f6f |
1 2 3 4 5 6 7 8 9 10 11 12 13 14 15 16 17 18 19 20 21 22 23 24 25 26 27 28 29 |
#!/usr/bin/env zsh
dir="$1"
cp="$dir/checkpoints/checkpoint_last.pt"
echo "dir: $dir"
declare -A tasks
tasks[cola]="/fsx-wav2vec/abaevski/data/nlp/GLUE/CoLA-bin"
tasks[qnli]="/fsx-wav2vec/abaevski/data/nlp/GLUE/QNLI-bin"
tasks[mrpc]="/fsx-wav2vec/abaevski/data/nlp/GLUE/MRPC-bin"
tasks[rte]="/fsx-wav2vec/abaevski/data/nlp/GLUE/RTE-bin"
tasks[sst_2]="/fsx-wav2vec/abaevski/data/nlp/GLUE/SST-2-bin"
tasks[mnli]="/fsx-wav2vec/abaevski/data/nlp/GLUE/MNLI-bin"
tasks[qqp]="/fsx-wav2vec/abaevski/data/nlp/GLUE/QQP-bin"
tasks[sts_b]="/fsx-wav2vec/abaevski/data/nlp/GLUE/STS-B-bin"
lrs=(5e-6 8e-6 1e-5 2e-5)
for task data_path in ${(kv)tasks}; do
for lr in $lrs; do
echo $lr $task
PYTHONPATH=. PREFIX="${PREFIX}" SUFFIX="" \
python fairseq_cli/hydra_train.py -m --config-dir examples/data2vec/config/multi/text_finetuning \
--config-name $task +run_config=local task.data="$data_path" common.log_interval=200 dataset.num_workers=1 \
model.model_path="$cp" hydra.sweep.dir="$dir/finetune_lr/$task/$lr" "optimization.lr=[${lr}]" +model=text_wrap
done
done
|