-
Notifications
You must be signed in to change notification settings - Fork 0
/
run_greaselm.sh
88 lines (75 loc) · 2.04 KB
/
run_greaselm.sh
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
67
68
69
70
71
72
73
74
75
76
77
78
79
80
81
82
83
84
85
86
87
88
#!/bin/bash
export TOKENIZERS_PARALLELISM=true
dt=`date '+%Y%m%d_%H%M%S'`
dataset=$1
shift
encoder='roberta-base'
args=$@
# elr="1e-5"
# elr=3.9e-8
# elr=5e-6
# elr="5e-8"
elr="1e-7"
# elr="8e-8"
# dlr="1e-3"
# dlr=3.9e-06
# dlr=5e-6
# dlr="5e-6"
dlr="5e-6"
# dlr="4e-6"
# bs=128
bs=1
# mbs=8
mbs=8
unfreeze_epoch=4
k=5 #num of gnn layers
gnndim=200
# Existing arguments but changed for GreaseLM
encoder_layer=-1
max_node_num=200
seed=5
lr_schedule=fixed
if [ ${dataset} = obqa ]
then
n_epochs=70
max_epochs_before_stop=10
ie_dim=400
else
n_epochs=50
# n_epochs=30
max_epochs_before_stop=10
ie_dim=400
fi
max_seq_len=100
ent_emb=tzw
# Added for GreaseLM
info_exchange=true
ie_layer_num=1
resume_checkpoint=None
resume_id=None
sep_ie_layers=false
random_ent_emb=false
echo "***** hyperparameters *****"
echo "dataset: $dataset"
echo "enc_name: $encoder"
echo "batch_size: $bs mini_batch_size: $mbs"
echo "learning_rate: elr $elr dlr $dlr"
echo "gnn: dim $gnndim layer $k"
echo "ie_dim: ${ie_dim}, info_exchange: ${info_exchange}"
echo "******************************"
save_dir_pref='runs'
mkdir -p $save_dir_pref
run_name=greaselm__ds_${dataset}__enc_${encoder}__k${k}__sd${seed}__iedim${ie_dim}__${dt}
log=logs/train_${dataset}__${run_name}.log.txt
###### Training ######
python3 -u greaselm.py \
--dataset $dataset \
--encoder $encoder -k $k --gnn_dim $gnndim -elr $elr -dlr $dlr -bs $bs --seed $seed -mbs ${mbs} --unfreeze_epoch ${unfreeze_epoch} --encoder_layer=${encoder_layer} -sl ${max_seq_len} --max_node_num ${max_node_num} \
--n_epochs $n_epochs --max_epochs_before_stop ${max_epochs_before_stop} \
--save_dir ${save_dir_pref}/${dataset}/${run_name} \
--run_name ${run_name} \
--ie_dim ${ie_dim} --info_exchange ${info_exchange} --ie_layer_num ${ie_layer_num} --resume_checkpoint ${resume_checkpoint} --resume_id ${resume_id} --sep_ie_layers ${sep_ie_layers} --random_ent_emb ${random_ent_emb} --ent_emb ${ent_emb//,/ } --lr_schedule ${lr_schedule} \
--use_wandb 'True' \
$args \
# > ${log} 2>&1 &
# echo log: ${log}