-
Notifications
You must be signed in to change notification settings - Fork 0
/
run_tune.sh
51 lines (45 loc) · 1.39 KB
/
run_tune.sh
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
#----------------------------------------------------------
ws=${TRAIN_HOME} # working folder path (root folder)
LR=$1 #learning rate
LD=$2 #gamma
LS=$3 #step size
ID=$4 #custom optimizer argument1
TS=$5 #custom optimizer argument2
EP=$6 #training epochs: 100
BS=$7 #batch size: 64
WD=$8 #weigh decay: '4e-5'
MO=$9 #momentum: 0.9
DB=${10} #custom opt flag: True/False
ST=${11} #LR update: fixedStep2
QI=${12} #custom optimizer parameter1
QS=${13} #custom optimizer parameter2
QF=${14} #custom optimizer parameter3
MD=${15} #model name: 'mobilenetv2'
IN=${16} #input path: '/datasets/IMAGENET-UNCROPPED'
CP=./checkpoints # checkpointing folder
IS=224 # input size in pixels
DA=pytorch #dali-gpu # dataloader backend
WR=8 #number of workers
CR=checkpoints/checkpoint.pth.tar # path of checkpoints
echo "Tuning for LR=$LR LD=$LD LS=$LS ID=$ID WD=$WD TS=$TS EP=$EP BS=$BS MO=$MO DB=$DB ST=$ST QI=$QI QS=$QS QF=$QF MD=$MD IN=$IN"
echo "My home path is: $(pwd)"
#----------------------------------------------------------
python $ws/imagenet_train.py \
-a $MD \
-d $IN \
--epochs $EP \
--lr-decay $ST \
--step $LS \
--gamma=$LD \
--lr $LR \
--init-decay $ID \
--wd $WD \
-c $CP \
--input-size $IS \
--batch-size $BS \
--momentum $MO \
-j $WR \
--data-backend $DA \
--resume $CR
#----------------------------------------------------------
# --early-term True