forked from UCSC-REAL/CAL
-
Notifications
You must be signed in to change notification settings - Fork 0
/
Copy pathrun_exptPRLD_C100_CAL.py
106 lines (82 loc) · 2.53 KB
/
run_exptPRLD_C100_CAL.py
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
67
68
69
70
71
72
73
74
75
76
77
78
79
80
81
82
83
84
85
86
87
88
89
90
91
92
93
94
95
96
97
98
99
100
101
102
103
104
105
106
import torch
import numpy as np
import random
from experiments import ExptPeerRegC100CAL
seed = 10086
if seed is not None:
torch.manual_seed(seed)
torch.cuda.manual_seed(seed)
np.random.seed(seed)
random.seed(seed)
LossAbbr = {
"crossentropy": "CE",
"crossentropy_CAL": "CE_CAL",
}
#for the complete list of options, see utils/options.py
dataset = "CIFAR100"
netARCH = "resnet_cifar34"
#-------------- customized parameters --------------#
noise_rate = 0.2
lossfunc = "crossentropy"
# lossfunc = "crossentropy_CAL"
gpu_idx = "0"
#---------------------------------------------------#
max_epoch = 100
outfile = None
json_path = None
is_peer = True # by default peersize=1
with_noise = True
noise_file = f"IDN_{noise_rate}_C100.pt"
chosen_classes = list(range(100))
# tune alpha_list if necessary
if lossfunc == "crossentropy":
alpha_list = [0.0, 1.0, 1.0] # for sample selection. This is slightly different from the setting in paper. We move the *10 to noise_prior (equivalent)
milestones = [10, 40, 80]
sample_weight_path = None
elif lossfunc == "crossentropy_CAL":
alpha_list = [0.0, 1.0, 1.0] # for CAL
milestones = [10, 40, 80]
sample_weight_path = f'sieve_65_CE_{dataset}_{noise_rate}.pt'
else:
ValueError('Undefined loss functions')
# Generate expt name
exp_mark = f'{dataset}_{noise_rate}'
exp_name = f"{LossAbbr[lossfunc]}_{exp_mark}"
if __name__ == "__main__":
# Main
Run = ExptPeerRegC100CAL(
{"--is_train": True,
"--is_plot_results": False,
"--is_class_resolved": False,
"--is_load": False,
"--exp_name": exp_name,
"--dataset": dataset,
"--netARCH": netARCH,
"--num_classes": 100,
"--lossfunc": lossfunc,
"--optimizer": "SGD",
"--lr": 0.1, # 0.1
"--lr_scheduler": "step",
"--weight_decay": 0.0005,
"--lr_decay_step_size": 60, # 60
"--lr_decay_rate": 0.1, # 0.1
"--batch_size": 128,
"--max_epoch": max_epoch,
"--is_validate": False,
"--val_ratio": 0.0,
"--with_noise": with_noise,
"--noise_label_fname": noise_file,
"--is_peerloss": is_peer,
"--alpha": 0.0,
"--alpha_scheduler": 'seg',
"--alpha_list": alpha_list,
"--milestones": milestones,
"--gpu_idx": gpu_idx,
"--chosen_classes": chosen_classes,
"--sample_weight_path": sample_weight_path,
"--beta_path": None,
},
json_path = json_path,
outputfile= outfile
)
Run.train()