-
Notifications
You must be signed in to change notification settings - Fork 13
/
simclr_config.yml
39 lines (30 loc) · 914 Bytes
/
simclr_config.yml
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
hydra:
job_logging:
# formatters:
# simple:
# format: '[]'
root:
handlers: [file, console] # logging to file only.
run:
#dir: logs/${dataset}
dir: logs/SimCLR/${dataset}
dataset: cifar10
data_dir: data
# model
backbone: resnet18 # or resnet34, resnet50
projection_dim: 128 # "[...] to project the representation to a 128-dimensional latent space"
# train options
seed: 42 # sacred handles automatic seeding when passed in the config
batch_size: 512
workers: 16
epochs: 1000
log_interval: 50
# loss options
optimizer: 'sgd' # or LARS (experimental)
learning_rate: 0.6 # initial lr = 0.3 * batch_size / 256
momentum: 0.9
weight_decay: 1.0e-6 # "optimized using LARS [...] and weight decay of 10−6"
temperature: 0.5 # see appendix B.7.: Optimal temperature under different batch sizes
# finetune options
finetune_epochs: 100
load_epoch: 1000 # checkpoint for finetune