-
Notifications
You must be signed in to change notification settings - Fork 0
/
scaling_run.py
64 lines (56 loc) · 1.58 KB
/
scaling_run.py
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
from datetime import datetime
import json
import numpy as np
from scaling_experiments import run_experiment
DEFAULT_PARAMS = dict(
delta_type="none", # 'none', 'shared', 'multi',
initial_sd=1.0e-04,
init_method="xavier",
activation="tanh", # 'relu', 'tanh', 'linear'
dim=10,
dataset="ODE", # 'ODE', 'mnist'
optimizer_name="adam",
num_epochs=200,
epsilon=1.0e-03,
train_size=1024,
test_size=256,
batch_size=50,
lr=1.0e-03,
path="./scaling/",
save=True,
min_depth=3,
max_depth=1000,
base=1.2, # base**n < max_depth
)
SCALING_PARAMS = [
dict(
DEFAULT_PARAMS,
**dict(
path=DEFAULT_PARAMS["path"] + "dataset-mnist/act-tanh/delta-shared/",
dataset="ODE",
num_epochs=10,
dim=10,
batch_size=32,
lr=1e-2,
epsilon=1e-3,
activation="tanh",
delta_type="shared", # 'multi' | 'shared'
initial_sd=0.01,
init_method="xavier-depth",
min_depth=3,
max_depth=10,
base=1.25992105,
),
)
]
def run():
NOW = datetime.now().strftime("%Y-%m-%d-%H-%M") + "/"
for i, params_dict in enumerate(SCALING_PARAMS):
params_dict["path"] += NOW
print(f"Scaling experiments, starting {i + 1}/{len(SCALING_PARAMS)}.")
print("Path: ", params_dict["path"])
run_experiment(**params_dict)
with open(params_dict["path"] + "params_dict.json", "w") as fp:
json.dump(params_dict, fp)
if __name__ == "__main__":
run()