This repository has been archived by the owner on Oct 31, 2023. It is now read-only.
-
Notifications
You must be signed in to change notification settings - Fork 259
/
plot_log.py
executable file
·104 lines (75 loc) · 2.9 KB
/
plot_log.py
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
67
68
69
70
71
72
73
74
75
76
77
78
79
80
81
82
83
84
85
86
87
88
89
90
91
92
93
94
95
96
97
98
99
100
101
102
103
104
#!/usr/bin/env python3
# Copyright 2004-present Facebook. All Rights Reserved.
import logging
import matplotlib.pyplot as plt
import numpy as np
import os
import torch
import deep_sdf
import deep_sdf.workspace as ws
def running_mean(x, N):
cumsum = np.cumsum(np.insert(x, 0, 0))
return (cumsum[N:] - cumsum[:-N]) / float(N)
def load_logs(experiment_directory, type):
logs = torch.load(os.path.join(experiment_directory, ws.logs_filename))
logging.info("latest epoch is {}".format(logs["epoch"]))
num_iters = len(logs["loss"])
iters_per_epoch = num_iters / logs["epoch"]
logging.info("{} iters per epoch".format(iters_per_epoch))
smoothed_loss_41 = running_mean(logs["loss"], 41)
smoothed_loss_1601 = running_mean(logs["loss"], 1601)
fig, ax = plt.subplots()
if type == "loss":
ax.plot(
np.arange(num_iters) / iters_per_epoch,
logs["loss"],
"#82c6eb",
np.arange(20, num_iters - 20) / iters_per_epoch,
smoothed_loss_41,
"#2a9edd",
np.arange(800, num_iters - 800) / iters_per_epoch,
smoothed_loss_1601,
"#16628b",
)
ax.set(xlabel="Epoch", ylabel="Loss", title="Training Loss")
elif type == "learning_rate":
combined_lrs = np.array(logs["learning_rate"])
ax.plot(
np.arange(combined_lrs.shape[0]),
combined_lrs[:, 0],
np.arange(combined_lrs.shape[0]),
combined_lrs[:, 1],
)
ax.set(xlabel="Epoch", ylabel="Learning Rate", title="Learning Rates")
elif type == "time":
ax.plot(logs["timing"], "#833eb7")
ax.set(xlabel="Epoch", ylabel="Time per Epoch (s)", title="Timing")
elif type == "lat_mag":
ax.plot(logs["latent_magnitude"])
ax.set(xlabel="Epoch", ylabel="Magnitude", title="Latent Vector Magnitude")
elif type == "param_mag":
for _name, mags in logs["param_magnitude"].items():
ax.plot(mags)
ax.set(xlabel="Epoch", ylabel="Magnitude", title="Parameter Magnitude")
ax.legend(logs["param_magnitude"].keys())
else:
raise Exception('unrecognized plot type "{}"'.format(type))
ax.grid()
plt.show()
if __name__ == "__main__":
import argparse
arg_parser = argparse.ArgumentParser(description="Plot DeepSDF training logs")
arg_parser.add_argument(
"--experiment",
"-e",
dest="experiment_directory",
required=True,
help="The experiment directory. This directory should include experiment "
+ "specifications in 'specs.json', and logging will be done in this directory "
+ "as well",
)
arg_parser.add_argument("--type", "-t", dest="type", default="loss")
deep_sdf.add_common_args(arg_parser)
args = arg_parser.parse_args()
deep_sdf.configure_logging(args)
load_logs(args.experiment_directory, args.type)