-
Notifications
You must be signed in to change notification settings - Fork 3.4k
/
base.py
459 lines (367 loc) · 16 KB
/
base.py
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
67
68
69
70
71
72
73
74
75
76
77
78
79
80
81
82
83
84
85
86
87
88
89
90
91
92
93
94
95
96
97
98
99
100
101
102
103
104
105
106
107
108
109
110
111
112
113
114
115
116
117
118
119
120
121
122
123
124
125
126
127
128
129
130
131
132
133
134
135
136
137
138
139
140
141
142
143
144
145
146
147
148
149
150
151
152
153
154
155
156
157
158
159
160
161
162
163
164
165
166
167
168
169
170
171
172
173
174
175
176
177
178
179
180
181
182
183
184
185
186
187
188
189
190
191
192
193
194
195
196
197
198
199
200
201
202
203
204
205
206
207
208
209
210
211
212
213
214
215
216
217
218
219
220
221
222
223
224
225
226
227
228
229
230
231
232
233
234
235
236
237
238
239
240
241
242
243
244
245
246
247
248
249
250
251
252
253
254
255
256
257
258
259
260
261
262
263
264
265
266
267
268
269
270
271
272
273
274
275
276
277
278
279
280
281
282
283
284
285
286
287
288
289
290
291
292
293
294
295
296
297
298
299
300
301
302
303
304
305
306
307
308
309
310
311
312
313
314
315
316
317
318
319
320
321
322
323
324
325
326
327
328
329
330
331
332
333
334
335
336
337
338
339
340
341
342
343
344
345
346
347
348
349
350
351
352
353
354
355
356
357
358
359
360
361
362
363
364
365
366
367
368
369
370
371
372
373
374
375
376
377
378
379
380
381
382
383
384
385
386
387
388
389
390
391
392
393
394
395
396
397
398
399
400
401
402
403
404
405
406
407
408
409
410
411
412
413
414
415
416
417
418
419
420
421
422
423
424
425
426
427
428
429
430
431
432
433
434
435
436
437
438
439
440
441
442
443
444
445
446
447
448
449
450
451
452
453
454
455
456
457
458
459
# Copyright The PyTorch Lightning team.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
"""Abstract base class used to build new loggers."""
import argparse
import functools
import operator
from abc import ABC, abstractmethod
from argparse import Namespace
from functools import wraps
from typing import Any, Callable, Dict, Iterable, List, Mapping, Optional, Sequence, Tuple, Union
from weakref import ReferenceType
import numpy as np
import pytorch_lightning as pl
from pytorch_lightning.callbacks.model_checkpoint import ModelCheckpoint
from pytorch_lightning.utilities.rank_zero import rank_zero_deprecation, rank_zero_only
def rank_zero_experiment(fn: Callable) -> Callable:
"""Returns the real experiment on rank 0 and otherwise the DummyExperiment."""
@wraps(fn)
def experiment(self):
@rank_zero_only
def get_experiment():
return fn(self)
return get_experiment() or DummyExperiment()
return experiment
class LightningLoggerBase(ABC):
"""Base class for experiment loggers.
Args:
agg_key_funcs:
Dictionary which maps a metric name to a function, which will
aggregate the metric values for the same steps.
agg_default_func:
Default function to aggregate metric values. If some metric name
is not presented in the `agg_key_funcs` dictionary, then the
`agg_default_func` will be used for aggregation.
Note:
The `agg_key_funcs` and `agg_default_func` arguments are used only when
one logs metrics with the :meth:`~LightningLoggerBase.agg_and_log_metrics` method.
"""
def __init__(
self,
agg_key_funcs: Optional[Mapping[str, Callable[[Sequence[float]], float]]] = None,
agg_default_func: Callable[[Sequence[float]], float] = np.mean,
):
self._prev_step: int = -1
self._metrics_to_agg: List[Dict[str, float]] = []
self._agg_key_funcs = agg_key_funcs if agg_key_funcs else {}
self._agg_default_func = agg_default_func
def after_save_checkpoint(self, checkpoint_callback: "ReferenceType[ModelCheckpoint]") -> None:
"""Called after model checkpoint callback saves a new checkpoint.
Args:
checkpoint_callback: the model checkpoint callback instance
"""
pass
def update_agg_funcs(
self,
agg_key_funcs: Optional[Mapping[str, Callable[[Sequence[float]], float]]] = None,
agg_default_func: Callable[[Sequence[float]], float] = np.mean,
):
"""Update aggregation methods.
Args:
agg_key_funcs:
Dictionary which maps a metric name to a function, which will
aggregate the metric values for the same steps.
agg_default_func:
Default function to aggregate metric values. If some metric name
is not presented in the `agg_key_funcs` dictionary, then the
`agg_default_func` will be used for aggregation.
"""
if agg_key_funcs:
self._agg_key_funcs.update(agg_key_funcs)
if agg_default_func:
self._agg_default_func = agg_default_func
def _aggregate_metrics(
self, metrics: Dict[str, float], step: Optional[int] = None
) -> Tuple[int, Optional[Dict[str, float]]]:
"""Aggregates metrics.
Args:
metrics: Dictionary with metric names as keys and measured quantities as values
step: Step number at which the metrics should be recorded
Returns:
Step and aggregated metrics. The return value could be ``None``. In such case, metrics
are added to the aggregation list, but not aggregated yet.
"""
# if you still receiving metric from the same step, just accumulate it
if step == self._prev_step:
self._metrics_to_agg.append(metrics)
return step, None
# compute the metrics
agg_step, agg_mets = self._reduce_agg_metrics()
# as new step received reset accumulator
self._metrics_to_agg = [metrics]
self._prev_step = step
return agg_step, agg_mets
def _reduce_agg_metrics(self):
"""Aggregate accumulated metrics."""
# compute the metrics
if not self._metrics_to_agg:
agg_mets = None
elif len(self._metrics_to_agg) == 1:
agg_mets = self._metrics_to_agg[0]
else:
agg_mets = merge_dicts(self._metrics_to_agg, self._agg_key_funcs, self._agg_default_func)
return self._prev_step, agg_mets
def _finalize_agg_metrics(self):
"""This shall be called before save/close."""
agg_step, metrics_to_log = self._reduce_agg_metrics()
self._metrics_to_agg = []
if metrics_to_log is not None:
self.log_metrics(metrics=metrics_to_log, step=agg_step)
def agg_and_log_metrics(self, metrics: Dict[str, float], step: Optional[int] = None):
"""Aggregates and records metrics. This method doesn't log the passed metrics instantaneously, but instead
it aggregates them and logs only if metrics are ready to be logged.
Args:
metrics: Dictionary with metric names as keys and measured quantities as values
step: Step number at which the metrics should be recorded
"""
agg_step, metrics_to_log = self._aggregate_metrics(metrics=metrics, step=step)
if metrics_to_log:
self.log_metrics(metrics=metrics_to_log, step=agg_step)
@abstractmethod
def log_metrics(self, metrics: Dict[str, float], step: Optional[int] = None):
"""
Records metrics.
This method logs metrics as as soon as it received them. If you want to aggregate
metrics for one specific `step`, use the
:meth:`~pytorch_lightning.loggers.base.LightningLoggerBase.agg_and_log_metrics` method.
Args:
metrics: Dictionary with metric names as keys and measured quantities as values
step: Step number at which the metrics should be recorded
"""
pass
@abstractmethod
def log_hyperparams(self, params: argparse.Namespace, *args, **kwargs):
"""Record hyperparameters.
Args:
params: :class:`~argparse.Namespace` containing the hyperparameters
args: Optional positional arguments, depends on the specific logger being used
kwargs: Optional keywoard arguments, depends on the specific logger being used
"""
def log_graph(self, model: "pl.LightningModule", input_array=None) -> None:
"""Record model graph.
Args:
model: lightning model
input_array: input passes to `model.forward`
"""
pass
def log_text(self, *args, **kwargs) -> None:
"""Log text.
Arguments are directly passed to the logger.
"""
raise NotImplementedError
def log_image(self, *args, **kwargs) -> None:
"""Log image.
Arguments are directly passed to the logger.
"""
raise NotImplementedError
def save(self) -> None:
"""Save log data."""
self._finalize_agg_metrics()
def finalize(self, status: str) -> None:
"""Do any processing that is necessary to finalize an experiment.
Args:
status: Status that the experiment finished with (e.g. success, failed, aborted)
"""
self.save()
def close(self) -> None:
"""Do any cleanup that is necessary to close an experiment.
See deprecation warning below.
.. deprecated:: v1.5
This method is deprecated in v1.5 and will be removed in v1.7.
Please use `LightningLoggerBase.finalize` instead.
"""
rank_zero_deprecation(
"`LightningLoggerBase.close` method is deprecated in v1.5 and will be removed in v1.7."
" Please use `LightningLoggerBase.finalize` instead."
)
self.save()
@property
def save_dir(self) -> Optional[str]:
"""Return the root directory where experiment logs get saved, or `None` if the logger does not save data
locally."""
return None
@property
def group_separator(self):
"""Return the default separator used by the logger to group the data into subfolders."""
return "/"
@property
@abstractmethod
def name(self) -> str:
"""Return the experiment name."""
@property
@abstractmethod
def version(self) -> Union[int, str]:
"""Return the experiment version."""
class LoggerCollection(LightningLoggerBase):
"""The :class:`LoggerCollection` class is used to iterate all logging actions over the given `logger_iterable`.
Args:
logger_iterable: An iterable collection of loggers
"""
def __init__(self, logger_iterable: Iterable[LightningLoggerBase]):
super().__init__()
self._logger_iterable = logger_iterable
def __getitem__(self, index: int) -> LightningLoggerBase:
return list(self._logger_iterable)[index]
def after_save_checkpoint(self, checkpoint_callback: "ReferenceType[ModelCheckpoint]") -> None:
for logger in self._logger_iterable:
logger.after_save_checkpoint(checkpoint_callback)
def update_agg_funcs(
self,
agg_key_funcs: Optional[Mapping[str, Callable[[Sequence[float]], float]]] = None,
agg_default_func: Callable[[Sequence[float]], float] = np.mean,
):
for logger in self._logger_iterable:
logger.update_agg_funcs(agg_key_funcs, agg_default_func)
@property
def experiment(self) -> List[Any]:
"""Returns a list of experiment objects for all the loggers in the logger collection."""
return [logger.experiment for logger in self._logger_iterable]
def agg_and_log_metrics(self, metrics: Dict[str, float], step: Optional[int] = None):
for logger in self._logger_iterable:
logger.agg_and_log_metrics(metrics, step)
def log_metrics(self, metrics: Dict[str, float], step: Optional[int] = None) -> None:
for logger in self._logger_iterable:
logger.log_metrics(metrics, step)
def log_hyperparams(self, params: Union[Dict[str, Any], Namespace]) -> None:
for logger in self._logger_iterable:
logger.log_hyperparams(params)
def log_graph(self, model: "pl.LightningModule", input_array=None) -> None:
for logger in self._logger_iterable:
logger.log_graph(model, input_array)
def log_text(self, *args, **kwargs) -> None:
for logger in self._logger_iterable:
logger.log_text(*args, **kwargs)
def log_image(self, *args, **kwargs) -> None:
for logger in self._logger_iterable:
logger.log_image(*args, **kwargs)
def save(self) -> None:
for logger in self._logger_iterable:
logger.save()
def finalize(self, status: str) -> None:
for logger in self._logger_iterable:
logger.finalize(status)
def close(self) -> None:
"""
.. deprecated:: v1.5
This method is deprecated in v1.5 and will be removed in v1.7.
Please use `LoggerCollection.finalize` instead.
"""
rank_zero_deprecation(
"`LoggerCollection.close` method is deprecated in v1.5 and will be removed in v1.7."
" Please use `LoggerCollection.finalize` instead."
)
for logger in self._logger_iterable:
logger.close()
@property
def save_dir(self) -> Optional[str]:
"""Returns ``None`` as checkpoints should be saved to default / chosen location when using multiple
loggers."""
# Checkpoints should be saved to default / chosen location when using multiple loggers
return None
@property
def name(self) -> str:
"""Returns the unique experiment names for all the loggers in the logger collection joined by an
underscore."""
return "_".join(dict.fromkeys(str(logger.name) for logger in self._logger_iterable))
@property
def version(self) -> str:
"""Returns the unique experiment versions for all the loggers in the logger collection joined by an
underscore."""
return "_".join(dict.fromkeys(str(logger.version) for logger in self._logger_iterable))
class DummyExperiment:
"""Dummy experiment."""
def nop(self, *args, **kw):
pass
def __getattr__(self, _):
return self.nop
def __getitem__(self, idx) -> "DummyExperiment":
# enables self.logger.experiment[0].add_image(...)
return self
def __setitem__(self, *args, **kwargs) -> None:
pass
class DummyLogger(LightningLoggerBase):
"""Dummy logger for internal use.
It is useful if we want to disable user's logger for a feature, but still ensure that user code can run
"""
def __init__(self):
super().__init__()
self._experiment = DummyExperiment()
@property
def experiment(self) -> DummyExperiment:
"""Return the experiment object associated with this logger."""
return self._experiment
def log_metrics(self, *args, **kwargs) -> None:
pass
def log_hyperparams(self, *args, **kwargs) -> None:
pass
@property
def name(self) -> str:
"""Return the experiment name."""
return ""
@property
def version(self) -> str:
"""Return the experiment version."""
return ""
def __getitem__(self, idx) -> "DummyLogger":
# enables self.logger[0].experiment.add_image(...)
return self
def __iter__(self):
# if DummyLogger is substituting a logger collection, pretend it is empty
yield from ()
def merge_dicts(
dicts: Sequence[Mapping],
agg_key_funcs: Optional[Mapping[str, Callable[[Sequence[float]], float]]] = None,
default_func: Callable[[Sequence[float]], float] = np.mean,
) -> Dict:
"""Merge a sequence with dictionaries into one dictionary by aggregating the same keys with some given
function.
Args:
dicts:
Sequence of dictionaries to be merged.
agg_key_funcs:
Mapping from key name to function. This function will aggregate a
list of values, obtained from the same key of all dictionaries.
If some key has no specified aggregation function, the default one
will be used. Default is: ``None`` (all keys will be aggregated by the
default function).
default_func:
Default function to aggregate keys, which are not presented in the
`agg_key_funcs` map.
Returns:
Dictionary with merged values.
Examples:
>>> import pprint
>>> d1 = {'a': 1.7, 'b': 2.0, 'c': 1, 'd': {'d1': 1, 'd3': 3}}
>>> d2 = {'a': 1.1, 'b': 2.2, 'v': 1, 'd': {'d1': 2, 'd2': 3}}
>>> d3 = {'a': 1.1, 'v': 2.3, 'd': {'d3': 3, 'd4': {'d5': 1}}}
>>> dflt_func = min
>>> agg_funcs = {'a': np.mean, 'v': max, 'd': {'d1': sum}}
>>> pprint.pprint(merge_dicts([d1, d2, d3], agg_funcs, dflt_func))
{'a': 1.3,
'b': 2.0,
'c': 1,
'd': {'d1': 3, 'd2': 3, 'd3': 3, 'd4': {'d5': 1}},
'v': 2.3}
"""
agg_key_funcs = agg_key_funcs or {}
keys = list(functools.reduce(operator.or_, [set(d.keys()) for d in dicts]))
d_out = {}
for k in keys:
fn = agg_key_funcs.get(k)
values_to_agg = [v for v in [d_in.get(k) for d_in in dicts] if v is not None]
if isinstance(values_to_agg[0], dict):
d_out[k] = merge_dicts(values_to_agg, fn, default_func)
else:
d_out[k] = (fn or default_func)(values_to_agg)
return d_out