From 4c98322a295554de5c74f4319a84eab8879fc8c4 Mon Sep 17 00:00:00 2001 From: thomas chaton Date: Fri, 5 Mar 2021 07:52:56 +0000 Subject: [PATCH] [bugfix] Resolve memory leak for evaluation (#6326) * resolve bug * resolve flake8 * revert name --- CHANGELOG.md | 3 +++ pytorch_lightning/trainer/evaluation_loop.py | 4 ++++ tests/trainer/logging_/test_eval_loop_logging_1_0.py | 1 + 3 files changed, 8 insertions(+) diff --git a/CHANGELOG.md b/CHANGELOG.md index 1cd238d17c05f..c3251a32a49ba 100644 --- a/CHANGELOG.md +++ b/CHANGELOG.md @@ -39,6 +39,9 @@ The format is based on [Keep a Changelog](http://keepachangelog.com/en/1.0.0/). - Check `LightningOptimizer` doesn't delete optimizer hooks ([#6305](https://github.com/PyTorchLightning/pytorch-lightning/pull/6305) +- Resolve memory leak for evaluation ([#6326](https://github.com/PyTorchLightning/pytorch-lightning/pull/6326) + + ## [1.2.2] - 2021-03-02 ### Added diff --git a/pytorch_lightning/trainer/evaluation_loop.py b/pytorch_lightning/trainer/evaluation_loop.py index 087741aa69c2b..e1b3688ef36e6 100644 --- a/pytorch_lightning/trainer/evaluation_loop.py +++ b/pytorch_lightning/trainer/evaluation_loop.py @@ -203,6 +203,10 @@ def __run_eval_epoch_end(self, num_dataloaders): # with a single dataloader don't pass an array outputs = self.outputs + + # free memory + self.outputs = [] + eval_results = outputs if num_dataloaders == 1: eval_results = outputs[0] diff --git a/tests/trainer/logging_/test_eval_loop_logging_1_0.py b/tests/trainer/logging_/test_eval_loop_logging_1_0.py index 46890f6801711..765fab229f6cf 100644 --- a/tests/trainer/logging_/test_eval_loop_logging_1_0.py +++ b/tests/trainer/logging_/test_eval_loop_logging_1_0.py @@ -126,6 +126,7 @@ def validation_step_end(self, acc): def validation_epoch_end(self, outputs): self.log('g', torch.tensor(2, device=self.device), on_epoch=True) self.validation_epoch_end_called = True + assert len(self.trainer.evaluation_loop.outputs) == 0 def backward(self, loss, optimizer, optimizer_idx): return LightningModule.backward(self, loss, optimizer, optimizer_idx)