Skip to content

Commit

Permalink
move copy() as suggested by @carmocca
Browse files Browse the repository at this point in the history
  • Loading branch information
awaelchli committed Jul 29, 2021
1 parent d3278fd commit ecf42db
Showing 1 changed file with 3 additions and 3 deletions.
6 changes: 3 additions & 3 deletions pytorch_lightning/loops/batch/training_batch_loop.py
Original file line number Diff line number Diff line change
Expand Up @@ -144,12 +144,12 @@ def advance(self, batch, batch_idx, dataloader_idx):

result = self._run_optimization(batch_idx, split_batch, opt_idx, optimizer)
if result:
self.batch_outputs[opt_idx].append(result.training_step_output)
self.batch_outputs[opt_idx].append(copy(result.training_step_output))
else:
# in manual optimization, there is no looping over optimizers
result = self._run_optimization(batch_idx, split_batch)
if result:
self.batch_outputs[0].append(result.training_step_output)
self.batch_outputs[0].append(copy(result.training_step_output))

def teardown(self) -> None:
# release memory
Expand Down Expand Up @@ -319,7 +319,7 @@ def _training_step(
closure_loss = training_step_output.minimize / self.trainer.accumulate_grad_batches
# the loss will get scaled for amp. avoid any modifications to it
loss = closure_loss.detach().clone()
return AttributeDict(closure_loss=closure_loss, loss=loss, training_step_output=copy(training_step_output))
return AttributeDict(closure_loss=closure_loss, loss=loss, training_step_output=training_step_output)

def _process_training_step_output(self, training_step_output: STEP_OUTPUT) -> Optional[ResultCollection]:
"""Adds the :param:`training_step_output` to the trainer's results
Expand Down

0 comments on commit ecf42db

Please sign in to comment.