Skip to content

Commit

Permalink
fix
Browse files Browse the repository at this point in the history
  • Loading branch information
Borda committed Dec 31, 2020
1 parent 7af6832 commit ff74854
Showing 1 changed file with 2 additions and 2 deletions.
4 changes: 2 additions & 2 deletions pytorch_lightning/accelerators/accelerator_connector.py
Original file line number Diff line number Diff line change
Expand Up @@ -317,11 +317,11 @@ def set_distributed_mode(self):

# DP and DDP2 cannot run without GPU
if (self.trainer.num_gpus == 0
and self.trainer._distrib_type in (DistributedType.DP, DistributedType.DDP, DistributedType.DDP2)):
and self.trainer._distrib_type in (DistributedType.DP, DistributedType.DDP2)):
rank_zero_warn(
'You requested distributed training on GPUs, but none is available, so we set backend to `ddp_cpu`.'
)
# in some cases it yield in comarison None and int
# todo: in some cases it yield in comarison None and int
if ((self.trainer.num_nodes and self.trainer.num_nodes > 1)
or (self.trainer.num_processes and self.trainer.num_processes > 1)):
self.trainer._distrib_type = DistributedType.DDP
Expand Down

0 comments on commit ff74854

Please sign in to comment.