From bb5f90c46a8afc78dac660566bdb218aaf1f4969 Mon Sep 17 00:00:00 2001 From: Amog Kamsetty Date: Mon, 15 Mar 2021 12:38:30 -0700 Subject: [PATCH] Custom Plugin is_distributed (#6537) * return from plugin * dont return for tpu (cherry picked from commit 6a1414681107cf77d1ebeba86359df7b6668bf49) --- pytorch_lightning/trainer/connectors/accelerator_connector.py | 4 ++++ 1 file changed, 4 insertions(+) diff --git a/pytorch_lightning/trainer/connectors/accelerator_connector.py b/pytorch_lightning/trainer/connectors/accelerator_connector.py index 59d406b0479c6..e5c17614474ee 100644 --- a/pytorch_lightning/trainer/connectors/accelerator_connector.py +++ b/pytorch_lightning/trainer/connectors/accelerator_connector.py @@ -266,6 +266,10 @@ def use_deepspeed(self) -> bool: @property def is_distributed(self) -> bool: + # Used for custom plugins. + # Custom plugins should implement is_distributed property. + if hasattr(self.training_type_plugin, 'is_distributed') and not self.on_tpu: + return self.training_type_plugin.is_distributed is_distributed = self.use_ddp or self.use_ddp2 or self.use_horovod if self.on_tpu: is_distributed |= self.training_type_plugin.is_distributed