From 5c76eceff5658bb487785261e3e1f4c09ce54305 Mon Sep 17 00:00:00 2001 From: Amog Kamsetty Date: Mon, 15 Mar 2021 11:30:30 -0700 Subject: [PATCH 1/3] return from plugin --- pytorch_lightning/trainer/connectors/accelerator_connector.py | 4 ++++ 1 file changed, 4 insertions(+) diff --git a/pytorch_lightning/trainer/connectors/accelerator_connector.py b/pytorch_lightning/trainer/connectors/accelerator_connector.py index 99d716f6b5a8c..4750f91611c24 100644 --- a/pytorch_lightning/trainer/connectors/accelerator_connector.py +++ b/pytorch_lightning/trainer/connectors/accelerator_connector.py @@ -273,6 +273,10 @@ def use_deepspeed(self) -> bool: @property def is_distributed(self) -> bool: + # Used for custom plugins. + # Custom plugins should implement is_distributed property. + if hasattr(self.training_type_plugin, 'is_distributed'): + return self.training_type_plugin.is_distributed is_distributed = self.use_ddp or self.use_ddp2 or self.use_horovod if self.on_tpu: is_distributed |= self.training_type_plugin.is_distributed From afdb16cd2a11c1f70bd2ded005ec695e92f26bfc Mon Sep 17 00:00:00 2001 From: Amog Kamsetty Date: Mon, 15 Mar 2021 11:56:07 -0700 Subject: [PATCH 2/3] dont return for tpu --- pytorch_lightning/trainer/connectors/accelerator_connector.py | 3 ++- 1 file changed, 2 insertions(+), 1 deletion(-) diff --git a/pytorch_lightning/trainer/connectors/accelerator_connector.py b/pytorch_lightning/trainer/connectors/accelerator_connector.py index 4750f91611c24..f3b92144bdc64 100644 --- a/pytorch_lightning/trainer/connectors/accelerator_connector.py +++ b/pytorch_lightning/trainer/connectors/accelerator_connector.py @@ -275,7 +275,8 @@ def use_deepspeed(self) -> bool: def is_distributed(self) -> bool: # Used for custom plugins. # Custom plugins should implement is_distributed property. - if hasattr(self.training_type_plugin, 'is_distributed'): + if hasattr(self.training_type_plugin, 'is_distributed') and not \ + self.on_tpu: return self.training_type_plugin.is_distributed is_distributed = self.use_ddp or self.use_ddp2 or self.use_horovod if self.on_tpu: From a22a3e2a0bc6116600030f4628a07b774d24c5e9 Mon Sep 17 00:00:00 2001 From: Jirka Borovec Date: Mon, 15 Mar 2021 20:04:16 +0100 Subject: [PATCH 3/3] Apply suggestions from code review --- pytorch_lightning/trainer/connectors/accelerator_connector.py | 3 +-- 1 file changed, 1 insertion(+), 2 deletions(-) diff --git a/pytorch_lightning/trainer/connectors/accelerator_connector.py b/pytorch_lightning/trainer/connectors/accelerator_connector.py index f3b92144bdc64..7ff288282259a 100644 --- a/pytorch_lightning/trainer/connectors/accelerator_connector.py +++ b/pytorch_lightning/trainer/connectors/accelerator_connector.py @@ -275,8 +275,7 @@ def use_deepspeed(self) -> bool: def is_distributed(self) -> bool: # Used for custom plugins. # Custom plugins should implement is_distributed property. - if hasattr(self.training_type_plugin, 'is_distributed') and not \ - self.on_tpu: + if hasattr(self.training_type_plugin, 'is_distributed') and not self.on_tpu: return self.training_type_plugin.is_distributed is_distributed = self.use_ddp or self.use_ddp2 or self.use_horovod if self.on_tpu: