Skip to content

Commit

Permalink
ref: unify slurm and TE under backendPlugin 5/n
Browse files Browse the repository at this point in the history
  • Loading branch information
williamFalcon committed Nov 8, 2020
1 parent e02d007 commit 52d5d1e
Showing 1 changed file with 9 additions and 5 deletions.
14 changes: 9 additions & 5 deletions pytorch_lightning/accelerators/ddp2_accelerator.py
Original file line number Diff line number Diff line change
Expand Up @@ -11,22 +11,20 @@
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License

import os

import torch
import torch.distributed as torch_distrib

from pytorch_lightning.utilities.exceptions import MisconfigurationException
from pytorch_lightning.core.lightning import LightningModule
from pytorch_lightning.core.step_result import Result
from pytorch_lightning.distributed.dist import LightningDistributed
from pytorch_lightning import _logger as log
from pytorch_lightning.accelerators.accelerator import Accelerator
from pytorch_lightning.accelerators.accelerator import Accelerator, ReduceOp
from pytorch_lightning.utilities import AMPType
from pytorch_lightning.utilities.distributed import rank_zero_only
from pytorch_lightning.utilities.distributed import rank_zero_only, sync_ddp_if_available
from torch.nn.parallel import DistributedDataParallel
from typing import List, Optional
from typing import List, Optional, Union, Any

try:
from hydra.utils import to_absolute_path, get_original_cwd
Expand Down Expand Up @@ -203,3 +201,9 @@ def configure_sync_batchnorm(self, model: LightningModule) -> LightningModule:
model = torch.nn.SyncBatchNorm.convert_sync_batchnorm(model, process_group=None)

return model

def sync_tensor(self,
tensor: Union[torch.Tensor],
group: Optional[Any] = None,
reduce_op: Optional[Union[ReduceOp, str]] = None) -> torch.Tensor:
return sync_ddp_if_available(tensor, group, reduce_op)

0 comments on commit 52d5d1e

Please sign in to comment.