Skip to content

Commit

Permalink
Disable xla backend for SPMD (pytorch#5690)
Browse files Browse the repository at this point in the history
* Disable xla backend for SPMD

* Add test

* yapf
  • Loading branch information
jonb377 authored and ghpvnist committed Oct 31, 2023
1 parent 0a91b44 commit 60c1632
Show file tree
Hide file tree
Showing 2 changed files with 18 additions and 0 deletions.
15 changes: 15 additions & 0 deletions test/spmd/test_xla_spmd_python_api_interaction.py
Original file line number Diff line number Diff line change
Expand Up @@ -3,7 +3,9 @@
import sys

import torch
import torch.distributed as dist
import torch_xla
import torch_xla.distributed.xla_backend
import torch_xla.core.xla_model as xm
from torch_xla import runtime as xr
from torch_xla.amp import autocast
Expand Down Expand Up @@ -132,6 +134,19 @@ def test_xla_autocast_api(self):
self.assertTrue(t3.dtype == expected_dtype)


class BasicDistributedTest(test_xla_sharding_base.XlaShardingTest):

@classmethod
def setUpClass(cls):
xr.use_spmd()
return super().setUpClass()

def test_xla_backend(self):
# XLA backend is not supported with SPMD
with self.assertRaises(AssertionError):
dist.init_process_group('xla', init_method='xla://')


if __name__ == '__main__':
test = unittest.main()
sys.exit(0 if test.result.wasSuccessful() else 1)
3 changes: 3 additions & 0 deletions torch_xla/distributed/xla_backend.py
Original file line number Diff line number Diff line change
@@ -1,13 +1,16 @@
import torch
import torch.distributed as dist
import torch_xla.core.xla_model as xm
import torch_xla.runtime as xr
from torch_xla._internal import rendezvous
import logging
import os
from torch._C._distributed_c10d import ProcessGroup


def _create_xla_process_group(prefix_store, rank, size, timeout):
assert not xr.is_spmd(
), "XLA backend is not supported with SPMD. Please use a CPU process group instead."
return ProcessGroupXla(prefix_store, rank, size, timeout)


Expand Down

0 comments on commit 60c1632

Please sign in to comment.