Skip to content

Commit

Permalink
[SPMD] fix bug with XLAShardedTensor.__repr__ (pytorch#5807)
Browse files Browse the repository at this point in the history
  • Loading branch information
yeounoh committed Nov 16, 2023
1 parent 6e66130 commit 6addbde
Show file tree
Hide file tree
Showing 2 changed files with 18 additions and 0 deletions.
14 changes: 14 additions & 0 deletions test/spmd/test_xla_sharding.py
Original file line number Diff line number Diff line change
Expand Up @@ -42,6 +42,20 @@ def test_xla_sharded_tensor(self):
# TODO(244003536) add more tests for XLAShardedTensror.
self.assertTrue(isinstance(xst1, XLAShardedTensor))

def test_xla_sharded_tensor_repr(self):
xt = torch.randn(128, 128).to(xm.xla_device())
model = self.SimpleLinear().to(xm.xla_device())

mesh = self._get_mesh((1, self.n_devices))
partition_spec = (0, 1)
xst = xs.mark_sharding(xt, mesh, partition_spec)
self.assertTrue(isinstance(xst, XLAShardedTensor))

xt_output = model(xt)
self.assertTrue('XLAShardedTensor' not in str(xt_output))
xst_output = model(xst)
self.assertTrue('XLAShardedTensor' in str(xst_output))

def test_sharded_tensor_debug_info(self):
partition_spec = (0, 1)
xt1 = torch.tensor([[1, 2, 3, 4, 5, 6, 7, 8]],
Expand Down
4 changes: 4 additions & 0 deletions torch_xla/distributed/spmd/xla_sharded_tensor.py
Original file line number Diff line number Diff line change
Expand Up @@ -142,6 +142,10 @@ def sharding_type(self) -> 'ShardingType':
return ShardingType(sharding_type)

def __repr__(self):
if not hasattr(self, "global_tensor"):
# materialize a copy of sharded global_tensnor and keep the actual data
# sharded on the XLA devices.
return str(self.cpu())
return f"XLAShardedTensor({self.global_tensor})"

@classmethod
Expand Down

0 comments on commit 6addbde

Please sign in to comment.