Skip to content

Commit

Permalink
Fix masked_fill broadcasting.
Browse files Browse the repository at this point in the history
  • Loading branch information
ysiraichi committed Oct 9, 2023
1 parent c9a1324 commit c0d6726
Show file tree
Hide file tree
Showing 2 changed files with 20 additions and 7 deletions.
13 changes: 13 additions & 0 deletions test/test_operations.py
Original file line number Diff line number Diff line change
Expand Up @@ -309,6 +309,19 @@ def test_get_xla_tensor(self):
tx = t.select(1, 12)
self.assertEqual(tx, sx.data.cpu())

def test_masked_fill_scalar(self):

def fn(tensor):
# Build a mask from the first line of tensor.
# Also, make it have the same rank as the original tensor.
mask = tensor[0].ge(0.5).unsqueeze(dim=0)
# Call masked_fill.
return tensor.masked_fill(mask, 10)

x = _gen_tensor(2, 2, device=xm.xla_device())
x_cpu = x.cpu()
self.assertEqual(fn(x_cpu), fn(x))


class TestRandom(test_utils.XlaTestCase):

Expand Down
14 changes: 7 additions & 7 deletions torch_xla/csrc/data_ops.cpp
Original file line number Diff line number Diff line change
Expand Up @@ -147,19 +147,19 @@ xla::XlaOp BuildExpand(xla::XlaOp input,
xla::XlaOp BuildMaskedFillScalar(xla::XlaOp input, xla::XlaOp mask,
xla::XlaOp scalar) {
const xla::Shape& input_shape = ShapeHelper::ShapeOfXlaOp(input);
int64_t input_rank = input_shape.rank();
const xla::Shape& mask_shape = ShapeHelper::ShapeOfXlaOp(mask);
int64_t mask_rank = mask_shape.rank();
if (input_rank <= mask_rank) {
input = BuildExpand(input, mask_shape.dimensions());
} else {
mask = BuildExpand(mask, input_shape.dimensions());

if (!xla::ShapeUtil::Compatible(input_shape, mask_shape)) {
xla::Shape shape = XlaHelpers::GetPromotedShape(input_shape, mask_shape);
input = BuildExpand(input, shape.dimensions());
mask = BuildExpand(mask, shape.dimensions());
}

xla::XlaOp zero = xla::Zero(mask.builder(), XlaHelpers::TypeOfXlaOp(mask));
xla::XlaOp mask_pred = xla::Ne(mask, zero);
xla::XlaOp update_scalar =
ConvertTo(scalar, ShapeHelper::ShapeOfXlaOp(scalar).element_type(),
input_shape.element_type(), nullptr);
ShapeHelper::ShapeOfXlaOp(input).element_type(), nullptr);
return xla::Select(mask_pred, update_scalar, input);
}

Expand Down

0 comments on commit c0d6726

Please sign in to comment.