Skip to content

Commit

Permalink
unnecessary files removed
Browse files Browse the repository at this point in the history
  • Loading branch information
Sameerk22 committed Aug 31, 2023
1 parent 18831b1 commit 87fc53c
Show file tree
Hide file tree
Showing 4 changed files with 86 additions and 3 deletions.
39 changes: 39 additions & 0 deletions ivy/functional/frontends/paddle/nn/functional/vision.py
Original file line number Diff line number Diff line change
Expand Up @@ -8,6 +8,45 @@
from ivy.utils.assertions import check_equal


@to_ivy_arrays_and_back
def pixel_unshuffle(x, downscale_factor, data_format="NCHW"):
input_shape = ivy.shape(x)

if data_format == "NCHW":
b, c, h, w = input_shape
else:
b, h, w, c = input_shape

check_equal(
c % (downscale_factor ** 2),
0,
message=(
"pixel unshuffle expects input channel to be divisible by square of downscale"
" factor, but got input with size {}, downscale factor={}, and"
" self.size(1)={}, is not divisible by {}".format(
input_shape, downscale_factor, c, downscale_factor ** 2
)
),
as_array=False,
)

oc = c // (downscale_factor ** 2)
oh = h // downscale_factor
ow = w // downscale_factor

if data_format == "NCHW":
x_reshaped = ivy.reshape(x, (b, oc, downscale_factor, downscale_factor, oh, ow))
else:
x_reshaped = ivy.reshape(x, (b, oh, ow, downscale_factor, downscale_factor, oc))

if data_format == "NCHW":
return ivy.reshape(
ivy.permute_dims(x_reshaped, (0, 1, 4, 2, 5, 3)), (b, oc, oh, ow)
)
return ivy.reshape(
ivy.permute_dims(x_reshaped, (0, 4, 1, 5, 2, 3)), (b, oh, ow, oc)
)

@to_ivy_arrays_and_back
@with_unsupported_dtypes({"2.5.1 and below": ("float16", "bfloat16")}, "paddle")
def affine_grid(theta, out_shape, align_corners=True):
Expand Down
7 changes: 5 additions & 2 deletions ivy/functional/frontends/torch/tensor.py
Original file line number Diff line number Diff line change
Expand Up @@ -495,9 +495,12 @@ def equal(self, other):
def erf(self, *, out=None):
return torch_frontend.erf(self, out=out)

@with_unsupported_dtypes({"2.0.1 and below": ("float16", "complex")}, "torch")
@with_supported_dtypes(
{"2.0.1 and below": ("float32", "float64", "bfloat16")}, "torch"
)
def erf_(self, *, out=None):
self.ivy_array = torch_frontend.erf(self, out=out).ivy_array
self.ivy_array = self.erf(out=out).ivy_array
return self

def new_zeros(
self,
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -10,6 +10,47 @@
# --- Helpers --- #
# --------------- #

#pixel_unshuffle
@handle_frontend_test(
fn_tree="paddle.nn.functional.pixel_unshuffle",
dtype_and_x=helpers.dtype_and_values(
available_dtypes=["float32", "float64"],
min_value=0,
min_num_dims=4,
max_num_dims=4,
min_dim_size=3,
),
downscale_factor=helpers.ints(min_value=1),
data_format=st.sampled_from(["NCHW", "NHWC"]),
)

def test_paddle_pixel_unshuffle(
*,
dtype_and_x,
downscale_factor,
data_format,
on_device,
fn_tree,
frontend,
test_flags,
backend_fw,
):
input_dtype, x = dtype_and_x
if data_format == "NCHW":
assume(ivy.shape(x[0])[1] % (downscale_factor**2) == 0)
else:
assume(ivy.shape(x[0])[3] % (downscale_factor**2) == 0)
helpers.test_frontend_function(
input_dtypes=input_dtype,
frontend=frontend,
test_flags=test_flags,
fn_tree=fn_tree,
on_device=on_device,
x=x[0],
downscale_factor=downscale_factor,
data_format=data_format,
backend_to_test=backend_fw,
)

@st.composite
def _affine_grid_helper(draw):
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -6180,7 +6180,7 @@ def test_torch_tensor_erf(
init_tree="torch.tensor",
method_name="erf_",
dtype_and_x=helpers.dtype_and_values(
available_dtypes=helpers.get_dtypes("float"),
available_dtypes=helpers.get_dtypes("valid"),
),
)
def test_torch_tensor_erf_(
Expand Down

0 comments on commit 87fc53c

Please sign in to comment.