Skip to content

Commit

Permalink
interpolate ivy-llc#26133 issue
Browse files Browse the repository at this point in the history
  • Loading branch information
rfatihors committed Oct 5, 2023
1 parent 3080aee commit 88bf27f
Show file tree
Hide file tree
Showing 2 changed files with 41 additions and 38 deletions.
30 changes: 10 additions & 20 deletions ivy/functional/backends/paddle/experimental/layers.py
Original file line number Diff line number Diff line change
@@ -1,5 +1,4 @@
# global
from turtle import st
from typing import Optional, Union, Tuple, List, Literal, Sequence, Callable
import paddle

Expand Down Expand Up @@ -438,10 +437,18 @@ def interpolate(
scale_factor: Optional[Union[Sequence[int], int]] = None,
recompute_scale_factor: Optional[bool] = None,
align_corners: Optional[bool] = None,
antialias: Optional[bool] = False,
data_format: str = "NCHW",
out: Optional[paddle.Tensor] = None,
):
raise IvyNotImplementedException()
if recompute_scale_factor is True:
align_mode = 1
elif recompute_scale_factor is False:
align_mode = 0
else:
align_mode = None
return paddle.nn.functional.interpolate(
x, size, scale_factor, mode, align_corners, align_mode, data_format
)


def adaptive_max_pool2d(
Expand Down Expand Up @@ -663,20 +670,3 @@ def sliding_window(
return paddle.nn.functional.unfold(
input, kernel_size, strides=stride, paddings=padding, dilations=dilation
)


@st.composite
def interpolate_linear(
x: paddle.Tensor,
size: Union[Sequence[int], int],
mode: Optional[Literal["linear", "bilinear", "trilinear"]] = "linear",
scale_factor: Optional[Union[Sequence[int], int]] = None,
align_corners: Optional[bool] = False,
align_mode: int = 0,
data_format: str = "NCHW",
name: Optional[str] = None,
out: Optional[paddle.Tensor] = None,
):
return paddle.nn.functional.interpolate(
x, size, scale_factor, mode, align_corners, align_mode, data_format, name
)
Original file line number Diff line number Diff line change
@@ -1,17 +1,12 @@
# global
from ctypes import Union
from typing import Optional, Literal

import numpy as np
import torch
from hypothesis import strategies as st, assume
from numpy.random.mtrand import Sequence

# local
import ivy
import ivy_tests.test_ivy.helpers as helpers
from ivy.functional.backends import paddle
from ivy.functional.backends.paddle import interpolate_linear
from ivy_tests.test_ivy.helpers import handle_test


Expand Down Expand Up @@ -1091,19 +1086,37 @@ def test_interpolate(
)


def test_interpolate_linear(
x: paddle.Tensor,
size: Union[Sequence[int], int],
mode: Optional[Literal["linear", "bilinear", "trilinear"]] = "linear",
scale_factor: Optional[Union[Sequence[int], int]] = None,
align_corners: Optional[bool] = False,
align_mode: int = 0,
data_format: str = "NCHW",
name: Optional[str] = None,
):
assert paddle.nn.functional.interpolate(
x, size, scale_factor, mode, align_corners, align_mode, data_format, name
) == interpolate_linear(x)
@handle_test(
fn_tree="functional.ivy.backends.experimental.paddle.interpolate",
dtype_x_mode=_interp_args(),
test_gradients=st.just(False),
number_positional_args=st.just(2),
)
def test_interpolate_paddle(dtype_x_mode, test_flags, backend_fw, fn_name, on_device):
(
input_dtype,
x,
mode,
size,
align_corners,
scale_factor,
recompute_scale_factor,
) = dtype_x_mode
helpers.test_function(
input_dtypes=input_dtype,
test_flags=test_flags,
backend_to_test=backend_fw,
fn_name=fn_name,
on_device=on_device,
rtol_=1e-01,
atol_=1e-01,
x=x[0],
size=size,
mode=mode,
align_corners=align_corners,
scale_factor=scale_factor,
recompute_scale_factor=recompute_scale_factor,
)


@handle_test(
Expand Down

0 comments on commit 88bf27f

Please sign in to comment.