Skip to content

Commit

Permalink
torch.linalg.norm frontend (#21955)
Browse files Browse the repository at this point in the history
  • Loading branch information
mobley-trent authored Sep 8, 2023
1 parent c1eaf05 commit 1358013
Show file tree
Hide file tree
Showing 2 changed files with 87 additions and 0 deletions.
20 changes: 20 additions & 0 deletions ivy/functional/frontends/torch/linalg.py
Original file line number Diff line number Diff line change
Expand Up @@ -185,6 +185,26 @@ def multi_dot(tensors, *, out=None):
return ivy.multi_dot(tensors, out=out)


@to_ivy_arrays_and_back
@with_supported_dtypes(
{"2.0.1 and below": ("float32", "float64", "complex64", "complex128")}, "torch"
)
def norm(input, ord=None, dim=None, keepdim=False, *, dtype=None, out=None):
if dim is None and not (ord is None):
if input.ndim == 1:
ret = ivy.vector_norm(input, axis=dim, keepdims=keepdim, ord=ord)
else:
ret = ivy.matrix_norm(input, keepdims=keepdim, ord=ord)
elif dim is None and ord is None:
input = ivy.flatten(input)
ret = ivy.vector_norm(input, axis=0, keepdims=keepdim, ord=2)
if isinstance(dim, int):
ret = ivy.vector_norm(input, axis=dim, keepdims=keepdim, ord=ord)
elif isinstance(dim, tuple):
ret = ivy.matrix_norm(input, axis=dim, keepdims=keepdim, ord=ord)
return ret


@to_ivy_arrays_and_back
@with_supported_dtypes(
{"2.0.1 and below": ("float32", "float64", "complex32", "complex64")}, "torch"
Expand Down
67 changes: 67 additions & 0 deletions ivy_tests/test_ivy/test_frontends/test_torch/test_linalg.py
Original file line number Diff line number Diff line change
Expand Up @@ -60,6 +60,39 @@ def _generate_multi_dot_dtype_and_arrays(draw):
return input_dtype, [matrix_1[1][0], matrix_2[1][0], matrix_3[1][0]]


@st.composite
def _get_axis_and_p(draw):
p = draw(st.sampled_from(["fro", "nuc", 1, 2, -1, -2, float("inf"), -float("inf")]))
if p == "fro" or p == "nuc":
max_axes_size = 2
min_axes_size = 2
else:
min_axes_size = 1
max_axes_size = 5
x_dtype, values, axis = draw(
helpers.dtype_values_axis(
available_dtypes=helpers.get_dtypes("valid"),
min_num_dims=2,
valid_axis=True,
min_value=-1e04,
max_value=1e04,
min_axes_size=min_axes_size,
max_axes_size=max_axes_size,
large_abs_safety_factor=2,
safety_factor_scale="log",
)
)
axis = axis[0] if isinstance(axis, tuple) and len(axis) == 1 else axis
# ToDo: fix the castable dtype helper. Right now using `dtype` causes errors
# dtype should be real for real inputs, but got ComplexDouble
x_dtype, values, dtype = draw(
helpers.get_castable_dtype(
draw(helpers.get_dtypes("valid")), x_dtype[0], values[0]
)
)
return p, x_dtype, values, axis, x_dtype


# helpers
@st.composite
def _get_dtype_and_matrix(
Expand Down Expand Up @@ -868,6 +901,40 @@ def test_torch_multi_dot(
)


@handle_frontend_test(
fn_tree="torch.linalg.norm",
args=_get_axis_and_p(),
keepdim=st.booleans(),
test_with_out=st.just(False),
)
def test_torch_norm(
*,
args,
keepdim,
on_device,
fn_tree,
frontend,
test_flags,
backend_fw,
):
p, x_dtype, x, axis, dtype = args
helpers.test_frontend_function(
input_dtypes=[x_dtype],
backend_to_test=backend_fw,
frontend=frontend,
test_flags=test_flags,
fn_tree=fn_tree,
on_device=on_device,
rtol=1e-01,
atol=1e-08,
input=x,
ord=p,
dim=axis,
keepdim=keepdim,
dtype=dtype,
)


# pinv
# TODO: add testing for hermitian
@handle_frontend_test(
Expand Down

0 comments on commit 1358013

Please sign in to comment.