Skip to content
New issue

Have a question about this project? Sign up for a free GitHub account to open an issue and contact its maintainers and the community.

By clicking “Sign up for GitHub”, you agree to our terms of service and privacy statement. We’ll occasionally send you account related emails.

Already on GitHub? Sign in to your account

P var #18995

Merged
merged 24 commits into from
Aug 17, 2023
Merged

P var #18995

Show file tree
Hide file tree
Changes from 19 commits
Commits
Show all changes
24 commits
Select commit Hold shift + click to select a range
00dedbd
after pull the all files
samthakur587 Jun 22, 2023
0ec4288
Revert "after pull the all files"
samthakur587 Jun 23, 2023
6164197
Merge branch 'unifyai:master' into master
samthakur587 Jun 24, 2023
007add1
adding bitwise not
samthakur587 Jun 24, 2023
02f0091
Revert "adding bitwise not"
samthakur587 Jun 24, 2023
0b88f49
Merge branch 'master' of github.com:samthakur587/ivy
samthakur587 Jun 27, 2023
67a68ca
added divide method to paddle tensor
samthakur587 Jun 27, 2023
4c31b4b
Revert "added divide method to paddle tensor"
samthakur587 Jun 27, 2023
54d2766
Merge branch 'master' of github.com:samthakur587/ivy
samthakur587 Jul 7, 2023
eff24fc
adding var to paddle frontend
samthakur587 Jul 8, 2023
2048701
Merge branch 'master' into p_var
samthakur587 Jul 8, 2023
54536b0
Update tensor.py
samthakur587 Jul 8, 2023
94535d3
Update tensor.py
samthakur587 Jul 15, 2023
c662a1c
solving the mereg conflit
samthakur587 Jul 15, 2023
260dd23
Merge branch 'master' of github.com:samthakur587/ivy into p_var
samthakur587 Jul 15, 2023
796b03e
fix: updates supported dtyes and removes...
rishabgit Jul 20, 2023
0105763
Merge branch 'unifyai:master' into p_var
samthakur587 Jul 28, 2023
af95157
Merge branch 'unifyai:master' into p_var
samthakur587 Jul 28, 2023
921d938
Merge branch 'master' into p_var
samthakur587 Aug 3, 2023
fbc7e3f
changed dtype
samthakur587 Aug 3, 2023
866694e
Merge branch 'p_var' of github.com:samthakur587/ivy into p_var
samthakur587 Aug 3, 2023
e0dfcab
Merge branch 'unifyai:master' into p_var
samthakur587 Aug 8, 2023
4109a4e
Merge branch 'p_var' of github.com:samthakur587/ivy into p_var
samthakur587 Aug 8, 2023
b38f85b
changes the dtype
samthakur587 Aug 8, 2023
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
10 changes: 10 additions & 0 deletions ivy/functional/frontends/paddle/tensor/tensor.py
Original file line number Diff line number Diff line change
Expand Up @@ -577,6 +577,16 @@ def logical_not(self, out=None, name=None):
def sign(self, name=None):
return ivy.sign(self._ivy_array)

@with_supported_dtypes(
{"2.5.1 and below": ("float16", "float32", "float64")}, "paddle"
)
def var(self, axis=None, unbiased=True, keepdim=False, name=None):
return paddle_frontend.Tensor(
ivy.var(
self._ivy_array, axis=axis, correction=int(unbiased), keepdims=keepdim
)
)

@with_unsupported_dtypes({"2.5.1 and below": ("float16", "bfloat16")}, "paddle")
def sgn(self, name=None):
return paddle_frontend.Tensor(ivy.sign(self._ivy_array, np_variant=True))
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -12,6 +12,9 @@
from ivy_tests.test_ivy.test_functional.test_experimental.test_core.test_manipulation import ( # noqa E501
_get_dtype_values_k_axes_for_rot90,
)
from ivy_tests.test_ivy.test_functional.test_core.test_statistical import (
_statistical_dtype_values,
)

CLASS_TREE = "ivy.functional.frontends.paddle.Tensor"

Expand Down Expand Up @@ -2654,6 +2657,45 @@ def test_paddle_cond(
)



# var
@handle_frontend_method(
class_tree=CLASS_TREE,
init_tree="paddle.to_tensor",
method_name="var",
dtype_and_x=_statistical_dtype_values(
function="var",
min_value=-1e04,
max_value=1e04,
),
keepdim=st.booleans(),
)
def test_paddle_instance_var(
dtype_and_x,
keepdim,
frontend,
frontend_method_data,
init_flags,
method_flags,
on_device,
):
input_dtype, x, axis, correction = dtype_and_x
helpers.test_frontend_method(
init_input_dtypes=input_dtype,
init_all_as_kwargs_np={"data": x[0]},
method_input_dtypes=input_dtype,
method_all_as_kwargs_np={
"axis": axis,
"unbiased": bool(correction),
"keepdim": keepdim,
},
frontend=frontend,
frontend_method_data=frontend_method_data,
init_flags=init_flags,
method_flags=method_flags,
on_device=on_device,
)

@handle_frontend_method(
class_tree=CLASS_TREE,
init_tree="paddle.to_tensor",
Expand Down
Loading