From 8e45c421d419a08d54dc74733aef5822cb4d9d67 Mon Sep 17 00:00:00 2001 From: Eddie-Wang1120 Date: Thu, 16 May 2024 13:45:21 +0000 Subject: [PATCH 1/2] update square --- paddle/fluid/primitive/codegen/gen.py | 1 + paddle/fluid/primitive/rule/vjp/details.h | 8 ++++++ test/legacy_test/test_activation_op.py | 30 ++++++++++++++++++++++- 3 files changed, 38 insertions(+), 1 deletion(-) diff --git a/paddle/fluid/primitive/codegen/gen.py b/paddle/fluid/primitive/codegen/gen.py index 60131dda70b10..1711856e344f0 100644 --- a/paddle/fluid/primitive/codegen/gen.py +++ b/paddle/fluid/primitive/codegen/gen.py @@ -67,6 +67,7 @@ 'sin_grad', 'cos_grad', 'tanh_grad', + 'square_grad', ] # prim op with two inputs and one output, with no attribute diff --git a/paddle/fluid/primitive/rule/vjp/details.h b/paddle/fluid/primitive/rule/vjp/details.h index 59c031952ee7f..b9a4fe5e904b0 100644 --- a/paddle/fluid/primitive/rule/vjp/details.h +++ b/paddle/fluid/primitive/rule/vjp/details.h @@ -795,6 +795,14 @@ void log_grad(const Tensor& x, const Tensor& out_grad, Tensor* x_grad) { } } +template +void square_grad(const Tensor& x, const Tensor& out_grad, Tensor* x_grad) { + if (x_grad) { + auto x_grad_tmp = 2 * x * out_grad; + set_output(x_grad_tmp, x_grad); + } +} + template void exp_grad(const Tensor& out, const Tensor& out_grad, Tensor* x_grad) { if (x_grad) { diff --git a/test/legacy_test/test_activation_op.py b/test/legacy_test/test_activation_op.py index 7806017bbfeed..ed5c9248206ff 100644 --- a/test/legacy_test/test_activation_op.py +++ b/test/legacy_test/test_activation_op.py @@ -4297,6 +4297,7 @@ def test_check_grad(self): 'Out', max_relative_error=0.007, check_pir=True, + check_prim_pir=True, check_pir_onednn=self.check_pir_onednn, ) @@ -4315,6 +4316,17 @@ def init_dtype(self): def test_check_output(self): self.check_output(check_pir=True) + def test_check_grad(self): + if self.dtype == np.float16: + return + self.check_grad( + ['X'], + 'Out', + max_relative_error=0.007, + check_pir=True, + check_pir_onednn=self.check_pir_onednn, + ) + class TestSquare_Complex128(TestSquare): def init_dtype(self): @@ -4323,6 +4335,17 @@ def init_dtype(self): def test_check_output(self): self.check_output(check_pir=True) + def test_check_grad(self): + if self.dtype == np.float16: + return + self.check_grad( + ['X'], + 'Out', + max_relative_error=0.007, + check_pir=True, + check_pir_onednn=self.check_pir_onednn, + ) + class TestSquare_ZeroDim(TestSquare): def init_shape(self): @@ -4365,7 +4388,12 @@ def test_check_output(self): def test_check_grad(self): place = core.CUDAPlace(0) self.check_grad_with_place( - place, ['X'], 'Out', numeric_grad_delta=0.5, check_pir=True + place, + ['X'], + 'Out', + numeric_grad_delta=0.5, + check_pir=True, + check_prim_pir=True, ) From 510f7757c0759062f053150e554e4c0eb5153b02 Mon Sep 17 00:00:00 2001 From: Eddie-Wang1120 Date: Wed, 19 Jun 2024 23:49:25 +0800 Subject: [PATCH 2/2] update --- paddle/fluid/primitive/rule/vjp/details.h | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/paddle/fluid/primitive/rule/vjp/details.h b/paddle/fluid/primitive/rule/vjp/details.h index b9a4fe5e904b0..475c2dd62a610 100644 --- a/paddle/fluid/primitive/rule/vjp/details.h +++ b/paddle/fluid/primitive/rule/vjp/details.h @@ -798,7 +798,7 @@ void log_grad(const Tensor& x, const Tensor& out_grad, Tensor* x_grad) { template void square_grad(const Tensor& x, const Tensor& out_grad, Tensor* x_grad) { if (x_grad) { - auto x_grad_tmp = 2 * x * out_grad; + Tensor x_grad_tmp = 2 * x * out_grad; set_output(x_grad_tmp, x_grad); } }