From 15ac0ebef4d75ed613721e08078befbae2d2c3a5 Mon Sep 17 00:00:00 2001 From: jayggh <1439725485@qq.com> Date: Thu, 8 Dec 2022 21:11:33 +0800 Subject: [PATCH] bugfix --- tests/test_ops/test_fused_bias_leakyrelu.py | 8 ++++---- 1 file changed, 4 insertions(+), 4 deletions(-) diff --git a/tests/test_ops/test_fused_bias_leakyrelu.py b/tests/test_ops/test_fused_bias_leakyrelu.py index 75b233042e..98129df9c2 100644 --- a/tests/test_ops/test_fused_bias_leakyrelu.py +++ b/tests/test_ops/test_fused_bias_leakyrelu.py @@ -22,7 +22,7 @@ def setup_class(cls): cls.input_tensor = torch.randn((2, 2, 2, 2), requires_grad=True).cuda() cls.bias = torch.zeros(2, requires_grad=True).cuda() - else: + elif IS_NPU_AVAILABLE: cls.input_tensor = torch.randn((2, 2, 2, 2), requires_grad=True).npu() cls.bias = torch.zeros(2, requires_grad=True).npu() @@ -47,7 +47,7 @@ def test_gradient(self, device): self.input_tensor, delta=1e-4, pt_atol=1e-3) - else: + elif IS_NPU_AVAILABLE: gradcheck( FusedBiasLeakyReLU(2).npu(), self.input_tensor, @@ -60,7 +60,7 @@ def test_gradient(self, device): self.input_tensor, eps=1e-4, atol=1e-3) - else: + elif IS_NPU_AVAILABLE: gradcheck( FusedBiasLeakyReLU(2).npu(), self.input_tensor, @@ -86,7 +86,7 @@ def test_gradgradient(self, device): self.input_tensor, eps=1e-4, atol=1e-3) - else: + elif IS_NPU_AVAILABLE: gradcheck( FusedBiasLeakyReLU(2).npu(), self.input_tensor,