Skip to content
New issue

Have a question about this project? Sign up for a free GitHub account to open an issue and contact its maintainers and the community.

By clicking “Sign up for GitHub”, you agree to our terms of service and privacy statement. We’ll occasionally send you account related emails.

Already on GitHub? Sign in to your account

[PaddlePaddle Hackathon] add paddle.nn.ClipGradByNorm单测 #278

Merged
merged 50 commits into from
Nov 3, 2021
Merged
Changes from 49 commits
Commits
Show all changes
50 commits
Select commit Hold shift + click to select a range
5d6082b
paddle.nn.PixelShuffle单测提交
justld Oct 10, 2021
2be3336
提交paddle.nn.PixelShuffle单测案例
justld Oct 10, 2021
cd161f4
add test of paddle.nn.ClipGradByGlobalNorm
justld Oct 10, 2021
b1af939
add test paddle.nn.ClipGradByNorm
justld Oct 10, 2021
151bdb8
add test of paddle.nn.PixelShuffle
justld Oct 10, 2021
fa2c79e
add test of paddle.nn.ClipGradByGlobalNorm and paddle.nn.ClipGradByNorm
justld Oct 10, 2021
7736df7
remove useless obj and class in test_clip_grad_by_global_norm.py and …
justld Oct 11, 2021
710ef80
Merge branch 'develop' into develop
justld Oct 11, 2021
d315fde
add test of paddle.nn.UpsampingBinlinear2D
justld Oct 11, 2021
0613680
remove unused code in test_flip_grad_by_global_norm.py
justld Oct 11, 2021
b7ce3bd
remove unused code in test_clip_grad_by_norm.py
justld Oct 11, 2021
078620c
add code annotation in test_clip_grad_by_global_norm.py
justld Oct 12, 2021
eafb154
add code annotation in test_clip_grad_by_norm.py
justld Oct 12, 2021
1978407
add code annotation in test_pixel_shuffle.py
justld Oct 12, 2021
143384b
add annotation in test_upsampling_bilinear2D.py
justld Oct 12, 2021
9d028b3
Merge branch 'develop' into develop
justld Oct 12, 2021
6a61fef
Merge branch 'PaddlePaddle:develop' into develop
justld Oct 13, 2021
3498837
add paddle.ClipGradByGlobalNorm test case
justld Oct 13, 2021
6563226
Merge branch 'develop' of github.com:justld/PaddleTest into develop
justld Oct 13, 2021
dd88b27
add paddle.nn.ClipGradByNorm test case
justld Oct 13, 2021
57b2dab
add paddle.nn.PixelShuffle test case
justld Oct 13, 2021
7667896
add paddle.nn.UpsamplingBilinear2D test case
justld Oct 13, 2021
54749f1
Merge branch 'develop' into develop
justld Oct 13, 2021
4042681
fix bug in test_clip_grad_by_norm.py
justld Oct 13, 2021
946fc10
Merge branch 'develop' of github.com:justld/PaddleTest into develop
justld Oct 13, 2021
fc0b49d
remove 3 test casse
justld Oct 13, 2021
2c12d1e
fix annotation
justld Oct 14, 2021
19edbf7
Merge branch 'develop' into develop
justld Oct 14, 2021
57b4e8e
Merge branch 'develop' into develop
justld Oct 14, 2021
df80e6f
Merge branch 'PaddlePaddle:develop' into develop
justld Oct 15, 2021
98f5d30
refine exception raise code
justld Oct 15, 2021
ddfebee
Merge branch 'develop' into develop
justld Oct 16, 2021
3a2f853
Merge branch 'develop' into develop
justld Oct 18, 2021
e25f69c
Merge branch 'develop' into develop
justld Oct 19, 2021
655823b
Merge branch 'develop' into develop
DDDivano Oct 22, 2021
e9ee331
Merge branch 'develop' into develop
DDDivano Oct 22, 2021
1974736
Merge branch 'develop' into develop
DDDivano Oct 25, 2021
50e586e
add test case of paddle.nn.ClipGradByNorm
justld Oct 25, 2021
cd9cde4
Merge branch 'develop' into test_ClipGradByNorm
justld Oct 25, 2021
7169018
change try-except to pytest.raise
justld Oct 29, 2021
9e8c9a3
Merge branch 'PaddlePaddle:develop' into test_ClipGradByNorm
justld Oct 29, 2021
01387d5
remove useless code
justld Oct 29, 2021
2acf6d3
Merge branch 'test_ClipGradByNorm' of github.com:justld/PaddleTest in…
justld Oct 29, 2021
86d614d
fix ci check
justld Oct 29, 2021
b2a4eca
Merge branch 'develop' into test_ClipGradByNorm
justld Oct 29, 2021
2be846c
add test case of paddle.nn.ClipGradByNorm
justld Oct 29, 2021
00a5aa3
Merge branch 'test_ClipGradByNorm' of github.com:justld/PaddleTest in…
justld Oct 29, 2021
6ea256a
update
justld Oct 30, 2021
19e5262
update test case
justld Oct 30, 2021
6409faa
Merge branch 'develop' into test_ClipGradByNorm
kolinwei Nov 3, 2021
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
272 changes: 272 additions & 0 deletions framework/api/nn/test_clip_grad_by_norm.py
Original file line number Diff line number Diff line change
@@ -0,0 +1,272 @@
#!/bin/env python
# -*- coding: utf-8 -*-
# encoding=utf-8 vi:ts=4:sw=4:expandtab:ft=python
"""
test_clip_grad_by_norm
"""

from apibase import randtool, compare
import paddle
import pytest
import numpy as np


def numpy_clip_grad_by_norm(test_data, clip_norm):
"""
ClipGradByNorm implemented by numpy.
"""
cliped_data = []
for data, grad in test_data:
norm = np.sqrt(np.sum(np.square(np.array(grad))))
if norm > clip_norm:
grad = grad * clip_norm / norm
cliped_data.append((data, grad))
return cliped_data


def generate_test_data(length, shape, dtype, value=10):
"""
generate test data
"""
tensor_data = []
numpy_data = []
np.random.seed(100)
for i in range(length):
np_weight = randtool("float", -value, value, shape).astype(dtype)
np_weight_grad = randtool("float", -value, value, shape).astype(dtype)
numpy_data.append((np_weight, np_weight_grad))

tensor_weight = paddle.to_tensor(np_weight)
tensor_weight_grad = paddle.to_tensor(np_weight_grad)
tensor_data.append((tensor_weight, tensor_weight_grad))
return numpy_data, tensor_data


@pytest.mark.api_nn_ClipGradByNorm_vartype
def test_clip_grad_by_norm_base():
"""
Test base.

Test base config:
input grad shape = [10, 10]
input grad number = 5
input data type = 'float32'
clip_norm = 1.0
value range: [-10, 10]

Expected Results:
The output of ClipGradByNorm implemented by numpy and paddle should be equal.
"""
shape = [10, 10]
length = 5
clip_norm = 1.0
dtype = "float32"

np_data, paddle_data = generate_test_data(length, shape, dtype, value=10)
np_res = numpy_clip_grad_by_norm(np_data, clip_norm=clip_norm)

paddle_clip = paddle.nn.ClipGradByNorm(clip_norm=clip_norm)
paddle_cliped_data = paddle_clip(paddle_data)
paddle_res = []
for w, g in paddle_cliped_data:
paddle_res.append((w.numpy(), g.numpy()))

# compare grad value computed by numpy and paddle
for res, p_res in zip(np_res, paddle_res):
compare(res[1], p_res[1])


@pytest.mark.api_nn_ClipGradByNorm_parameters
def test_clip_grad_by_norm1():
"""
Test ClipGradByNorm when input shape changes.

Test base config:
input grad shape = [10, 10]
input grad number = 5
input data type = 'float32'
clip_norm = 1.0
value range: [-10, 10]

Changes:
input grad shape: [10, 10] -> [7, 13, 10]

Expected Results:
The output of ClipGradByNorm implemented by numpy and paddle should be equal.
"""
shape = [7, 13, 10]
length = 5
clip_norm = 1.0
dtype = "float32"
np_data, paddle_data = generate_test_data(length, shape, dtype, value=10)
np_res = numpy_clip_grad_by_norm(np_data, clip_norm=clip_norm)

paddle_clip = paddle.nn.ClipGradByNorm(clip_norm=clip_norm)
paddle_cliped_data = paddle_clip(paddle_data)
paddle_res = []
for w, g in paddle_cliped_data:
paddle_res.append((w.numpy(), g.numpy()))

# compare grad value computed by numpy and paddle
for res, p_res in zip(np_res, paddle_res):
compare(res[1], p_res[1])


@pytest.mark.api_nn_ClipGradByNorm_parameters
def test_clip_grad_by_norm2():
"""
Test ClipGradByNorm when input shape changes.

Test base config:
input grad shape = [10, 10]
input grad number = 5
input data type = 'float32'
clip_norm = 1.0
value range: [-10, 10]

Changes:
input grad shape: [10, 10] -> [10]

Expected Results:
The output of ClipGradByNorm implemented by numpy and paddle should be equal.
"""
shape = [10]
length = 5
clip_norm = 1.0
dtype = "float32"
np_data, paddle_data = generate_test_data(length, shape, dtype, value=10)
np_res = numpy_clip_grad_by_norm(np_data, clip_norm=clip_norm)

paddle_clip = paddle.nn.ClipGradByNorm(clip_norm=clip_norm)
paddle_cliped_data = paddle_clip(paddle_data)
paddle_res = []
for w, g in paddle_cliped_data:
paddle_res.append((w.numpy(), g.numpy()))

# compare grad value computed by numpy and paddle
for res, p_res in zip(np_res, paddle_res):
compare(res[1], p_res[1])


@pytest.mark.api_nn_ClipGradByNorm_parameters
def test_clip_grad_by_norm3():
"""
Test ClipGradByNorm when clip_norm changes.

Test base config:
input grad shape = [10, 10]
input grad number = 5
input data type = 'float32'
clip_norm = 1.0
value range: [-10, 10]

Changes:
clip_norm: -1.0

Expected Results:
clip_norm cann't less equal than 0.0, raise ValueError.
"""
shape = [10, 10]
length = 5
clip_norm = -1.0
dtype = "float32"
np_data, paddle_data = generate_test_data(length, shape, dtype, value=10)

with pytest.raises(ValueError):
paddle_clip = paddle.nn.ClipGradByNorm(clip_norm=clip_norm)
paddle_clip(paddle_data)


@pytest.mark.api_nn_ClipGradByNorm_parameters
def test_clip_grad_by_norm4():
"""
Test ClipGradByNorm when value range changes.

Test base config:
input grad shape = [10, 10]
input grad number = 5
input data type = 'float32'
clip_norm = 1.0
value range: [-10, 10]

Changes:
value range: [-10, 10] -> [-25555, 25555]

Expected Results:
The output of ClipGradByNorm implemented by numpy and paddle should be equal.
"""
shape = [10, 10]
length = 5
clip_norm = 1.0
dtype = "float32"
np_data, paddle_data = generate_test_data(length, shape, dtype, value=25555)
np_res = numpy_clip_grad_by_norm(np_data, clip_norm=clip_norm)

paddle_clip = paddle.nn.ClipGradByNorm(clip_norm=clip_norm)
paddle_cliped_data = paddle_clip(paddle_data)
paddle_res = []
for w, g in paddle_cliped_data:
paddle_res.append((w.numpy(), g.numpy()))

# compare grad value computed by numpy and paddle
for res, p_res in zip(np_res, paddle_res):
compare(res[1], p_res[1])


@pytest.mark.api_nn_ClipGradByNorm_parameters
def test_clip_grad_by_norm5():
"""
Test whether clip_norm is constrained.

Test base config:
input grad shape = [10, 10]
input grad number = 5
input data type = 'float32'
clip_norm = 1.0
value range: [-10, 10]

Changes:
clip_norm: 1.0 -> 0.01

Expected Results:
'clip_norm' should be constrained when l2 norm is bigger than 'clip_norm'.
"""
shape = [10, 10]
length = 5
clip_norm = 0.01
dtype = "float32"
np_data, paddle_data = generate_test_data(length, shape, dtype, value=10)

paddle_clip = paddle.nn.ClipGradByNorm(clip_norm=clip_norm)
paddle_clip(paddle_data)
compare(clip_norm, paddle_clip.clip_norm) # clip_norm is constrained


@pytest.mark.api_nn_ClipGradByNorm_vartype
def test_clip_grad_by_norm6():
"""
Test unsupport input grad dtype.

Test base config:
input grad shape = [10, 10]
input grad number = 5
input data type = 'float32'
clip_norm = 1.0
value range: [-10, 10]

Changes:
input grad type: 'float32' -> ['int8', 'int16', 'int32', 'float16']

Expected Results:
Raise RuntimeError when input grad type is not supported.
"""
shape = [10, 10]
length = 5
clip_norm = 1.0
unsupport_dtype = ["int8", "int16", "int32", "float16"]

with pytest.raises(RuntimeError):
for dtype in unsupport_dtype:
paddle_clip = paddle.nn.ClipGradByNorm(clip_norm=clip_norm)
np_data, paddle_data = generate_test_data(length, shape, dtype, value=10)
paddle_clip(paddle_data)