Skip to content

Commit

Permalink
[api move] cvm (PaddlePaddle#48989)
Browse files Browse the repository at this point in the history
* [api move] cvm

* [api move] cvm

* [api move] cvm

* [api move] cvm

* [api move] cvm

* [api move] cvm

* [api move] cvm

* [api move] ci test

* [api move] ci test

* [api move] ci test
  • Loading branch information
wangzhen38 authored Dec 13, 2022
1 parent a484bc6 commit 3b2f754
Show file tree
Hide file tree
Showing 3 changed files with 90 additions and 0 deletions.
40 changes: 40 additions & 0 deletions python/paddle/fluid/tests/unittests/test_layers.py
Original file line number Diff line number Diff line change
Expand Up @@ -182,6 +182,46 @@ def test_type():

self.assertRaises(TypeError, test_type)

def test_cvm(self):
inp = np.ones([10, 10], dtype='float32')
arr = [[0.6931472, -1.904654e-09, 1, 1, 1, 1, 1, 1, 1, 1]] * 10
cvm1 = np.array(arr, dtype='float32')
cvm2 = np.ones([10, 8], dtype='float32')
show_clk = np.ones([10, 2], dtype='float32')
with self.static_graph():
x = paddle.static.data(
name='data',
shape=[10, 10],
dtype='float32',
)
u = paddle.static.data(
name='show_click',
shape=[10, 2],
dtype='float32',
)
no_cvm = paddle.static.nn.continuous_value_model(x, u, True)
static_ret1 = self.get_static_graph_result(
feed={'data': inp, 'show_click': show_clk},
fetch_list=[no_cvm],
)[0]
with self.static_graph():
x = paddle.static.data(
name='data',
shape=[10, 10],
dtype='float32',
)
u = paddle.static.data(
name='show_click',
shape=[10, 2],
dtype='float32',
)
cvm = paddle.static.nn.continuous_value_model(x, u, False)
static_ret2 = self.get_static_graph_result(
feed={'data': inp, 'show_click': show_clk}, fetch_list=[cvm]
)[0]
np.testing.assert_allclose(static_ret1, cvm1, rtol=1e-5, atol=1e-06)
np.testing.assert_allclose(static_ret2, cvm2, rtol=1e-5, atol=1e-06)

def test_Flatten(self):
inp = np.ones([3, 4, 4, 5], dtype='float32')
with self.static_graph():
Expand Down
1 change: 1 addition & 0 deletions python/paddle/static/nn/__init__.py
Original file line number Diff line number Diff line change
Expand Up @@ -16,6 +16,7 @@
from .common import batch_norm # noqa: F401
from .common import instance_norm # noqa: F401
from .common import data_norm # noqa: F401
from .common import continuous_value_model # noqa: F401
from .common import group_norm # noqa: F401
from .common import deform_conv2d # noqa: F401
from .common import conv3d # noqa: F401
Expand Down
49 changes: 49 additions & 0 deletions python/paddle/static/nn/common.py
Original file line number Diff line number Diff line change
Expand Up @@ -321,6 +321,55 @@ def instance_norm(
return instance_norm_out


@static_only
def continuous_value_model(input, cvm, use_cvm=True):
r"""
**continuous_value_model layers**
Now, this OP is used in CTR project to remove or dispose show and click value in :attr:`input`.
:attr:`input` is an embedding vector including show and click value, whose shape is :math:`[N, D]` (N is batch size. D is `2 + embedding dim` ).
Show and click at first two dims of embedding vector D.
If :attr:`use_cvm` is True, it will calculate :math:`log(show)` and :math:`log(click)` , and output shape is :math:`[N, D]` .
If :attr:`use_cvm` is False, it will remove show and click from :attr:`input` , and output shape is :math:`[N, D - 2]` .
:attr:`cvm` is show_click info, whose shape is :math:`[N, 2]` .
Args:
input (Variable): The input variable. A 2-D LoDTensor with shape :math:`[N, D]` , where N is the batch size, D is `2 + the embedding dim` . `lod level = 1` .
A Tensor with type float32, float64.
cvm (Variable): Show and click variable. A 2-D Tensor with shape :math:`[N, 2]` , where N is the batch size, 2 is show and click.
A Tensor with type float32, float64.
use_cvm (bool): Use show_click or not. if use, the output dim is the same as input.
if not use, the output dim is `input dim - 2` (remove show and click)
Returns:
Variable: A 2-D LodTensor with shape :math:`[N, M]` . if :attr:`use_cvm` = True, M is equal to input dim D. if False, M is equal to `D - 2`. \
A Tensor with same type as input.
Examples:
.. code-block:: python
import paddle.fluid as fluid
import paddle
input = paddle.static.data(name="input", shape=[64, 1], dtype="int64")
label = paddle.static.data(name="label", shape=[64, 1], dtype="int64")
w0 = paddle.full(shape=(100, 1), fill_value=2).astype(paddle.float32)
embed = paddle.nn.functional.embedding(
input,
w0)
ones = paddle.full_like(label, 1, dtype="int64")
show_clk = paddle.cast(paddle.concat([ones, label], axis=1), dtype='float32')
show_clk.stop_gradient = True
input_with_cvm = paddle.static.nn.continuous_value_model(embed, show_clk, True)
"""
helper = LayerHelper('cvm', **locals())
out = helper.create_variable(dtype=input.dtype)
check_variable_and_dtype(
input, 'input', ['float16', 'float32', 'float64'], 'cvm'
)
helper.append_op(
type='cvm',
inputs={'X': [input], 'CVM': [cvm]},
outputs={'Y': [out]},
attrs={"use_cvm": use_cvm},
)
return out


@static_only
def data_norm(
input,
Expand Down

0 comments on commit 3b2f754

Please sign in to comment.