Skip to content

Commit

Permalink
add beam search doc (#11469)
Browse files Browse the repository at this point in the history
  • Loading branch information
Superjomn authored Jun 20, 2018
1 parent 5972990 commit d020d7f
Show file tree
Hide file tree
Showing 3 changed files with 56 additions and 12 deletions.
4 changes: 2 additions & 2 deletions paddle/fluid/operators/activation_op.cc
Original file line number Diff line number Diff line change
Expand Up @@ -143,7 +143,7 @@ Tanh Activation Operator.
__attribute__((unused)) constexpr char TanhShrinkDoc[] = R"DOC(
TanhShrink Activation Operator.
$$out = x - \frac{e^{x} - e^{-x}}{e^{x} + e^{-x}}$$
$$out = x - \\frac{e^{x} - e^{-x}}{e^{x} + e^{-x}}$$
)DOC";

Expand Down Expand Up @@ -385,7 +385,7 @@ class STanhOpMaker : public framework::OpProtoAndCheckerMaker {
AddComment(R"DOC(
STanh Activation Operator.
$$out = b * \frac{e^{a * x} - e^{-a * x}}{e^{a * x} + e^{-a * x}}$$
$$out = b * \\frac{e^{a * x} - e^{-a * x}}{e^{a * x} + e^{-a * x}}$$
)DOC");
}
Expand Down
33 changes: 30 additions & 3 deletions python/paddle/fluid/layers/control_flow.py
Original file line number Diff line number Diff line change
Expand Up @@ -185,12 +185,14 @@ def Print(input,
Returns:
Variable: Output tensor, same data with input tensor.
Examples:
.. code-block:: python
value = some_layer(...)
Print(value, summarize=10,
message="The content of some_layer: ")
value = some_layer(...)
Print(value, summarize=10,
message="The content of some_layer: ")
'''
helper = LayerHelper('print', **locals())
out = helper.create_tmp_variable(dtype=helper.input_dtype())
Expand Down Expand Up @@ -1201,6 +1203,31 @@ def __exit__(self, exc_type, exc_val, exc_tb):


class ConditionalBlock(object):
'''
**ConditionalBlock**
ConditionalBlock is an operator that bind a block to a specific condition,
if the condition matches, the corresponding block will be executed.
Args:
inputs (Variable): bool conditions.
is_scalar_condition (bool): whether the branch is controled by a scalar.
name(str): name of this ConditionalBlock.
Examples:
.. code-block:: python
cond = layers.less_than(x=label, y=limit)
true_image, false_image = layers.split_lod_tensor(
input=image, mask=cond)
true_cond = layers.ConditionalBlock([true_image])
with true_cond.block():
...
with false_cond.block():
...
'''

def __init__(self, inputs, is_scalar_condition=False, name=None):
for each_input in inputs:
if not isinstance(each_input, Variable):
Expand Down
31 changes: 24 additions & 7 deletions python/paddle/fluid/layers/nn.py
Original file line number Diff line number Diff line change
Expand Up @@ -2678,18 +2678,35 @@ def sequence_expand(x, y, ref_level=-1, name=None):

def beam_search(pre_ids, ids, scores, beam_size, end_id, level=0):
'''
**beam search**
This function implements the beam search algorithm.
Beam search is a classical algorithm for selecting candidate words
in a machine translation task.
Refer to `Beam search <https://en.wikipedia.org/wiki/Beam_search>`_
for more details.
Args:
pre_ids (Variable): ${pre_ids_comment}
ids (Variable): ${ids_comment}
scores (Variable): ${scores_comment}
beam_size (int): ${beam_size_comment}
end_id (int): ${end_id_comment}
level (int): ${level_comment}
pre_ids (Variable): ids in previous step.
ids (Variable): a LoDTensor of shape of [None,k]
scores (Variable): a LoDTensor that has the same shape and LoD with `ids`
beam_size (int): beam size for beam search
end_id (int): the token id which indicates the end of a sequence
level (int): the level of LoDTensor
Returns:
tuple: a tuple of beam_search output variables: selected_ids, selected_scores
tuple: a tuple of beam_search output variables: `selected_ids`, `selected_scores`
Examples:
.. code-block:: python
# current_score is a Tensor of shape (num_batch_size, embed_size), which
# consists score of each candidate word.
topk_scores, topk_indices = pd.topk(current_score, k=50)
selected_ids, selected_scores = pd.beam_search(
pre_ids, topk_indices, topk_scores, beam_size, end_id=10, level=0)
'''
helper = LayerHelper('beam_search', **locals())
score_type = scores.dtype
Expand Down

0 comments on commit d020d7f

Please sign in to comment.