diff --git a/paddle/fluid/operators/mkldnn/conv_mkldnn_op.cc b/paddle/fluid/operators/mkldnn/conv_mkldnn_op.cc index f676dca7d64ae..f61d14489be2c 100644 --- a/paddle/fluid/operators/mkldnn/conv_mkldnn_op.cc +++ b/paddle/fluid/operators/mkldnn/conv_mkldnn_op.cc @@ -583,11 +583,7 @@ class ConvMKLDNNHandlerT post_operations.append_sum(sum_scale); } - bool has_activation = !ctx.Attr("fuse_activation").empty(); - if (has_activation) { - paddle::platform::AppendActivation( - ctx, post_operations, activation_scale); - } + paddle::platform::AppendActivation(ctx, post_operations, activation_scale); conv_attr.set_post_ops(post_operations); return conv_attr; diff --git a/paddle/fluid/platform/mkldnn_reuse.h b/paddle/fluid/platform/mkldnn_reuse.h index cd3a28382f5d6..e4d256a38ac3c 100644 --- a/paddle/fluid/platform/mkldnn_reuse.h +++ b/paddle/fluid/platform/mkldnn_reuse.h @@ -1011,7 +1011,12 @@ class ActivationMKLDNNHandler static void AppendActivation(const framework::ExecutionContext& ctx, dnnl::post_ops& post_ops, float activation_scale = 1.0f) { - if (!ctx.HasAttr("fuse_activation")) return; + const auto invalid_attribute = + ctx.HasAttr("fuse_activation") + ? ctx.Attr("fuse_activation").empty() + : true; + if (invalid_attribute) return; + const auto fuse_activation = ctx.Attr("fuse_activation"); const auto fuse_alpha = ctx.HasAttr("fuse_alpha") ? ctx.Attr("fuse_alpha") : 0.0f;