From 04e7e1da0a1be833c425cfd33df053358a67cc95 Mon Sep 17 00:00:00 2001 From: yunfeima Date: Wed, 23 Dec 2020 16:47:36 +0800 Subject: [PATCH] Fix code format --- tensorflow/core/grappler/optimizers/remapper.cc | 2 +- tensorflow/core/grappler/optimizers/remapper_test.cc | 1 - tensorflow/core/kernels/matmul_op_fused.cc | 3 +-- tensorflow/core/ops/math_ops.cc | 2 +- 4 files changed, 3 insertions(+), 5 deletions(-) diff --git a/tensorflow/core/grappler/optimizers/remapper.cc b/tensorflow/core/grappler/optimizers/remapper.cc index 88011daa291..f0cef979325 100644 --- a/tensorflow/core/grappler/optimizers/remapper.cc +++ b/tensorflow/core/grappler/optimizers/remapper.cc @@ -472,7 +472,7 @@ bool FindContractionWithBiasAndActivation( if (!IsMatMul(*contraction_node_def) && IsTanh(*node_def)) return false; // Currently, only (conv | matmul) + bias + leakyrelu is enabled - if ((!IsConv2D(*contraction_node_def) && !IsMatMul(*contraction_node_def)) && + if (!(IsConv2D(*contraction_node_def) || IsMatMul(*contraction_node_def)) && IsLeakyRelu(*node_def)) return false; diff --git a/tensorflow/core/grappler/optimizers/remapper_test.cc b/tensorflow/core/grappler/optimizers/remapper_test.cc index 7fa6f9b5412..396f57f9b4e 100644 --- a/tensorflow/core/grappler/optimizers/remapper_test.cc +++ b/tensorflow/core/grappler/optimizers/remapper_test.cc @@ -712,7 +712,6 @@ class RemapperFuseMatMulWithBiasAndActivationTest : public RemapperTest { if (activation == "LeakyRelu") { EXPECT_EQ(node.attr().at("leakyrelu_alpha").f(), leakyrelu_alpha); } - found++; } } diff --git a/tensorflow/core/kernels/matmul_op_fused.cc b/tensorflow/core/kernels/matmul_op_fused.cc index b24797da901..07552ab9c76 100644 --- a/tensorflow/core/kernels/matmul_op_fused.cc +++ b/tensorflow/core/kernels/matmul_op_fused.cc @@ -108,8 +108,7 @@ struct LaunchFusedMatMulOp { executeWithOutputKernel(WithBiasAddAndElu(bias_add_args)); break; case FusedComputationType::kBiasAddWithLeakyRelu: - out.device(d) = lhs.contract(rhs, dim_pair, - WithBiasAddAndLeakyRelu(bias_add_args)); + executeWithOutputKernel(WithBiasAddAndLeakyRelu(bias_add_args)); break; case FusedComputationType::kUndefined: OP_REQUIRES_OK(context, errors::Internal("Fusion type is undefined")); diff --git a/tensorflow/core/ops/math_ops.cc b/tensorflow/core/ops/math_ops.cc index 7edf7f7a843..fb833bb9d97 100644 --- a/tensorflow/core/ops/math_ops.cc +++ b/tensorflow/core/ops/math_ops.cc @@ -954,7 +954,7 @@ REGISTER_OP("_FusedMatMul") .Attr("fused_ops: list(string) = []") // Attributes for the FusedBatchNorm ----------- // .Attr("epsilon: float = 0.0001") - // Attributes for the LeakyRelu ----------------------------------------- // + // Attributes for the LeakyRelu ---------------- // .Attr("leakyrelu_alpha: float = 0.2") // --------------------------------------------- // .SetShapeFn(shape_inference::MatMulShape)