From 941e049f0bf9a26876336eaa0b4b6a14d075f2ad Mon Sep 17 00:00:00 2001 From: Trent Lo Date: Wed, 24 Feb 2021 15:30:10 -0800 Subject: [PATCH] [XLA/GPU] Fix a bug in CanShareOperandBufferWithUser(). ElementsIn() supports only array shape; avoid it. We don't need to check the sharing between operands and users for horizontal fusion for non-array shape anyway. --- tensorflow/compiler/xla/service/hlo_dataflow_analysis.cc | 3 ++- 1 file changed, 2 insertions(+), 1 deletion(-) diff --git a/tensorflow/compiler/xla/service/hlo_dataflow_analysis.cc b/tensorflow/compiler/xla/service/hlo_dataflow_analysis.cc index 31132fa5d9e..24f19fd8ce3 100644 --- a/tensorflow/compiler/xla/service/hlo_dataflow_analysis.cc +++ b/tensorflow/compiler/xla/service/hlo_dataflow_analysis.cc @@ -1445,7 +1445,8 @@ bool HloDataflowAnalysis::CanShareOperandBufferWithUser( user->fused_parameter(user->operand_index(operand)); // We don't require the same dimensions but only the same number of elements // and type (to make sure the same buffer size). - return ShapeUtil::ElementsIn(operand_subshape) == + return operand_subshape.IsArray() && user_subshape.IsArray() && + ShapeUtil::ElementsIn(operand_subshape) == ShapeUtil::ElementsIn(user_subshape) && ShapeUtil::SameElementType(operand_subshape, user_subshape) && AreTransitiveUsesEffectivelyElementwise(