From 10380456280efb3c47cfbc3c9462db7f9415bf9d Mon Sep 17 00:00:00 2001 From: "A. Unique TensorFlower" Date: Mon, 1 Mar 2021 15:33:37 -0800 Subject: [PATCH] TPU layout method cleanup by removing one parameter. PiperOrigin-RevId: 360292316 Change-Id: Ib8332ba5b0d7576cfb02773f9974a48ead7ae47c --- .../compiler/xla/service/bfloat16_propagation.cc | 15 +++++---------- .../compiler/xla/service/bfloat16_propagation.h | 4 ++++ 2 files changed, 9 insertions(+), 10 deletions(-) diff --git a/tensorflow/compiler/xla/service/bfloat16_propagation.cc b/tensorflow/compiler/xla/service/bfloat16_propagation.cc index c3d8df85b6c..9622e176e68 100644 --- a/tensorflow/compiler/xla/service/bfloat16_propagation.cc +++ b/tensorflow/compiler/xla/service/bfloat16_propagation.cc @@ -345,11 +345,8 @@ bool BFloat16Propagation::AllUsersConsumeBF16(const HloInstruction& hlo, return true; } -namespace { - -// Returns whether we should avoid changing the precision of inst regardless of -// the producers and users. -bool ShouldKeepPrecisionUnchanged(const HloInstruction* inst) { +bool BFloat16Propagation::ShouldKeepPrecisionUnchanged( + const HloInstruction* inst) { if (inst->opcode() == HloOpcode::kFusion && inst->fusion_kind() == HloInstruction::FusionKind::kCustom) { return ShouldKeepPrecisionUnchanged( @@ -358,14 +355,12 @@ bool ShouldKeepPrecisionUnchanged(const HloInstruction* inst) { // Do not change precision for side-effecting instructions, control flow, and // bitcast-convert, because this pass might break the interfaces or // assumptions for them. - return inst->opcode() == HloOpcode::kCustomCall || // - inst->opcode() == HloOpcode::kCall || // - inst->opcode() == HloOpcode::kBitcastConvert || // + return inst->opcode() == HloOpcode::kCustomCall || + inst->opcode() == HloOpcode::kCall || + inst->opcode() == HloOpcode::kBitcastConvert || inst->HasSideEffectNoRecurse(); } -} // namespace - void BFloat16Propagation::DetermineInstructionPrecision(HloInstruction* hlo, bool skip_parameters) { // We handle any fusion computation, while body/condition or conditional diff --git a/tensorflow/compiler/xla/service/bfloat16_propagation.h b/tensorflow/compiler/xla/service/bfloat16_propagation.h index 200599efab2..168649a10bd 100644 --- a/tensorflow/compiler/xla/service/bfloat16_propagation.h +++ b/tensorflow/compiler/xla/service/bfloat16_propagation.h @@ -72,6 +72,10 @@ class BFloat16Propagation : public HloModulePass { // (precision reductions were added). StatusOr Run(HloModule* module) override; + // Returns whether we should avoid changing the precision of inst regardless + // of the producers and users. + virtual bool ShouldKeepPrecisionUnchanged(const HloInstruction* inst); + private: // *************************** // Function called and state produced by the forward analysis pass (from