Mark TPUReplicateMetadata as side-effecting
This operation does not return any value, marking it non side-effecting implies that it can always be deleted (which is what the canonicalizer will do). PiperOrigin-RevId: 266989820
This commit is contained in:
parent
407b665929
commit
13c3302982
@ -3681,31 +3681,6 @@ retained with length 1.
|
||||
TF_DerivedOperandTypeAttr Tidx = TF_DerivedOperandTypeAttr<1>;
|
||||
}
|
||||
|
||||
def TF_TPUReplicateMetadataOp : TF_Op<"TPUReplicateMetadata", [NoSideEffect]> {
|
||||
let summary = [{
|
||||
Metadata indicating how the TPU computation should be replicated.
|
||||
}];
|
||||
|
||||
let description = [{
|
||||
This operation holds the metadata common to operations of a `tpu.replicate()` computation subgraph.
|
||||
}];
|
||||
|
||||
let arguments = (ins
|
||||
Confined<I64Attr, [IntMinValue<0>]>:$num_replicas,
|
||||
DefaultValuedAttr<I64Attr, "1">:$num_cores_per_replica,
|
||||
StrAttr:$topology,
|
||||
DefaultValuedAttr<BoolAttr, "true">:$use_tpu,
|
||||
DefaultValuedAttr<I64ArrayAttr, "{}">:$device_assignment,
|
||||
DefaultValuedAttr<I64ArrayAttr, "{}">:$computation_shape,
|
||||
DefaultValuedAttr<StrArrayAttr, "{}">:$host_compute_core,
|
||||
DefaultValuedAttr<StrArrayAttr, "{}">:$padding_map,
|
||||
DefaultValuedAttr<StrAttr, "STEP_MARK_AT_ENTRY">:$step_marker_location,
|
||||
DefaultValuedAttr<BoolAttr, "false">:$allow_soft_placement
|
||||
);
|
||||
|
||||
let results = (outs);
|
||||
}
|
||||
|
||||
def TF_TanhOp : TF_Op<"Tanh", [NoSideEffect, SameOperandsAndResultType]> {
|
||||
let summary = "Computes hyperbolic tangent of `x` element-wise.";
|
||||
|
||||
|
@ -302,4 +302,32 @@ element_dtype: the desired type of elements in the list.
|
||||
);
|
||||
}
|
||||
|
||||
// This operation when auto-generated is marked as NoSideEffect because it isn't
|
||||
// stateful in TensorFlow. However it is kept alive through control dependency,
|
||||
// and does not have any output. When placed in an island it wouldn't be kept
|
||||
// alive in any way and the canonicalizer would just always fold it away.
|
||||
def TF_TPUReplicateMetadataOp : TF_Op<"TPUReplicateMetadata", []> {
|
||||
let summary = [{
|
||||
Metadata indicating how the TPU computation should be replicated.
|
||||
}];
|
||||
|
||||
let description = [{
|
||||
This operation holds the metadata common to operations of a `tpu.replicate()` computation subgraph.
|
||||
}];
|
||||
|
||||
let arguments = (ins
|
||||
Confined<I64Attr, [IntMinValue<0>]>:$num_replicas,
|
||||
DefaultValuedAttr<I64Attr, "1">:$num_cores_per_replica,
|
||||
StrAttr:$topology,
|
||||
DefaultValuedAttr<BoolAttr, "true">:$use_tpu,
|
||||
DefaultValuedAttr<I64ArrayAttr, "{}">:$device_assignment,
|
||||
DefaultValuedAttr<I64ArrayAttr, "{}">:$computation_shape,
|
||||
DefaultValuedAttr<StrArrayAttr, "{}">:$host_compute_core,
|
||||
DefaultValuedAttr<StrArrayAttr, "{}">:$padding_map,
|
||||
DefaultValuedAttr<StrAttr, "STEP_MARK_AT_ENTRY">:$step_marker_location,
|
||||
DefaultValuedAttr<BoolAttr, "false">:$allow_soft_placement
|
||||
);
|
||||
|
||||
let results = (outs);
|
||||
}
|
||||
#endif // TF_OPS
|
||||
|
Loading…
Reference in New Issue
Block a user