Replaced calls to tensorflow::StringPiece::ToString with std::string conversions.

That is, instances of sp.ToString() are replaced with std::string(sp).

This will allow tensorflow::StringPiece::ToString to be removed, which is necessary before it can be replaced with absl::string_view.

PiperOrigin-RevId: 195689392
This commit is contained in:
A. Unique TensorFlower 2018-05-07 11:05:56 -07:00 committed by TensorFlower Gardener
parent 9ba26ca0d5
commit 170634d5a1
23 changed files with 50 additions and 48 deletions

View File

@ -269,7 +269,7 @@ StatusOr<HloInstruction*> BroadcastZeros(
StatusOr<std::unique_ptr<HloComputation>> CreateComputationWithSignature( StatusOr<std::unique_ptr<HloComputation>> CreateComputationWithSignature(
ArraySlice<const Shape*> domain, const Shape& range, ArraySlice<const Shape*> domain, const Shape& range,
tensorflow::StringPiece name) { tensorflow::StringPiece name) {
HloComputation::Builder b(name.ToString()); HloComputation::Builder b{std::string(name)};
int64 param_idx = 0; int64 param_idx = 0;
for (const Shape* param_shape : domain) { for (const Shape* param_shape : domain) {
b.AddInstruction(HloInstruction::CreateParameter( b.AddInstruction(HloInstruction::CreateParameter(

View File

@ -325,7 +325,7 @@ class HloDotDumper {
bool show_backend_config, const HloExecutionProfile* profile, bool show_backend_config, const HloExecutionProfile* profile,
NodeFilter filter) NodeFilter filter)
: computation_(computation), : computation_(computation),
label_(label.ToString()), label_(std::string(label)),
debug_options_(debug_options), debug_options_(debug_options),
show_metadata_(show_metadata), show_metadata_(show_metadata),
show_backend_config_(show_backend_config), show_backend_config_(show_backend_config),

View File

@ -438,7 +438,7 @@ HloInstruction::CreateCrossReplicaSum(
<< "Outfeed shape " << shape << " must be compatible with operand shape " << "Outfeed shape " << shape << " must be compatible with operand shape "
<< operand->shape(); << operand->shape();
instruction->AppendOperand(operand); instruction->AppendOperand(operand);
instruction->outfeed_config_ = outfeed_config.ToString(); instruction->outfeed_config_ = std::string(outfeed_config);
instruction->outfeed_shape_ = shape; instruction->outfeed_shape_ = shape;
return instruction; return instruction;
} }
@ -1168,7 +1168,7 @@ bool HloInstruction::HasSideEffect() const {
for (auto operand : operands) { for (auto operand : operands) {
instruction->AppendOperand(operand); instruction->AppendOperand(operand);
} }
instruction->custom_call_target_ = custom_call_target.ToString(); instruction->custom_call_target_ = std::string(custom_call_target);
return instruction; return instruction;
} }
@ -1180,7 +1180,7 @@ bool HloInstruction::HasSideEffect() const {
for (auto operand : operands) { for (auto operand : operands) {
instruction->AppendOperand(operand); instruction->AppendOperand(operand);
} }
instruction->channel_name_ = channel_name.ToString(); instruction->channel_name_ = std::string(channel_name);
instruction->cost_estimate_ns_ = cost_estimate_ns; instruction->cost_estimate_ns_ = cost_estimate_ns;
return instruction; return instruction;
} }

View File

@ -1264,7 +1264,7 @@ class HloInstruction {
// Gets/sets the string identifier for this instruction. // Gets/sets the string identifier for this instruction.
const string& name() const { return name_; } const string& name() const { return name_; }
void set_name(tensorflow::StringPiece name) { name_ = name.ToString(); } void set_name(tensorflow::StringPiece name) { name_ = std::string(name); }
// Use the given NameUniquer to select a unique name for the instruction based // Use the given NameUniquer to select a unique name for the instruction based
// on the instruction's existing name. // on the instruction's existing name.

View File

@ -90,7 +90,7 @@ StatusOr<bool> HloPassPipeline::Run(HloModule* module) {
return Status::OK(); return Status::OK();
}; };
string prefix = name().ToString() + ": pipeline start"; string prefix = std::string(name()) + ": pipeline start";
bool changed = false; bool changed = false;
string message; string message;
TF_RETURN_IF_ERROR( TF_RETURN_IF_ERROR(
@ -98,12 +98,12 @@ StatusOr<bool> HloPassPipeline::Run(HloModule* module) {
const string xla_dump_per_pass_hlo_proto_to = const string xla_dump_per_pass_hlo_proto_to =
module->config().debug_options().xla_dump_per_pass_hlo_proto_to(); module->config().debug_options().xla_dump_per_pass_hlo_proto_to();
if (!xla_dump_per_pass_hlo_proto_to.empty()) { if (!xla_dump_per_pass_hlo_proto_to.empty()) {
DumpModuleProto(*module, xla_dump_per_pass_hlo_proto_to, name().ToString(), DumpModuleProto(*module, xla_dump_per_pass_hlo_proto_to,
"pipeline_start"); std::string(name()), "pipeline_start");
} }
for (auto& pass : passes_) { for (auto& pass : passes_) {
if (disabled_passes.count(pass->name().ToString()) > 0) { if (disabled_passes.count(std::string(pass->name())) > 0) {
VLOG(1) << " Skipping HLO pass " << pass->name() VLOG(1) << " Skipping HLO pass " << pass->name()
<< ", disabled by --xla_disable_hlo_passes"; << ", disabled by --xla_disable_hlo_passes";
continue; continue;
@ -121,7 +121,7 @@ StatusOr<bool> HloPassPipeline::Run(HloModule* module) {
run_invariant_checkers(StrCat("after running pass: ", pass->name()))); run_invariant_checkers(StrCat("after running pass: ", pass->name())));
if (!xla_dump_per_pass_hlo_proto_to.empty()) { if (!xla_dump_per_pass_hlo_proto_to.empty()) {
DumpModuleProto(*module, xla_dump_per_pass_hlo_proto_to, DumpModuleProto(*module, xla_dump_per_pass_hlo_proto_to,
name().ToString(), pass->name().ToString()); std::string(name()), std::string(pass->name()));
} }
changed |= changed_this_pass; changed |= changed_this_pass;

View File

@ -32,7 +32,7 @@ class HumanReadableProfileBuilder {
explicit HumanReadableProfileBuilder(tensorflow::StringPiece computation_name, explicit HumanReadableProfileBuilder(tensorflow::StringPiece computation_name,
int64 total_cycles, int64 total_cycles,
double clock_rate_ghz) double clock_rate_ghz)
: computation_name_(computation_name.ToString()), : computation_name_(std::string(computation_name)),
total_cycles_(total_cycles), total_cycles_(total_cycles),
clock_rate_ghz_(clock_rate_ghz) { clock_rate_ghz_(clock_rate_ghz) {
CHECK_GE(clock_rate_ghz, 1e-9); CHECK_GE(clock_rate_ghz, 1e-9);
@ -47,9 +47,10 @@ class HumanReadableProfileBuilder {
tensorflow::StringPiece category, int64 cycles, int64 flop_count, tensorflow::StringPiece category, int64 cycles, int64 flop_count,
int64 transcendental_count, int64 bytes_accessed, int64 transcendental_count, int64 bytes_accessed,
float optimal_seconds) { float optimal_seconds) {
op_infos_.push_back( op_infos_.push_back({std::string(op_name), std::string(short_name),
{op_name.ToString(), short_name.ToString(), category.ToString(), cycles, std::string(category), cycles, flop_count,
flop_count, transcendental_count, bytes_accessed, optimal_seconds}); transcendental_count, bytes_accessed,
optimal_seconds});
} }
// Gets the human-readable profile. // Gets the human-readable profile.

View File

@ -53,7 +53,7 @@ NameUniquer::NameUniquer(const string& separator) {
} }
string NameUniquer::GetUniqueName(tensorflow::StringPiece prefix) { string NameUniquer::GetUniqueName(tensorflow::StringPiece prefix) {
string root = GetSanitizedName(prefix.empty() ? "name" : prefix.ToString()); string root = GetSanitizedName(prefix.empty() ? "name" : std::string(prefix));
// Strip away numeric suffix (if any). Only recognize separator if it is in // Strip away numeric suffix (if any). Only recognize separator if it is in
// the middle of the name. // the middle of the name.

View File

@ -172,11 +172,11 @@ tensorflow::Status ExpectNotTupleOrOpaque(const Shape& shape,
tensorflow::StringPiece op_type) { tensorflow::StringPiece op_type) {
if (ShapeUtil::IsTuple(shape)) { if (ShapeUtil::IsTuple(shape)) {
return InvalidArgument("Expected non-tuple argument for %s, but got %s.", return InvalidArgument("Expected non-tuple argument for %s, but got %s.",
op_type.ToString().c_str(), std::string(op_type).c_str(),
ShapeUtil::HumanString(shape).c_str()); ShapeUtil::HumanString(shape).c_str());
} else if (ShapeUtil::IsOpaque(shape)) { } else if (ShapeUtil::IsOpaque(shape)) {
return InvalidArgument("Expected non-opaque argument for %s, but got %s.", return InvalidArgument("Expected non-opaque argument for %s, but got %s.",
op_type.ToString().c_str(), std::string(op_type).c_str(),
ShapeUtil::HumanString(shape).c_str()); ShapeUtil::HumanString(shape).c_str());
} else { } else {
return tensorflow::Status::OK(); return tensorflow::Status::OK();

View File

@ -504,7 +504,7 @@ string Print(const NodeDef& n) {
std::vector<string> dep; std::vector<string> dep;
for (StringPiece s : n.input()) { for (StringPiece s : n.input()) {
if (str_util::ConsumePrefix(&s, "^")) { if (str_util::ConsumePrefix(&s, "^")) {
dep.push_back(s.ToString()); dep.push_back(std::string(s));
} else { } else {
dat.push_back(s); dat.push_back(s);
} }

View File

@ -24,22 +24,23 @@ limitations under the License.
namespace tensorflow { namespace tensorflow {
NodeDefBuilder::NodeOut::NodeOut(StringPiece n, int i, DataType dt) NodeDefBuilder::NodeOut::NodeOut(StringPiece n, int i, DataType dt)
: node(n.ToString()), index(i), data_type(dt) {} : node(std::string(n)), index(i), data_type(dt) {}
NodeDefBuilder::NodeOut::NodeOut() { NodeDefBuilder::NodeOut::NodeOut() {
// uninitialized, call Reset() before use. // uninitialized, call Reset() before use.
} }
void NodeDefBuilder::NodeOut::Reset(StringPiece n, int i, DataType dt) { void NodeDefBuilder::NodeOut::Reset(StringPiece n, int i, DataType dt) {
node = n.ToString(); node = std::string(n);
index = i; index = i;
data_type = dt; data_type = dt;
} }
NodeDefBuilder::NodeDefBuilder(StringPiece name, StringPiece op_name, NodeDefBuilder::NodeDefBuilder(StringPiece name, StringPiece op_name,
const OpRegistryInterface* op_registry) { const OpRegistryInterface* op_registry) {
node_def_.set_name(name.ToString()); node_def_.set_name(std::string(name));
const Status status = op_registry->LookUpOpDef(op_name.ToString(), &op_def_); const Status status =
op_registry->LookUpOpDef(std::string(op_name), &op_def_);
if (status.ok()) { if (status.ok()) {
Initialize(); Initialize();
} else { } else {
@ -50,7 +51,7 @@ NodeDefBuilder::NodeDefBuilder(StringPiece name, StringPiece op_name,
NodeDefBuilder::NodeDefBuilder(StringPiece name, const OpDef* op_def) NodeDefBuilder::NodeDefBuilder(StringPiece name, const OpDef* op_def)
: op_def_(op_def) { : op_def_(op_def) {
node_def_.set_name(name.ToString()); node_def_.set_name(std::string(name));
Initialize(); Initialize();
} }
@ -170,7 +171,7 @@ void NodeDefBuilder::AddInput(StringPiece src_node, int src_index) {
} else if (src_index > 0) { } else if (src_index > 0) {
node_def_.add_input(strings::StrCat(src_node, ":", src_index)); node_def_.add_input(strings::StrCat(src_node, ":", src_index));
} else { } else {
node_def_.add_input(src_node.ToString()); node_def_.add_input(std::string(src_node));
} }
} }
@ -193,12 +194,12 @@ void NodeDefBuilder::VerifyInputRef(const OpDef::ArgDef* input_arg,
} }
NodeDefBuilder& NodeDefBuilder::ControlInput(StringPiece src_node) { NodeDefBuilder& NodeDefBuilder::ControlInput(StringPiece src_node) {
control_inputs_.push_back(src_node.ToString()); control_inputs_.push_back(std::string(src_node));
return *this; return *this;
} }
NodeDefBuilder& NodeDefBuilder::Device(StringPiece device_spec) { NodeDefBuilder& NodeDefBuilder::Device(StringPiece device_spec) {
node_def_.set_device(device_spec.ToString()); node_def_.set_device(std::string(device_spec));
return *this; return *this;
} }

View File

@ -245,7 +245,7 @@ DEFINE_GET_ATTR(NameAttrList, func, "func", emplace_back, v, ;);
#undef DEFINE_GET_ATTR #undef DEFINE_GET_ATTR
bool HasNodeAttr(const NodeDef& node_def, StringPiece attr_name) { bool HasNodeAttr(const NodeDef& node_def, StringPiece attr_name) {
return node_def.attr().find(attr_name.ToString()) != node_def.attr().end(); return node_def.attr().find(std::string(attr_name)) != node_def.attr().end();
} }
static const string& kEmptyString = *new string(); static const string& kEmptyString = *new string();
@ -639,7 +639,7 @@ Status AttachDef(const Status& status, const Node& node) {
void AddNodeAttr(StringPiece name, const AttrValue& value, NodeDef* node_def) { void AddNodeAttr(StringPiece name, const AttrValue& value, NodeDef* node_def) {
node_def->mutable_attr()->insert( node_def->mutable_attr()->insert(
AttrValueMap::value_type(name.ToString(), value)); AttrValueMap::value_type(std::string(name), value));
} }
#define ADD_NODE_ATTR(T) \ #define ADD_NODE_ATTR(T) \
@ -677,7 +677,7 @@ ADD_NODE_ATTR(gtl::ArraySlice<NameAttrList>)
#undef ADD_NODE_ATTR #undef ADD_NODE_ATTR
void AddAttr(StringPiece name, const AttrValue& value, AttrValueMap* map) { void AddAttr(StringPiece name, const AttrValue& value, AttrValueMap* map) {
map->insert(AttrValueMap::value_type(name.ToString(), value)); map->insert(AttrValueMap::value_type(std::string(name), value));
} }
#define ADD_ATTR(T) \ #define ADD_ATTR(T) \

View File

@ -527,7 +527,7 @@ void FinalizeDoc(const string& text, OpDef* op_def,
} // namespace } // namespace
OpDefBuilder::OpDefBuilder(StringPiece op_name) { OpDefBuilder::OpDefBuilder(StringPiece op_name) {
op_def()->set_name(op_name.ToString()); // NOLINT op_def()->set_name(std::string(op_name)); // NOLINT
} }
OpDefBuilder& OpDefBuilder::Attr(StringPiece spec) { OpDefBuilder& OpDefBuilder::Attr(StringPiece spec) {
@ -584,7 +584,7 @@ OpDefBuilder& OpDefBuilder::Deprecated(int version, StringPiece explanation) {
} else { } else {
OpDeprecation* deprecation = op_def()->mutable_deprecation(); OpDeprecation* deprecation = op_def()->mutable_deprecation();
deprecation->set_version(version); deprecation->set_version(version);
deprecation->set_explanation(explanation.ToString()); deprecation->set_explanation(std::string(explanation));
} }
return *this; return *this;
} }

View File

@ -185,7 +185,7 @@ static bool FindMultiline(StringPiece line, size_t colon, string* end) {
while (str_util::ConsumePrefix(&line, " ")) { while (str_util::ConsumePrefix(&line, " ")) {
} }
if (str_util::ConsumePrefix(&line, "<<")) { if (str_util::ConsumePrefix(&line, "<<")) {
*end = line.ToString(); *end = std::string(line);
return true; return true;
} }
return false; return false;

View File

@ -923,7 +923,7 @@ void OpKernelContext::clear_recorded_memory() {
struct KernelRegistration { struct KernelRegistration {
KernelRegistration(const KernelDef& d, StringPiece c, KernelRegistration(const KernelDef& d, StringPiece c,
kernel_factory::OpKernelRegistrar::Factory f) kernel_factory::OpKernelRegistrar::Factory f)
: def(d), kernel_class_name(c.ToString()), factory(f) {} : def(d), kernel_class_name(std::string(c)), factory(f) {}
const KernelDef def; const KernelDef def;
const string kernel_class_name; const string kernel_class_name;
const kernel_factory::OpKernelRegistrar::Factory factory; const kernel_factory::OpKernelRegistrar::Factory factory;

View File

@ -32,7 +32,7 @@ class Tensor;
struct ShapeInferenceTestOp { struct ShapeInferenceTestOp {
typedef std::pair<string, DataType> ShapeAndType; typedef std::pair<string, DataType> ShapeAndType;
explicit ShapeInferenceTestOp(StringPiece name) : name(name.ToString()) {} explicit ShapeInferenceTestOp(StringPiece name) : name(std::string(name)) {}
string name; string name;
NodeDef node_def; NodeDef node_def;
std::vector<const Tensor*> input_tensors; std::vector<const Tensor*> input_tensors;

View File

@ -695,7 +695,7 @@ Status Graph::AddWhileContext(StringPiece frame_name,
std::vector<OutputTensor> body_outputs, std::vector<OutputTensor> body_outputs,
WhileContext** result) { WhileContext** result) {
auto pair = while_ctxs_.insert(std::pair<string, WhileContext>( auto pair = while_ctxs_.insert(std::pair<string, WhileContext>(
frame_name.ToString(), std::string(frame_name),
WhileContext(frame_name, std::move(enter_nodes), std::move(exit_nodes), WhileContext(frame_name, std::move(enter_nodes), std::move(exit_nodes),
cond_output, std::move(body_inputs), cond_output, std::move(body_inputs),
std::move(body_outputs)))); std::move(body_outputs))));

View File

@ -489,7 +489,7 @@ Status GraphConstructor::InitFromEdges() {
num_control_edges++; num_control_edges++;
} else { } else {
TensorId id(ParseTensorName(input_name)); TensorId id(ParseTensorName(input_name));
if (next_iteration_nodes_.find(id.first.ToString()) != if (next_iteration_nodes_.find(std::string(id.first)) !=
next_iteration_nodes_.end()) { next_iteration_nodes_.end()) {
has_loop_back_edge = true; has_loop_back_edge = true;
} }
@ -811,7 +811,7 @@ void GraphConstructor::UniquifyNames(
// We require that UniquifyNames() is called on all NodeDefs in topological // We require that UniquifyNames() is called on all NodeDefs in topological
// order. This guarantees that node_def's inputs will already be uniquified // order. This guarantees that node_def's inputs will already be uniquified
// if necessary. // if necessary.
auto iter = uniquified_names_.find(id.first.ToString()); auto iter = uniquified_names_.find(std::string(id.first));
if (iter == uniquified_names_.end()) continue; if (iter == uniquified_names_.end()) continue;
id.first = iter->second; id.first = iter->second;
node_def->set_input(i, id.ToString()); node_def->set_input(i, id.ToString());
@ -830,7 +830,7 @@ void GraphConstructor::UpdateUniquifiedColocationNames() {
for (int i = 0; i < coloc_values.size(); ++i) { for (int i = 0; i < coloc_values.size(); ++i) {
StringPiece val(coloc_values[i]); StringPiece val(coloc_values[i]);
if (str_util::ConsumePrefix(&val, kColocationGroupPrefix)) { if (str_util::ConsumePrefix(&val, kColocationGroupPrefix)) {
const auto& name_pair = uniquified_names_.find(val.ToString()); const auto& name_pair = uniquified_names_.find(std::string(val));
if (name_pair == uniquified_names_.end()) continue; if (name_pair == uniquified_names_.end()) continue;
updated = true; updated = true;
coloc_values[i] = coloc_values[i] =
@ -856,7 +856,7 @@ bool GraphConstructor::NameExistsInGraphDef(StringPiece name) {
} }
string GraphConstructor::FindUniqueName(StringPiece original_name) { string GraphConstructor::FindUniqueName(StringPiece original_name) {
string name = original_name.ToString(); string name = std::string(original_name);
int count = 0; int count = 0;
// Check that any generated names don't collide with imported NodeDefs (as // Check that any generated names don't collide with imported NodeDefs (as
// well as nodes in g_). // well as nodes in g_).
@ -989,7 +989,7 @@ Status GraphConstructor::Convert() {
src_node->num_outputs(), " outputs"); src_node->num_outputs(), " outputs");
} }
inputs.emplace_back(id.first.ToString(), src_node, src_index); inputs.emplace_back(std::string(id.first), src_node, src_index);
} }
if (has_data_back_edge && !IsMerge(*node_def)) { if (has_data_back_edge && !IsMerge(*node_def)) {

View File

@ -157,7 +157,7 @@ class GraphConstructorTest : public ::testing::Test {
} }
StringPiece loc(value[0]); StringPiece loc(value[0]);
return str_util::ConsumePrefix(&loc, kColocationGroupPrefix) return str_util::ConsumePrefix(&loc, kColocationGroupPrefix)
? loc.ToString() ? std::string(loc)
: ""; : "";
} }

View File

@ -44,12 +44,12 @@ GraphDefBuilder::Options GraphDefBuilder::Options::WithControlInputs(
} }
GraphDefBuilder::Options GraphDefBuilder::Options::WithNameImpl( GraphDefBuilder::Options GraphDefBuilder::Options::WithNameImpl(
StringPiece name) { StringPiece name) {
name_ = name.ToString(); name_ = std::string(name);
return *this; return *this;
} }
GraphDefBuilder::Options GraphDefBuilder::Options::WithDeviceImpl( GraphDefBuilder::Options GraphDefBuilder::Options::WithDeviceImpl(
StringPiece device) { StringPiece device) {
device_ = device.ToString(); device_ = std::string(device);
return *this; return *this;
} }
GraphDefBuilder::Options GraphDefBuilder::Options::WithControlInputImpl( GraphDefBuilder::Options GraphDefBuilder::Options::WithControlInputImpl(

View File

@ -128,7 +128,7 @@ class GraphDefBuilder {
Options WithControlInputsImpl(gtl::ArraySlice<Node*> control_inputs); Options WithControlInputsImpl(gtl::ArraySlice<Node*> control_inputs);
template <class T> template <class T>
Options WithAttrImpl(StringPiece name, T&& value) { Options WithAttrImpl(StringPiece name, T&& value) {
attrs_.emplace_back(name.ToString(), AttrValue()); attrs_.emplace_back(std::string(name), AttrValue());
SetAttrValue(std::forward<T>(value), &attrs_.back().second); SetAttrValue(std::forward<T>(value), &attrs_.back().second);
return *this; return *this;
} }

View File

@ -785,7 +785,7 @@ Status TopologicalSortNodesWithTimePriority(
for (int n = 0; n < gdef->node_size(); ++n) { for (int n = 0; n < gdef->node_size(); ++n) {
const NodeDef* ndef = &gdef->node(n); const NodeDef* ndef = &gdef->node(n);
for (int i = 0; i < ndef->input_size(); ++i) { for (int i = 0; i < ndef->input_size(); ++i) {
node_to_output_nodes[ParseTensorName(ndef->input(i)).first.ToString()] node_to_output_nodes[std::string(ParseTensorName(ndef->input(i)).first)]
.push_back(ndef); .push_back(ndef);
} }
int64 start_time; int64 start_time;

View File

@ -30,7 +30,7 @@ NodeBuilder::NodeOut::NodeOut(Node* n, int32 i) // NOLINT(runtime/explicit)
dt(SafeGetOutput(node, i, &error)) {} dt(SafeGetOutput(node, i, &error)) {}
NodeBuilder::NodeOut::NodeOut(StringPiece n, int32 i, DataType t) NodeBuilder::NodeOut::NodeOut(StringPiece n, int32 i, DataType t)
: node(nullptr), error(false), name(n.ToString()), index(i), dt(t) {} : node(nullptr), error(false), name(std::string(n)), index(i), dt(t) {}
NodeBuilder::NodeOut::NodeOut() NodeBuilder::NodeOut::NodeOut()
: node(nullptr), error(true), index(0), dt(DT_FLOAT) {} : node(nullptr), error(true), index(0), dt(DT_FLOAT) {}

View File

@ -23,7 +23,7 @@ WhileContext::WhileContext(StringPiece frame_name,
OutputTensor cond_output, OutputTensor cond_output,
std::vector<OutputTensor> body_inputs, std::vector<OutputTensor> body_inputs,
std::vector<OutputTensor> body_outputs) std::vector<OutputTensor> body_outputs)
: frame_name_(frame_name.ToString()), : frame_name_(std::string(frame_name)),
enter_nodes_(std::move(enter_nodes)), enter_nodes_(std::move(enter_nodes)),
exit_nodes_(std::move(exit_nodes)), exit_nodes_(std::move(exit_nodes)),
cond_output_(cond_output), cond_output_(cond_output),