Replaced calls to tensorflow::StringPiece::ToString with std::string conversions.
That is, instances of sp.ToString() are replaced with std::string(sp). This will allow tensorflow::StringPiece::ToString to be removed, which is necessary before it can be replaced with absl::string_view. PiperOrigin-RevId: 195689392
This commit is contained in:
parent
9ba26ca0d5
commit
170634d5a1
@ -269,7 +269,7 @@ StatusOr<HloInstruction*> BroadcastZeros(
|
||||
StatusOr<std::unique_ptr<HloComputation>> CreateComputationWithSignature(
|
||||
ArraySlice<const Shape*> domain, const Shape& range,
|
||||
tensorflow::StringPiece name) {
|
||||
HloComputation::Builder b(name.ToString());
|
||||
HloComputation::Builder b{std::string(name)};
|
||||
int64 param_idx = 0;
|
||||
for (const Shape* param_shape : domain) {
|
||||
b.AddInstruction(HloInstruction::CreateParameter(
|
||||
|
@ -325,7 +325,7 @@ class HloDotDumper {
|
||||
bool show_backend_config, const HloExecutionProfile* profile,
|
||||
NodeFilter filter)
|
||||
: computation_(computation),
|
||||
label_(label.ToString()),
|
||||
label_(std::string(label)),
|
||||
debug_options_(debug_options),
|
||||
show_metadata_(show_metadata),
|
||||
show_backend_config_(show_backend_config),
|
||||
|
@ -438,7 +438,7 @@ HloInstruction::CreateCrossReplicaSum(
|
||||
<< "Outfeed shape " << shape << " must be compatible with operand shape "
|
||||
<< operand->shape();
|
||||
instruction->AppendOperand(operand);
|
||||
instruction->outfeed_config_ = outfeed_config.ToString();
|
||||
instruction->outfeed_config_ = std::string(outfeed_config);
|
||||
instruction->outfeed_shape_ = shape;
|
||||
return instruction;
|
||||
}
|
||||
@ -1168,7 +1168,7 @@ bool HloInstruction::HasSideEffect() const {
|
||||
for (auto operand : operands) {
|
||||
instruction->AppendOperand(operand);
|
||||
}
|
||||
instruction->custom_call_target_ = custom_call_target.ToString();
|
||||
instruction->custom_call_target_ = std::string(custom_call_target);
|
||||
return instruction;
|
||||
}
|
||||
|
||||
@ -1180,7 +1180,7 @@ bool HloInstruction::HasSideEffect() const {
|
||||
for (auto operand : operands) {
|
||||
instruction->AppendOperand(operand);
|
||||
}
|
||||
instruction->channel_name_ = channel_name.ToString();
|
||||
instruction->channel_name_ = std::string(channel_name);
|
||||
instruction->cost_estimate_ns_ = cost_estimate_ns;
|
||||
return instruction;
|
||||
}
|
||||
|
@ -1264,7 +1264,7 @@ class HloInstruction {
|
||||
|
||||
// Gets/sets the string identifier for this instruction.
|
||||
const string& name() const { return name_; }
|
||||
void set_name(tensorflow::StringPiece name) { name_ = name.ToString(); }
|
||||
void set_name(tensorflow::StringPiece name) { name_ = std::string(name); }
|
||||
|
||||
// Use the given NameUniquer to select a unique name for the instruction based
|
||||
// on the instruction's existing name.
|
||||
|
@ -90,7 +90,7 @@ StatusOr<bool> HloPassPipeline::Run(HloModule* module) {
|
||||
return Status::OK();
|
||||
};
|
||||
|
||||
string prefix = name().ToString() + ": pipeline start";
|
||||
string prefix = std::string(name()) + ": pipeline start";
|
||||
bool changed = false;
|
||||
string message;
|
||||
TF_RETURN_IF_ERROR(
|
||||
@ -98,12 +98,12 @@ StatusOr<bool> HloPassPipeline::Run(HloModule* module) {
|
||||
const string xla_dump_per_pass_hlo_proto_to =
|
||||
module->config().debug_options().xla_dump_per_pass_hlo_proto_to();
|
||||
if (!xla_dump_per_pass_hlo_proto_to.empty()) {
|
||||
DumpModuleProto(*module, xla_dump_per_pass_hlo_proto_to, name().ToString(),
|
||||
"pipeline_start");
|
||||
DumpModuleProto(*module, xla_dump_per_pass_hlo_proto_to,
|
||||
std::string(name()), "pipeline_start");
|
||||
}
|
||||
|
||||
for (auto& pass : passes_) {
|
||||
if (disabled_passes.count(pass->name().ToString()) > 0) {
|
||||
if (disabled_passes.count(std::string(pass->name())) > 0) {
|
||||
VLOG(1) << " Skipping HLO pass " << pass->name()
|
||||
<< ", disabled by --xla_disable_hlo_passes";
|
||||
continue;
|
||||
@ -121,7 +121,7 @@ StatusOr<bool> HloPassPipeline::Run(HloModule* module) {
|
||||
run_invariant_checkers(StrCat("after running pass: ", pass->name())));
|
||||
if (!xla_dump_per_pass_hlo_proto_to.empty()) {
|
||||
DumpModuleProto(*module, xla_dump_per_pass_hlo_proto_to,
|
||||
name().ToString(), pass->name().ToString());
|
||||
std::string(name()), std::string(pass->name()));
|
||||
}
|
||||
|
||||
changed |= changed_this_pass;
|
||||
|
@ -32,7 +32,7 @@ class HumanReadableProfileBuilder {
|
||||
explicit HumanReadableProfileBuilder(tensorflow::StringPiece computation_name,
|
||||
int64 total_cycles,
|
||||
double clock_rate_ghz)
|
||||
: computation_name_(computation_name.ToString()),
|
||||
: computation_name_(std::string(computation_name)),
|
||||
total_cycles_(total_cycles),
|
||||
clock_rate_ghz_(clock_rate_ghz) {
|
||||
CHECK_GE(clock_rate_ghz, 1e-9);
|
||||
@ -47,9 +47,10 @@ class HumanReadableProfileBuilder {
|
||||
tensorflow::StringPiece category, int64 cycles, int64 flop_count,
|
||||
int64 transcendental_count, int64 bytes_accessed,
|
||||
float optimal_seconds) {
|
||||
op_infos_.push_back(
|
||||
{op_name.ToString(), short_name.ToString(), category.ToString(), cycles,
|
||||
flop_count, transcendental_count, bytes_accessed, optimal_seconds});
|
||||
op_infos_.push_back({std::string(op_name), std::string(short_name),
|
||||
std::string(category), cycles, flop_count,
|
||||
transcendental_count, bytes_accessed,
|
||||
optimal_seconds});
|
||||
}
|
||||
|
||||
// Gets the human-readable profile.
|
||||
|
@ -53,7 +53,7 @@ NameUniquer::NameUniquer(const string& separator) {
|
||||
}
|
||||
|
||||
string NameUniquer::GetUniqueName(tensorflow::StringPiece prefix) {
|
||||
string root = GetSanitizedName(prefix.empty() ? "name" : prefix.ToString());
|
||||
string root = GetSanitizedName(prefix.empty() ? "name" : std::string(prefix));
|
||||
|
||||
// Strip away numeric suffix (if any). Only recognize separator if it is in
|
||||
// the middle of the name.
|
||||
|
@ -172,11 +172,11 @@ tensorflow::Status ExpectNotTupleOrOpaque(const Shape& shape,
|
||||
tensorflow::StringPiece op_type) {
|
||||
if (ShapeUtil::IsTuple(shape)) {
|
||||
return InvalidArgument("Expected non-tuple argument for %s, but got %s.",
|
||||
op_type.ToString().c_str(),
|
||||
std::string(op_type).c_str(),
|
||||
ShapeUtil::HumanString(shape).c_str());
|
||||
} else if (ShapeUtil::IsOpaque(shape)) {
|
||||
return InvalidArgument("Expected non-opaque argument for %s, but got %s.",
|
||||
op_type.ToString().c_str(),
|
||||
std::string(op_type).c_str(),
|
||||
ShapeUtil::HumanString(shape).c_str());
|
||||
} else {
|
||||
return tensorflow::Status::OK();
|
||||
|
@ -504,7 +504,7 @@ string Print(const NodeDef& n) {
|
||||
std::vector<string> dep;
|
||||
for (StringPiece s : n.input()) {
|
||||
if (str_util::ConsumePrefix(&s, "^")) {
|
||||
dep.push_back(s.ToString());
|
||||
dep.push_back(std::string(s));
|
||||
} else {
|
||||
dat.push_back(s);
|
||||
}
|
||||
|
@ -24,22 +24,23 @@ limitations under the License.
|
||||
namespace tensorflow {
|
||||
|
||||
NodeDefBuilder::NodeOut::NodeOut(StringPiece n, int i, DataType dt)
|
||||
: node(n.ToString()), index(i), data_type(dt) {}
|
||||
: node(std::string(n)), index(i), data_type(dt) {}
|
||||
|
||||
NodeDefBuilder::NodeOut::NodeOut() {
|
||||
// uninitialized, call Reset() before use.
|
||||
}
|
||||
|
||||
void NodeDefBuilder::NodeOut::Reset(StringPiece n, int i, DataType dt) {
|
||||
node = n.ToString();
|
||||
node = std::string(n);
|
||||
index = i;
|
||||
data_type = dt;
|
||||
}
|
||||
|
||||
NodeDefBuilder::NodeDefBuilder(StringPiece name, StringPiece op_name,
|
||||
const OpRegistryInterface* op_registry) {
|
||||
node_def_.set_name(name.ToString());
|
||||
const Status status = op_registry->LookUpOpDef(op_name.ToString(), &op_def_);
|
||||
node_def_.set_name(std::string(name));
|
||||
const Status status =
|
||||
op_registry->LookUpOpDef(std::string(op_name), &op_def_);
|
||||
if (status.ok()) {
|
||||
Initialize();
|
||||
} else {
|
||||
@ -50,7 +51,7 @@ NodeDefBuilder::NodeDefBuilder(StringPiece name, StringPiece op_name,
|
||||
|
||||
NodeDefBuilder::NodeDefBuilder(StringPiece name, const OpDef* op_def)
|
||||
: op_def_(op_def) {
|
||||
node_def_.set_name(name.ToString());
|
||||
node_def_.set_name(std::string(name));
|
||||
Initialize();
|
||||
}
|
||||
|
||||
@ -170,7 +171,7 @@ void NodeDefBuilder::AddInput(StringPiece src_node, int src_index) {
|
||||
} else if (src_index > 0) {
|
||||
node_def_.add_input(strings::StrCat(src_node, ":", src_index));
|
||||
} else {
|
||||
node_def_.add_input(src_node.ToString());
|
||||
node_def_.add_input(std::string(src_node));
|
||||
}
|
||||
}
|
||||
|
||||
@ -193,12 +194,12 @@ void NodeDefBuilder::VerifyInputRef(const OpDef::ArgDef* input_arg,
|
||||
}
|
||||
|
||||
NodeDefBuilder& NodeDefBuilder::ControlInput(StringPiece src_node) {
|
||||
control_inputs_.push_back(src_node.ToString());
|
||||
control_inputs_.push_back(std::string(src_node));
|
||||
return *this;
|
||||
}
|
||||
|
||||
NodeDefBuilder& NodeDefBuilder::Device(StringPiece device_spec) {
|
||||
node_def_.set_device(device_spec.ToString());
|
||||
node_def_.set_device(std::string(device_spec));
|
||||
return *this;
|
||||
}
|
||||
|
||||
|
@ -245,7 +245,7 @@ DEFINE_GET_ATTR(NameAttrList, func, "func", emplace_back, v, ;);
|
||||
#undef DEFINE_GET_ATTR
|
||||
|
||||
bool HasNodeAttr(const NodeDef& node_def, StringPiece attr_name) {
|
||||
return node_def.attr().find(attr_name.ToString()) != node_def.attr().end();
|
||||
return node_def.attr().find(std::string(attr_name)) != node_def.attr().end();
|
||||
}
|
||||
|
||||
static const string& kEmptyString = *new string();
|
||||
@ -639,7 +639,7 @@ Status AttachDef(const Status& status, const Node& node) {
|
||||
|
||||
void AddNodeAttr(StringPiece name, const AttrValue& value, NodeDef* node_def) {
|
||||
node_def->mutable_attr()->insert(
|
||||
AttrValueMap::value_type(name.ToString(), value));
|
||||
AttrValueMap::value_type(std::string(name), value));
|
||||
}
|
||||
|
||||
#define ADD_NODE_ATTR(T) \
|
||||
@ -677,7 +677,7 @@ ADD_NODE_ATTR(gtl::ArraySlice<NameAttrList>)
|
||||
#undef ADD_NODE_ATTR
|
||||
|
||||
void AddAttr(StringPiece name, const AttrValue& value, AttrValueMap* map) {
|
||||
map->insert(AttrValueMap::value_type(name.ToString(), value));
|
||||
map->insert(AttrValueMap::value_type(std::string(name), value));
|
||||
}
|
||||
|
||||
#define ADD_ATTR(T) \
|
||||
|
@ -527,7 +527,7 @@ void FinalizeDoc(const string& text, OpDef* op_def,
|
||||
} // namespace
|
||||
|
||||
OpDefBuilder::OpDefBuilder(StringPiece op_name) {
|
||||
op_def()->set_name(op_name.ToString()); // NOLINT
|
||||
op_def()->set_name(std::string(op_name)); // NOLINT
|
||||
}
|
||||
|
||||
OpDefBuilder& OpDefBuilder::Attr(StringPiece spec) {
|
||||
@ -584,7 +584,7 @@ OpDefBuilder& OpDefBuilder::Deprecated(int version, StringPiece explanation) {
|
||||
} else {
|
||||
OpDeprecation* deprecation = op_def()->mutable_deprecation();
|
||||
deprecation->set_version(version);
|
||||
deprecation->set_explanation(explanation.ToString());
|
||||
deprecation->set_explanation(std::string(explanation));
|
||||
}
|
||||
return *this;
|
||||
}
|
||||
|
@ -185,7 +185,7 @@ static bool FindMultiline(StringPiece line, size_t colon, string* end) {
|
||||
while (str_util::ConsumePrefix(&line, " ")) {
|
||||
}
|
||||
if (str_util::ConsumePrefix(&line, "<<")) {
|
||||
*end = line.ToString();
|
||||
*end = std::string(line);
|
||||
return true;
|
||||
}
|
||||
return false;
|
||||
|
@ -923,7 +923,7 @@ void OpKernelContext::clear_recorded_memory() {
|
||||
struct KernelRegistration {
|
||||
KernelRegistration(const KernelDef& d, StringPiece c,
|
||||
kernel_factory::OpKernelRegistrar::Factory f)
|
||||
: def(d), kernel_class_name(c.ToString()), factory(f) {}
|
||||
: def(d), kernel_class_name(std::string(c)), factory(f) {}
|
||||
const KernelDef def;
|
||||
const string kernel_class_name;
|
||||
const kernel_factory::OpKernelRegistrar::Factory factory;
|
||||
|
@ -32,7 +32,7 @@ class Tensor;
|
||||
|
||||
struct ShapeInferenceTestOp {
|
||||
typedef std::pair<string, DataType> ShapeAndType;
|
||||
explicit ShapeInferenceTestOp(StringPiece name) : name(name.ToString()) {}
|
||||
explicit ShapeInferenceTestOp(StringPiece name) : name(std::string(name)) {}
|
||||
string name;
|
||||
NodeDef node_def;
|
||||
std::vector<const Tensor*> input_tensors;
|
||||
|
@ -695,7 +695,7 @@ Status Graph::AddWhileContext(StringPiece frame_name,
|
||||
std::vector<OutputTensor> body_outputs,
|
||||
WhileContext** result) {
|
||||
auto pair = while_ctxs_.insert(std::pair<string, WhileContext>(
|
||||
frame_name.ToString(),
|
||||
std::string(frame_name),
|
||||
WhileContext(frame_name, std::move(enter_nodes), std::move(exit_nodes),
|
||||
cond_output, std::move(body_inputs),
|
||||
std::move(body_outputs))));
|
||||
|
@ -489,7 +489,7 @@ Status GraphConstructor::InitFromEdges() {
|
||||
num_control_edges++;
|
||||
} else {
|
||||
TensorId id(ParseTensorName(input_name));
|
||||
if (next_iteration_nodes_.find(id.first.ToString()) !=
|
||||
if (next_iteration_nodes_.find(std::string(id.first)) !=
|
||||
next_iteration_nodes_.end()) {
|
||||
has_loop_back_edge = true;
|
||||
}
|
||||
@ -811,7 +811,7 @@ void GraphConstructor::UniquifyNames(
|
||||
// We require that UniquifyNames() is called on all NodeDefs in topological
|
||||
// order. This guarantees that node_def's inputs will already be uniquified
|
||||
// if necessary.
|
||||
auto iter = uniquified_names_.find(id.first.ToString());
|
||||
auto iter = uniquified_names_.find(std::string(id.first));
|
||||
if (iter == uniquified_names_.end()) continue;
|
||||
id.first = iter->second;
|
||||
node_def->set_input(i, id.ToString());
|
||||
@ -830,7 +830,7 @@ void GraphConstructor::UpdateUniquifiedColocationNames() {
|
||||
for (int i = 0; i < coloc_values.size(); ++i) {
|
||||
StringPiece val(coloc_values[i]);
|
||||
if (str_util::ConsumePrefix(&val, kColocationGroupPrefix)) {
|
||||
const auto& name_pair = uniquified_names_.find(val.ToString());
|
||||
const auto& name_pair = uniquified_names_.find(std::string(val));
|
||||
if (name_pair == uniquified_names_.end()) continue;
|
||||
updated = true;
|
||||
coloc_values[i] =
|
||||
@ -856,7 +856,7 @@ bool GraphConstructor::NameExistsInGraphDef(StringPiece name) {
|
||||
}
|
||||
|
||||
string GraphConstructor::FindUniqueName(StringPiece original_name) {
|
||||
string name = original_name.ToString();
|
||||
string name = std::string(original_name);
|
||||
int count = 0;
|
||||
// Check that any generated names don't collide with imported NodeDefs (as
|
||||
// well as nodes in g_).
|
||||
@ -989,7 +989,7 @@ Status GraphConstructor::Convert() {
|
||||
src_node->num_outputs(), " outputs");
|
||||
}
|
||||
|
||||
inputs.emplace_back(id.first.ToString(), src_node, src_index);
|
||||
inputs.emplace_back(std::string(id.first), src_node, src_index);
|
||||
}
|
||||
|
||||
if (has_data_back_edge && !IsMerge(*node_def)) {
|
||||
|
@ -157,7 +157,7 @@ class GraphConstructorTest : public ::testing::Test {
|
||||
}
|
||||
StringPiece loc(value[0]);
|
||||
return str_util::ConsumePrefix(&loc, kColocationGroupPrefix)
|
||||
? loc.ToString()
|
||||
? std::string(loc)
|
||||
: "";
|
||||
}
|
||||
|
||||
|
@ -44,12 +44,12 @@ GraphDefBuilder::Options GraphDefBuilder::Options::WithControlInputs(
|
||||
}
|
||||
GraphDefBuilder::Options GraphDefBuilder::Options::WithNameImpl(
|
||||
StringPiece name) {
|
||||
name_ = name.ToString();
|
||||
name_ = std::string(name);
|
||||
return *this;
|
||||
}
|
||||
GraphDefBuilder::Options GraphDefBuilder::Options::WithDeviceImpl(
|
||||
StringPiece device) {
|
||||
device_ = device.ToString();
|
||||
device_ = std::string(device);
|
||||
return *this;
|
||||
}
|
||||
GraphDefBuilder::Options GraphDefBuilder::Options::WithControlInputImpl(
|
||||
|
@ -128,7 +128,7 @@ class GraphDefBuilder {
|
||||
Options WithControlInputsImpl(gtl::ArraySlice<Node*> control_inputs);
|
||||
template <class T>
|
||||
Options WithAttrImpl(StringPiece name, T&& value) {
|
||||
attrs_.emplace_back(name.ToString(), AttrValue());
|
||||
attrs_.emplace_back(std::string(name), AttrValue());
|
||||
SetAttrValue(std::forward<T>(value), &attrs_.back().second);
|
||||
return *this;
|
||||
}
|
||||
|
@ -785,7 +785,7 @@ Status TopologicalSortNodesWithTimePriority(
|
||||
for (int n = 0; n < gdef->node_size(); ++n) {
|
||||
const NodeDef* ndef = &gdef->node(n);
|
||||
for (int i = 0; i < ndef->input_size(); ++i) {
|
||||
node_to_output_nodes[ParseTensorName(ndef->input(i)).first.ToString()]
|
||||
node_to_output_nodes[std::string(ParseTensorName(ndef->input(i)).first)]
|
||||
.push_back(ndef);
|
||||
}
|
||||
int64 start_time;
|
||||
|
@ -30,7 +30,7 @@ NodeBuilder::NodeOut::NodeOut(Node* n, int32 i) // NOLINT(runtime/explicit)
|
||||
dt(SafeGetOutput(node, i, &error)) {}
|
||||
|
||||
NodeBuilder::NodeOut::NodeOut(StringPiece n, int32 i, DataType t)
|
||||
: node(nullptr), error(false), name(n.ToString()), index(i), dt(t) {}
|
||||
: node(nullptr), error(false), name(std::string(n)), index(i), dt(t) {}
|
||||
|
||||
NodeBuilder::NodeOut::NodeOut()
|
||||
: node(nullptr), error(true), index(0), dt(DT_FLOAT) {}
|
||||
|
@ -23,7 +23,7 @@ WhileContext::WhileContext(StringPiece frame_name,
|
||||
OutputTensor cond_output,
|
||||
std::vector<OutputTensor> body_inputs,
|
||||
std::vector<OutputTensor> body_outputs)
|
||||
: frame_name_(frame_name.ToString()),
|
||||
: frame_name_(std::string(frame_name)),
|
||||
enter_nodes_(std::move(enter_nodes)),
|
||||
exit_nodes_(std::move(exit_nodes)),
|
||||
cond_output_(cond_output),
|
||||
|
Loading…
Reference in New Issue
Block a user