Qualify uses of std::string.

PiperOrigin-RevId: 302910208
Change-Id: If59968d915f4b4e87f21fa34c05ee87299b75a58
This commit is contained in:
A. Unique TensorFlower 2020-03-25 09:49:49 -07:00 committed by TensorFlower Gardener
parent a3493f7e2b
commit 0c4cb6e207
12 changed files with 46 additions and 43 deletions

View File

@ -124,7 +124,7 @@ Status LoadSavedModel(const SessionOptions& session_options,
/// the export directory definitely does not contain a SavedModel. If the method
/// returns `true`, the export directory may contain a SavedModel but provides
/// no guarantee that it can be loaded.
bool MaybeSavedModelDirectory(const string& export_dir);
bool MaybeSavedModelDirectory(const std::string& export_dir);
} // namespace tensorflow

View File

@ -114,7 +114,7 @@ struct AllocatorStats {
bytes_reserved(0),
peak_bytes_reserved(0) {}
string DebugString() const;
std::string DebugString() const;
};
// Allocator is an abstract interface for allocating and deallocating
@ -127,7 +127,7 @@ class Allocator {
virtual ~Allocator();
// Return a string identifying this allocator
virtual string Name() = 0;
virtual std::string Name() = 0;
// Return an uninitialized block of memory that is "num_bytes" bytes
// in size. The returned pointer is guaranteed to be aligned to a
@ -242,7 +242,7 @@ class AllocatorWrapper : public Allocator {
// Returns the wrapped allocator to which all calls are delegated.
Allocator* wrapped() const { return wrapped_; }
string Name() override { return wrapped_->Name(); }
std::string Name() override { return wrapped_->Name(); }
void* AllocateRaw(size_t alignment, size_t num_bytes) override {
return wrapped_->AllocateRaw(alignment, num_bytes);
@ -336,7 +336,7 @@ struct AllocatorAttributes {
int32 scope_id = 0;
// Returns a human readable representation of this.
string DebugString() const;
std::string DebugString() const;
};
// Returns a trivial implementation of Allocator, which is a process singleton.

View File

@ -38,23 +38,23 @@ class ResourceHandle {
~ResourceHandle();
// Unique name for the device containing the resource.
const string& device() const { return device_; }
const std::string& device() const { return device_; }
// Names of the devices containing the resource.
const std::vector<string>& allowed_devices() const {
return allowed_devices_;
}
void set_device(const string& device) { device_ = device; }
void set_device(const std::string& device) { device_ = device; }
void set_allowed_devices(const std::vector<string>& devices) {
allowed_devices_ = devices;
}
// Container in which this resource is placed.
const string& container() const { return container_; }
void set_container(const string& container) { container_ = container; }
const std::string& container() const { return container_; }
void set_container(const std::string& container) { container_ = container; }
// Unique name of this resource.
const string& name() const { return name_; }
void set_name(const string& name) { name_ = name; }
const std::string& name() const { return name_; }
void set_name(const std::string& name) { name_ = name; }
// Hash code for the type of the resource. Is only valid in the same device
// and in the same execution.
@ -63,8 +63,10 @@ class ResourceHandle {
// For debug-only, the name of the type pointed to by this handle, if
// available.
const string& maybe_type_name() const { return maybe_type_name_; }
void set_maybe_type_name(const string& value) { maybe_type_name_ = value; }
const std::string& maybe_type_name() const { return maybe_type_name_; }
void set_maybe_type_name(const std::string& value) {
maybe_type_name_ = value;
}
// Data types and shapes for the underlying resource.
std::vector<DtypeAndPartialTensorShape> dtypes_and_shapes() const {
@ -80,10 +82,10 @@ class ResourceHandle {
void FromProto(const ResourceHandleProto& proto);
// Serialization via ResourceHandleProto
string SerializeAsString() const;
bool ParseFromString(const string& s);
std::string SerializeAsString() const;
bool ParseFromString(const std::string& s);
string DebugString() const;
std::string DebugString() const;
// GUID for anonymous resources. Resources with this shared_name will have
// their shared_name replaced with a GUID at creation time
@ -93,19 +95,19 @@ class ResourceHandle {
public:
// The default device containing the resource, where the ResourceHandle is
// initially created.
string device_;
std::string device_;
// A set of devices containing the resource. If empty, the resource only
// exists on device_. Can be represented in wildcard patterns.
std::vector<string> allowed_devices_;
string container_;
string name_;
std::string container_;
std::string name_;
uint64 hash_code_ = 0;
string maybe_type_name_;
std::string maybe_type_name_;
std::vector<DtypeAndPartialTensorShape> dtypes_and_shapes_;
};
// For backwards compatibility for when this was a proto
string ProtoDebugString(const ResourceHandle& handle);
std::string ProtoDebugString(const ResourceHandle& handle);
// Encodes a list of ResourceHandle protos in the given StringListEncoder.
void EncodeResourceHandleList(const ResourceHandle* p, int64 n,

View File

@ -86,11 +86,11 @@ class ShapeInferenceTestutil {
.error_message())
#define INFER_ERROR(error_substring, op, i) \
{ \
string error_message = \
std::string error_message = \
::tensorflow::shape_inference::ShapeInferenceTestutil::InferShapes( \
op, i, "e") \
.error_message(); \
const string& substring = error_substring; \
const std::string& substring = error_substring; \
EXPECT_NE("", error_message); \
EXPECT_TRUE(absl::StrContains(error_message, substring)) \
<< "Expected to see '" << substring << "' in '" << error_message \

View File

@ -571,19 +571,19 @@ class Tensor {
int64 begin) const;
/// Render the first `max_entries` values in `*this` into a string.
string SummarizeValue(int64 max_entries, bool print_v2 = false) const;
std::string SummarizeValue(int64 max_entries, bool print_v2 = false) const;
/// A human-readable summary of the tensor suitable for debugging.
// `num_values` is the number of actual data values in the tensor
// included in the message. If the tensor might be resident in
// GPU/TPU memory use DeviceSafeDebugString instead.
string DebugString(int num_values) const;
string DebugString() const { return DebugString(3); }
std::string DebugString(int num_values) const;
std::string DebugString() const { return DebugString(3); }
// Variant of DebugString() that should be used for possibly non-CPU tensors.
// If the tensor is not resident on CPU, we can't read its values as
// DebugString() does.
string DeviceSafeDebugString() const;
std::string DeviceSafeDebugString() const;
/// Fill in the `TensorDescription` proto with metadata about the
/// tensor that is useful for monitoring and debugging.

View File

@ -69,8 +69,8 @@ class TensorShapeRep {
int64 num_elements() const { return num_elements_; }
/// For error messages.
string DebugString() const;
static string DebugString(const TensorShapeProto& proto);
std::string DebugString() const;
static std::string DebugString(const TensorShapeProto& proto);
void DumpRep() const; // XXX
@ -397,7 +397,8 @@ class TensorShapeUtils {
static Status MakeShape(gtl::ArraySlice<int64> shape,
PartialTensorShape* out);
static string ShapeListString(const gtl::ArraySlice<TensorShape>& shapes);
static std::string ShapeListString(
const gtl::ArraySlice<TensorShape>& shapes);
/// \brief Returns true iff `shape` starts with `prefix`.
static bool StartsWith(const TensorShape& shape, const TensorShape& prefix);
@ -462,7 +463,7 @@ class PartialTensorShape : public TensorShapeBase<PartialTensorShape> {
/// common predicates on a partially known tensor shape.
class PartialTensorShapeUtils {
public:
static string PartialShapeListString(
static std::string PartialShapeListString(
const gtl::ArraySlice<PartialTensorShape>& shapes);
static bool AreIdentical(const gtl::ArraySlice<PartialTensorShape>& shapes0,

View File

@ -59,14 +59,14 @@ class DeviceType {
explicit DeviceType(StringPiece type) : type_(type.data(), type.size()) {}
const char* type() const { return type_.c_str(); }
const string& type_string() const { return type_; }
const std::string& type_string() const { return type_; }
bool operator<(const DeviceType& other) const;
bool operator==(const DeviceType& other) const;
bool operator!=(const DeviceType& other) const { return !(*this == other); }
private:
string type_;
std::string type_;
};
std::ostream& operator<<(std::ostream& os, const DeviceType& d);
@ -110,10 +110,10 @@ typedef gtl::InlinedVector<std::pair<DeviceType, int32>, 4>
PrioritizedDeviceTypeVector;
// Convert the enums to strings for errors:
string DataTypeString(DataType dtype);
string DeviceTypeString(const DeviceType& device_type);
string DataTypeSliceString(const DataTypeSlice dtypes);
inline string DataTypeVectorString(const DataTypeVector& dtypes) {
std::string DataTypeString(DataType dtype);
std::string DeviceTypeString(const DeviceType& device_type);
std::string DataTypeSliceString(const DataTypeSlice dtypes);
inline std::string DataTypeVectorString(const DataTypeVector& dtypes) {
return DataTypeSliceString(dtypes);
}

View File

@ -26,7 +26,7 @@ namespace graph {
// Sets the default device for all nodes in graph_def to "device",
// only if not already set.
inline void SetDefaultDevice(const string& device, GraphDef* graph_def) {
inline void SetDefaultDevice(const std::string& device, GraphDef* graph_def) {
for (int i = 0; i < graph_def->node_size(); ++i) {
auto node = graph_def->mutable_node(i);
if (node->device().empty()) {

View File

@ -52,7 +52,7 @@ struct SessionOptions {
///
/// If the session disconnects from the remote process during its
/// lifetime, session calls may fail immediately.
string target;
std::string target;
/// Configuration options.
ConfigProto config;

View File

@ -157,7 +157,7 @@ class HostExecutor : public internal::StreamExecutorInterface {
}
port::Status SetDeviceSharedMemoryConfig(SharedMemoryConfig config) override {
string error_msg{
std::string error_msg{
"Shared memory configuration is unsupported for host "
"executors."};
LOG(INFO) << error_msg;

View File

@ -38,7 +38,7 @@ int HostPlatform::VisibleDeviceCount() const {
return std::thread::hardware_concurrency();
}
const string& HostPlatform::Name() const { return name_; }
const std::string& HostPlatform::Name() const { return name_; }
port::StatusOr<std::unique_ptr<DeviceDescription>>
HostPlatform::DescriptionForDevice(int ordinal) const {

View File

@ -49,7 +49,7 @@ class HostPlatform : public Platform {
// base::NumCPUs().
int VisibleDeviceCount() const override;
const string& Name() const override;
const std::string& Name() const override;
port::StatusOr<std::unique_ptr<DeviceDescription>> DescriptionForDevice(
int ordinal) const override;
@ -71,7 +71,7 @@ class HostPlatform : public Platform {
private:
// This platform's name.
string name_;
std::string name_;
// Cache of created StreamExecutors.
ExecutorCache executor_cache_;