Use explicit primitive types with strings::Printf

PiperOrigin-RevId: 299753695
Change-Id: Iecbd08903b22442c210c3d404946077535a6089f
This commit is contained in:
A. Unique TensorFlower 2020-03-08 20:54:49 -07:00 committed by TensorFlower Gardener
parent b3307f90aa
commit 8f597046dc
25 changed files with 83 additions and 57 deletions

View File

@ -980,10 +980,10 @@ TEST_F(ShapeRefinerTest, ConstantValueAsShape_PackInt64) {
InputList inputs{ InputList inputs{
// clang-format off // clang-format off
Input(ops::Const<int64>(root, 10LL)), Input(ops::Const<int64>(root, int64{10})),
Input(ops::Const<int64>(root, 20LL)), Input(ops::Const<int64>(root, int64{20})),
Input(Output(scalar_non_const)), Input(Output(scalar_non_const)),
Input(ops::Const<int64>(root, 1LL << 40)), Input(ops::Const<int64>(root, int64{1} << 40)),
}; // clang-format on }; // clang-format on
auto pack = ops::Stack(root, inputs); auto pack = ops::Stack(root, inputs);
TF_ASSERT_OK(root.status()); TF_ASSERT_OK(root.status());
@ -1008,8 +1008,8 @@ TEST_F(ShapeRefinerTest, ConstantValueAsShape_PackUnknownDim) {
Scope root = Scope::NewRootScope(); Scope root = Scope::NewRootScope();
InputList inputs{ InputList inputs{
Input(ops::Const<int64>(root, 10LL)), Input(ops::Const<int64>(root, int64{10})),
Input(ops::Const<int64>(root, -1LL)), Input(ops::Const<int64>(root, int64{-1})),
}; };
auto pack = ops::Stack(root, inputs); auto pack = ops::Stack(root, inputs);
TF_ASSERT_OK(root.status()); TF_ASSERT_OK(root.status());
@ -1035,8 +1035,8 @@ TEST_F(ShapeRefinerTest, ConstantValueAsShape_PackInvalidInput) {
// Inputs are length 2 vectors instead of scalars. // Inputs are length 2 vectors instead of scalars.
InputList inputs{ InputList inputs{
Input(ops::Const<int64>(root, {10LL, 20LL})), Input(ops::Const<int64>(root, {int64{10}, int64{20}})),
Input(ops::Const<int64>(root, {10LL, 21LL})), Input(ops::Const<int64>(root, {int64{10}, int64{21}})),
}; };
auto pack = ops::Stack(root, inputs); auto pack = ops::Stack(root, inputs);
TF_ASSERT_OK(root.status()); TF_ASSERT_OK(root.status());

View File

@ -395,11 +395,12 @@ Status DebugIO::PublishDebugMetadata(
} else if (absl::StartsWith(absl::AsciiStrToLower(url), kFileURLScheme)) { } else if (absl::StartsWith(absl::AsciiStrToLower(url), kFileURLScheme)) {
const string dump_root_dir = url.substr(strlen(kFileURLScheme)); const string dump_root_dir = url.substr(strlen(kFileURLScheme));
const string core_metadata_path = AppendTimestampToFilePath( const string core_metadata_path = AppendTimestampToFilePath(
io::JoinPath( io::JoinPath(dump_root_dir,
dump_root_dir, strings::StrCat(
strings::StrCat(DebugNodeKey::kMetadataFilePrefix, DebugNodeKey::kMetadataFilePrefix,
DebugIO::kCoreMetadataTag, "sessionrun", DebugIO::kCoreMetadataTag, "sessionrun",
strings::Printf("%.14lld", session_run_index))), strings::Printf("%.14lld", static_cast<long long>(
session_run_index)))),
Env::Default()->NowMicros()); Env::Default()->NowMicros());
status.Update(DebugFileIO::DumpEventProtoToFile( status.Update(DebugFileIO::DumpEventProtoToFile(
event, string(io::Dirname(core_metadata_path)), event, string(io::Dirname(core_metadata_path)),

View File

@ -557,7 +557,7 @@ tensorflow::Status EagerServiceImpl::GetServerContext(
return errors::InvalidArgument(strings::Printf( return errors::InvalidArgument(strings::Printf(
"Unable to find a context_id matching the specified one " "Unable to find a context_id matching the specified one "
"(%llu). Perhaps the worker was restarted, or the context was GC'd?", "(%llu). Perhaps the worker was restarted, or the context was GC'd?",
context_id)); static_cast<unsigned long long>(context_id)));
} }
*server_context = iter->second; *server_context = iter->second;

View File

@ -303,7 +303,8 @@ Status GraphMgr::Register(
// Inserts one item into table_. // Inserts one item into table_.
{ {
mutex_lock l(mu_); mutex_lock l(mu_);
*graph_handle = strings::Printf("%016llx", ++next_id_); *graph_handle =
strings::Printf("%016llx", static_cast<long long>(++next_id_));
item->handle = *graph_handle; item->handle = *graph_handle;
CHECK(table_.insert({*graph_handle, item}).second); CHECK(table_.insert({*graph_handle, item}).second);
} }

View File

@ -54,7 +54,8 @@ class BatchDatasetOp::Dataset : public DatasetBase {
input_(input), input_(input),
op_version_(op_version), op_version_(op_version),
traceme_metadata_( traceme_metadata_(
{{"batch_size", strings::Printf("%lld", batch_size)}, {{"batch_size",
strings::Printf("%lld", static_cast<long long>(batch_size))},
{"drop_remainder", drop_remainder ? "true" : "false"}, {"drop_remainder", drop_remainder ? "true" : "false"},
{"parallel_copy", parallel_copy ? "true" : "false"}}) { {"parallel_copy", parallel_copy ? "true" : "false"}}) {
input_->Ref(); input_->Ref();

View File

@ -100,7 +100,8 @@ class MapAndBatchDatasetOp::Dataset : public DatasetBase {
traceme_metadata_( traceme_metadata_(
{{"autotune", {{"autotune",
num_parallel_calls == model::kAutotune ? "true" : "false"}, num_parallel_calls == model::kAutotune ? "true" : "false"},
{"batch_size", strings::Printf("%lld", batch_size)}, {"batch_size",
strings::Printf("%lld", static_cast<long long>(batch_size))},
{"drop_remainder", drop_remainder ? "true" : "false"}}) { {"drop_remainder", drop_remainder ? "true" : "false"}}) {
input_->Ref(); input_->Ref();
} }
@ -285,8 +286,8 @@ class MapAndBatchDatasetOp::Dataset : public DatasetBase {
} }
TraceMeMetadata GetTraceMeMetadata() const override { TraceMeMetadata GetTraceMeMetadata() const override {
int64 parallelism = -1; long long parallelism = -1; // NOLINT
int64 max_batch_results = -1; long long max_batch_results = -1; // NOLINT
// NOTE: We only set the parallelism value if the lock can be acquired // NOTE: We only set the parallelism value if the lock can be acquired
// right away to avoid introducing tracing overhead. // right away to avoid introducing tracing overhead.
if (mu_->try_lock()) { if (mu_->try_lock()) {

View File

@ -107,8 +107,10 @@ class ParallelInterleaveDatasetOp::Dataset : public DatasetBase {
output_types_(output_types), output_types_(output_types),
output_shapes_(output_shapes), output_shapes_(output_shapes),
traceme_metadata_( traceme_metadata_(
{{"block_length", strings::Printf("%lld", block_length)}, {{"block_length",
{"cycle_length", strings::Printf("%lld", cycle_length)}, strings::Printf("%lld", static_cast<long long>(block_length))},
{"cycle_length",
strings::Printf("%lld", static_cast<long long>(cycle_length))},
{"deterministic", {"deterministic",
deterministic.IsDeterministic() || deterministic.IsDefault() deterministic.IsDeterministic() || deterministic.IsDefault()
? "true" ? "true"

View File

@ -62,7 +62,8 @@ class RebatchDatasetOp : public UnaryDatasetOpKernel {
output_types_(output_types), output_types_(output_types),
output_shapes_(output_shapes), output_shapes_(output_shapes),
traceme_metadata_( traceme_metadata_(
{{"num_replicas", strings::Printf("%lld", num_replicas)}}) { {{"num_replicas", strings::Printf("%lld", static_cast<long long>(
num_replicas))}}) {
input_->Ref(); input_->Ref();
} }

View File

@ -1206,7 +1206,9 @@ class SnapshotDatasetOp : public UnaryDatasetOpKernel {
string GetSnapshotFilename() { string GetSnapshotFilename() {
mutex_lock l(mu_); mutex_lock l(mu_);
string snapshot_data_filename = io::JoinPath( string snapshot_data_filename = io::JoinPath(
run_dir_, strings::Printf("%08llu.snapshot", next_file_index_)); run_dir_, strings::Printf(
"%08llu.snapshot",
static_cast<unsigned long long>(next_file_index_)));
next_file_index_++; next_file_index_++;
return snapshot_data_filename; return snapshot_data_filename;
} }

View File

@ -62,8 +62,10 @@ class InterleaveDatasetOp::Dataset : public DatasetBase {
output_types_(output_types), output_types_(output_types),
output_shapes_(output_shapes), output_shapes_(output_shapes),
traceme_metadata_( traceme_metadata_(
{{"block_length", strings::Printf("%lld", block_length)}, {{"block_length",
{"cycle_length", strings::Printf("%lld", cycle_length)}}) { strings::Printf("%lld", static_cast<long long>(block_length))},
{"cycle_length",
strings::Printf("%lld", static_cast<long long>(cycle_length))}}) {
input_->Ref(); input_->Ref();
} }

View File

@ -61,7 +61,8 @@ class PaddedBatchDatasetOp::Dataset : public DatasetBase {
input_(input), input_(input),
op_version_(op_version), op_version_(op_version),
traceme_metadata_( traceme_metadata_(
{{"batch_size", strings::Printf("%lld", batch_size)}, {{"batch_size",
strings::Printf("%lld", static_cast<long long>(batch_size))},
{"drop_remainder", drop_remainder ? "true" : "false"}}) { {"drop_remainder", drop_remainder ? "true" : "false"}}) {
input_->Ref(); input_->Ref();

View File

@ -172,8 +172,10 @@ class ParallelInterleaveDatasetOp::Dataset : public DatasetBase {
traceme_metadata_( traceme_metadata_(
{{"autotune", {{"autotune",
num_parallel_calls == model::kAutotune ? "true" : "false"}, num_parallel_calls == model::kAutotune ? "true" : "false"},
{"block_length", strings::Printf("%lld", block_length)}, {"block_length",
{"cycle_length", strings::Printf("%lld", cycle_length)}, strings::Printf("%lld", static_cast<long long>(block_length))},
{"cycle_length",
strings::Printf("%lld", static_cast<long long>(cycle_length))},
{"deterministic", {"deterministic",
deterministic.IsNondeterministic() ? "false" : "true"}}) { deterministic.IsNondeterministic() ? "false" : "true"}}) {
input_->Ref(); input_->Ref();
@ -467,8 +469,9 @@ class ParallelInterleaveDatasetOp::Dataset : public DatasetBase {
mu_->unlock(); mu_->unlock();
} }
auto result = dataset()->traceme_metadata_; auto result = dataset()->traceme_metadata_;
result.push_back( result.push_back(std::make_pair(
std::make_pair("parallelism", strings::Printf("%lld", parallelism))); "parallelism",
strings::Printf("%lld", static_cast<long long>(parallelism))));
return result; return result;
} }

View File

@ -471,8 +471,9 @@ class ParallelMapIterator : public DatasetBaseIterator {
result.push_back(std::make_pair("autotune", autotune_ ? "true" : "false")); result.push_back(std::make_pair("autotune", autotune_ ? "true" : "false"));
result.push_back( result.push_back(
std::make_pair("deterministic", deterministic_ ? "true" : "false")); std::make_pair("deterministic", deterministic_ ? "true" : "false"));
result.push_back( result.push_back(std::make_pair(
std::make_pair("parallelism", strings::Printf("%lld", parallelism))); "parallelism",
strings::Printf("%lld", static_cast<long long>(parallelism))));
return result; return result;
} }

View File

@ -278,11 +278,13 @@ class PrefetchDatasetOp::Dataset : public DatasetBase {
mu_->unlock(); mu_->unlock();
} }
data::TraceMeMetadata result; data::TraceMeMetadata result;
result.push_back( result.push_back(std::make_pair(
std::make_pair("buffer_limit", strings::Printf("%lld", limit))); "buffer_limit",
strings::Printf("%lld", static_cast<long long>(limit))));
if (dataset()->slack_period_ > 0) { if (dataset()->slack_period_ > 0) {
result.push_back( result.push_back(std::make_pair(
std::make_pair("slack", strings::Printf("%lld", slack_us_.load()))); "slack",
strings::Printf("%lld", static_cast<long long>(slack_us_.load()))));
} }
return result; return result;
} }

View File

@ -48,8 +48,9 @@ class ShardDatasetOp::Dataset : public DatasetBase {
input_(input), input_(input),
require_non_empty_(require_non_empty), require_non_empty_(require_non_empty),
traceme_metadata_( traceme_metadata_(
{{"index", strings::Printf("%lld", index)}, {{"index", strings::Printf("%lld", static_cast<long long>(index))},
{"num_shards", strings::Printf("%lld", num_shards)}}) { {"num_shards",
strings::Printf("%lld", static_cast<long long>(num_shards))}}) {
input_->Ref(); input_->Ref();
} }

View File

@ -108,7 +108,8 @@ class ShuffleDatasetOpBase::ShuffleDatasetBase : public DatasetBase {
buffer_size_(buffer_size), buffer_size_(buffer_size),
count_(count), count_(count),
traceme_metadata_( traceme_metadata_(
{{"buffer_size", strings::Printf("%lld", buffer_size)}}) { {{"buffer_size",
strings::Printf("%lld", static_cast<long long>(buffer_size))}}) {
input_->Ref(); input_->Ref();
} }

View File

@ -54,9 +54,12 @@ class WindowDatasetOp::Dataset : public DatasetBase {
output_dtypes_(input_->output_dtypes().size(), {DT_VARIANT}), output_dtypes_(input_->output_dtypes().size(), {DT_VARIANT}),
output_shapes_(input_->output_shapes().size(), TensorShape({})), output_shapes_(input_->output_shapes().size(), TensorShape({})),
traceme_metadata_( traceme_metadata_(
{{"window_size", strings::Printf("%lld", window_size)}, {{"window_size",
{"window_shift", strings::Printf("%lld", window_shift)}, strings::Printf("%lld", static_cast<long long>(window_size))},
{"window_stride", strings::Printf("%lld", window_stride)}}) { {"window_shift",
strings::Printf("%lld", static_cast<long long>(window_shift))},
{"window_stride", strings::Printf("%lld", static_cast<long long>(
window_stride))}}) {
input_->Ref(); input_->Ref();
} }

View File

@ -78,7 +78,7 @@ REGISTER_OP_GRADIENT("Reciprocal", InvGrad);
Status SquareGrad(const AttrSlice& attrs, FunctionDef* g) { Status SquareGrad(const AttrSlice& attrs, FunctionDef* g) {
// clang-format off // clang-format off
return GradForUnaryCwise(g, { return GradForUnaryCwise(g, {
FDH::Const("c", 2LL), FDH::Const("c", int64{2}),
{{"two"}, "Cast", {"c"}, {{"SrcT", DT_INT64}, {"DstT", "$T"}}}, {{"two"}, "Cast", {"c"}, {{"SrcT", DT_INT64}, {"DstT", "$T"}}},
{{"x2"}, "Mul", {"x", "two"}, {}, {"dy"}}, // x * 2 {{"x2"}, "Mul", {"x", "two"}, {}, {"dy"}}, // x * 2
{{"dx"}, "Mul", {"dy", "x2"}}, // dy * (x * 2) {{"dx"}, "Mul", {"dy", "x2"}}, // dy * (x * 2)
@ -619,7 +619,7 @@ REGISTER_OP_GRADIENT("Xdivy", XdivyGrad);
Status SquaredDifferenceGrad(const AttrSlice& attrs, FunctionDef* g) { Status SquaredDifferenceGrad(const AttrSlice& attrs, FunctionDef* g) {
// clang-format off // clang-format off
return GradForBinaryCwise(g, { return GradForBinaryCwise(g, {
FDH::Const("c", 2LL), FDH::Const("c", int64{2}),
{{"two"}, "Cast", {"c"}, {{"SrcT", DT_INT64}, {"DstT", "$T"}}}, {{"two"}, "Cast", {"c"}, {{"SrcT", DT_INT64}, {"DstT", "$T"}}},
{{"x_sub_y"}, "Sub", {"x", "y"}}, {{"x_sub_y"}, "Sub", {"x", "y"}},
{{"two_x_sub_y"}, "Mul", {"two", "x_sub_y"}}, // 2 * (x - y) {{"two_x_sub_y"}, "Mul", {"two", "x_sub_y"}}, // 2 * (x - y)

View File

@ -141,7 +141,8 @@ CurlHttpRequest::CurlHttpRequest(LibCurl* libcurl, Env* env)
// TODO(b/74351157): Enable HTTP/2. // TODO(b/74351157): Enable HTTP/2.
// Set up the progress meter. // Set up the progress meter.
CHECK_CURL_OK(libcurl_->curl_easy_setopt(curl_, CURLOPT_NOPROGRESS, 0ULL)); CHECK_CURL_OK(
libcurl_->curl_easy_setopt(curl_, CURLOPT_NOPROGRESS, uint64{0}));
CHECK_CURL_OK(libcurl_->curl_easy_setopt(curl_, CURLOPT_XFERINFODATA, this)); CHECK_CURL_OK(libcurl_->curl_easy_setopt(curl_, CURLOPT_XFERINFODATA, this));
CHECK_CURL_OK(libcurl_->curl_easy_setopt(curl_, CURLOPT_XFERINFOFUNCTION, CHECK_CURL_OK(libcurl_->curl_easy_setopt(curl_, CURLOPT_XFERINFOFUNCTION,
&CurlHttpRequest::ProgressCallback)); &CurlHttpRequest::ProgressCallback));

View File

@ -400,7 +400,7 @@ bool Env::CreateUniqueFileName(string* prefix, const string& suffix) {
#else #else
int32 pid = static_cast<int32>(getpid()); int32 pid = static_cast<int32>(getpid());
#endif #endif
uint64 now_microsec = NowMicros(); long long now_microsec = NowMicros(); // NOLINT
*prefix += strings::Printf("%s-%x-%d-%llx", port::Hostname().c_str(), tid, *prefix += strings::Printf("%s-%x-%d-%llx", port::Hostname().c_str(), tid,
pid, now_microsec); pid, now_microsec);

View File

@ -439,7 +439,7 @@ string HumanReadableNum(int64 value) {
value = -value; value = -value;
} }
if (value < 1000) { if (value < 1000) {
Appendf(&s, "%lld", value); Appendf(&s, "%lld", static_cast<long long>(value));
} else if (value >= static_cast<int64>(1e15)) { } else if (value >= static_cast<int64>(1e15)) {
// Number bigger than 1E15; use that notation. // Number bigger than 1E15; use that notation.
Appendf(&s, "%0.3G", static_cast<double>(value)); Appendf(&s, "%0.3G", static_cast<double>(value));
@ -472,7 +472,7 @@ string HumanReadableNumBytes(int64 num_bytes) {
// No fractions for bytes. // No fractions for bytes.
char buf[8]; // Longest possible string is '-XXXXB' char buf[8]; // Longest possible string is '-XXXXB'
snprintf(buf, sizeof(buf), "%s%lldB", neg_str, snprintf(buf, sizeof(buf), "%s%lldB", neg_str,
static_cast<int64>(num_bytes)); static_cast<long long>(num_bytes));
return string(buf); return string(buf);
} }

View File

@ -68,8 +68,9 @@ class DebugEventsWriterTest : public ::testing::Test {
} }
void SetUp() override { void SetUp() override {
dump_root_ = io::JoinPath(testing::TmpDir(), dump_root_ = io::JoinPath(
strings::Printf("%010lld", env()->NowMicros())); testing::TmpDir(),
strings::Printf("%010lld", static_cast<long long>(env()->NowMicros())));
} }
void TearDown() override { void TearDown() override {

View File

@ -66,7 +66,7 @@ Status EventsWriter::InitIfNeeded() {
filename_ = filename_ =
strings::Printf("%s.out.tfevents.%010lld.%s%s", file_prefix_.c_str(), strings::Printf("%s.out.tfevents.%010lld.%s%s", file_prefix_.c_str(),
static_cast<int64>(time_in_seconds), static_cast<long long>(time_in_seconds),
port::Hostname().c_str(), file_suffix_.c_str()); port::Hostname().c_str(), file_suffix_.c_str());
// Reset recordio_writer (which has a reference to recordio_file_) so final // Reset recordio_writer (which has a reference to recordio_file_) so final

View File

@ -53,7 +53,8 @@ struct RunCounter {
}; };
std::string SessionToHandle(Session* session) { std::string SessionToHandle(Session* session) {
return strings::Printf("%llu", reinterpret_cast<uint64>(session)); return strings::Printf("%llu", static_cast<unsigned long long>(
reinterpret_cast<uintptr_t>(session)));
} }
// The Session interface has many methods of the form: // The Session interface has many methods of the form:

View File

@ -2620,8 +2620,8 @@ port::StatusOr<dnn::AlgorithmDesc> GetCudnnConvolutionForwardAlgorithm(
bool specify_workspace_limit = scratch_allocator != nullptr; bool specify_workspace_limit = scratch_allocator != nullptr;
auto memory_limit_bytes = auto memory_limit_bytes =
specify_workspace_limit specify_workspace_limit
? std::max(scratch_allocator->GetMemoryLimitInBytes(), 0ll) ? std::max(scratch_allocator->GetMemoryLimitInBytes(), int64{0})
: 0ll; : int64{0};
SE_ASSIGN_OR_RETURN(cudnnConvolutionFwdAlgo_t algo, SE_ASSIGN_OR_RETURN(cudnnConvolutionFwdAlgo_t algo,
GetCudnnConvolutionForwardAlgo( GetCudnnConvolutionForwardAlgo(
cudnn, input_nd, filter, conv, output_nd, cudnn, input_nd, filter, conv, output_nd,
@ -2673,8 +2673,8 @@ port::StatusOr<dnn::AlgorithmDesc> GetCudnnConvolutionBackwardDataAlgorithm(
bool specify_workspace_limit = scratch_allocator != nullptr; bool specify_workspace_limit = scratch_allocator != nullptr;
auto memory_limit_bytes = auto memory_limit_bytes =
specify_workspace_limit specify_workspace_limit
? std::max(scratch_allocator->GetMemoryLimitInBytes(), 0ll) ? std::max(scratch_allocator->GetMemoryLimitInBytes(), int64{0})
: 0ll; : int64{0};
SE_ASSIGN_OR_RETURN(cudnnConvolutionBwdDataAlgo_t algo, SE_ASSIGN_OR_RETURN(cudnnConvolutionBwdDataAlgo_t algo,
GetCudnnConvolutionBackwardDataAlgo( GetCudnnConvolutionBackwardDataAlgo(
cudnn, input_nd, filter, conv, output_nd, cudnn, input_nd, filter, conv, output_nd,
@ -2725,8 +2725,8 @@ port::StatusOr<dnn::AlgorithmDesc> GetCudnnConvolutionBackwardFilterAlgorithm(
bool specify_workspace_limit = scratch_allocator != nullptr; bool specify_workspace_limit = scratch_allocator != nullptr;
auto memory_limit_bytes = auto memory_limit_bytes =
specify_workspace_limit specify_workspace_limit
? std::max(scratch_allocator->GetMemoryLimitInBytes(), 0ll) ? std::max(scratch_allocator->GetMemoryLimitInBytes(), int64{0})
: 0ll; : int64{0};
SE_ASSIGN_OR_RETURN(cudnnConvolutionBwdFilterAlgo_t algo, SE_ASSIGN_OR_RETURN(cudnnConvolutionBwdFilterAlgo_t algo,
GetCudnnConvolutionBackwardFilterAlgo( GetCudnnConvolutionBackwardFilterAlgo(
cudnn, input_nd, filter, conv, output_nd, cudnn, input_nd, filter, conv, output_nd,