Use explicit primitive types with strings::Printf

PiperOrigin-RevId: 299753695
Change-Id: Iecbd08903b22442c210c3d404946077535a6089f
This commit is contained in:
A. Unique TensorFlower 2020-03-08 20:54:49 -07:00 committed by TensorFlower Gardener
parent b3307f90aa
commit 8f597046dc
25 changed files with 83 additions and 57 deletions

View File

@ -980,10 +980,10 @@ TEST_F(ShapeRefinerTest, ConstantValueAsShape_PackInt64) {
InputList inputs{
// clang-format off
Input(ops::Const<int64>(root, 10LL)),
Input(ops::Const<int64>(root, 20LL)),
Input(ops::Const<int64>(root, int64{10})),
Input(ops::Const<int64>(root, int64{20})),
Input(Output(scalar_non_const)),
Input(ops::Const<int64>(root, 1LL << 40)),
Input(ops::Const<int64>(root, int64{1} << 40)),
}; // clang-format on
auto pack = ops::Stack(root, inputs);
TF_ASSERT_OK(root.status());
@ -1008,8 +1008,8 @@ TEST_F(ShapeRefinerTest, ConstantValueAsShape_PackUnknownDim) {
Scope root = Scope::NewRootScope();
InputList inputs{
Input(ops::Const<int64>(root, 10LL)),
Input(ops::Const<int64>(root, -1LL)),
Input(ops::Const<int64>(root, int64{10})),
Input(ops::Const<int64>(root, int64{-1})),
};
auto pack = ops::Stack(root, inputs);
TF_ASSERT_OK(root.status());
@ -1035,8 +1035,8 @@ TEST_F(ShapeRefinerTest, ConstantValueAsShape_PackInvalidInput) {
// Inputs are length 2 vectors instead of scalars.
InputList inputs{
Input(ops::Const<int64>(root, {10LL, 20LL})),
Input(ops::Const<int64>(root, {10LL, 21LL})),
Input(ops::Const<int64>(root, {int64{10}, int64{20}})),
Input(ops::Const<int64>(root, {int64{10}, int64{21}})),
};
auto pack = ops::Stack(root, inputs);
TF_ASSERT_OK(root.status());

View File

@ -395,11 +395,12 @@ Status DebugIO::PublishDebugMetadata(
} else if (absl::StartsWith(absl::AsciiStrToLower(url), kFileURLScheme)) {
const string dump_root_dir = url.substr(strlen(kFileURLScheme));
const string core_metadata_path = AppendTimestampToFilePath(
io::JoinPath(
dump_root_dir,
strings::StrCat(DebugNodeKey::kMetadataFilePrefix,
io::JoinPath(dump_root_dir,
strings::StrCat(
DebugNodeKey::kMetadataFilePrefix,
DebugIO::kCoreMetadataTag, "sessionrun",
strings::Printf("%.14lld", session_run_index))),
strings::Printf("%.14lld", static_cast<long long>(
session_run_index)))),
Env::Default()->NowMicros());
status.Update(DebugFileIO::DumpEventProtoToFile(
event, string(io::Dirname(core_metadata_path)),

View File

@ -557,7 +557,7 @@ tensorflow::Status EagerServiceImpl::GetServerContext(
return errors::InvalidArgument(strings::Printf(
"Unable to find a context_id matching the specified one "
"(%llu). Perhaps the worker was restarted, or the context was GC'd?",
context_id));
static_cast<unsigned long long>(context_id)));
}
*server_context = iter->second;

View File

@ -303,7 +303,8 @@ Status GraphMgr::Register(
// Inserts one item into table_.
{
mutex_lock l(mu_);
*graph_handle = strings::Printf("%016llx", ++next_id_);
*graph_handle =
strings::Printf("%016llx", static_cast<long long>(++next_id_));
item->handle = *graph_handle;
CHECK(table_.insert({*graph_handle, item}).second);
}

View File

@ -54,7 +54,8 @@ class BatchDatasetOp::Dataset : public DatasetBase {
input_(input),
op_version_(op_version),
traceme_metadata_(
{{"batch_size", strings::Printf("%lld", batch_size)},
{{"batch_size",
strings::Printf("%lld", static_cast<long long>(batch_size))},
{"drop_remainder", drop_remainder ? "true" : "false"},
{"parallel_copy", parallel_copy ? "true" : "false"}}) {
input_->Ref();

View File

@ -100,7 +100,8 @@ class MapAndBatchDatasetOp::Dataset : public DatasetBase {
traceme_metadata_(
{{"autotune",
num_parallel_calls == model::kAutotune ? "true" : "false"},
{"batch_size", strings::Printf("%lld", batch_size)},
{"batch_size",
strings::Printf("%lld", static_cast<long long>(batch_size))},
{"drop_remainder", drop_remainder ? "true" : "false"}}) {
input_->Ref();
}
@ -285,8 +286,8 @@ class MapAndBatchDatasetOp::Dataset : public DatasetBase {
}
TraceMeMetadata GetTraceMeMetadata() const override {
int64 parallelism = -1;
int64 max_batch_results = -1;
long long parallelism = -1; // NOLINT
long long max_batch_results = -1; // NOLINT
// NOTE: We only set the parallelism value if the lock can be acquired
// right away to avoid introducing tracing overhead.
if (mu_->try_lock()) {

View File

@ -107,8 +107,10 @@ class ParallelInterleaveDatasetOp::Dataset : public DatasetBase {
output_types_(output_types),
output_shapes_(output_shapes),
traceme_metadata_(
{{"block_length", strings::Printf("%lld", block_length)},
{"cycle_length", strings::Printf("%lld", cycle_length)},
{{"block_length",
strings::Printf("%lld", static_cast<long long>(block_length))},
{"cycle_length",
strings::Printf("%lld", static_cast<long long>(cycle_length))},
{"deterministic",
deterministic.IsDeterministic() || deterministic.IsDefault()
? "true"

View File

@ -62,7 +62,8 @@ class RebatchDatasetOp : public UnaryDatasetOpKernel {
output_types_(output_types),
output_shapes_(output_shapes),
traceme_metadata_(
{{"num_replicas", strings::Printf("%lld", num_replicas)}}) {
{{"num_replicas", strings::Printf("%lld", static_cast<long long>(
num_replicas))}}) {
input_->Ref();
}

View File

@ -1206,7 +1206,9 @@ class SnapshotDatasetOp : public UnaryDatasetOpKernel {
string GetSnapshotFilename() {
mutex_lock l(mu_);
string snapshot_data_filename = io::JoinPath(
run_dir_, strings::Printf("%08llu.snapshot", next_file_index_));
run_dir_, strings::Printf(
"%08llu.snapshot",
static_cast<unsigned long long>(next_file_index_)));
next_file_index_++;
return snapshot_data_filename;
}

View File

@ -62,8 +62,10 @@ class InterleaveDatasetOp::Dataset : public DatasetBase {
output_types_(output_types),
output_shapes_(output_shapes),
traceme_metadata_(
{{"block_length", strings::Printf("%lld", block_length)},
{"cycle_length", strings::Printf("%lld", cycle_length)}}) {
{{"block_length",
strings::Printf("%lld", static_cast<long long>(block_length))},
{"cycle_length",
strings::Printf("%lld", static_cast<long long>(cycle_length))}}) {
input_->Ref();
}

View File

@ -61,7 +61,8 @@ class PaddedBatchDatasetOp::Dataset : public DatasetBase {
input_(input),
op_version_(op_version),
traceme_metadata_(
{{"batch_size", strings::Printf("%lld", batch_size)},
{{"batch_size",
strings::Printf("%lld", static_cast<long long>(batch_size))},
{"drop_remainder", drop_remainder ? "true" : "false"}}) {
input_->Ref();

View File

@ -172,8 +172,10 @@ class ParallelInterleaveDatasetOp::Dataset : public DatasetBase {
traceme_metadata_(
{{"autotune",
num_parallel_calls == model::kAutotune ? "true" : "false"},
{"block_length", strings::Printf("%lld", block_length)},
{"cycle_length", strings::Printf("%lld", cycle_length)},
{"block_length",
strings::Printf("%lld", static_cast<long long>(block_length))},
{"cycle_length",
strings::Printf("%lld", static_cast<long long>(cycle_length))},
{"deterministic",
deterministic.IsNondeterministic() ? "false" : "true"}}) {
input_->Ref();
@ -467,8 +469,9 @@ class ParallelInterleaveDatasetOp::Dataset : public DatasetBase {
mu_->unlock();
}
auto result = dataset()->traceme_metadata_;
result.push_back(
std::make_pair("parallelism", strings::Printf("%lld", parallelism)));
result.push_back(std::make_pair(
"parallelism",
strings::Printf("%lld", static_cast<long long>(parallelism))));
return result;
}

View File

@ -471,8 +471,9 @@ class ParallelMapIterator : public DatasetBaseIterator {
result.push_back(std::make_pair("autotune", autotune_ ? "true" : "false"));
result.push_back(
std::make_pair("deterministic", deterministic_ ? "true" : "false"));
result.push_back(
std::make_pair("parallelism", strings::Printf("%lld", parallelism)));
result.push_back(std::make_pair(
"parallelism",
strings::Printf("%lld", static_cast<long long>(parallelism))));
return result;
}

View File

@ -278,11 +278,13 @@ class PrefetchDatasetOp::Dataset : public DatasetBase {
mu_->unlock();
}
data::TraceMeMetadata result;
result.push_back(
std::make_pair("buffer_limit", strings::Printf("%lld", limit)));
result.push_back(std::make_pair(
"buffer_limit",
strings::Printf("%lld", static_cast<long long>(limit))));
if (dataset()->slack_period_ > 0) {
result.push_back(
std::make_pair("slack", strings::Printf("%lld", slack_us_.load())));
result.push_back(std::make_pair(
"slack",
strings::Printf("%lld", static_cast<long long>(slack_us_.load()))));
}
return result;
}

View File

@ -48,8 +48,9 @@ class ShardDatasetOp::Dataset : public DatasetBase {
input_(input),
require_non_empty_(require_non_empty),
traceme_metadata_(
{{"index", strings::Printf("%lld", index)},
{"num_shards", strings::Printf("%lld", num_shards)}}) {
{{"index", strings::Printf("%lld", static_cast<long long>(index))},
{"num_shards",
strings::Printf("%lld", static_cast<long long>(num_shards))}}) {
input_->Ref();
}

View File

@ -108,7 +108,8 @@ class ShuffleDatasetOpBase::ShuffleDatasetBase : public DatasetBase {
buffer_size_(buffer_size),
count_(count),
traceme_metadata_(
{{"buffer_size", strings::Printf("%lld", buffer_size)}}) {
{{"buffer_size",
strings::Printf("%lld", static_cast<long long>(buffer_size))}}) {
input_->Ref();
}

View File

@ -54,9 +54,12 @@ class WindowDatasetOp::Dataset : public DatasetBase {
output_dtypes_(input_->output_dtypes().size(), {DT_VARIANT}),
output_shapes_(input_->output_shapes().size(), TensorShape({})),
traceme_metadata_(
{{"window_size", strings::Printf("%lld", window_size)},
{"window_shift", strings::Printf("%lld", window_shift)},
{"window_stride", strings::Printf("%lld", window_stride)}}) {
{{"window_size",
strings::Printf("%lld", static_cast<long long>(window_size))},
{"window_shift",
strings::Printf("%lld", static_cast<long long>(window_shift))},
{"window_stride", strings::Printf("%lld", static_cast<long long>(
window_stride))}}) {
input_->Ref();
}

View File

@ -78,7 +78,7 @@ REGISTER_OP_GRADIENT("Reciprocal", InvGrad);
Status SquareGrad(const AttrSlice& attrs, FunctionDef* g) {
// clang-format off
return GradForUnaryCwise(g, {
FDH::Const("c", 2LL),
FDH::Const("c", int64{2}),
{{"two"}, "Cast", {"c"}, {{"SrcT", DT_INT64}, {"DstT", "$T"}}},
{{"x2"}, "Mul", {"x", "two"}, {}, {"dy"}}, // x * 2
{{"dx"}, "Mul", {"dy", "x2"}}, // dy * (x * 2)
@ -619,7 +619,7 @@ REGISTER_OP_GRADIENT("Xdivy", XdivyGrad);
Status SquaredDifferenceGrad(const AttrSlice& attrs, FunctionDef* g) {
// clang-format off
return GradForBinaryCwise(g, {
FDH::Const("c", 2LL),
FDH::Const("c", int64{2}),
{{"two"}, "Cast", {"c"}, {{"SrcT", DT_INT64}, {"DstT", "$T"}}},
{{"x_sub_y"}, "Sub", {"x", "y"}},
{{"two_x_sub_y"}, "Mul", {"two", "x_sub_y"}}, // 2 * (x - y)

View File

@ -141,7 +141,8 @@ CurlHttpRequest::CurlHttpRequest(LibCurl* libcurl, Env* env)
// TODO(b/74351157): Enable HTTP/2.
// Set up the progress meter.
CHECK_CURL_OK(libcurl_->curl_easy_setopt(curl_, CURLOPT_NOPROGRESS, 0ULL));
CHECK_CURL_OK(
libcurl_->curl_easy_setopt(curl_, CURLOPT_NOPROGRESS, uint64{0}));
CHECK_CURL_OK(libcurl_->curl_easy_setopt(curl_, CURLOPT_XFERINFODATA, this));
CHECK_CURL_OK(libcurl_->curl_easy_setopt(curl_, CURLOPT_XFERINFOFUNCTION,
&CurlHttpRequest::ProgressCallback));

View File

@ -400,7 +400,7 @@ bool Env::CreateUniqueFileName(string* prefix, const string& suffix) {
#else
int32 pid = static_cast<int32>(getpid());
#endif
uint64 now_microsec = NowMicros();
long long now_microsec = NowMicros(); // NOLINT
*prefix += strings::Printf("%s-%x-%d-%llx", port::Hostname().c_str(), tid,
pid, now_microsec);

View File

@ -439,7 +439,7 @@ string HumanReadableNum(int64 value) {
value = -value;
}
if (value < 1000) {
Appendf(&s, "%lld", value);
Appendf(&s, "%lld", static_cast<long long>(value));
} else if (value >= static_cast<int64>(1e15)) {
// Number bigger than 1E15; use that notation.
Appendf(&s, "%0.3G", static_cast<double>(value));
@ -472,7 +472,7 @@ string HumanReadableNumBytes(int64 num_bytes) {
// No fractions for bytes.
char buf[8]; // Longest possible string is '-XXXXB'
snprintf(buf, sizeof(buf), "%s%lldB", neg_str,
static_cast<int64>(num_bytes));
static_cast<long long>(num_bytes));
return string(buf);
}

View File

@ -68,8 +68,9 @@ class DebugEventsWriterTest : public ::testing::Test {
}
void SetUp() override {
dump_root_ = io::JoinPath(testing::TmpDir(),
strings::Printf("%010lld", env()->NowMicros()));
dump_root_ = io::JoinPath(
testing::TmpDir(),
strings::Printf("%010lld", static_cast<long long>(env()->NowMicros())));
}
void TearDown() override {

View File

@ -66,7 +66,7 @@ Status EventsWriter::InitIfNeeded() {
filename_ =
strings::Printf("%s.out.tfevents.%010lld.%s%s", file_prefix_.c_str(),
static_cast<int64>(time_in_seconds),
static_cast<long long>(time_in_seconds),
port::Hostname().c_str(), file_suffix_.c_str());
// Reset recordio_writer (which has a reference to recordio_file_) so final

View File

@ -53,7 +53,8 @@ struct RunCounter {
};
std::string SessionToHandle(Session* session) {
return strings::Printf("%llu", reinterpret_cast<uint64>(session));
return strings::Printf("%llu", static_cast<unsigned long long>(
reinterpret_cast<uintptr_t>(session)));
}
// The Session interface has many methods of the form:

View File

@ -2620,8 +2620,8 @@ port::StatusOr<dnn::AlgorithmDesc> GetCudnnConvolutionForwardAlgorithm(
bool specify_workspace_limit = scratch_allocator != nullptr;
auto memory_limit_bytes =
specify_workspace_limit
? std::max(scratch_allocator->GetMemoryLimitInBytes(), 0ll)
: 0ll;
? std::max(scratch_allocator->GetMemoryLimitInBytes(), int64{0})
: int64{0};
SE_ASSIGN_OR_RETURN(cudnnConvolutionFwdAlgo_t algo,
GetCudnnConvolutionForwardAlgo(
cudnn, input_nd, filter, conv, output_nd,
@ -2673,8 +2673,8 @@ port::StatusOr<dnn::AlgorithmDesc> GetCudnnConvolutionBackwardDataAlgorithm(
bool specify_workspace_limit = scratch_allocator != nullptr;
auto memory_limit_bytes =
specify_workspace_limit
? std::max(scratch_allocator->GetMemoryLimitInBytes(), 0ll)
: 0ll;
? std::max(scratch_allocator->GetMemoryLimitInBytes(), int64{0})
: int64{0};
SE_ASSIGN_OR_RETURN(cudnnConvolutionBwdDataAlgo_t algo,
GetCudnnConvolutionBackwardDataAlgo(
cudnn, input_nd, filter, conv, output_nd,
@ -2725,8 +2725,8 @@ port::StatusOr<dnn::AlgorithmDesc> GetCudnnConvolutionBackwardFilterAlgorithm(
bool specify_workspace_limit = scratch_allocator != nullptr;
auto memory_limit_bytes =
specify_workspace_limit
? std::max(scratch_allocator->GetMemoryLimitInBytes(), 0ll)
: 0ll;
? std::max(scratch_allocator->GetMemoryLimitInBytes(), int64{0})
: int64{0};
SE_ASSIGN_OR_RETURN(cudnnConvolutionBwdFilterAlgo_t algo,
GetCudnnConvolutionBackwardFilterAlgo(
cudnn, input_nd, filter, conv, output_nd,