Remove redundant get() calls and string conversions
PiperOrigin-RevId: 157497932
This commit is contained in:
parent
af2b9d8757
commit
dcc3cdce8d
@ -77,14 +77,14 @@ EventMgr::~EventMgr() {
|
||||
}
|
||||
|
||||
void EventMgr::StartPollingLoop() {
|
||||
CHECK(polling_stopped_.get() == nullptr);
|
||||
CHECK(polling_stopped_ == nullptr);
|
||||
stop_polling_.reset(new Notification);
|
||||
polling_stopped_.reset(new Notification);
|
||||
threadpool_.Schedule([this]() { PollLoop(); });
|
||||
}
|
||||
|
||||
void EventMgr::StopPollingLoop() {
|
||||
if (stop_polling_.get()) {
|
||||
if (stop_polling_) {
|
||||
stop_polling_->Notify();
|
||||
polling_stopped_->WaitForNotification();
|
||||
stop_polling_.reset(nullptr);
|
||||
|
@ -74,8 +74,8 @@ SimpleGraphExecutionState::~SimpleGraphExecutionState() {
|
||||
std::unique_ptr<SimpleGraphExecutionState> ret(
|
||||
new SimpleGraphExecutionState(graph_def, options));
|
||||
|
||||
TF_RETURN_IF_ERROR(AddDefaultAttrsToGraphDef(&ret->original_graph_def_,
|
||||
*ret->flib_def_.get(), 0));
|
||||
TF_RETURN_IF_ERROR(
|
||||
AddDefaultAttrsToGraphDef(&ret->original_graph_def_, *ret->flib_def_, 0));
|
||||
// TODO(mrry): Refactor InitBaseGraph() so that we don't have to
|
||||
// pass an empty BuildGraphOptions (that isn't going to be used when
|
||||
// place_pruned_graph is false).
|
||||
@ -103,8 +103,8 @@ SimpleGraphExecutionState::~SimpleGraphExecutionState() {
|
||||
GraphDef temp(graph_def);
|
||||
std::unique_ptr<SimpleGraphExecutionState> ret(
|
||||
new SimpleGraphExecutionState(&temp, options));
|
||||
TF_RETURN_IF_ERROR(AddDefaultAttrsToGraphDef(&ret->original_graph_def_,
|
||||
*ret->flib_def_.get(), 0));
|
||||
TF_RETURN_IF_ERROR(
|
||||
AddDefaultAttrsToGraphDef(&ret->original_graph_def_, *ret->flib_def_, 0));
|
||||
TF_RETURN_IF_ERROR(ret->InitBaseGraph(subgraph_options));
|
||||
TF_RETURN_IF_ERROR(ret->BuildGraph(subgraph_options, out_client_graph));
|
||||
*out_state = std::move(ret);
|
||||
@ -139,7 +139,7 @@ Status SimpleGraphExecutionState::Extend(
|
||||
int old_node_size = gdef.node_size();
|
||||
gdef.mutable_node()->MergeFrom(extension_def.node());
|
||||
TF_RETURN_IF_ERROR(
|
||||
AddDefaultAttrsToGraphDef(&gdef, *flib_def_.get(), old_node_size));
|
||||
AddDefaultAttrsToGraphDef(&gdef, *flib_def_, old_node_size));
|
||||
// Merge versions
|
||||
if (gdef.has_versions()) {
|
||||
if (gdef.versions().producer() != extension_def.versions().producer()) {
|
||||
@ -181,7 +181,7 @@ Status SimpleGraphExecutionState::Extend(
|
||||
if (gdef.versions().producer() >= 5) {
|
||||
// Validate the graph: we assume that merging two valid graphs
|
||||
// should maintain graph validity.
|
||||
TF_RETURN_IF_ERROR(graph::ValidateGraphDef(gdef, *flib_def_.get()));
|
||||
TF_RETURN_IF_ERROR(graph::ValidateGraphDef(gdef, *flib_def_));
|
||||
}
|
||||
|
||||
// 6. Add the extension.
|
||||
@ -196,7 +196,7 @@ Status SimpleGraphExecutionState::Extend(
|
||||
new SimpleGraphExecutionState(&gdef, combined_options));
|
||||
|
||||
TF_RETURN_IF_ERROR(AddDefaultAttrsToGraphDef(
|
||||
&new_execution_state->original_graph_def_, *flib_def_.get(), 0));
|
||||
&new_execution_state->original_graph_def_, *flib_def_, 0));
|
||||
if (!session_options_->config.graph_options().place_pruned_graph()) {
|
||||
// TODO(mrry): Refactor InitBaseGraph() so that we don't have to
|
||||
// pass an empty BuildGraphOptions (that isn't going to be used
|
||||
@ -313,7 +313,7 @@ Status SimpleGraphExecutionState::InitBaseGraph(
|
||||
CostModel costs(true /*is_global*/);
|
||||
{
|
||||
mutex_lock l(mu_);
|
||||
costs_.InitFromGraph(*new_graph.get());
|
||||
costs_.InitFromGraph(*new_graph);
|
||||
costs.MergeFromGlobal(costs_);
|
||||
}
|
||||
|
||||
|
@ -194,7 +194,7 @@ TEST_F(ResizeBilinearOpTest, TestBilinearRandom2x2To1x1) {
|
||||
ResizeBilinearBaseline(input->tensor<float, 4>(),
|
||||
expected->tensor<float, 4>());
|
||||
EXPECT_EQ(input->flat<float>()(0), output->flat<float>()(0));
|
||||
test::ExpectTensorEqual<float>(*expected.get(), *output);
|
||||
test::ExpectTensorEqual<float>(*expected, *output);
|
||||
}
|
||||
|
||||
TEST_F(ResizeBilinearOpAlignCornersTest, TestBilinearAlignCorners2x2To1x1) {
|
||||
|
@ -17,7 +17,7 @@ limitations under the License.
|
||||
// libxsmm is not available.
|
||||
|
||||
#ifndef TENSORFLOW_USE_LIBXSMM
|
||||
void dummy_xsmm_conv2d_ensure_file_is_not_empty(void);
|
||||
void dummy_xsmm_conv2d_ensure_file_is_not_empty();
|
||||
#else
|
||||
|
||||
#define USE_EIGEN_TENSOR
|
||||
|
@ -37,7 +37,7 @@ ZlibInputStream::ZlibInputStream(
|
||||
}
|
||||
|
||||
ZlibInputStream::~ZlibInputStream() {
|
||||
if (z_stream_.get()) {
|
||||
if (z_stream_) {
|
||||
inflateEnd(z_stream_.get());
|
||||
}
|
||||
}
|
||||
|
@ -36,7 +36,7 @@ ZlibOutputBuffer::ZlibOutputBuffer(
|
||||
z_stream_(new z_stream) {}
|
||||
|
||||
ZlibOutputBuffer::~ZlibOutputBuffer() {
|
||||
if (z_stream_.get()) {
|
||||
if (z_stream_) {
|
||||
LOG(WARNING) << "ZlibOutputBuffer::Close() not called. Possible data loss";
|
||||
}
|
||||
}
|
||||
|
@ -71,17 +71,17 @@ void TestJPEG(Env* env, const string& jpegfile) {
|
||||
// Set min_acceptable_fraction to something insufficient
|
||||
flags.min_acceptable_fraction = 0.8;
|
||||
imgdata.reset(Uncompress(temp, fsize / 2, flags, &w, &h, &c, nullptr));
|
||||
CHECK(imgdata.get() == nullptr);
|
||||
CHECK(imgdata == nullptr);
|
||||
|
||||
// Now, use a value that makes fsize/2 be enough for a black-filling
|
||||
flags.min_acceptable_fraction = 0.01;
|
||||
imgdata.reset(Uncompress(temp, fsize / 2, flags, &w, &h, &c, nullptr));
|
||||
CHECK(imgdata.get() != nullptr);
|
||||
CHECK(imgdata != nullptr);
|
||||
|
||||
// Finally, uncompress the whole data
|
||||
flags.min_acceptable_fraction = 1.0;
|
||||
imgdata.reset(Uncompress(temp, fsize, flags, &w, &h, &c, nullptr));
|
||||
CHECK(imgdata.get() != nullptr);
|
||||
CHECK(imgdata != nullptr);
|
||||
}
|
||||
|
||||
TEST(JpegMemTest, Jpeg) {
|
||||
@ -267,7 +267,7 @@ TEST(JpegMemTest, ChromaDownsampling) {
|
||||
int64 num_warnings;
|
||||
std::unique_ptr<uint8[]> uncompressed(Uncompress(
|
||||
jpeg.c_str(), jpeg.size(), unflags, &w, &h, &c, &num_warnings));
|
||||
CHECK(uncompressed.get() != nullptr);
|
||||
CHECK(uncompressed != nullptr);
|
||||
CHECK_EQ(num_warnings, 0);
|
||||
|
||||
// Recompress the JPEG with and without chroma downsampling
|
||||
|
@ -32,15 +32,15 @@ static void TestGradAndIndicesErrorHandling(const ShapeInferenceTestOp& op,
|
||||
|
||||
// mismatch between grad[1] and var[1].
|
||||
INFER_ERROR("Dimension 1 in both shapes must be equal", op,
|
||||
shape_spec("[?,1]", "[?,2];[?]").c_str());
|
||||
shape_spec("[?,1]", "[?,2];[?]"));
|
||||
// grad[0] and indices[0] must match.
|
||||
INFER_ERROR("Dimensions must be equal, but are 1 and 2", op,
|
||||
shape_spec("?", "[2,?];[1]").c_str());
|
||||
shape_spec("?", "[2,?];[1]"));
|
||||
// grad is wrong rank.
|
||||
INFER_ERROR("must be equal rank", op, shape_spec("[1]", "[?,2];[?]").c_str());
|
||||
INFER_ERROR("must be equal rank", op, shape_spec("[1]", "[?,2];[?]"));
|
||||
// indices is wrong rank.
|
||||
INFER_ERROR("Shape must be rank 1 but is rank 2", op,
|
||||
shape_spec("[?]", "[?];[1,2]").c_str());
|
||||
shape_spec("[?]", "[?];[1,2]"));
|
||||
}
|
||||
|
||||
TEST(TrainingOpsTest, ApplyGradientDescent_ShapeFn) {
|
||||
|
@ -36,7 +36,7 @@ EventsWriter::EventsWriter(const string& file_prefix)
|
||||
num_outstanding_events_(0) {}
|
||||
|
||||
bool EventsWriter::InitIfNeeded() {
|
||||
if (recordio_writer_.get() != nullptr) {
|
||||
if (recordio_writer_ != nullptr) {
|
||||
CHECK(!filename_.empty());
|
||||
if (FileHasDisappeared()) {
|
||||
// Warn user of data loss and let .reset() below do basic cleanup.
|
||||
@ -63,7 +63,7 @@ bool EventsWriter::InitIfNeeded() {
|
||||
return false;
|
||||
}
|
||||
recordio_writer_.reset(new io::RecordWriter(recordio_file_.get()));
|
||||
if (recordio_writer_.get() == nullptr) {
|
||||
if (recordio_writer_ == nullptr) {
|
||||
LOG(ERROR) << "Could not create record writer";
|
||||
return false;
|
||||
}
|
||||
@ -90,7 +90,7 @@ string EventsWriter::FileName() {
|
||||
}
|
||||
|
||||
void EventsWriter::WriteSerializedEvent(StringPiece event_str) {
|
||||
if (recordio_writer_.get() == nullptr) {
|
||||
if (recordio_writer_ == nullptr) {
|
||||
if (!InitIfNeeded()) {
|
||||
LOG(ERROR) << "Write failed because file could not be opened.";
|
||||
return;
|
||||
@ -110,7 +110,7 @@ void EventsWriter::WriteEvent(const Event& event) {
|
||||
|
||||
bool EventsWriter::Flush() {
|
||||
if (num_outstanding_events_ == 0) return true;
|
||||
CHECK(recordio_file_.get() != nullptr) << "Unexpected NULL file";
|
||||
CHECK(recordio_file_ != nullptr) << "Unexpected NULL file";
|
||||
|
||||
if (!recordio_writer_->Flush().ok()) {
|
||||
LOG(ERROR) << "Failed to flush " << num_outstanding_events_ << " events to "
|
||||
@ -139,7 +139,7 @@ bool EventsWriter::Flush() {
|
||||
|
||||
bool EventsWriter::Close() {
|
||||
bool return_value = Flush();
|
||||
if (recordio_file_.get() != nullptr) {
|
||||
if (recordio_file_ != nullptr) {
|
||||
Status s = recordio_file_->Close();
|
||||
if (!s.ok()) {
|
||||
LOG(ERROR) << "Error when closing previous event file: " << filename_
|
||||
|
@ -369,7 +369,7 @@ port::StatusOr<DriverVersion> Diagnostician::FindKernelDriverVersion() {
|
||||
LOG(INFO) << "driver version file contents: \"\"\"" << contents.begin()
|
||||
<< "\"\"\"";
|
||||
fclose(driver_version_file);
|
||||
return FindKernelModuleVersion(string{contents.begin()});
|
||||
return FindKernelModuleVersion(contents.begin());
|
||||
}
|
||||
|
||||
auto status =
|
||||
|
@ -177,10 +177,11 @@ string TFOp::FormatNode(OpNode* node, OpNode* root, const Options& opts) {
|
||||
root->proto().total_exec_micros();
|
||||
}
|
||||
|
||||
attrs.push_back(strings::Printf("%30s", strings::Printf(
|
||||
"%s (%.2f%%, %.2f%%)",
|
||||
FormatTime(node->proto().exec_micros()).c_str(),
|
||||
accu_pct, pct).c_str()).c_str());
|
||||
attrs.push_back(strings::Printf(
|
||||
"%30s", strings::Printf("%s (%.2f%%, %.2f%%)",
|
||||
FormatTime(node->proto().exec_micros()).c_str(),
|
||||
accu_pct, pct)
|
||||
.c_str()));
|
||||
}
|
||||
|
||||
if (opts.select.find(kShown[2]) != opts.select.end()) {
|
||||
@ -192,10 +193,12 @@ string TFOp::FormatNode(OpNode* node, OpNode* root, const Options& opts) {
|
||||
pct = 100.0 * node->proto().parameters() /
|
||||
root->proto().total_parameters();
|
||||
}
|
||||
attrs.push_back(strings::Printf("%30s", strings::Printf(
|
||||
"%s params (%.2f%%, %.2f%%)",
|
||||
FormatNumber(node->proto().parameters()).c_str(),
|
||||
accu_pct, pct).c_str()).c_str());
|
||||
attrs.push_back(strings::Printf(
|
||||
"%30s",
|
||||
strings::Printf("%s params (%.2f%%, %.2f%%)",
|
||||
FormatNumber(node->proto().parameters()).c_str(),
|
||||
accu_pct, pct)
|
||||
.c_str()));
|
||||
}
|
||||
|
||||
if (opts.select.find(kShown[3]) != opts.select.end()) {
|
||||
|
@ -41,7 +41,7 @@ limitations under the License.
|
||||
using tensorflow::str_util::Split;
|
||||
|
||||
void completion(const char* buf, linenoiseCompletions* lc) {
|
||||
tensorflow::string buf_str = tensorflow::string(buf);
|
||||
tensorflow::string buf_str = buf;
|
||||
if (buf_str.find(" ") == buf_str.npos) {
|
||||
for (const char* opt : tensorflow::tfprof::kCmds) {
|
||||
if (tensorflow::string(opt).find(buf_str) == 0) {
|
||||
@ -246,7 +246,7 @@ int main(int argc, char** argv) {
|
||||
linenoiseHistoryLoad(".tfprof_history.txt");
|
||||
|
||||
for (char* line = nullptr; (line = linenoise("tfprof> ")) != nullptr;) {
|
||||
tensorflow::string line_s = tensorflow::string(line);
|
||||
tensorflow::string line_s = line;
|
||||
free(line);
|
||||
|
||||
if (line_s.empty()) {
|
||||
|
Loading…
Reference in New Issue
Block a user