Remove some unused C++ variables in core/.

PiperOrigin-RevId: 235617985
This commit is contained in:
A. Unique TensorFlower 2019-02-25 15:52:40 -08:00 committed by TensorFlower Gardener
parent b56a703f6b
commit 36724b63f6
27 changed files with 0 additions and 39 deletions

View File

@ -95,7 +95,6 @@ string DeviceMgr::DeviceMappingString() const {
}
Status DeviceMgr::LookupDevice(StringPiece name, Device** device) const {
Status s;
auto iter = device_map_.find(name);
if (iter == device_map_.end()) {
std::vector<StringPiece> device_names;

View File

@ -31,7 +31,6 @@ namespace {
class DeviceResolverLocalTest : public ::testing::Test {
protected:
DeviceResolverLocalTest() {
ConfigProto cp;
SessionOptions options;
string task_name = "/job:localhost/replica:0/task:0";
auto* device_count = options.config.mutable_device_count();

View File

@ -684,7 +684,6 @@ Status FunctionLibraryRuntimeImpl::Instantiate(
}
}
Status s;
const FunctionLibraryDefinition* lib_def =
options.overlay_lib ? options.overlay_lib : base_lib_def_;
FunctionBody* fbody = nullptr;

View File

@ -107,7 +107,6 @@ class FakeCache : public TestWorkerCache {
WorkerInterface* wi = it->second;
GetStatusRequest req;
GetStatusResponse resp;
Notification note;
Status status = wi->GetStatus(&req, &resp);
if (!status.ok()) {
done(status);

View File

@ -147,7 +147,6 @@ class FakeCache : public TestWorkerCache {
WorkerInterface* wi = it->second;
GetStatusRequest req;
GetStatusResponse resp;
Notification note;
Status status = wi->GetStatus(&req, &resp);
if (!status.ok()) {
done(status);
@ -271,7 +270,6 @@ TEST_F(CollRMADistTest, ProdFirstOK) {
producer_status.Update(s);
producer_note.Notify();
});
Status status;
Device* dst_device = nullptr;
string dev_name = "CPU:0";
TF_EXPECT_OK(device_mgrs_[0]->LookupDevice(dev_name, &dst_device));
@ -300,7 +298,6 @@ TEST_F(CollRMADistTest, ConsFirstOK) {
Status producer_status;
FakeWorker* wi = workers_[1];
const string kBufKey = "fake_buf_key";
Status status;
Device* dst_device = nullptr;
string dev_name = "CPU:0";
TF_EXPECT_OK(device_mgrs_[0]->LookupDevice(dev_name, &dst_device));
@ -333,7 +330,6 @@ TEST_F(CollRMADistTest, ConsFirstAbort) {
Notification consumer_note;
Status consumer_status;
const string kBufKey = "fake_buf_key";
Status status;
Device* dst_device = nullptr;
string dev_name = "CPU:0";
TF_EXPECT_OK(device_mgrs_[0]->LookupDevice(dev_name, &dst_device));

View File

@ -109,7 +109,6 @@ class FakeCache : public TestWorkerCache {
WorkerInterface* wi = it->second;
GetStatusRequest req;
GetStatusResponse resp;
Notification note;
Status status = wi->GetStatus(&req, &resp);
if (!status.ok()) {
done(status);

View File

@ -140,7 +140,6 @@ TEST_F(SessionMgrTest, CreateSessionIsolateSessionState) {
}
TEST_F(SessionMgrTest, LegacySession) {
ServerDef server_def;
string session_handle = "";
std::shared_ptr<WorkerSession> session;
TF_EXPECT_OK(mgr_.WorkerSessionForSession(session_handle, &session));
@ -150,7 +149,6 @@ TEST_F(SessionMgrTest, LegacySession) {
}
TEST_F(SessionMgrTest, UnknownSessionHandle) {
ServerDef server_def;
string session_handle = "unknown_session_handle";
std::shared_ptr<WorkerSession> session;
Status s = mgr_.WorkerSessionForSession(session_handle, &session);

View File

@ -228,7 +228,6 @@ string PBTxtFromMultiline(StringPiece multiline_pbtxt) {
// Add every line to unescaped until we see the "END" string.
string unescaped;
bool first = true;
string suffix;
while (!multiline_pbtxt.empty()) {
SplitAt('\n', &multiline_pbtxt, &line);
if (str_util::ConsumePrefix(&line, end)) break;

View File

@ -318,7 +318,6 @@ void BM_SendRecv(int iters) {
Tensor val(DT_STRING, TensorShape({}));
bool is_dead = false;
Rendezvous::Args args;
Status s;
if (iters > 0) {
while (iters--) {
TF_CHECK_OK(rendez->Send(KeyFoo(), args, orig, is_dead));
@ -343,7 +342,6 @@ void BM_PingPong(int iters) {
Tensor foo(DT_STRING, TensorShape({}));
bool is_dead = false;
Rendezvous::Args args;
Status s;
for (int i = 0; i < iters; ++i) {
TF_CHECK_OK(rendez->Recv(KeyFoo(), args, &foo, &is_dead));
TF_CHECK_OK(rendez->Send(KeyBar(), args, bar, is_dead));
@ -354,7 +352,6 @@ void BM_PingPong(int iters) {
Tensor bar(DT_STRING, TensorShape({}));
bool is_dead = false;
Rendezvous::Args args;
Status s;
for (int i = 0; i < iters; ++i) {
TF_CHECK_OK(rendez->Send(KeyFoo(), args, foo, is_dead));
TF_CHECK_OK(rendez->Recv(KeyBar(), args, &bar, &is_dead));

View File

@ -128,7 +128,6 @@ string TensorSlice::DebugString() const {
if (!first) {
buffer.append(":");
}
string s;
if (IsFullAt(d)) {
buffer.append("-");
} else {

View File

@ -156,7 +156,6 @@ TEST(AlgorithmTest, ReversePostOrderStable) {
TEST(AlgorithmTest, PostOrderWithEdgeFilter) {
GraphDefBuilder b(GraphDefBuilder::kFailImmediately);
string error;
Node* n0 = ops::SourceOp("TestParams", b.opts().WithName("n0"));
Node* n1 = ops::UnaryOp("TestUnary", n0, b.opts().WithName("n1"));
Node* n2 = ops::UnaryOp("TestUnary", n1, b.opts().WithName("n2"));

View File

@ -2366,7 +2366,6 @@ void MklLayoutRewritePass::CopyAttrsQuantizedPooling(const Node* orig_node,
NodeBuilder* nb,
bool change_format) {
DataType T;
string data_format;
string padding;
std::vector<int32> ksize, strides;

View File

@ -455,7 +455,6 @@ Status SingleMachine::ClearAllocatorStats() const {
std::vector<Device*> devices = device_mgr->ListDevices();
for (Device* device : devices) {
AllocatorStats stats;
auto* allocator = device->GetAllocator(AllocatorAttributes());
if (!allocator->TracksAllocationSizes()) {
return Status(error::INVALID_ARGUMENT,

View File

@ -204,8 +204,6 @@ class DeserializeSparseOp : public OpKernel {
target_shape.vec<int64>()(i + ndims - 1) = output.shape().data()[i + 1];
}
Tensor output_indices;
Tensor output_shape;
Reshape(context, output.indices(), input_shape, target_shape,
0 /* output indices index */, 2 /* output shape index */);
context->set_output(1, output.values());

View File

@ -338,7 +338,6 @@ Status GraphTransferer::TransformGraphToAddAggregatedInputNode(
shapes.emplace_back(input_node_info_list.at(i).second.shape());
}
NodeDef input_node_def;
auto builder =
NodeBuilder(AGGREGATED_INPUT_NODE_NAME, "RemoteFusedGraphExecute")
.Input(std::vector<NodeBuilder::NodeOut>{})

View File

@ -582,7 +582,6 @@ class MapUnstageOp : public OpKernel {
const Tensor* key_tensor;
const Tensor* indices_tensor;
OpInputList values_tensor;
OP_REQUIRES_OK(ctx, ctx->input("key", &key_tensor));
OP_REQUIRES_OK(ctx, ctx->input("indices", &indices_tensor));
@ -644,7 +643,6 @@ class MapPeekOp : public OpKernel {
const Tensor* key_tensor;
const Tensor* indices_tensor;
OpInputList values_tensor;
OP_REQUIRES_OK(ctx, ctx->input("key", &key_tensor));
OP_REQUIRES_OK(ctx, ctx->input("indices", &indices_tensor));

View File

@ -44,11 +44,8 @@ TEST(MfccDctTest, AgreesWithMatlab) {
TEST(MfccDctTest, InitializeFailsOnInvalidInput) {
MfccDct dct1;
EXPECT_FALSE(dct1.Initialize(-50, 1));
MfccDct dct2;
EXPECT_FALSE(dct1.Initialize(10, -4));
MfccDct dct3;
EXPECT_FALSE(dct1.Initialize(-1, -1));
MfccDct dct4;
EXPECT_FALSE(dct1.Initialize(20, 21));
}

View File

@ -34,8 +34,6 @@ class SparseReshapeOp : public OpKernel {
explicit SparseReshapeOp(OpKernelConstruction* context) : OpKernel(context) {}
void Compute(OpKernelContext* context) override {
Tensor output_indices;
Tensor output_shape;
Reshape(context, context->input(0), context->input(1), context->input(2),
0 /* output indices index */, 1 /* output shape index */);
}

View File

@ -34,7 +34,6 @@ TEST(MapUtil, Find) {
m["foo"] = "bar";
EXPECT_EQ("bar", gtl::FindWithDefault(m, "foo", ""));
EXPECT_EQ("bar", *gtl::FindOrNull(m, "foo"));
string str;
EXPECT_TRUE(m.count("foo") > 0);
EXPECT_EQ(m["foo"], "bar");
}

View File

@ -337,7 +337,6 @@ TEST(BufferedInputStream, ReadAll_Empty) {
for (auto buf_size : BufferSizes()) {
RandomAccessInputStream input_stream(file.get());
string read;
BufferedInputStream in(&input_stream, buf_size);
string contents;
TF_ASSERT_OK(in.ReadAll(&contents));
@ -355,7 +354,6 @@ TEST(BufferedInputStream, ReadAll_Text) {
for (auto buf_size : BufferSizes()) {
RandomAccessInputStream input_stream(file.get());
string read;
BufferedInputStream in(&input_stream, buf_size);
string contents;
TF_ASSERT_OK(in.ReadAll(&contents));

View File

@ -133,7 +133,6 @@ Status Table::InternalGet(const StringPiece& k, void* arg,
Iterator* iiter = rep_->index_block->NewIterator();
iiter->Seek(k);
if (iiter->Valid()) {
BlockHandle handle;
Iterator* block_iter = BlockReader(this, iiter->value());
block_iter->Seek(k);
if (block_iter->Valid()) {

View File

@ -396,7 +396,6 @@ void BM_WriteNum(int n, T multiplier) {
template <typename T>
void BM_ReadNum(int n, T multiplier) {
string x;
random::PhiloxRandom philox(301, 17);
random::SimplePhilox rnd(&philox);
// Use enough distinct values to confuse the branch predictor

View File

@ -400,7 +400,6 @@ REGISTER_OP("BoostedTreesMakeQuantileSummaries")
c->WithRank(c->input(num_features), 1, &example_weights_shape));
for (int i = 0; i < num_features; ++i) {
ShapeHandle feature_shape;
DimensionHandle unused_dim;
TF_RETURN_IF_ERROR(c->WithRank(c->input(i), 1, &feature_shape));
// the columns are value, weight, min_rank, max_rank.
c->set_output(i, c->MakeShape({c->UnknownDim(), 4}));

View File

@ -205,7 +205,6 @@ TEST(MathOpsTest, Select_ShapeFn) {
typedef std::vector<std::pair<TensorShapeProto, DataType>> ShapeDtypeV;
std::vector<std::unique_ptr<ShapeDtypeV>> handle_data;
std::unique_ptr<shape_inference::InferenceContext> c;
Status run_status;
auto run_inference_for_handles = [&]() -> Status {
CHECK(op_reg_data->shape_inference_fn != nullptr);
c.reset(new shape_inference::InferenceContext(

View File

@ -97,7 +97,6 @@ REGISTER_OP("SparseTensorDenseMatMul")
ShapeHandle unused;
ShapeHandle b;
ShapeHandle a_shape;
ShapeHandle a_shape_shape;
TF_RETURN_IF_ERROR(c->WithRank(c->input(0), 2, &unused)); // a_indices
TF_RETURN_IF_ERROR(c->WithRank(c->input(1), 1, &unused)); // a_values
TF_RETURN_IF_ERROR(c->MakeShapeFromShapeTensor(2, &a_shape));

View File

@ -63,7 +63,6 @@ TEST_F(TFProfTensorTest, Basics) {
"", {});
const GraphNodeProto& root = tf_stats_->ShowGraphNode("scope", opts);
GraphNodeProto expected;
EXPECT_EQ(root.children(0).name(), "DW");
EXPECT_GT(root.children(0).tensor_value().value_double_size(), 10);
EXPECT_EQ(root.children(1).name(), "DW2");

View File

@ -70,7 +70,6 @@ versions {
TF_ASSERT_OK(session->Run(run_options, {}, {"myconstant:0"}, {}, &outputs,
&run_metadata));
StatSummarizerOptions opts;
StatSummarizer stats(graph_def);
stats.ProcessStepStats(run_metadata.step_stats());