Remove unused variables.
PiperOrigin-RevId: 286517146 Change-Id: Ic9566fe02ef33f3156d744fdd6fe8f9036a3a5fb
This commit is contained in:
parent
276f22523a
commit
61f1a53cbf
@ -127,7 +127,6 @@ Status MklEagerOpRewrite::SetupNewOp(
|
||||
}
|
||||
|
||||
// Copy all attributes to the new op.
|
||||
string name;
|
||||
const NodeDef& orig_ndef = orig_op->MutableAttrs()->BuildNodeDef();
|
||||
|
||||
AttrSlice attr_list(orig_ndef);
|
||||
|
@ -275,7 +275,6 @@ class FunctionLibraryRuntimeTest : public ::testing::Test {
|
||||
opts.runner = nullptr;
|
||||
}
|
||||
Notification done;
|
||||
std::vector<Tensor> out;
|
||||
Status status;
|
||||
flr->Run(opts, handle, frame, [&status, &done](const Status& s) {
|
||||
status = s;
|
||||
|
@ -171,7 +171,6 @@ class FunctionLibraryRuntimeTest : public ::testing::Test {
|
||||
opts.runner = nullptr;
|
||||
}
|
||||
Notification done;
|
||||
std::vector<Tensor> out;
|
||||
Status status;
|
||||
flr->Run(opts, handle, frame, [&status, &done](const Status& s) {
|
||||
status = s;
|
||||
|
@ -80,7 +80,6 @@ Status TestCluster::MakeTestCluster(const string& binary_path,
|
||||
|
||||
std::unique_ptr<GrpcSession> session;
|
||||
TF_RETURN_IF_ERROR(GrpcSession::Create(options_copy, &session));
|
||||
std::vector<DeviceAttributes> device_attributes;
|
||||
TF_RETURN_IF_ERROR(session->ListDevices(&ret->devices_));
|
||||
|
||||
*out_cluster = std::move(ret);
|
||||
|
@ -199,7 +199,6 @@ Status SessionMgr::UpdateSession(
|
||||
}
|
||||
protobuf::RepeatedPtrField<DeviceAttributes> added_cluster_device_attrs_pb(
|
||||
added_cluster_device_attrs.begin(), added_cluster_device_attrs.end());
|
||||
std::unique_ptr<DeviceMgr> remote_devices;
|
||||
AsRemoteDevices(worker_env_->env, added_cluster_device_attrs_pb, nullptr,
|
||||
&added_remote_devices);
|
||||
|
||||
|
@ -38,7 +38,6 @@ ResourceHandle MakeResourceHandle(
|
||||
const std::vector<DtypeAndPartialTensorShape>& dtypes_and_shapes) {
|
||||
ResourceHandle result;
|
||||
result.set_device(device.name());
|
||||
string actual_container;
|
||||
result.set_container(container);
|
||||
if (name == ResourceHandle::ANONYMOUS_NAME) {
|
||||
result.set_name(strings::StrCat("_AnonymousVar", current_id_.fetch_add(1)));
|
||||
|
@ -1486,7 +1486,6 @@ rinfo_.push_back({csinfo_.tanh_grad,
|
||||
// false otherwise.
|
||||
static bool FusedMatMulRewrite(const Node* n) {
|
||||
bool trans_a;
|
||||
std::vector<string> fused_ops;
|
||||
|
||||
// Do not rewrite with transpose attribute because reorder has performance
|
||||
// impact.
|
||||
|
@ -3594,7 +3594,6 @@ bool ConstantFolding::MergeConcat(bool use_shape_info,
|
||||
|
||||
protobuf::RepeatedPtrField<string> parent_inputs;
|
||||
parent_inputs.Swap(parent->mutable_input());
|
||||
std::vector<string> ctrl_output;
|
||||
// TODO(rmlarsen): IF the child occurs more than once, is it beneficial to
|
||||
// collapse it into the parent multiple times? Probably not.
|
||||
for (const auto& input : parent_inputs) {
|
||||
|
@ -396,7 +396,6 @@ Status OptimizeGraph(const GrapplerItem& item, int64 num_workers, int64 index,
|
||||
MutableGraphView graph(output);
|
||||
FunctionLibraryDefinition flib(OpRegistry::Global(), item.graph.library());
|
||||
|
||||
NodeDef target_node;
|
||||
absl::flat_hash_set<string> nodes_to_delete;
|
||||
|
||||
NodeDef* sink_node;
|
||||
|
@ -426,7 +426,6 @@ Status DatasetOpsTestBase::CreateOpKernelContext(
|
||||
params->op_kernel = kernel;
|
||||
params->resource_manager = resource_mgr_.get();
|
||||
params->runner = &runner_;
|
||||
checkpoint::TensorSliceReaderCacheWrapper slice_reader_cache_wrapper;
|
||||
slice_reader_cache_ =
|
||||
absl::make_unique<checkpoint::TensorSliceReaderCacheWrapper>();
|
||||
params->slice_reader_cache = slice_reader_cache_.get();
|
||||
|
@ -298,7 +298,6 @@ Status ReadMetadataFile(const string& hash_dir,
|
||||
|
||||
Status DumpDatasetGraph(const std::string& path, uint64 hash,
|
||||
const GraphDef& graph) {
|
||||
std::unique_ptr<WritableFile> file;
|
||||
std::string hash_hex =
|
||||
strings::StrCat(strings::Hex(hash, strings::kZeroPad16));
|
||||
std::string graph_file =
|
||||
|
@ -40,7 +40,6 @@ se::DeviceMemoryBase WrapRedzoneBestEffort(se::RedzoneAllocator* rz_allocator,
|
||||
if (RedzoneCheckDisabled()) {
|
||||
return buffer;
|
||||
}
|
||||
se::DeviceMemoryBase output_tensor;
|
||||
auto output_rz_or = rz_allocator->AllocateBytes(buffer.size());
|
||||
if (!output_rz_or.ok()) {
|
||||
static std::once_flag rz_allocation_failure_logged;
|
||||
|
@ -548,7 +548,6 @@ TEST(GraphTransferer, DISABLED_CheckShapeInferencePerformance) {
|
||||
inputs.emplace_back("Mul", Tensor(DT_FLOAT, {1, WIDTH, HEIGHT, DEPTH}));
|
||||
std::vector<string> output_node_names = {"softmax"};
|
||||
|
||||
RemoteFusedGraphExecuteUtils::TensorShapeMap output_tensor_info0;
|
||||
GraphTransferer gt0;
|
||||
gt0.EnableStrictCheckMode(false);
|
||||
ClockCycleProfiler prof0;
|
||||
@ -568,7 +567,6 @@ TEST(GraphTransferer, DISABLED_CheckShapeInferencePerformance) {
|
||||
LOG(INFO) << "(0) node count: " << gfi0.node_info_size() << ", "
|
||||
<< gfi0.const_node_info_size();
|
||||
|
||||
RemoteFusedGraphExecuteUtils::TensorShapeMap output_tensor_info1;
|
||||
GraphTransferer gt1;
|
||||
gt1.EnableStrictCheckMode(true);
|
||||
ClockCycleProfiler prof1;
|
||||
|
@ -1591,8 +1591,6 @@ class MklQuantizedConv2DOp
|
||||
const float* min_filter = min_filter_vector.flat<float>().data();
|
||||
const float* max_filter = max_filter_vector.flat<float>().data();
|
||||
|
||||
std::vector<mkldnn::primitive> net;
|
||||
|
||||
const float int_const_scale_limit =
|
||||
(std::is_same<Tinput, quint8>::value) ? 255.0 * 127.0 : 127.0 * 127.0;
|
||||
// Re-scale bias if either of following 2 conditions are met:
|
||||
|
@ -368,7 +368,6 @@ void BatchedNonMaxSuppressionOp(
|
||||
}
|
||||
|
||||
std::vector<int> selected;
|
||||
std::vector<float> selected_boxes;
|
||||
Candidate next_candidate;
|
||||
|
||||
std::sort(candidate_vector.begin(), candidate_vector.end(), cmp);
|
||||
|
@ -87,7 +87,6 @@ class FuseRemoteGraphMultipleAddOpsTest : public ::testing::Test {
|
||||
|
||||
Status FuseByInOut() {
|
||||
// Feed output shapes and types
|
||||
RemoteFusedGraphExecuteUtils::TensorShapeMap tensor_shape_map;
|
||||
GraphDef graph_def_with_shapetype = graph_def_;
|
||||
TF_RETURN_IF_ERROR(RemoteFusedGraphExecuteUtils::BuildAndAddTensorShapes(
|
||||
input_tensors_, /*dry_run_inference*/ true, &graph_def_with_shapetype));
|
||||
|
@ -369,8 +369,6 @@ class CSRSparseMatMulGPUOp : public OpKernel {
|
||||
|
||||
CSRSparseMatrix c;
|
||||
Tensor c_row_ptrs;
|
||||
Tensor c_col_inds;
|
||||
Tensor c_values;
|
||||
|
||||
// TODO(ebrevdo): Re-enable transposing within the GEMM kernel when cuSparse
|
||||
// stops spitting out CUSPARSE_STATUS_INTERNAL_ERROR values for transposes.
|
||||
|
@ -395,7 +395,6 @@ REGISTER_OP("BoostedTreesPredict")
|
||||
int num_bucketized_features;
|
||||
TF_RETURN_IF_ERROR(
|
||||
c->GetAttr("num_bucketized_features", &num_bucketized_features));
|
||||
shape_inference::ShapeHandle unused_input;
|
||||
shape_inference::DimensionHandle batch_size = c->Dim(c->input(1), 0);
|
||||
for (int i = 0; i < num_bucketized_features; ++i) {
|
||||
TF_RETURN_IF_ERROR(
|
||||
@ -425,7 +424,6 @@ REGISTER_OP("BoostedTreesExampleDebugOutputs")
|
||||
int num_bucketized_features;
|
||||
TF_RETURN_IF_ERROR(
|
||||
c->GetAttr("num_bucketized_features", &num_bucketized_features));
|
||||
shape_inference::ShapeHandle unused_input;
|
||||
shape_inference::DimensionHandle batch_dim = c->Dim(c->input(1), 0);
|
||||
for (int i = 0; i < num_bucketized_features; ++i) {
|
||||
TF_RETURN_IF_ERROR(
|
||||
|
@ -525,7 +525,6 @@ TEST(RetryingFileSystemTest, DeleteFile_SuccessWith2ndTry) {
|
||||
RetryingFileSystem<MockFileSystem> fs(
|
||||
std::move(base_fs), RetryConfig(0 /* init_delay_time_us */));
|
||||
|
||||
std::vector<string> result;
|
||||
TF_EXPECT_OK(fs.DeleteFile("gs://path/file.txt"));
|
||||
}
|
||||
|
||||
@ -536,7 +535,6 @@ TEST(RetryingFileSystemTest, DeleteFile_AllRetriesFailed) {
|
||||
RetryingFileSystem<MockFileSystem> fs(
|
||||
std::move(base_fs), RetryConfig(0 /* init_delay_time_us */));
|
||||
|
||||
std::vector<string> result;
|
||||
const auto& status = fs.DeleteFile("gs://path/file.txt");
|
||||
EXPECT_TRUE(absl::StrContains(status.error_message(), "Retriable error #10"))
|
||||
<< status;
|
||||
@ -551,7 +549,6 @@ TEST(RetryingFileSystemTest, CreateDir_SuccessWith2ndTry) {
|
||||
RetryingFileSystem<MockFileSystem> fs(
|
||||
std::move(base_fs), RetryConfig(0 /* init_delay_time_us */));
|
||||
|
||||
std::vector<string> result;
|
||||
TF_EXPECT_OK(fs.CreateDir("gs://path/newdir"));
|
||||
}
|
||||
|
||||
@ -562,7 +559,6 @@ TEST(RetryingFileSystemTest, CreateDir_AllRetriesFailed) {
|
||||
RetryingFileSystem<MockFileSystem> fs(
|
||||
std::move(base_fs), RetryConfig(0 /* init_delay_time_us */));
|
||||
|
||||
std::vector<string> result;
|
||||
const auto& status = fs.CreateDir("gs://path/newdir");
|
||||
EXPECT_TRUE(absl::StrContains(status.error_message(), "Retriable error #10"))
|
||||
<< status;
|
||||
@ -577,7 +573,6 @@ TEST(RetryingFileSystemTest, DeleteDir_SuccessWith2ndTry) {
|
||||
RetryingFileSystem<MockFileSystem> fs(
|
||||
std::move(base_fs), RetryConfig(0 /* init_delay_time_us */));
|
||||
|
||||
std::vector<string> result;
|
||||
TF_EXPECT_OK(fs.DeleteDir("gs://path/dir"));
|
||||
}
|
||||
|
||||
@ -588,7 +583,6 @@ TEST(RetryingFileSystemTest, DeleteDir_AllRetriesFailed) {
|
||||
RetryingFileSystem<MockFileSystem> fs(
|
||||
std::move(base_fs), RetryConfig(0 /* init_delay_time_us */));
|
||||
|
||||
std::vector<string> result;
|
||||
const auto& status = fs.DeleteDir("gs://path/dir");
|
||||
EXPECT_TRUE(absl::StrContains(status.error_message(), "Retriable error #10"))
|
||||
<< status;
|
||||
|
@ -170,7 +170,6 @@ string TFShow::FormatNode(ShowNode* node, const Options& opts) const {
|
||||
}
|
||||
info.push_back(fops);
|
||||
}
|
||||
std::vector<string> attrs;
|
||||
if (opts.select.find(kShown[0]) != opts.select.end()) {
|
||||
info.push_back(FormatNodeMemory(node, node->proto().requested_bytes(),
|
||||
node->proto().total_requested_bytes()));
|
||||
|
@ -426,8 +426,6 @@ static void MergeOverrideHelper(const string& target, const string& name,
|
||||
}
|
||||
|
||||
TEST(DeviceNameUtilsTest, MergeDevNames) {
|
||||
DeviceNameUtils::ParsedName target;
|
||||
|
||||
// Idempotence tests.
|
||||
MergeDevNamesHelper("", "", "");
|
||||
MergeDevNamesHelper("/job:foo/replica:1/task:2/cpu:1",
|
||||
|
Loading…
Reference in New Issue
Block a user