Deprecated and removed uses of TPUEmbeddingOutputLayout proto and output_layout field in TPUEmbeddingConfiguration.

PiperOrigin-RevId: 334680354
Change-Id: I14f1907cbef71e8b5f5629d837cbf184948e39a7
This commit is contained in:
A. Unique TensorFlower 2020-09-30 14:31:49 -07:00 committed by TensorFlower Gardener
parent 44d72d9303
commit 84df34f818
6 changed files with 13 additions and 76 deletions

View File

@ -70,7 +70,6 @@ REGISTER_OP("RecvTPUEmbeddingActivations")
if (!config.ParseFromString(config_string)) {
return errors::InvalidArgument("Malformed tpu_embedding_config.");
}
tpu::AddDefaultEmbeddingOutputLayoutIfNeeded(&config);
std::vector<TensorShapeProto> output_shapes;
TF_RETURN_IF_ERROR(ComputeOutputTensorShapes(config, &output_shapes));
if (c->num_outputs() != output_shapes.size()) {

View File

@ -87,9 +87,6 @@ message TPUEmbeddingConfiguration {
// problem.
bool pipeline_execution_with_tensor_core = 7;
// Extended output layout information; if not provided, a compatibility mode
// will use defaults that match the old layout. Providing a value for this
// field is EXPERIMENTAL and most ways of filling it will probably break. Do
// not set it unless you know what you are doing.
TPUEmbeddingOutputLayout output_layout = 8;
// Extended output layout information; deprecated and now ignored.
TPUEmbeddingOutputLayout output_layout = 8 [deprecated = true];
}

View File

@ -5,6 +5,8 @@ package tensorflow.tpu;
// In the comments here, "layout" refers to the top-level EmbeddingOutputLayout
// proto contained in the TPUEmbeddingConfiguration.
// This proto is deprecated and its contents are no longer used.
// The embedding output consists of a list of tensors, each specified by an
// EmbeddingOutputTensor proto within the EmbeddingOutputLayout (the "output"
// field). Each table and feature lookup is then placed into some number of
@ -15,6 +17,8 @@ package tensorflow.tpu;
// EmbeddingOutputLayout.
message TPUEmbeddingOutputLayout {
option deprecated = true;
// Location of one copy of the feature's data.
message OutputLocation {
// Which output tensor this copy of the feature will go into. Must be

View File

@ -45,7 +45,6 @@ cc_library(
hdrs = ["tpu_embedding_output_layout_utils.h"],
visibility = ["//visibility:public"],
deps = [
"//tensorflow/core:framework_headers_lib",
"//tensorflow/core:lib_proto_parsing",
"//tensorflow/core:protos_all_cc",
"//tensorflow/core/protobuf/tpu:tpu_embedding_configuration_proto_cc",

View File

@ -20,75 +20,17 @@ limitations under the License.
namespace tensorflow {
namespace tpu {
void AddDefaultEmbeddingOutputLayoutIfNeeded(
TPUEmbeddingConfiguration* config) {
if (config->has_output_layout()) {
// Model or previous step has already filled this in.
return;
}
TPUEmbeddingOutputLayout* layout = config->mutable_output_layout();
// Create output tensors.
for (const auto& table : config->table_descriptor()) {
TPUEmbeddingOutputLayout::EmbeddingOutputTensor* output =
layout->add_output();
TPUEmbeddingOutputLayout::TwoDOutputTensor* two_d = output->mutable_two_d();
two_d->set_dim1_size(table.dimension());
two_d->set_dim0_size_per_sample(table.num_features());
}
// Create table output locations.
for (int table_id = 0; table_id < config->table_descriptor_size();
++table_id) {
TPUEmbeddingOutputLayout::TableDescriptor* output_table =
layout->add_table();
const auto& table = config->table_descriptor(table_id);
for (int feature_index = 0; feature_index < table.num_features();
++feature_index) {
TPUEmbeddingOutputLayout::FeatureDescriptor* output_feature =
output_table->add_feature();
TPUEmbeddingOutputLayout::OutputLocation* output_location =
output_feature->add_output_location();
output_location->set_tensor_index(table_id);
output_location->set_dim0_offset(feature_index);
output_location->set_dim1_offset(0);
}
}
}
Status ComputeOutputTensorShapes(const TPUEmbeddingConfiguration& config,
std::vector<TensorShapeProto>* shapes) {
if (!config.has_output_layout()) {
return errors::InvalidArgument(
"TPUEmbeddingConfiguration is missing output layout.");
}
const TPUEmbeddingOutputLayout& layout = config.output_layout();
int batch_size = config.batch_size_per_tensor_core();
for (int i = 0; i < layout.output_size(); ++i) {
const auto& output = layout.output(i);
for (const TPUEmbeddingConfiguration::TableDescriptor& table :
config.table_descriptor()) {
TensorShapeProto shape;
switch (output.output_format_case()) {
case TPUEmbeddingOutputLayout::EmbeddingOutputTensor::OutputFormatCase::
kTwoD: {
auto* dim0 = shape.add_dim();
dim0->set_size(output.two_d().dim0_size_per_sample() * batch_size);
auto* dim1 = shape.add_dim();
dim1->set_size(output.two_d().dim1_size());
break;
}
case TPUEmbeddingOutputLayout::EmbeddingOutputTensor::OutputFormatCase::
OUTPUT_FORMAT_NOT_SET: {
return errors::InvalidArgument(
"Output layout in TPUEmbeddingConfiguration has unset embedding "
"output tensor format.");
}
default: {
return errors::InvalidArgument(
"Output layout in TPUEmbeddingConfiguration has invalid or "
"unhandled embedding output tensor format.");
}
}
auto* dim0 = shape.add_dim();
dim0->set_size(table.num_features() * batch_size);
auto* dim1 = shape.add_dim();
dim1->set_size(table.dimension());
shapes->push_back(shape);
}
return Status::OK();

View File

@ -23,11 +23,7 @@ limitations under the License.
namespace tensorflow {
namespace tpu {
// Creates a default output layout for compatibility if none was provided by the
// model.
void AddDefaultEmbeddingOutputLayoutIfNeeded(TPUEmbeddingConfiguration* config);
// Computes the shape of the output tensors from an output layout.
// Computes the shape of the output tensors from an embedding configuration.
Status ComputeOutputTensorShapes(
const TPUEmbeddingConfiguration& config,
std::vector<tensorflow::TensorShapeProto>* shapes);