Move function definitions into .cc file

- Clarified some documentations and method names
- changed char* buffer usage with std::string when possible

PiperOrigin-RevId: 344083133
Change-Id: I29f3acc4967d71e540d33de14f96d770ad1a8c95
This commit is contained in:
Taehee Jeong 2020-11-24 10:12:18 -08:00 committed by TensorFlower Gardener
parent 603d88d72c
commit 6a30edd2de
29 changed files with 155 additions and 116 deletions

View File

@ -24,10 +24,9 @@ namespace tflite {
namespace delegates { namespace delegates {
namespace coreml { namespace coreml {
const char* ActivationLayerBuilder::DebugName() { const std::string& ActivationLayerBuilder::DebugName() {
if (!str_debug_name_[0]) if (debug_name_.empty()) SetDebugName("ActivationLayerBuilder", node_id_);
GetDebugName("ActivationLayerBuilder", node_id_, str_debug_name_); return debug_name_;
return str_debug_name_;
} }
CoreML::Specification::NeuralNetworkLayer* ActivationLayerBuilder::Build() { CoreML::Specification::NeuralNetworkLayer* ActivationLayerBuilder::Build() {

View File

@ -31,7 +31,7 @@ class ActivationLayerBuilder : public OpBuilder {
TfLiteFusedActivation activation) TfLiteFusedActivation activation)
: OpBuilder(graph_builder), activation_(activation) {} : OpBuilder(graph_builder), activation_(activation) {}
const char* DebugName() override; const std::string& DebugName() override;
CoreML::Specification::NeuralNetworkLayer* Build() override; CoreML::Specification::NeuralNetworkLayer* Build() override;

View File

@ -24,10 +24,9 @@ limitations under the License.
namespace tflite { namespace tflite {
namespace delegates { namespace delegates {
namespace coreml { namespace coreml {
const char* AddOpBuilder::DebugName() { const std::string& AddOpBuilder::DebugName() {
if (!str_debug_name_[0]) if (debug_name_.empty()) SetDebugName("AddOpBuilder", node_id_);
GetDebugName("AddOpBuilder", node_id_, str_debug_name_); return debug_name_;
return str_debug_name_;
} }
CoreML::Specification::NeuralNetworkLayer* AddOpBuilder::Build() { CoreML::Specification::NeuralNetworkLayer* AddOpBuilder::Build() {

View File

@ -25,7 +25,7 @@ class AddOpBuilder : public OpBuilder {
public: public:
explicit AddOpBuilder(GraphBuilder* graph_builder) explicit AddOpBuilder(GraphBuilder* graph_builder)
: OpBuilder(graph_builder) {} : OpBuilder(graph_builder) {}
const char* DebugName() override; const std::string& DebugName() override;
CoreML::Specification::NeuralNetworkLayer* Build() override; CoreML::Specification::NeuralNetworkLayer* Build() override;

View File

@ -26,10 +26,9 @@ class ConcatenationOpBuilder : public OpBuilder {
explicit ConcatenationOpBuilder(GraphBuilder* graph_builder) explicit ConcatenationOpBuilder(GraphBuilder* graph_builder)
: OpBuilder(graph_builder) {} : OpBuilder(graph_builder) {}
const char* DebugName() override { const std::string& DebugName() override {
if (!str_debug_name_[0]) if (debug_name_.empty()) SetDebugName("ConcatOpBuilder", node_id_);
GetDebugName("ConcatOpBuilder", node_id_, str_debug_name_); return debug_name_;
return str_debug_name_;
} }
CoreML::Specification::NeuralNetworkLayer* Build() override; CoreML::Specification::NeuralNetworkLayer* Build() override;

View File

@ -25,10 +25,9 @@ limitations under the License.
namespace tflite { namespace tflite {
namespace delegates { namespace delegates {
namespace coreml { namespace coreml {
const char* ConvolutionOpBuilder::DebugName() { const std::string& ConvolutionOpBuilder::DebugName() {
if (!str_debug_name_[0]) if (debug_name_.empty()) SetDebugName("ConvolutionOpBuilder", node_id_);
GetDebugName("ConvolutionOpBuilder", node_id_, str_debug_name_); return debug_name_;
return str_debug_name_;
} }
void ConvolutionOpBuilder::SetWeights(TfLiteTensor* weights) { void ConvolutionOpBuilder::SetWeights(TfLiteTensor* weights) {

View File

@ -33,7 +33,7 @@ class ConvolutionOpBuilder : public OpBuilder {
ConvolutionType conv_type) ConvolutionType conv_type)
: OpBuilder(graph_builder), conv_type_(conv_type) {} : OpBuilder(graph_builder), conv_type_(conv_type) {}
const char* DebugName() override; const std::string& DebugName() override;
CoreML::Specification::NeuralNetworkLayer* Build() override; CoreML::Specification::NeuralNetworkLayer* Build() override;

View File

@ -24,7 +24,10 @@ CoreML::Specification::NeuralNetworkLayer* DummyOpBuilder::Build() {
return nullptr; return nullptr;
} }
const char* DummyOpBuilder::DebugName() { return "Dummy OpBuilder"; } const std::string& DummyOpBuilder::DebugName() {
SetDebugName("DummyOpBuilder", node_id_);
return debug_name_;
}
TfLiteStatus DummyOpBuilder::PopulateSubgraph(TfLiteContext* context) { TfLiteStatus DummyOpBuilder::PopulateSubgraph(TfLiteContext* context) {
return kTfLiteOk; return kTfLiteOk;
@ -34,6 +37,16 @@ OpBuilder* CreateDummyOpBuilder(GraphBuilder* graph_builder) {
return new DummyOpBuilder(graph_builder); return new DummyOpBuilder(graph_builder);
} }
TfLiteStatus DummyOpBuilder::RegisterInputs(const TfLiteIntArray* inputs,
TfLiteContext* context) {
return kTfLiteOk;
}
TfLiteStatus DummyOpBuilder::RegisterOutputs(const TfLiteIntArray* outputs,
TfLiteContext* context) {
return kTfLiteOk;
}
} // namespace coreml } // namespace coreml
} // namespace delegates } // namespace delegates
} // namespace tflite } // namespace tflite

View File

@ -31,7 +31,13 @@ class DummyOpBuilder : public OpBuilder {
: OpBuilder(graph_builder) {} : OpBuilder(graph_builder) {}
CoreML::Specification::NeuralNetworkLayer* Build() override; CoreML::Specification::NeuralNetworkLayer* Build() override;
TfLiteStatus PopulateSubgraph(TfLiteContext* context) override; TfLiteStatus PopulateSubgraph(TfLiteContext* context) override;
const char* DebugName() override; const std::string& DebugName() override;
TfLiteStatus RegisterInputs(const TfLiteIntArray* inputs,
TfLiteContext* context) override;
TfLiteStatus RegisterOutputs(const TfLiteIntArray* outputs,
TfLiteContext* context) override;
}; };
} // namespace coreml } // namespace coreml

View File

@ -24,10 +24,9 @@ limitations under the License.
namespace tflite { namespace tflite {
namespace delegates { namespace delegates {
namespace coreml { namespace coreml {
const char* FullyConnectedOpBuilder::DebugName() { const std::string& FullyConnectedOpBuilder::DebugName() {
if (!str_debug_name_[0]) if (debug_name_.empty()) SetDebugName("FullyConnectedOpBuilder", node_id_);
GetDebugName("FullyConnectedOpBuilder", node_id_, str_debug_name_); return debug_name_;
return str_debug_name_;
} }
void FullyConnectedOpBuilder::SetWeights(TfLiteTensor* weights) { void FullyConnectedOpBuilder::SetWeights(TfLiteTensor* weights) {

View File

@ -25,7 +25,7 @@ class FullyConnectedOpBuilder : public OpBuilder {
public: public:
explicit FullyConnectedOpBuilder(GraphBuilder* graph_builder) explicit FullyConnectedOpBuilder(GraphBuilder* graph_builder)
: OpBuilder(graph_builder) {} : OpBuilder(graph_builder) {}
const char* DebugName() override; const std::string& DebugName() override;
CoreML::Specification::NeuralNetworkLayer* Build() override; CoreML::Specification::NeuralNetworkLayer* Build() override;

View File

@ -23,10 +23,9 @@ limitations under the License.
namespace tflite { namespace tflite {
namespace delegates { namespace delegates {
namespace coreml { namespace coreml {
const char* HardSwishOpBuilder::DebugName() { const std::string& HardSwishOpBuilder::DebugName() {
if (!str_debug_name_[0]) if (debug_name_.empty()) SetDebugName("HardSwishOpBuilder", node_id_);
GetDebugName("HardSwishOpBuilder", node_id_, str_debug_name_); return debug_name_;
return str_debug_name_;
} }
CoreML::Specification::NeuralNetworkLayer* HardSwishOpBuilder::Build() { CoreML::Specification::NeuralNetworkLayer* HardSwishOpBuilder::Build() {

View File

@ -25,7 +25,7 @@ class HardSwishOpBuilder : public OpBuilder {
public: public:
explicit HardSwishOpBuilder(GraphBuilder* graph_builder) explicit HardSwishOpBuilder(GraphBuilder* graph_builder)
: OpBuilder(graph_builder) {} : OpBuilder(graph_builder) {}
const char* DebugName() override; const std::string& DebugName() override;
CoreML::Specification::NeuralNetworkLayer* Build() override; CoreML::Specification::NeuralNetworkLayer* Build() override;

View File

@ -27,10 +27,9 @@ limitations under the License.
namespace tflite { namespace tflite {
namespace delegates { namespace delegates {
namespace coreml { namespace coreml {
const char* MulOpBuilder::DebugName() { const std::string& MulOpBuilder::DebugName() {
if (!str_debug_name_[0]) if (debug_name_.empty()) SetDebugName("MulOpBuilder", node_id_);
GetDebugName("MulOpBuilder", node_id_, str_debug_name_); return debug_name_;
return str_debug_name_;
} }
CoreML::Specification::NeuralNetworkLayer* MulOpBuilder::Build() { CoreML::Specification::NeuralNetworkLayer* MulOpBuilder::Build() {

View File

@ -25,7 +25,7 @@ class MulOpBuilder : public OpBuilder {
public: public:
explicit MulOpBuilder(GraphBuilder* graph_builder) explicit MulOpBuilder(GraphBuilder* graph_builder)
: OpBuilder(graph_builder) {} : OpBuilder(graph_builder) {}
const char* DebugName() override; const std::string& DebugName() override;
CoreML::Specification::NeuralNetworkLayer* Build() override; CoreML::Specification::NeuralNetworkLayer* Build() override;

View File

@ -14,6 +14,8 @@ limitations under the License.
==============================================================================*/ ==============================================================================*/
#include "tensorflow/lite/experimental/delegates/coreml/builders/op_builder.h" #include "tensorflow/lite/experimental/delegates/coreml/builders/op_builder.h"
#include <string>
#include "tensorflow/lite/builtin_ops.h" #include "tensorflow/lite/builtin_ops.h"
#include "tensorflow/lite/c/builtin_op_data.h" #include "tensorflow/lite/c/builtin_op_data.h"
#include "tensorflow/lite/experimental/delegates/coreml/builders/op_factory.h" #include "tensorflow/lite/experimental/delegates/coreml/builders/op_factory.h"
@ -22,6 +24,15 @@ limitations under the License.
namespace tflite { namespace tflite {
namespace delegates { namespace delegates {
namespace coreml { namespace coreml {
std::string TensorID::ToString() const {
return std::to_string(node_) + "_" + std::to_string(output_id_);
}
int TensorID::NodeID() const { return node_; }
int TensorID::OutputID() const { return output_id_; }
OpBuilder* GraphBuilder::AddBuilder(int builtin_code, const TfLiteNode* node) { OpBuilder* GraphBuilder::AddBuilder(int builtin_code, const TfLiteNode* node) {
switch (builtin_code) { switch (builtin_code) {
case kTfLiteBuiltinAdd: case kTfLiteBuiltinAdd:
@ -113,7 +124,7 @@ CoreML::Specification::Model* GraphBuilder::BuildModel() {
CoreML::Specification::NeuralNetworkLayer* layer = builder->Build(); CoreML::Specification::NeuralNetworkLayer* layer = builder->Build();
if (layer == nullptr) { if (layer == nullptr) {
fprintf(stderr, "Null layer returned from builder: %s\n", fprintf(stderr, "Null layer returned from builder: %s\n",
builder->DebugName()); builder->DebugName().c_str());
continue; continue;
} }
neural_network->mutable_layers()->AddAllocated(layer); neural_network->mutable_layers()->AddAllocated(layer);
@ -159,6 +170,35 @@ bool GraphBuilder::IsTensorUsed(int tflite_tensor_index) {
return used_tensor_[tflite_tensor_index]; return used_tensor_[tflite_tensor_index];
} }
CoreML::Specification::NeuralNetworkLayer* OpBuilder::Build() {
layer_->set_name(DebugName());
return layer_.release();
}
TfLiteStatus OpBuilder::PopulateSubgraph(TfLiteContext* context) {
builder_output_ = AddOutput();
return kTfLiteOk;
}
void OpBuilder::SetBuiltinData(void* builtin_data) {
builtin_data_ = builtin_data;
}
void OpBuilder::SetNodeID(int id) { node_id_ = id; }
void OpBuilder::SetTfLiteNode(const TfLiteNode* node) { tflite_node_ = node; }
int OpBuilder::GetID() const { return node_id_; }
TensorID OpBuilder::GetOutput(TfLiteContext* context) {
if (builder_output_.NodeID() != -1) {
return builder_output_;
}
// builder_output_ is not set when PopulateSubgraph is not called.
builder_output_ = AddOutput();
return builder_output_;
}
void OpBuilder::AddInput(const std::string& input_name) { void OpBuilder::AddInput(const std::string& input_name) {
if (layer_ == nullptr) { if (layer_ == nullptr) {
layer_.reset(new CoreML::Specification::NeuralNetworkLayer); layer_.reset(new CoreML::Specification::NeuralNetworkLayer);
@ -180,6 +220,10 @@ TensorID OpBuilder::AddOutput() {
return tensor_id; return tensor_id;
} }
void OpBuilder::SetDebugName(const char* name, int id) {
debug_name_ = std::string(name) + "_" + std::to_string(id);
}
} // namespace coreml } // namespace coreml
} // namespace delegates } // namespace delegates
} // namespace tflite } // namespace tflite

View File

@ -15,8 +15,8 @@ limitations under the License.
#ifndef TENSORFLOW_LITE_EXPERIMENTAL_DELEGATES_COREML_BUILDERS_OP_BUILDER_H_ #ifndef TENSORFLOW_LITE_EXPERIMENTAL_DELEGATES_COREML_BUILDERS_OP_BUILDER_H_
#define TENSORFLOW_LITE_EXPERIMENTAL_DELEGATES_COREML_BUILDERS_OP_BUILDER_H_ #define TENSORFLOW_LITE_EXPERIMENTAL_DELEGATES_COREML_BUILDERS_OP_BUILDER_H_
#include "absl/strings/str_cat.h" #include <string>
#include "absl/strings/str_format.h"
#include "mlmodel/format/Model.pb.h" #include "mlmodel/format/Model.pb.h"
#include "mlmodel/format/NeuralNetwork.pb.h" #include "mlmodel/format/NeuralNetwork.pb.h"
#include "tensorflow/lite/c/common.h" #include "tensorflow/lite/c/common.h"
@ -34,11 +34,11 @@ class TensorID {
TensorID() {} TensorID() {}
TensorID(int node, int output_id) : node_(node), output_id_(output_id) {} TensorID(int node, int output_id) : node_(node), output_id_(output_id) {}
std::string ToString() const { return absl::StrCat(node_, "__", output_id_); } std::string ToString() const;
int NodeID() const { return node_; } int NodeID() const;
int OutputID() const { return output_id_; } int OutputID() const;
private: private:
int node_ = -1; int node_ = -1;
@ -101,20 +101,18 @@ class OpBuilder {
// Returns the Layer this builder responsible for. // Returns the Layer this builder responsible for.
// Ownership is transferred to caller. // Ownership is transferred to caller.
virtual CoreML::Specification::NeuralNetworkLayer* Build() { virtual CoreML::Specification::NeuralNetworkLayer* Build();
layer_->set_name(DebugName());
return layer_.release();
}
// Associates TfLite input tensors to Core ML layer's inputs and properties.
// Verification for input constraints should happen here.
virtual TfLiteStatus RegisterInputs(const TfLiteIntArray* inputs, virtual TfLiteStatus RegisterInputs(const TfLiteIntArray* inputs,
TfLiteContext* context) { TfLiteContext* context) = 0;
return kTfLiteOk;
}
// Associates TFLite output tensor with the node's output. If the OpBuilder
// has subgraphs, The final output of that subgraph should be associated with
// the output tensor.
virtual TfLiteStatus RegisterOutputs(const TfLiteIntArray* outputs, virtual TfLiteStatus RegisterOutputs(const TfLiteIntArray* outputs,
TfLiteContext* context) { TfLiteContext* context) = 0;
return kTfLiteOk;
}
// Adds additional required OpBuilders, and populate builder_output_ with // Adds additional required OpBuilders, and populate builder_output_ with
// Actual output that corresponds to output tensor of TFL Node. // Actual output that corresponds to output tensor of TFL Node.
@ -122,32 +120,17 @@ class OpBuilder {
// composing other ops. For example, Relu6 in TfLite can be converted to // composing other ops. For example, Relu6 in TfLite can be converted to
// Relu -> Threshold -> Neg. // Relu -> Threshold -> Neg.
// TODO(b/147211734): have this called automatically when necessary. // TODO(b/147211734): have this called automatically when necessary.
virtual TfLiteStatus PopulateSubgraph(TfLiteContext* context) { virtual TfLiteStatus PopulateSubgraph(TfLiteContext* context);
builder_output_ = AddOutput();
return kTfLiteOk;
}
virtual const char* DebugName() = 0; virtual const std::string& DebugName() = 0;
void SetBuiltinData(void* builtin_data) { builtin_data_ = builtin_data; } void SetBuiltinData(void* builtin_data);
void SetNodeID(int id) { node_id_ = id; } void SetNodeID(int id);
void SetTfLiteNode(const TfLiteNode* node) { tflite_node_ = node; } void SetTfLiteNode(const TfLiteNode* node);
int GetID() const { return node_id_; } int GetID() const;
TensorID AddOutput();
// To be used by clients that needs the output of the node.
virtual TensorID GetOutput(TfLiteContext* context) {
if (builder_output_.NodeID() != -1) {
return builder_output_;
}
// builder_output_ is not set when PopulateSubgraph is not called.
builder_output_ = AddOutput();
return builder_output_;
}
// Adds input with tensor name. // Adds input with tensor name.
void AddInput(const std::string& input_name); void AddInput(const std::string& input_name);
@ -159,13 +142,16 @@ class OpBuilder {
// TODO(taeheej): cleanup AddInput use cases and used tensor tracking. // TODO(taeheej): cleanup AddInput use cases and used tensor tracking.
void AddInput(int tf_input_id); void AddInput(int tf_input_id);
// Simply adds new output to the underlying layer.
TensorID AddOutput();
// Should set builder_output_ (if unset) and return it as the output of
// this node. To be used by clients that needs the output of the node.
virtual TensorID GetOutput(TfLiteContext* context);
protected: protected:
// Helper to print op instance name. // Sets layer's name.
void GetDebugName(const char* name, int id, char* debug_name) { void SetDebugName(const char* layer_name, int id);
// TODO(karimnosseir): Move away from absl, probably adding overhead
// on binary size ?.
absl::SNPrintF(debug_name, 100 * sizeof(char), "%s_%d", name, id);
}
GraphBuilder* graph_builder_ = nullptr; GraphBuilder* graph_builder_ = nullptr;
// Data needed by this node. // Data needed by this node.
@ -174,7 +160,7 @@ class OpBuilder {
int num_outputs_ = 0; int num_outputs_ = 0;
const TfLiteNode* tflite_node_ = nullptr; const TfLiteNode* tflite_node_ = nullptr;
TensorID builder_output_; TensorID builder_output_;
char str_debug_name_[100] = {0}; std::string debug_name_;
std::unique_ptr<CoreML::Specification::NeuralNetworkLayer> layer_; std::unique_ptr<CoreML::Specification::NeuralNetworkLayer> layer_;
}; };

View File

@ -25,12 +25,12 @@ namespace tflite {
namespace delegates { namespace delegates {
namespace coreml { namespace coreml {
const char* PadOpBuilder::DebugName() { const std::string& PadOpBuilder::DebugName() {
if (str_debug_name_[0]) return str_debug_name_; if (!debug_name_.empty()) return debug_name_;
GetDebugName(padding_type_ == PadType::kPad ? "PadOpBuilder (PAD)" SetDebugName(padding_type_ == PadType::kPad ? "PadOpBuilder (PAD)"
: "PadOpBuilder (MIRROR_PAD)", : "PadOpBuilder (MIRROR_PAD)",
node_id_, str_debug_name_); node_id_);
return str_debug_name_; return debug_name_;
} }
CoreML::Specification::NeuralNetworkLayer* PadOpBuilder::Build() { CoreML::Specification::NeuralNetworkLayer* PadOpBuilder::Build() {

View File

@ -30,7 +30,7 @@ class PadOpBuilder : public OpBuilder {
explicit PadOpBuilder(GraphBuilder* graph_builder, PadType padding_type) explicit PadOpBuilder(GraphBuilder* graph_builder, PadType padding_type)
: OpBuilder(graph_builder), padding_type_(padding_type) {} : OpBuilder(graph_builder), padding_type_(padding_type) {}
const char* DebugName() override; const std::string& DebugName() override;
CoreML::Specification::NeuralNetworkLayer* Build() override; CoreML::Specification::NeuralNetworkLayer* Build() override;

View File

@ -25,26 +25,25 @@ namespace tflite {
namespace delegates { namespace delegates {
namespace coreml { namespace coreml {
const char* PoolingLayerBuilder::DebugName() { const std::string& PoolingLayerBuilder::DebugName() {
if (str_debug_name_[0]) return str_debug_name_; if (!debug_name_.empty()) return debug_name_;
switch (pooling_type_) { switch (pooling_type_) {
case kTfLiteBuiltinAveragePool2d: case kTfLiteBuiltinAveragePool2d:
GetDebugName("PoolingLayerBuilder (AVERAGE)", node_id_, str_debug_name_); SetDebugName("PoolingLayerBuilder (AVERAGE)", node_id_);
break; break;
case kTfLiteBuiltinMaxPool2d: case kTfLiteBuiltinMaxPool2d:
GetDebugName("PoolingLayerBuilder (MAX)", node_id_, str_debug_name_); SetDebugName("PoolingLayerBuilder (MAX)", node_id_);
break; break;
case kTfLiteBuiltinL2Pool2d: case kTfLiteBuiltinL2Pool2d:
GetDebugName("PoolingLayerBuilder (L2, unsupported)", node_id_, SetDebugName("PoolingLayerBuilder (L2, unsupported)", node_id_);
str_debug_name_);
break; break;
case kTfLiteBuiltinMean: case kTfLiteBuiltinMean:
GetDebugName("PoolingLayerBuilder (MEAN)", node_id_, str_debug_name_); SetDebugName("PoolingLayerBuilder (MEAN)", node_id_);
break; break;
default: default:
GetDebugName("PoolingLayerBuilder (ERROR)", node_id_, str_debug_name_); SetDebugName("PoolingLayerBuilder (ERROR)", node_id_);
} }
return str_debug_name_; return debug_name_;
} }
CoreML::Specification::NeuralNetworkLayer* PoolingLayerBuilder::Build() { CoreML::Specification::NeuralNetworkLayer* PoolingLayerBuilder::Build() {

View File

@ -28,7 +28,7 @@ class PoolingLayerBuilder : public OpBuilder {
TfLiteBuiltinOperator pooling_type) TfLiteBuiltinOperator pooling_type)
: OpBuilder(graph_builder), pooling_type_(pooling_type) {} : OpBuilder(graph_builder), pooling_type_(pooling_type) {}
const char* DebugName() override; const std::string& DebugName() override;
CoreML::Specification::NeuralNetworkLayer* Build() override; CoreML::Specification::NeuralNetworkLayer* Build() override;

View File

@ -26,11 +26,11 @@ namespace tflite {
namespace delegates { namespace delegates {
namespace coreml { namespace coreml {
const char* ReshapeOpBuilder::DebugName() { const std::string& ReshapeOpBuilder::DebugName() {
if (!str_debug_name_[0]) { if (debug_name_.empty()) {
GetDebugName("ReshapeOpBuilder", node_id_, str_debug_name_); SetDebugName("ReshapeOpBuilder", node_id_);
} }
return str_debug_name_; return debug_name_;
} }
CoreML::Specification::NeuralNetworkLayer* ReshapeOpBuilder::Build() { CoreML::Specification::NeuralNetworkLayer* ReshapeOpBuilder::Build() {

View File

@ -26,7 +26,7 @@ class ReshapeOpBuilder : public OpBuilder {
public: public:
explicit ReshapeOpBuilder(GraphBuilder* graph_builder) explicit ReshapeOpBuilder(GraphBuilder* graph_builder)
: OpBuilder(graph_builder) {} : OpBuilder(graph_builder) {}
const char* DebugName() override; const std::string& DebugName() override;
CoreML::Specification::NeuralNetworkLayer* Build() override; CoreML::Specification::NeuralNetworkLayer* Build() override;

View File

@ -28,10 +28,10 @@ namespace tflite {
namespace delegates { namespace delegates {
namespace coreml { namespace coreml {
const char* ResizeBilinearOpBuilder::DebugName() { const std::string& ResizeBilinearOpBuilder::DebugName() {
if (str_debug_name_[0]) return str_debug_name_; if (!debug_name_.empty()) return debug_name_;
GetDebugName("ResizeBilinearOpBuilder", node_id_, str_debug_name_); SetDebugName("ResizeBilinearOpBuilder", node_id_);
return str_debug_name_; return debug_name_;
} }
CoreML::Specification::NeuralNetworkLayer* ResizeBilinearOpBuilder::Build() { CoreML::Specification::NeuralNetworkLayer* ResizeBilinearOpBuilder::Build() {

View File

@ -27,7 +27,7 @@ class ResizeBilinearOpBuilder : public OpBuilder {
explicit ResizeBilinearOpBuilder(GraphBuilder* graph_builder) explicit ResizeBilinearOpBuilder(GraphBuilder* graph_builder)
: OpBuilder(graph_builder) {} : OpBuilder(graph_builder) {}
const char* DebugName() override; const std::string& DebugName() override;
CoreML::Specification::NeuralNetworkLayer* Build() override; CoreML::Specification::NeuralNetworkLayer* Build() override;

View File

@ -19,10 +19,9 @@ limitations under the License.
namespace tflite { namespace tflite {
namespace delegates { namespace delegates {
namespace coreml { namespace coreml {
const char* SoftmaxOpBuilder::DebugName() { const std::string& SoftmaxOpBuilder::DebugName() {
if (!str_debug_name_[0]) if (debug_name_.empty()) SetDebugName("SoftmaxOpBuilder", node_id_);
GetDebugName("SoftmaxOpBuilder", node_id_, str_debug_name_); return debug_name_;
return str_debug_name_;
} }
CoreML::Specification::NeuralNetworkLayer* SoftmaxOpBuilder::Build() { CoreML::Specification::NeuralNetworkLayer* SoftmaxOpBuilder::Build() {

View File

@ -25,7 +25,7 @@ class SoftmaxOpBuilder : public OpBuilder {
public: public:
explicit SoftmaxOpBuilder(GraphBuilder* graph_builder) explicit SoftmaxOpBuilder(GraphBuilder* graph_builder)
: OpBuilder(graph_builder) {} : OpBuilder(graph_builder) {}
const char* DebugName() override; const std::string& DebugName() override;
CoreML::Specification::NeuralNetworkLayer* Build() override; CoreML::Specification::NeuralNetworkLayer* Build() override;

View File

@ -20,10 +20,9 @@ namespace tflite {
namespace delegates { namespace delegates {
namespace coreml { namespace coreml {
const char* ThresholdLayerBuilder::DebugName() { const std::string& ThresholdLayerBuilder::DebugName() {
if (!str_debug_name_[0]) if (debug_name_.empty()) SetDebugName("ThresholdLayerBuilder", node_id_);
GetDebugName("ThresholdLayerBuilder", node_id_, str_debug_name_); return debug_name_;
return str_debug_name_;
} }
CoreML::Specification::NeuralNetworkLayer* ThresholdLayerBuilder::Build() { CoreML::Specification::NeuralNetworkLayer* ThresholdLayerBuilder::Build() {

View File

@ -30,7 +30,7 @@ class ThresholdLayerBuilder : public OpBuilder {
explicit ThresholdLayerBuilder(GraphBuilder* graph_builder) explicit ThresholdLayerBuilder(GraphBuilder* graph_builder)
: OpBuilder(graph_builder) {} : OpBuilder(graph_builder) {}
const char* DebugName() override; const std::string& DebugName() override;
CoreML::Specification::NeuralNetworkLayer* Build() override; CoreML::Specification::NeuralNetworkLayer* Build() override;