Replace calls to deprecated googletest macros *TEST_CASE() with *TEST_SUITE()

PiperOrigin-RevId: 229586776
This commit is contained in:
A. Unique TensorFlower 2019-01-16 10:54:45 -08:00 committed by TensorFlower Gardener
parent aba19e4143
commit 2eaf93a349
16 changed files with 54 additions and 53 deletions

View File

@ -49,7 +49,7 @@ using Types =
std::pair<int16, qint16>, std::pair<uint16, quint16>, std::pair<int16, qint16>, std::pair<uint16, quint16>,
std::pair<int32, qint32>>; std::pair<int32, qint32>>;
TYPED_TEST_CASE(LiteralUtilTest, Types); TYPED_TEST_SUITE(LiteralUtilTest, Types);
TYPED_TEST(LiteralUtilTest, LiteralToQuantizedHostTensor) { TYPED_TEST(LiteralUtilTest, LiteralToQuantizedHostTensor) {
using int_type = typename TypeParam::first_type; using int_type = typename TypeParam::first_type;

View File

@ -139,7 +139,7 @@ TEST_P(StatusPropagationTest, PartialRunDoneFirst) {
// ExecutorDone and PartialRunDone. // ExecutorDone and PartialRunDone.
Status ExecutorError() { return errors::Internal("executor error"); } Status ExecutorError() { return errors::Internal("executor error"); }
Status PartialRunError() { return errors::Internal("partial run error"); } Status PartialRunError() { return errors::Internal("partial run error"); }
INSTANTIATE_TEST_CASE_P( INSTANTIATE_TEST_SUITE_P(
PartialRunMgr, StatusPropagationTest, PartialRunMgr, StatusPropagationTest,
::testing::Values( ::testing::Values(
StatusTestParam{Status::OK(), Status::OK(), Status::OK()}, StatusTestParam{Status::OK(), Status::OK(), Status::OK()},

View File

@ -75,7 +75,7 @@ TEST_P(Bfloat16Test, TruncateTest) {
EXPECT_EQ(GetParam().expected_rounding, float(rounded)); EXPECT_EQ(GetParam().expected_rounding, float(rounded));
} }
INSTANTIATE_TEST_CASE_P( INSTANTIATE_TEST_SUITE_P(
Bfloat16Test_Instantiation, Bfloat16Test, Bfloat16Test_Instantiation, Bfloat16Test,
::testing::Values( ::testing::Values(
Bfloat16TestParam{ Bfloat16TestParam{

View File

@ -83,9 +83,10 @@ TEST_P(AsyncInterleaveManyTest, Model) {
EXPECT_GE(async_interleave_many->OutputTime(&input_times), 0); EXPECT_GE(async_interleave_many->OutputTime(&input_times), 0);
} }
INSTANTIATE_TEST_CASE_P(Test, AsyncInterleaveManyTest, INSTANTIATE_TEST_SUITE_P(Test, AsyncInterleaveManyTest,
::testing::Combine(::testing::Values(1, 2), ::testing::Combine(::testing::Values(1, 2),
::testing::Values(0, 50, 100, 200))); ::testing::Values(0, 50, 100,
200)));
class AsyncKnownRatioTest class AsyncKnownRatioTest
: public ::testing::TestWithParam<std::tuple<int64, int64, int64>> {}; : public ::testing::TestWithParam<std::tuple<int64, int64, int64>> {};
@ -156,10 +157,10 @@ TEST_P(AsyncKnownRatioTest, Model) {
EXPECT_GE(async_known_many->OutputTime(&input_times), 0); EXPECT_GE(async_known_many->OutputTime(&input_times), 0);
} }
INSTANTIATE_TEST_CASE_P(Test, AsyncKnownRatioTest, INSTANTIATE_TEST_SUITE_P(Test, AsyncKnownRatioTest,
::testing::Combine(::testing::Values(1, 2, 4, 8), ::testing::Combine(::testing::Values(1, 2, 4, 8),
::testing::Values(0, 50, 100, 200), ::testing::Values(0, 50, 100, 200),
::testing::Values(0, 1, 2, 4))); ::testing::Values(0, 1, 2, 4)));
TEST(InterleaveManyTest, Model) { TEST(InterleaveManyTest, Model) {
std::shared_ptr<Node> interleave_many = std::shared_ptr<Node> interleave_many =
@ -245,7 +246,7 @@ TEST_P(KnownRatioTest, Model) {
num_inputs_per_output * (50 + 100) + 64); num_inputs_per_output * (50 + 100) + 64);
} }
INSTANTIATE_TEST_CASE_P(Test, KnownRatioTest, ::testing::Values(0, 1, 2, 4)); INSTANTIATE_TEST_SUITE_P(Test, KnownRatioTest, ::testing::Values(0, 1, 2, 4));
TEST(SourceTest, Model) { TEST(SourceTest, Model) {
std::shared_ptr<Node> source = model::MakeSourceNode({0, "source", nullptr}); std::shared_ptr<Node> source = model::MakeSourceNode({0, "source", nullptr});

View File

@ -952,8 +952,8 @@ class FusedConv2DWithBiasOpTest : public FusedConv2DOpTest<T> {};
template <typename T> template <typename T>
class FusedConv2DWithBatchNormOpTest : public FusedConv2DOpTest<T> {}; class FusedConv2DWithBatchNormOpTest : public FusedConv2DOpTest<T> {};
TYPED_TEST_CASE_P(FusedConv2DWithBiasOpTest); TYPED_TEST_SUITE_P(FusedConv2DWithBiasOpTest);
TYPED_TEST_CASE_P(FusedConv2DWithBatchNormOpTest); TYPED_TEST_SUITE_P(FusedConv2DWithBatchNormOpTest);
// -------------------------------------------------------------------------- // // -------------------------------------------------------------------------- //
// Conv2D + BiasAdd + {Relu} // // Conv2D + BiasAdd + {Relu} //
@ -1035,29 +1035,29 @@ TYPED_TEST_P(FusedConv2DWithBatchNormOpTest, SpatialConvolutionAndRelu) {
this->VerifyConv2DWithBatchNormAndRelu(filter_size, filter_count); this->VerifyConv2DWithBatchNormAndRelu(filter_size, filter_count);
} }
REGISTER_TYPED_TEST_CASE_P(FusedConv2DWithBiasOpTest, // REGISTER_TYPED_TEST_SUITE_P(FusedConv2DWithBiasOpTest, //
OneByOneConvolution, // OneByOneConvolution, //
ImageSizeConvolution, // ImageSizeConvolution, //
SpatialConvolution, // SpatialConvolution, //
OneByOneConvolutionAndRelu, // OneByOneConvolutionAndRelu, //
ImageSizeConvolutionAndRelu, // ImageSizeConvolutionAndRelu, //
SpatialConvolutionAndRelu); SpatialConvolutionAndRelu);
REGISTER_TYPED_TEST_CASE_P(FusedConv2DWithBatchNormOpTest, // REGISTER_TYPED_TEST_SUITE_P(FusedConv2DWithBatchNormOpTest, //
OneByOneConvolution, // OneByOneConvolution, //
ImageSizeConvolution, // ImageSizeConvolution, //
SpatialConvolution, // SpatialConvolution, //
OneByOneConvolutionAndRelu, // OneByOneConvolutionAndRelu, //
ImageSizeConvolutionAndRelu, // ImageSizeConvolutionAndRelu, //
SpatialConvolutionAndRelu); SpatialConvolutionAndRelu);
using FusedBiasAddDataTypes = ::testing::Types<float, double>; using FusedBiasAddDataTypes = ::testing::Types<float, double>;
INSTANTIATE_TYPED_TEST_CASE_P(Test, FusedConv2DWithBiasOpTest, INSTANTIATE_TYPED_TEST_SUITE_P(Test, FusedConv2DWithBiasOpTest,
FusedBiasAddDataTypes); FusedBiasAddDataTypes);
using FusedBatchNormDataTypes = ::testing::Types<float>; using FusedBatchNormDataTypes = ::testing::Types<float>;
INSTANTIATE_TYPED_TEST_CASE_P(Test, FusedConv2DWithBatchNormOpTest, INSTANTIATE_TYPED_TEST_SUITE_P(Test, FusedConv2DWithBatchNormOpTest,
FusedBatchNormDataTypes); FusedBatchNormDataTypes);
//////////////////////////////////////////////////////////////////////////////// ////////////////////////////////////////////////////////////////////////////////
// Performance benchmarks for the FusedConv2DWithBiasOp. // // Performance benchmarks for the FusedConv2DWithBiasOp. //

View File

@ -45,7 +45,7 @@ typedef ::testing::Types<Int8_IT, UInt8_IT, Int16_IT, UInt16_IT, Int32_IT,
Int64_IT, UInt64_IT, Long_IT> Int64_IT, UInt64_IT, Long_IT>
SupportedIntTypes; SupportedIntTypes;
TYPED_TEST_CASE(IntTypeTest, SupportedIntTypes); TYPED_TEST_SUITE(IntTypeTest, SupportedIntTypes);
TYPED_TEST(IntTypeTest, TestInitialization) { TYPED_TEST(IntTypeTest, TestInitialization) {
constexpr typename TestFixture::T a; constexpr typename TestFixture::T a;

View File

@ -209,7 +209,7 @@ const Scalar NcclManagerTest<Scalar>::max_ =
// Instantiate tests for float and double. // Instantiate tests for float and double.
using TypeList = ::testing::Types<float, double>; using TypeList = ::testing::Types<float, double>;
TYPED_TEST_CASE(NcclManagerTest, TypeList); TYPED_TEST_SUITE(NcclManagerTest, TypeList);
// Test basic sum reduction. // Test basic sum reduction.
TYPED_TEST(NcclManagerTest, BasicSumReduction) { TYPED_TEST(NcclManagerTest, BasicSumReduction) {

View File

@ -400,7 +400,7 @@ class BidirectionalLSTMOpModel : public SingleOpModel {
// indicating whether to use quantization or not. // indicating whether to use quantization or not.
class LSTMOpTest : public ::testing::TestWithParam<bool> {}; class LSTMOpTest : public ::testing::TestWithParam<bool> {};
INSTANTIATE_TEST_CASE_P(QuantizationOrNot, LSTMOpTest, ::testing::Bool()); INSTANTIATE_TEST_SUITE_P(QuantizationOrNot, LSTMOpTest, ::testing::Bool());
TEST_P(LSTMOpTest, BlackBoxTestNoCifgNoPeepholeNoProjectionNoClipping) { TEST_P(LSTMOpTest, BlackBoxTestNoCifgNoPeepholeNoProjectionNoClipping) {
const int n_batch = 1; const int n_batch = 1;

View File

@ -1069,7 +1069,7 @@ TEST_P(ConvolutionOpTest, DISABLED_PointwiseMultifilterHybrid) {
0.0474))); 0.0474)));
} }
INSTANTIATE_TEST_CASE_P( INSTANTIATE_TEST_SUITE_P(
ConvolutionOpTest, ConvolutionOpTest, ConvolutionOpTest, ConvolutionOpTest,
::testing::ValuesIn(SingleOpTest::GetKernelTags(*kKernelMap))); ::testing::ValuesIn(SingleOpTest::GetKernelTags(*kKernelMap)));

View File

@ -437,11 +437,11 @@ TEST_P(QuantizedDepthwiseConvolutionOpTest, SimpleDilatedTestPaddingSame) {
ElementsAreArray({4, 7, 3, 6, 10, 4, 2, 3, 1})); ElementsAreArray({4, 7, 3, 6, 10, 4, 2, 3, 1}));
} }
INSTANTIATE_TEST_CASE_P( INSTANTIATE_TEST_SUITE_P(
DepthwiseConvolutionOpTest, DepthwiseConvolutionOpTest, DepthwiseConvolutionOpTest, DepthwiseConvolutionOpTest,
::testing::ValuesIn(SingleOpTest::GetKernelTags(*kKernelMap))); ::testing::ValuesIn(SingleOpTest::GetKernelTags(*kKernelMap)));
INSTANTIATE_TEST_CASE_P( INSTANTIATE_TEST_SUITE_P(
QuantizedDepthwiseConvolutionOpTest, QuantizedDepthwiseConvolutionOpTest, QuantizedDepthwiseConvolutionOpTest, QuantizedDepthwiseConvolutionOpTest,
::testing::ValuesIn(SingleOpTest::GetKernelTags(*kKernelMap))); ::testing::ValuesIn(SingleOpTest::GetKernelTags(*kKernelMap)));

View File

@ -725,11 +725,11 @@ TEST_P(QuantizedFullyConnectedOpTest,
ElementsAre(175, 177, 179, 243, 245, 247)); ElementsAre(175, 177, 179, 243, 245, 247));
} }
INSTANTIATE_TEST_CASE_P( INSTANTIATE_TEST_SUITE_P(
FloatFullyConnectedOpTest, FloatFullyConnectedOpTest, FloatFullyConnectedOpTest, FloatFullyConnectedOpTest,
::testing::ValuesIn(SingleOpTest::GetKernelTags(*kKernelMap))); ::testing::ValuesIn(SingleOpTest::GetKernelTags(*kKernelMap)));
INSTANTIATE_TEST_CASE_P( INSTANTIATE_TEST_SUITE_P(
QuantizedFullyConnectedOpTest, QuantizedFullyConnectedOpTest, QuantizedFullyConnectedOpTest, QuantizedFullyConnectedOpTest,
::testing::ValuesIn(SingleOpTest::GetKernelTags(*kKernelMapNoPie))); ::testing::ValuesIn(SingleOpTest::GetKernelTags(*kKernelMapNoPie)));

View File

@ -236,9 +236,9 @@ TEST_P(ReshapeOpTest, Strings) {
ElementsAreArray({"1", "2", "3", "4", "5", "6", "7", "8"})); ElementsAreArray({"1", "2", "3", "4", "5", "6", "7", "8"}));
} }
INSTANTIATE_TEST_CASE_P(VariedShapeSpec, ReshapeOpTest, INSTANTIATE_TEST_SUITE_P(VariedShapeSpec, ReshapeOpTest,
::testing::Values(kAsReshapeOption, kAsConstantTensor, ::testing::Values(kAsReshapeOption, kAsConstantTensor,
kAsTensor)); kAsTensor));
} // namespace } // namespace
} // namespace tflite } // namespace tflite

View File

@ -252,7 +252,7 @@ TEST_P(TransposeConvOpTest, AccuracyTest) {
EXPECT_THAT(m.GetOutputShape(), ElementsAreArray({1, 3, 4, 1})); EXPECT_THAT(m.GetOutputShape(), ElementsAreArray({1, 3, 4, 1}));
} }
INSTANTIATE_TEST_CASE_P( INSTANTIATE_TEST_SUITE_P(
TransposeConvOpTest, TransposeConvOpTest, TransposeConvOpTest, TransposeConvOpTest,
::testing::ValuesIn(SingleOpTest::GetKernelTags(*kKernelMap))); ::testing::ValuesIn(SingleOpTest::GetKernelTags(*kKernelMap)));

View File

@ -196,10 +196,10 @@ TEST_P(SpeechTest, DISABLED_TtsTest) {
// 200s just to bring up the Android emulator.) // 200s just to bring up the Android emulator.)
static const int kAllInvocations = -1; static const int kAllInvocations = -1;
static const int kFirstFewInvocations = 10; static const int kFirstFewInvocations = 10;
INSTANTIATE_TEST_CASE_P(LongTests, SpeechTest, INSTANTIATE_TEST_SUITE_P(LongTests, SpeechTest,
::testing::Values(kAllInvocations)); ::testing::Values(kAllInvocations));
INSTANTIATE_TEST_CASE_P(ShortTests, SpeechTest, INSTANTIATE_TEST_SUITE_P(ShortTests, SpeechTest,
::testing::Values(kFirstFewInvocations)); ::testing::Values(kFirstFewInvocations));
} // namespace } // namespace
} // namespace tflite } // namespace tflite

View File

@ -257,8 +257,8 @@ std::vector<tensorflow::DataType> TestTypes() {
return {DT_FLOAT, DT_INT32, DT_INT64, DT_BOOL, DT_QUINT8, DT_COMPLEX64}; return {DT_FLOAT, DT_INT32, DT_INT64, DT_BOOL, DT_QUINT8, DT_COMPLEX64};
} }
INSTANTIATE_TEST_CASE_P(ShapeImportTest, ShapeImportTest, INSTANTIATE_TEST_SUITE_P(ShapeImportTest, ShapeImportTest,
::testing::ValuesIn(TestTypes())); ::testing::ValuesIn(TestTypes()));
class ContentImportTest : public ::testing::Test { class ContentImportTest : public ::testing::Test {
public: public:
@ -418,8 +418,8 @@ TEST_P(TypeImportTest, BasicTypeInference) {
model.operators[0].get()); model.operators[0].get());
ASSERT_THAT(op->output_data_types, ::testing::ElementsAre(GetParam().second)); ASSERT_THAT(op->output_data_types, ::testing::ElementsAre(GetParam().second));
} }
INSTANTIATE_TEST_CASE_P(BasicTypeInference, TypeImportTest, INSTANTIATE_TEST_SUITE_P(BasicTypeInference, TypeImportTest,
::testing::ValuesIn(UnaryTestTypes())); ::testing::ValuesIn(UnaryTestTypes()));
TEST(ImportTest, TypeInferenceWithFixedOutputType) { TEST(ImportTest, TypeInferenceWithFixedOutputType) {
// Create an op that has a fixed output type (bool). // Create an op that has a fixed output type (bool).

View File

@ -96,8 +96,8 @@ TEST_P(ShapeTest, Agrees) {
} }
} }
INSTANTIATE_TEST_CASE_P(AgreeBroadcast, ShapeTest, INSTANTIATE_TEST_SUITE_P(AgreeBroadcast, ShapeTest,
::testing::ValuesIn(CreateShapePairs())); ::testing::ValuesIn(CreateShapePairs()));
static const char kNegativeValuesMessage[] = static const char kNegativeValuesMessage[] =
"Tensor shape should not include negative values"; "Tensor shape should not include negative values";