Revert Split saved model benchmark tests into individual test to avoid timeout.

PiperOrigin-RevId: 321144378
Change-Id: Icbdad7cdb1daa873c65ce4000e0ce68ed0f7a829
This commit is contained in:
Mihai Maruseac 2020-07-14 06:09:01 -07:00 committed by TensorFlower Gardener
parent b6980e0529
commit 20982b2052
13 changed files with 110 additions and 532 deletions

View File

@ -23,15 +23,6 @@ exports_files(["LICENSE"])
# to the regular expression is executed.
# e.g. --benchmarks=".*lstm*." will run all lstm layer related benchmarks.
# Add all benchmarks related utils here for pip testing dependencis.
py_library(
name = "keras_benchmark_lib_pip",
deps = [
":benchmark_util",
"//tensorflow/python/keras/benchmarks/saved_model_benchmarks:saved_model_benchmark_util",
],
)
py_test(
name = "keras_cpu_benchmark_test",
size = "large",
@ -58,6 +49,22 @@ cuda_py_test(
],
)
cuda_py_test(
name = "applications_saved_model_test",
size = "medium",
srcs = ["applications_saved_model_test.py"],
shard_count = 8,
tags = [
"no_oss_py38", # b/160170347
"no_windows", # b/160269052
],
deps = [
"//tensorflow/python:client_testlib",
"//tensorflow/python/keras/applications",
"@absl_py//absl/testing:parameterized",
],
)
cuda_py_test(
name = "model_components_benchmarks_test",
srcs = ["model_components_benchmarks_test.py"],

View File

@ -0,0 +1,93 @@
# Copyright 2020 The TensorFlow Authors. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
# ==============================================================================
"""Benchmarks for Keras applications."""
from __future__ import absolute_import
from __future__ import division
from __future__ import print_function
import tempfile
import time
import six
from tensorflow.python.keras.applications import densenet
from tensorflow.python.keras.applications import efficientnet
from tensorflow.python.keras.applications import inception_resnet_v2
from tensorflow.python.keras.applications import mobilenet_v2
from tensorflow.python.keras.applications import nasnet
from tensorflow.python.keras.applications import resnet_v2
from tensorflow.python.keras.applications import vgg19
from tensorflow.python.keras.applications import xception
from tensorflow.python.keras.saving.saved_model import load as keras_load
from tensorflow.python.platform import benchmark
from tensorflow.python.platform import gfile
from tensorflow.python.platform import googletest
from tensorflow.python.platform import test
class BenchmarkSaveApplications(
six.with_metaclass(benchmark.ParameterizedBenchmark, test.Benchmark)):
_benchmark_parameters = [
('ResNet152V2', resnet_v2.ResNet152V2, 2048),
('VGG19', vgg19.VGG19, 512),
('Xception', xception.Xception, 2048),
('InceptionResNetV2', inception_resnet_v2.InceptionResNetV2, 1536),
('MobileNetV2', mobilenet_v2.MobileNetV2, 1280),
('DenseNet201', densenet.DenseNet201, 1920),
('EfficientNetB7', efficientnet.EfficientNetB7, 2560),
('NASNetLarge', nasnet.NASNetLarge, 4032),
]
def benchmark_save_and_load_applications(self, app, _):
trials = 3
model = app(weights=None)
model_name = app.__name__
tmp_dir = googletest.GetTempDir()
gfile.MakeDirs(tmp_dir)
save_dir = tempfile.mkdtemp(dir=tmp_dir)
total_save_time = 0
total_load_time = 0
# Run one untimed iteration of saving/loading.
model.save(save_dir, save_format='tf')
keras_load.load(save_dir)
for _ in range(trials):
start_time = time.time()
model.save(save_dir, save_format='tf')
total_save_time += time.time() - start_time
start_time = time.time()
keras_load.load(save_dir)
total_load_time += time.time() - start_time
self.report_benchmark(
iters=trials,
wall_time=total_save_time / trials,
name='{}.save'.format(model_name))
self.report_benchmark(
iters=1,
wall_time=total_load_time / trials,
name='{}.load'.format(model_name))
gfile.DeleteRecursively(save_dir)
if __name__ == '__main__':
test.main()

View File

@ -1,104 +0,0 @@
# Description:
# Implementation of Keras benchmarks.
load("//tensorflow:tensorflow.bzl", "cuda_py_test")
package(
default_visibility = ["//visibility:public"],
licenses = ["notice"], # Apache 2.0
)
exports_files(["LICENSE"])
# To run CPU benchmarks:
# bazel run -c opt benchmarks_test -- --benchmarks=.
# To run GPU benchmarks:
# bazel run --config=cuda -c opt --copt="-mavx" benchmarks_test -- \
# --benchmarks=.
# To run a subset of benchmarks using --benchmarks flag.
# --benchmarks: the list of benchmarks to run. The specified value is interpreted
# as a regular expression and any benchmark whose name contains a partial match
# to the regular expression is executed.
# e.g. --benchmarks=".*lstm*." will run all lstm layer related benchmarks.
py_library(
name = "saved_model_benchmark_util",
srcs = ["saved_model_benchmark_util.py"],
deps = [
"//tensorflow:tensorflow_py",
],
)
cuda_py_test(
name = "densenet_benchmark_test",
srcs = ["densenet_benchmark_test.py"],
deps = [
":saved_model_benchmark_util",
"//tensorflow:tensorflow_py",
],
)
cuda_py_test(
name = "efficientnet_benchmark_test",
srcs = ["efficientnet_benchmark_test.py"],
deps = [
":saved_model_benchmark_util",
"//tensorflow:tensorflow_py",
],
)
cuda_py_test(
name = "inception_resnet_v2_benchmark_test",
srcs = ["inception_resnet_v2_benchmark_test.py"],
deps = [
":saved_model_benchmark_util",
"//tensorflow:tensorflow_py",
],
)
cuda_py_test(
name = "mobilenet_benchmark_test",
srcs = ["mobilenet_benchmark_test.py"],
deps = [
":saved_model_benchmark_util",
"//tensorflow:tensorflow_py",
],
)
cuda_py_test(
name = "nasnet_large_benchmark_test",
srcs = ["nasnet_large_benchmark_test.py"],
deps = [
":saved_model_benchmark_util",
"//tensorflow:tensorflow_py",
],
)
cuda_py_test(
name = "resnet152_v2_benchmark_test",
srcs = ["resnet152_v2_benchmark_test.py"],
deps = [
":saved_model_benchmark_util",
"//tensorflow:tensorflow_py",
],
)
cuda_py_test(
name = "vgg_benchmark_test",
srcs = ["vgg_benchmark_test.py"],
deps = [
":saved_model_benchmark_util",
"//tensorflow:tensorflow_py",
],
)
cuda_py_test(
name = "xception_benchmark_test",
srcs = ["xception_benchmark_test.py"],
deps = [
":saved_model_benchmark_util",
"//tensorflow:tensorflow_py",
],
)

View File

@ -1,43 +0,0 @@
# Copyright 2020 The TensorFlow Authors. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
# ==============================================================================
"""Benchmarks for saved model on DenseNet201."""
from __future__ import absolute_import
from __future__ import division
from __future__ import print_function
import tensorflow as tf
from tensorflow.python.keras.benchmarks.saved_model_benchmarks import saved_model_benchmark_util
class BenchmarkSaveApplications(tf.test.Benchmark):
def benchmark_save_and_load_densenet_201(self):
app = tf.keras.applications.DenseNet201
save_result, load_result = (
saved_model_benchmark_util.save_and_load_benchmark(app))
self.report_benchmark(
iters=save_result['iters'],
wall_time=save_result['wall_time'],
name=save_result['name'])
self.report_benchmark(
iters=load_result['iters'],
wall_time=load_result['wall_time'],
name=load_result['name'])
if __name__ == '__main__':
tf.test.main()

View File

@ -1,43 +0,0 @@
# Copyright 2020 The TensorFlow Authors. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
# ==============================================================================
"""Benchmarks for saved model on EfficientNetB7."""
from __future__ import absolute_import
from __future__ import division
from __future__ import print_function
import tensorflow as tf
from tensorflow.python.keras.benchmarks.saved_model_benchmarks import saved_model_benchmark_util
class BenchmarkSaveApplications(tf.test.Benchmark):
def benchmark_save_and_load_efficient_net_b7(self):
app = tf.keras.applications.EfficientNetB7
save_result, load_result = (
saved_model_benchmark_util.save_and_load_benchmark(app))
self.report_benchmark(
iters=save_result['iters'],
wall_time=save_result['wall_time'],
name=save_result['name'])
self.report_benchmark(
iters=load_result['iters'],
wall_time=load_result['wall_time'],
name=load_result['name'])
if __name__ == '__main__':
tf.test.main()

View File

@ -1,44 +0,0 @@
# Copyright 2020 The TensorFlow Authors. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
# ==============================================================================
"""Benchmarks for saved model on InceptionResNetV2."""
from __future__ import absolute_import
from __future__ import division
from __future__ import print_function
import tensorflow as tf
from tensorflow.python.keras.benchmarks.saved_model_benchmarks import saved_model_benchmark_util
class BenchmarkSaveApplications(tf.test.Benchmark):
def benchmark_save_and_load_inception_resnet_v2(self):
app = tf.keras.applications.InceptionResNetV2
save_result, load_result = (
saved_model_benchmark_util.save_and_load_benchmark(app))
self.report_benchmark(
iters=save_result['iters'],
wall_time=save_result['wall_time'],
name=save_result['name'])
self.report_benchmark(
iters=load_result['iters'],
wall_time=load_result['wall_time'],
name=load_result['name'])
if __name__ == '__main__':
tf.test.main()

View File

@ -1,43 +0,0 @@
# Copyright 2020 The TensorFlow Authors. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
# ==============================================================================
"""Benchmarks for saved model on MobileNetV2."""
from __future__ import absolute_import
from __future__ import division
from __future__ import print_function
import tensorflow as tf
from tensorflow.python.keras.benchmarks.saved_model_benchmarks import saved_model_benchmark_util
class BenchmarkSaveApplications(tf.test.Benchmark):
def benchmark_save_and_load_mobilenet_v2(self):
app = tf.keras.applications.MobileNetV2
save_result, load_result = (
saved_model_benchmark_util.save_and_load_benchmark(app))
self.report_benchmark(
iters=save_result['iters'],
wall_time=save_result['wall_time'],
name=save_result['name'])
self.report_benchmark(
iters=load_result['iters'],
wall_time=load_result['wall_time'],
name=load_result['name'])
if __name__ == '__main__':
tf.test.main()

View File

@ -1,43 +0,0 @@
# Copyright 2020 The TensorFlow Authors. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
# ==============================================================================
"""Benchmarks for saved model on NASNetLarge."""
from __future__ import absolute_import
from __future__ import division
from __future__ import print_function
import tensorflow as tf
from tensorflow.python.keras.benchmarks.saved_model_benchmarks import saved_model_benchmark_util
class BenchmarkSaveApplications(tf.test.Benchmark):
def benchmark_save_and_load_nasnet_large(self):
app = tf.keras.applications.NASNetLarge
save_result, load_result = (
saved_model_benchmark_util.save_and_load_benchmark(app))
self.report_benchmark(
iters=save_result['iters'],
wall_time=save_result['wall_time'],
name=save_result['name'])
self.report_benchmark(
iters=load_result['iters'],
wall_time=load_result['wall_time'],
name=load_result['name'])
if __name__ == '__main__':
tf.test.main()

View File

@ -1,44 +0,0 @@
# Copyright 2020 The TensorFlow Authors. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
# ==============================================================================
"""Benchmarks for saved model on ResNet152V2."""
from __future__ import absolute_import
from __future__ import division
from __future__ import print_function
import tensorflow as tf
from tensorflow.python.keras.benchmarks.saved_model_benchmarks import saved_model_benchmark_util
class BenchmarkSaveApplications(tf.test.Benchmark):
def benchmark_save_and_load_resnet152_v2(self):
app = tf.keras.applications.ResNet152V2
save_result, load_result = (
saved_model_benchmark_util.save_and_load_benchmark(app))
self.report_benchmark(
iters=save_result['iters'],
wall_time=save_result['wall_time'],
name=save_result['name'])
self.report_benchmark(
iters=load_result['iters'],
wall_time=load_result['wall_time'],
name=load_result['name'])
if __name__ == '__main__':
tf.test.main()

View File

@ -1,70 +0,0 @@
# Copyright 2020 The TensorFlow Authors. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
# ==============================================================================
"""Utils for saved model benchmarks."""
from __future__ import absolute_import
from __future__ import division
from __future__ import print_function
import tempfile
import time
import tensorflow as tf
from tensorflow.python.platform import gfile
from tensorflow.python.platform import googletest
def save_and_load_benchmark(app):
"""Util for saved model benchmarks."""
trials = 3
model = app(weights=None)
model_name = app.__name__
tmp_dir = googletest.GetTempDir()
gfile.MakeDirs(tmp_dir)
save_dir = tempfile.mkdtemp(dir=tmp_dir)
total_save_time = 0
total_load_time = 0
# Run one untimed iteration of saving/loading.
model.save(save_dir, save_format='tf')
tf.keras.models.load_model(save_dir)
for _ in range(trials):
start_time = time.time()
model.save(save_dir, save_format='tf')
total_save_time += time.time() - start_time
start_time = time.time()
tf.keras.models.load_model(save_dir)
total_load_time += time.time() - start_time
save_result = {
'iters': trials,
'wall_time': total_save_time / trials,
'name': '{}.save'.format(model_name)
}
load_result = {
'iters': trials,
'wall_time': total_load_time / trials,
'name': '{}.load'.format(model_name)
}
gfile.DeleteRecursively(save_dir)
return save_result, load_result

View File

@ -1,44 +0,0 @@
# Copyright 2020 The TensorFlow Authors. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
# ==============================================================================
"""Benchmarks for saved model on VGG19."""
from __future__ import absolute_import
from __future__ import division
from __future__ import print_function
import tensorflow as tf
from tensorflow.python.keras.benchmarks.saved_model_benchmarks import saved_model_benchmark_util
class BenchmarkSaveApplications(tf.test.Benchmark):
def benchmark_save_and_load_vgg19(self):
app = tf.keras.applications.VGG19
save_result, load_result = (
saved_model_benchmark_util.save_and_load_benchmark(app))
self.report_benchmark(
iters=save_result['iters'],
wall_time=save_result['wall_time'],
name=save_result['name'])
self.report_benchmark(
iters=load_result['iters'],
wall_time=load_result['wall_time'],
name=load_result['name'])
if __name__ == '__main__':
tf.test.main()

View File

@ -1,44 +0,0 @@
# Copyright 2020 The TensorFlow Authors. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
# ==============================================================================
"""Benchmarks for saved model on Xception."""
from __future__ import absolute_import
from __future__ import division
from __future__ import print_function
import tensorflow as tf
from tensorflow.python.keras.benchmarks.saved_model_benchmarks import saved_model_benchmark_util
class BenchmarkSaveApplications(tf.test.Benchmark):
def benchmark_save_and_load_xception(self):
app = tf.keras.applications.Xception
save_result, load_result = (
saved_model_benchmark_util.save_and_load_benchmark(app))
self.report_benchmark(
iters=save_result['iters'],
wall_time=save_result['wall_time'],
name=save_result['name'])
self.report_benchmark(
iters=load_result['iters'],
wall_time=load_result['wall_time'],
name=load_result['name'])
if __name__ == '__main__':
tf.test.main()

View File

@ -132,7 +132,7 @@ COMMON_PIP_DEPS = [
"//tensorflow/python/keras/mixed_precision/experimental:test_util",
"//tensorflow/python/keras/tests:model_subclassing_test_util",
"//tensorflow/python/keras/tests:model_architectures",
"//tensorflow/python/keras/benchmarks:keras_benchmark_lib_pip",
"//tensorflow/python/keras/benchmarks:benchmark_util",
"//tensorflow/python/kernel_tests:cudnn_deterministic_base",
"//tensorflow/python/kernel_tests:bias_op_base",
"//tensorflow/python/kernel_tests/random:util",