Linter fixes

This commit is contained in:
Martin Wicke 2018-02-15 15:42:02 -08:00 committed by GitHub
parent ad8d437b32
commit f0a9686511
No known key found for this signature in database
GPG Key ID: 4AEE18F83AFDEB23

View File

@ -41,7 +41,6 @@ The subfolder names are important, since they define what label is applied to
each image, but the filenames themselves don't matter. Once your images are each image, but the filenames themselves don't matter. Once your images are
prepared, you can run the training with a command like this: prepared, you can run the training with a command like this:
```bash ```bash
bazel build tensorflow/examples/image_retraining:retrain && \ bazel build tensorflow/examples/image_retraining:retrain && \
bazel-bin/tensorflow/examples/image_retraining/retrain \ bazel-bin/tensorflow/examples/image_retraining/retrain \
@ -70,12 +69,14 @@ on resource-limited platforms, you can try the `--architecture` flag with a
Mobilenet model. For example: Mobilenet model. For example:
Run floating-point version of mobilenet: Run floating-point version of mobilenet:
```bash ```bash
python tensorflow/examples/image_retraining/retrain.py \ python tensorflow/examples/image_retraining/retrain.py \
--image_dir ~/flower_photos --architecture mobilenet_1.0_224 --image_dir ~/flower_photos --architecture mobilenet_1.0_224
``` ```
Run quantized version of mobilenet: Run quantized version of mobilenet:
```bash ```bash
python tensorflow/examples/image_retraining/retrain.py \ python tensorflow/examples/image_retraining/retrain.py \
--image_dir ~/flower_photos/ --architecture mobilenet_1.0_224_quantized --image_dir ~/flower_photos/ --architecture mobilenet_1.0_224_quantized
@ -98,8 +99,10 @@ tensorboard --logdir /tmp/retrain_logs
To use with Tensorflow Serving: To use with Tensorflow Serving:
tensorflow_model_server --port=9000 --model_name=inception --model_base_path=/tmp/saved_models/ ```bash
tensorflow_model_server --port=9000 --model_name=inception \
--model_base_path=/tmp/saved_models/
```
""" """
from __future__ import absolute_import from __future__ import absolute_import
from __future__ import division from __future__ import division
@ -1026,24 +1029,25 @@ def export_model(sess, architecture, saved_model_dir):
inputs = {'image': tf.saved_model.utils.build_tensor_info(in_image)} inputs = {'image': tf.saved_model.utils.build_tensor_info(in_image)}
out_classes = sess.graph.get_tensor_by_name('final_result:0') out_classes = sess.graph.get_tensor_by_name('final_result:0')
outputs = {'prediction': tf.saved_model.utils.build_tensor_info(out_classes)} outputs = {'prediction':
tf.saved_model.utils.build_tensor_info(out_classes)}
signature = tf.saved_model.signature_def_utils.build_signature_def( signature = tf.saved_model.signature_def_utils.build_signature_def(
inputs=inputs, inputs=inputs,
outputs=outputs, outputs=outputs,
method_name=tf.saved_model.signature_constants.PREDICT_METHOD_NAME method_name=tf.saved_model.signature_constants.PREDICT_METHOD_NAME)
)
legacy_init_op = tf.group(tf.tables_initializer(), name='legacy_init_op') legacy_init_op = tf.group(tf.tables_initializer(), name='legacy_init_op')
# Save out the SavedModel. # Save out the SavedModel.
builder = tf.saved_model.builder.SavedModelBuilder(saved_model_dir) builder = tf.saved_model.builder.SavedModelBuilder(saved_model_dir)
builder.add_meta_graph_and_variables( builder.add_meta_graph_and_variables(
sess, [tf.saved_model.tag_constants.SERVING], sess, [tf.saved_model.tag_constants.SERVING],
signature_def_map={ signature_def_map = {
tf.saved_model.signature_constants.DEFAULT_SERVING_SIGNATURE_DEF_KEY: signature tf.saved_model.signature_constants.DEFAULT_SERVING_SIGNATURE_DEF_KEY:
}, signature
legacy_init_op=legacy_init_op) },
legacy_init_op=legacy_init_op)
builder.save() builder.save()