Export initial batch of contrib summary symbols under tf.compat.v2

PiperOrigin-RevId: 222122302
This commit is contained in:
Nick Felt 2018-11-19 12:48:46 -08:00 committed by TensorFlower Gardener
parent 03bbe80587
commit 9a83d2111f
5 changed files with 107 additions and 77 deletions

View File

@ -79,6 +79,7 @@ from tensorflow.python.ops.summary_ops_v2 import image
from tensorflow.python.ops.summary_ops_v2 import import_event
from tensorflow.python.ops.summary_ops_v2 import initialize
from tensorflow.python.ops.summary_ops_v2 import never_record_summaries
from tensorflow.python.ops.summary_ops_v2 import record_summaries
from tensorflow.python.ops.summary_ops_v2 import record_summaries_every_n_global_steps
from tensorflow.python.ops.summary_ops_v2 import scalar
from tensorflow.python.ops.summary_ops_v2 import should_record_summaries

View File

@ -40,7 +40,9 @@ from tensorflow.python.ops import resource_variable_ops
from tensorflow.python.ops import summary_op_util
from tensorflow.python.platform import tf_logging as logging
from tensorflow.python.training import training_util
from tensorflow.python.util import deprecation
from tensorflow.python.util import tf_contextlib
from tensorflow.python.util.tf_export import tf_export
# Dictionary mapping graph keys to a boolean Tensor (or callable returning
@ -56,6 +58,7 @@ _RUN_NAME_PATTERNS = re.compile(r"^[^\x00-\x1F<>]{0,512}$")
_USER_NAME_PATTERNS = re.compile(r"^[a-z]([-a-z0-9]{0,29}[a-z0-9])?$", re.I)
@tf_export("summary.should_record_summaries", v1=[])
def should_record_summaries():
"""Returns boolean Tensor which is true if summaries should be recorded."""
global _SHOULD_RECORD_SUMMARIES
@ -64,58 +67,64 @@ def should_record_summaries():
return should() if callable(should) else should
# TODO(apassos) consider how to handle local step here.
@tf_export("summary.record_summaries", v1=[])
@tf_contextlib.contextmanager
def record_summaries(boolean=True):
"""Sets summary recording on or off per the provided boolean value.
The provided value can be a python boolean, a scalar boolean Tensor, or
or a callable providing such a value; if a callable is passed it will be
invoked each time should_record_summaries() is called to determine whether
summary writing should be enabled.
Args:
boolean: can be True, False, a bool Tensor, or a callable providing such.
Defaults to True.
Yields:
Returns a context manager that sets this value on enter and restores the
previous value on exit.
"""
# TODO(nickfelt): make this threadlocal
global _SHOULD_RECORD_SUMMARIES
key = ops.get_default_graph()._graph_key # pylint: disable=protected-access
old = _SHOULD_RECORD_SUMMARIES.setdefault(key, False)
try:
_SHOULD_RECORD_SUMMARIES[key] = boolean
yield
finally:
_SHOULD_RECORD_SUMMARIES[key] = old
# TODO(apassos) consider how to handle local step here.
def record_summaries_every_n_global_steps(n, global_step=None):
"""Sets the should_record_summaries Tensor to true if global_step % n == 0."""
if global_step is None:
global_step = training_util.get_or_create_global_step()
global _SHOULD_RECORD_SUMMARIES
key = ops.get_default_graph()._graph_key # pylint: disable=protected-access
old = _SHOULD_RECORD_SUMMARIES.setdefault(key, False)
try:
with ops.device("cpu:0"):
should = lambda: math_ops.equal(global_step % n, 0)
if not context.executing_eagerly():
should = should()
_SHOULD_RECORD_SUMMARIES[key] = should
yield
finally:
_SHOULD_RECORD_SUMMARIES[key] = old
with ops.device("cpu:0"):
should = lambda: math_ops.equal(global_step % n, 0)
if not context.executing_eagerly():
should = should()
return record_summaries(should)
@tf_contextlib.contextmanager
def always_record_summaries():
"""Sets the should_record_summaries Tensor to always true."""
global _SHOULD_RECORD_SUMMARIES
key = ops.get_default_graph()._graph_key # pylint: disable=protected-access
old = _SHOULD_RECORD_SUMMARIES.setdefault(key, False)
try:
_SHOULD_RECORD_SUMMARIES[key] = True
yield
finally:
_SHOULD_RECORD_SUMMARIES[key] = old
return record_summaries(True)
@tf_contextlib.contextmanager
def never_record_summaries():
"""Sets the should_record_summaries Tensor to always false."""
global _SHOULD_RECORD_SUMMARIES
key = ops.get_default_graph()._graph_key # pylint: disable=protected-access
old = _SHOULD_RECORD_SUMMARIES.setdefault(key, False)
try:
_SHOULD_RECORD_SUMMARIES[key] = False
yield
finally:
_SHOULD_RECORD_SUMMARIES[key] = old
return record_summaries(False)
@tf_export("summary.SummaryWriter", v1=[])
class SummaryWriter(object):
"""Encapsulates a stateful summary writer resource.
See also:
- `tf.contrib.summary.create_file_writer`
- `tf.contrib.summary.create_db_writer`
- `tf.summary.create_file_writer`
- `tf.summary.create_db_writer`
"""
def __init__(self, resource, init_op_fn):
@ -210,6 +219,7 @@ def initialize(
session.run(_graph(x, 0), feed_dict={x: data})
@tf_export("summary.create_file_writer", v1=[])
def create_file_writer(logdir,
max_queue=None,
flush_millis=None,
@ -285,7 +295,7 @@ def create_db_writer(db_uri,
`tf.Graph`.
Returns:
A `tf.contrib.summary.SummaryWriter` instance.
A `tf.summary.SummaryWriter` instance.
"""
with ops.device("cpu:0"):
if experiment_name is None:
@ -334,7 +344,7 @@ def _nothing():
def all_summary_ops():
"""Graph-mode only. Returns all summary ops.
Please note this excludes `tf.contrib.summary.graph` ops.
Please note this excludes `tf.summary.graph` ops.
Returns:
The summary ops.
@ -502,7 +512,7 @@ def graph(param, step=None, name=None):
"""Writes a TensorFlow graph to the summary interface.
The graph summary is, strictly speaking, not a summary. Conditions
like `tf.contrib.summary.never_record_summaries` do not apply. Only
like `tf.summary.should_record_summaries` do not apply. Only
a single graph can be associated with a particular run. If multiple
graphs are written, then only the last one will be considered by
TensorBoard.
@ -546,14 +556,13 @@ def graph(param, step=None, name=None):
_graph = graph # for functions with a graph parameter
@tf_export("summary.import_event", v1=[])
def import_event(tensor, name=None):
"""Writes a `tf.Event` binary proto.
When using create_db_writer(), this can be used alongside
`tf.TFRecordReader` to load event logs into the database. Please
note that this is lower level than the other summary functions and
will ignore any conditions set by methods like
`tf.contrib.summary.should_record_summaries`.
This can be used to import existing event logs into a new summary writer sink.
Please note that this is lower level than the other summary functions and
will ignore the `tf.summary.should_record_summaries` setting.
Args:
tensor: A `tf.Tensor` of type `string` containing a serialized
@ -567,13 +576,14 @@ def import_event(tensor, name=None):
context.context().summary_writer_resource, tensor, name=name)
@tf_export("summary.flush", v1=[])
def flush(writer=None, name=None):
"""Forces summary writer to send any buffered data to storage.
This operation blocks until that finishes.
Args:
writer: The `tf.contrib.summary.SummaryWriter` resource to flush.
writer: The `tf.summary.SummaryWriter` resource to flush.
The thread default will be used if this parameter is None.
Otherwise a `tf.no_op` is returned.
name: A name for the operation (optional).
@ -600,6 +610,8 @@ def eval_dir(model_dir, name=None):
return os.path.join(model_dir, "eval" if not name else "eval_" + name)
@deprecation.deprecated(date=None,
instructions="Renamed to create_file_writer().")
def create_summary_file_writer(*args, **kwargs):
"""Please use `tf.contrib.summary.create_file_writer`."""
logging.warning("Deprecation Warning: create_summary_file_writer was renamed "

View File

@ -52,7 +52,7 @@ from tensorflow.python.util import compat as _compat
from tensorflow.python.util.tf_export import tf_export
@tf_export('summary.scalar')
@tf_export(v1=['summary.scalar'])
def scalar(name, tensor, collections=None, family=None):
"""Outputs a `Summary` protocol buffer containing a single scalar value.
@ -82,7 +82,7 @@ def scalar(name, tensor, collections=None, family=None):
return val
@tf_export('summary.image')
@tf_export(v1=['summary.image'])
def image(name, tensor, max_outputs=3, collections=None, family=None):
"""Outputs a `Summary` protocol buffer with images.
@ -138,7 +138,7 @@ def image(name, tensor, max_outputs=3, collections=None, family=None):
return val
@tf_export('summary.histogram')
@tf_export(v1=['summary.histogram'])
def histogram(name, values, collections=None, family=None):
# pylint: disable=line-too-long
"""Outputs a `Summary` protocol buffer with a histogram.
@ -179,7 +179,7 @@ def histogram(name, values, collections=None, family=None):
return val
@tf_export('summary.audio')
@tf_export(v1=['summary.audio'])
def audio(name, tensor, sample_rate, max_outputs=3, collections=None,
family=None):
# pylint: disable=line-too-long
@ -228,7 +228,7 @@ def audio(name, tensor, sample_rate, max_outputs=3, collections=None,
return val
@tf_export('summary.text')
@tf_export(v1=['summary.text'])
def text(name, tensor, collections=None):
"""Summarizes textual data.
@ -269,7 +269,7 @@ def text(name, tensor, collections=None):
return t_summary
@tf_export('summary.tensor_summary')
@tf_export(v1=['summary.tensor_summary'])
def tensor_summary(name,
tensor,
summary_description=None,
@ -325,7 +325,7 @@ def tensor_summary(name,
return val
@tf_export('summary.merge')
@tf_export(v1=['summary.merge'])
def merge(inputs, collections=None, name=None):
# pylint: disable=line-too-long
"""Merges summaries.
@ -371,7 +371,7 @@ def merge(inputs, collections=None, name=None):
return val
@tf_export('summary.merge_all')
@tf_export(v1=['summary.merge_all'])
def merge_all(key=_ops.GraphKeys.SUMMARIES, scope=None, name=None):
"""Merges all summaries collected in the default graph.
@ -404,7 +404,7 @@ def merge_all(key=_ops.GraphKeys.SUMMARIES, scope=None, name=None):
return merge(summary_ops, name=name)
@tf_export('summary.get_summary_description')
@tf_export(v1=['summary.get_summary_description'])
def get_summary_description(node_def):
"""Given a TensorSummary node_def, retrieve its SummaryDescription.

View File

@ -0,0 +1,29 @@
path: "tensorflow.summary.SummaryWriter"
tf_class {
is_instance: "<class \'tensorflow.python.ops.summary_ops_v2.SummaryWriter\'>"
is_instance: "<type \'object\'>"
member_method {
name: "__init__"
argspec: "args=[\'self\', \'resource\', \'init_op_fn\'], varargs=None, keywords=None, defaults=None"
}
member_method {
name: "as_default"
argspec: "args=[\'self\'], varargs=None, keywords=None, defaults=None"
}
member_method {
name: "close"
argspec: "args=[\'self\'], varargs=None, keywords=None, defaults=None"
}
member_method {
name: "flush"
argspec: "args=[\'self\'], varargs=None, keywords=None, defaults=None"
}
member_method {
name: "init"
argspec: "args=[\'self\'], varargs=None, keywords=None, defaults=None"
}
member_method {
name: "set_as_default"
argspec: "args=[\'self\'], varargs=None, keywords=None, defaults=None"
}
}

View File

@ -24,44 +24,32 @@ tf_module {
name: "SummaryDescription"
mtype: "<class \'google.protobuf.pyext.cpp_message.GeneratedProtocolMessageType\'>"
}
member {
name: "SummaryWriter"
mtype: "<type \'type\'>"
}
member {
name: "TaggedRunMetadata"
mtype: "<class \'google.protobuf.pyext.cpp_message.GeneratedProtocolMessageType\'>"
}
member_method {
name: "audio"
argspec: "args=[\'name\', \'tensor\', \'sample_rate\', \'max_outputs\', \'collections\', \'family\'], varargs=None, keywords=None, defaults=[\'3\', \'None\', \'None\'], "
name: "create_file_writer"
argspec: "args=[\'logdir\', \'max_queue\', \'flush_millis\', \'filename_suffix\', \'name\'], varargs=None, keywords=None, defaults=[\'None\', \'None\', \'None\', \'None\'], "
}
member_method {
name: "get_summary_description"
argspec: "args=[\'node_def\'], varargs=None, keywords=None, defaults=None"
name: "flush"
argspec: "args=[\'writer\', \'name\'], varargs=None, keywords=None, defaults=[\'None\', \'None\'], "
}
member_method {
name: "histogram"
argspec: "args=[\'name\', \'values\', \'collections\', \'family\'], varargs=None, keywords=None, defaults=[\'None\', \'None\'], "
name: "import_event"
argspec: "args=[\'tensor\', \'name\'], varargs=None, keywords=None, defaults=[\'None\'], "
}
member_method {
name: "image"
argspec: "args=[\'name\', \'tensor\', \'max_outputs\', \'collections\', \'family\'], varargs=None, keywords=None, defaults=[\'3\', \'None\', \'None\'], "
name: "record_summaries"
argspec: "args=[\'boolean\'], varargs=None, keywords=None, defaults=[\'True\'], "
}
member_method {
name: "merge"
argspec: "args=[\'inputs\', \'collections\', \'name\'], varargs=None, keywords=None, defaults=[\'None\', \'None\'], "
}
member_method {
name: "merge_all"
argspec: "args=[\'key\', \'scope\', \'name\'], varargs=None, keywords=None, defaults=[\'summaries\', \'None\', \'None\'], "
}
member_method {
name: "scalar"
argspec: "args=[\'name\', \'tensor\', \'collections\', \'family\'], varargs=None, keywords=None, defaults=[\'None\', \'None\'], "
}
member_method {
name: "tensor_summary"
argspec: "args=[\'name\', \'tensor\', \'summary_description\', \'collections\', \'summary_metadata\', \'family\', \'display_name\'], varargs=None, keywords=None, defaults=[\'None\', \'None\', \'None\', \'None\', \'None\'], "
}
member_method {
name: "text"
argspec: "args=[\'name\', \'tensor\', \'collections\'], varargs=None, keywords=None, defaults=[\'None\'], "
name: "should_record_summaries"
argspec: "args=[], varargs=None, keywords=None, defaults=None"
}
}