Add error if calling jacobian or batch_jacobian on an exhausted tape.

Currently, this is failing silently: returns None.

PiperOrigin-RevId: 324700276
Change-Id: If5b4fc76bc3bfd2280ca67395015aca5bcf62f91
This commit is contained in:
Mark Daoust 2020-08-03 15:50:36 -07:00 committed by TensorFlower Gardener
parent 0e5562be23
commit 52821ea935
2 changed files with 48 additions and 6 deletions

View File

@ -997,6 +997,9 @@ class GradientTape(object):
unconnected_gradients=UnconnectedGradients.NONE):
"""Computes the gradient using operations recorded in context of this tape.
Note: Unless you set `persistent=True` a GradientTape can only be used to
compute one set of gradients (or jacobians).
Args:
target: a list or nested structure of Tensors or Variables to be
differentiated.
@ -1015,14 +1018,14 @@ class GradientTape(object):
the structure of `sources`.
Raises:
RuntimeError: if called inside the context of the tape, or if called more
than once on a non-persistent tape.
ValueError: if the target is a variable or if unconnected gradients is
RuntimeError: If called on a used, non-persistent tape.
RuntimeError: If called inside the context of the tape.
ValueError: If the target is a variable or if unconnected gradients is
called with an unknown value.
"""
if self._tape is None:
raise RuntimeError("GradientTape.gradient can only be called once on "
"non-persistent tapes.")
raise RuntimeError("A non-persistent GradientTape can only be used to"
"compute one set of gradients (or jacobians)")
if self._recording:
if not self._persistent:
self._pop_tape()
@ -1101,6 +1104,9 @@ class GradientTape(object):
experimental_use_pfor=True):
"""Computes the jacobian using operations recorded in context of this tape.
Note: Unless you set `persistent=True` a GradientTape can only be used to
compute one set of gradients (or jacobians).
See[wikipedia article](http://en.wikipedia.org/wiki/jacobian_matrix_and_determinant)
for the definition of a Jacobian.
@ -1139,10 +1145,15 @@ class GradientTape(object):
Raises:
RuntimeError: If called on a used, non-persistent tape.
RuntimeError: If called on a non-persistent tape with eager execution
enabled and without enabling experimental_use_pfor.
ValueError: If vectorization of jacobian computation fails.
"""
if self._tape is None:
raise RuntimeError("A non-persistent GradientTape can only be used to"
"compute one set of gradients (or jacobians)")
flat_sources = nest.flatten(sources)
rewrap_as_ndarray = False
if isinstance(target, np_arrays.ndarray):
@ -1225,6 +1236,9 @@ class GradientTape(object):
are lower dimensional and avoid a bunch of redundant zeros which would
result in the jacobian computation given the independence assumption.
Note: Unless you set `persistent=True` a GradientTape can only be used to
compute one set of gradients (or jacobians).
Example usage:
```python
@ -1255,11 +1269,15 @@ class GradientTape(object):
per-example jacobians.
Raises:
RuntimeError: If called on a used, non-persistent tape.
RuntimeError: If called on a non-persistent tape with eager execution
enabled and without enabling experimental_use_pfor.
ValueError: If vectorization of jacobian computation fails or if first
dimension of `target` and `source` do not match.
"""
if self._tape is None:
raise RuntimeError("A non-persistent GradientTape can only be used to"
"compute one set of gradients (or jacobians)")
rewrap_as_ndarray = False
if isinstance(target, np_arrays.ndarray):
target = target.data

View File

@ -837,9 +837,33 @@ class BackpropTest(test.TestCase, parameterized.TestCase):
z = y * y
g.gradient(z, [x])
with self.assertRaisesRegex(
RuntimeError, 'GradientTape.gradient can only be called once'):
RuntimeError, 'A non-persistent GradientTape can only'):
g.gradient(y, [x])
@test_util.assert_no_new_tensors
def testGradientTapeJacobianCalledMultipleTimes(self):
with backprop.GradientTape() as g:
x = constant_op.constant(3.0)
g.watch(x)
y = x * x
z = y * y
g.jacobian(z, [x])
with self.assertRaisesRegex(
RuntimeError, 'A non-persistent GradientTape can only'):
g.jacobian(y, [x])
@test_util.assert_no_new_tensors
def testGradientTapeBatchJacobianCalledMultipleTimes(self):
with backprop.GradientTape() as g:
x = constant_op.constant([[3.0]])
g.watch(x)
y = x * x
z = y * y
g.batch_jacobian(z, x)
with self.assertRaisesRegex(
RuntimeError, 'A non-persistent GradientTape can only'):
g.batch_jacobian(y, [x])
@test_util.assert_no_new_tensors
@test_util.run_in_graph_and_eager_modes
@test_util.run_v1_only('b/120545219')