Relax the check for state_size
The behaviour of `hasattr` is to evaluate the state_size member. In the case of `tfa.seq2seq.AttentionWrapper`, that is a @property member that is built at graph runtime after calling `setup_memory`, thus `hasattr` returns an error when using AttentionWrapper with dynamic memories. More details: https://github.com/tensorflow/addons/issues/680
This commit is contained in:
parent
c5eafbfebd
commit
4ab6a520c9
@ -82,7 +82,7 @@ class StackedRNNCells(Layer):
|
||||
if not hasattr(cell, 'call'):
|
||||
raise ValueError('All cells must have a `call` method. '
|
||||
'received cells:', cells)
|
||||
if not hasattr(cell, 'state_size'):
|
||||
if not ('state_size' in dir(cell) or hasattr(cell, 'state_size')):
|
||||
raise ValueError('All cells must have a '
|
||||
'`state_size` attribute. '
|
||||
'received cells:', cells)
|
||||
@ -391,7 +391,7 @@ class RNN(Layer):
|
||||
if not hasattr(cell, 'call'):
|
||||
raise ValueError('`cell` should have a `call` method. '
|
||||
'The RNN was passed:', cell)
|
||||
if not hasattr(cell, 'state_size'):
|
||||
if not ('state_size' in dir(cell) or hasattr(cell, 'state_size')):
|
||||
raise ValueError('The RNN cell should have '
|
||||
'an attribute `state_size` '
|
||||
'(tuple of integers, '
|
||||
|
Loading…
x
Reference in New Issue
Block a user