Merge pull request #40619 from jpodivin:docfix37293
PiperOrigin-RevId: 320625911 Change-Id: I02ad15e3b1c1ac97918db4b059939f8cc416da00
This commit is contained in:
commit
7e279d6b0f
@ -39,19 +39,19 @@ class Module(tracking.AutoTrackable):
|
|||||||
functions which apply to user input. For example a dense layer in a neural
|
functions which apply to user input. For example a dense layer in a neural
|
||||||
network might be implemented as a `tf.Module`:
|
network might be implemented as a `tf.Module`:
|
||||||
|
|
||||||
>>> class Dense(tf.Module):
|
>>> class Dense(tf.Module):
|
||||||
... def __init__(self, in_features, out_features, name=None):
|
... def __init__(self, input_dim, output_size, name=None):
|
||||||
... super(Dense, self).__init__(name=name)
|
... super(Dense, self).__init__(name=name)
|
||||||
... self.w = tf.Variable(
|
... self.w = tf.Variable(
|
||||||
... tf.random.normal([in_features, out_features]), name='w')
|
... tf.random.normal([input_dim, output_size]), name='w')
|
||||||
... self.b = tf.Variable(tf.zeros([out_features]), name='b')
|
... self.b = tf.Variable(tf.zeros([output_size]), name='b')
|
||||||
... def __call__(self, x):
|
... def __call__(self, x):
|
||||||
... y = tf.matmul(x, self.w) + self.b
|
... y = tf.matmul(x, self.w) + self.b
|
||||||
... return tf.nn.relu(y)
|
... return tf.nn.relu(y)
|
||||||
|
|
||||||
You can use the Dense layer as you would expect:
|
You can use the Dense layer as you would expect:
|
||||||
|
|
||||||
>>> d = Dense(in_features=3, out_features=2)
|
>>> d = Dense(input_dim=3, output_size=2)
|
||||||
>>> d(tf.ones([1, 3]))
|
>>> d(tf.ones([1, 3]))
|
||||||
<tf.Tensor: shape=(1, 2), dtype=float32, numpy=..., dtype=float32)>
|
<tf.Tensor: shape=(1, 2), dtype=float32, numpy=..., dtype=float32)>
|
||||||
|
|
||||||
@ -77,22 +77,28 @@ class Module(tracking.AutoTrackable):
|
|||||||
`with self.name_scope:` or you can annotate methods (apart from `__init__`)
|
`with self.name_scope:` or you can annotate methods (apart from `__init__`)
|
||||||
with `@tf.Module.with_name_scope`.
|
with `@tf.Module.with_name_scope`.
|
||||||
|
|
||||||
```python
|
>>> class MLP(tf.Module):
|
||||||
class MLP(tf.Module):
|
... def __init__(self, input_size, sizes, name=None):
|
||||||
def __init__(self, input_size, sizes, name=None):
|
... super(MLP, self).__init__(name=name)
|
||||||
super(MLP, self).__init__(name=name)
|
... self.layers = []
|
||||||
self.layers = []
|
... with self.name_scope:
|
||||||
with self.name_scope:
|
... for size in sizes:
|
||||||
for size in sizes:
|
... self.layers.append(Dense(input_dim=input_size, output_size=size))
|
||||||
self.layers.append(Dense(input_size=input_size, output_size=size))
|
... input_size = size
|
||||||
input_size = size
|
... @tf.Module.with_name_scope
|
||||||
|
... def __call__(self, x):
|
||||||
|
... for layer in self.layers:
|
||||||
|
... x = layer(x)
|
||||||
|
... return x
|
||||||
|
|
||||||
@tf.Module.with_name_scope
|
>>> module = MLP(input_size=5, sizes=[5, 5])
|
||||||
def __call__(self, x):
|
>>> module.variables
|
||||||
for layer in self.layers:
|
(<tf.Variable 'mlp/b:0' shape=(5,) dtype=float32, numpy=..., dtype=float32)>,
|
||||||
x = layer(x)
|
<tf.Variable 'mlp/w:0' shape=(5, 5) dtype=float32, numpy=...,
|
||||||
return x
|
dtype=float32)>,
|
||||||
```
|
<tf.Variable 'mlp/b:0' shape=(5,) dtype=float32, numpy=..., dtype=float32)>,
|
||||||
|
<tf.Variable 'mlp/w:0' shape=(5, 5) dtype=float32, numpy=...,
|
||||||
|
dtype=float32)>)
|
||||||
"""
|
"""
|
||||||
|
|
||||||
# AutoTrackable adds object attributes that users will not expect us to
|
# AutoTrackable adds object attributes that users will not expect us to
|
||||||
|
|||||||
Loading…
x
Reference in New Issue
Block a user