commit
8187059712
@ -278,7 +278,7 @@ class LabeledTensor(object):
|
|||||||
@tc.accepts(object, ops.Tensor,
|
@tc.accepts(object, ops.Tensor,
|
||||||
tc.Union(Axes, tc.Collection(tc.Union(string_types, AxisLike))))
|
tc.Union(Axes, tc.Collection(tc.Union(string_types, AxisLike))))
|
||||||
def __init__(self, tensor, axes):
|
def __init__(self, tensor, axes):
|
||||||
"""Construct a LabeledTenor.
|
"""Construct a LabeledTensor.
|
||||||
|
|
||||||
Args:
|
Args:
|
||||||
tensor: The underlying tensor containing the data.
|
tensor: The underlying tensor containing the data.
|
||||||
|
@ -451,7 +451,7 @@ Buffer<T>::~Buffer() {
|
|||||||
// default value for T.
|
// default value for T.
|
||||||
//
|
//
|
||||||
// This routine is using the typed fields (float_val, etc.) in the
|
// This routine is using the typed fields (float_val, etc.) in the
|
||||||
// tenor proto as opposed to the untyped binary representation
|
// tensor proto as opposed to the untyped binary representation
|
||||||
// (tensor_content). This is used when we expect the TensorProto is
|
// (tensor_content). This is used when we expect the TensorProto is
|
||||||
// used by a client program which may not know how to encode a tensor
|
// used by a client program which may not know how to encode a tensor
|
||||||
// in the compact binary representation.
|
// in the compact binary representation.
|
||||||
|
@ -282,7 +282,7 @@ Status SingleMachine::ResetSession() {
|
|||||||
// Make sure the session is properly closed
|
// Make sure the session is properly closed
|
||||||
TF_RETURN_IF_ERROR(Shutdown());
|
TF_RETURN_IF_ERROR(Shutdown());
|
||||||
|
|
||||||
// Destroying the object deletes all its varibles as well. This is only true
|
// Destroying the object deletes all its variables as well. This is only true
|
||||||
// for DirectSession.
|
// for DirectSession.
|
||||||
session_.reset();
|
session_.reset();
|
||||||
}
|
}
|
||||||
|
@ -213,7 +213,7 @@ struct LaunchPoolingOp<SYCLDevice, T, MAX> {
|
|||||||
}
|
}
|
||||||
};
|
};
|
||||||
// MaxPool3DGrad SYCL kernel. Expects the number of threads to be equal to the
|
// MaxPool3DGrad SYCL kernel. Expects the number of threads to be equal to the
|
||||||
// number of elements in the output backprop tenor (i.e. the number of elements
|
// number of elements in the output backprop tensor (i.e. the number of elements
|
||||||
// in the input data tensor).
|
// in the input data tensor).
|
||||||
//
|
//
|
||||||
// For each output backprop element we compute the possible window of values in
|
// For each output backprop element we compute the possible window of values in
|
||||||
|
@ -2889,7 +2889,7 @@ def elu(x, alpha=1.):
|
|||||||
"""Exponential linear unit.
|
"""Exponential linear unit.
|
||||||
|
|
||||||
Arguments:
|
Arguments:
|
||||||
x: A tenor or variable to compute the activation function for.
|
x: A tensor or variable to compute the activation function for.
|
||||||
alpha: A scalar, slope of positive section.
|
alpha: A scalar, slope of positive section.
|
||||||
|
|
||||||
Returns:
|
Returns:
|
||||||
|
@ -1256,7 +1256,7 @@
|
|||||||
" \n",
|
" \n",
|
||||||
"But, here, we'll want to keep the session open so we can poke at values as we work out the details of training. The TensorFlow API includes a function for this, `InteractiveSession`.\n",
|
"But, here, we'll want to keep the session open so we can poke at values as we work out the details of training. The TensorFlow API includes a function for this, `InteractiveSession`.\n",
|
||||||
"\n",
|
"\n",
|
||||||
"We'll start by creating a session and initializing the varibles we defined above."
|
"We'll start by creating a session and initializing the variables we defined above."
|
||||||
]
|
]
|
||||||
},
|
},
|
||||||
{
|
{
|
||||||
|
Loading…
Reference in New Issue
Block a user