STT-tensorflow/tensorflow/tools/api/golden/v1/tensorflow.losses.pbtxt
A. Unique TensorFlower fb9e242080 Adding a scope option to tf.get_total_loss()
PiperOrigin-RevId: 253843008
2019-06-18 12:46:56 -07:00

72 lines
3.9 KiB
Plaintext

path: "tensorflow.losses"
tf_module {
member {
name: "Reduction"
mtype: "<type \'type\'>"
}
member_method {
name: "absolute_difference"
argspec: "args=[\'labels\', \'predictions\', \'weights\', \'scope\', \'loss_collection\', \'reduction\'], varargs=None, keywords=None, defaults=[\'1.0\', \'None\', \'losses\', \'weighted_sum_by_nonzero_weights\'], "
}
member_method {
name: "add_loss"
argspec: "args=[\'loss\', \'loss_collection\'], varargs=None, keywords=None, defaults=[\'losses\'], "
}
member_method {
name: "compute_weighted_loss"
argspec: "args=[\'losses\', \'weights\', \'scope\', \'loss_collection\', \'reduction\'], varargs=None, keywords=None, defaults=[\'1.0\', \'None\', \'losses\', \'weighted_sum_by_nonzero_weights\'], "
}
member_method {
name: "cosine_distance"
argspec: "args=[\'labels\', \'predictions\', \'axis\', \'weights\', \'scope\', \'loss_collection\', \'reduction\', \'dim\'], varargs=None, keywords=None, defaults=[\'None\', \'1.0\', \'None\', \'losses\', \'weighted_sum_by_nonzero_weights\', \'None\'], "
}
member_method {
name: "get_losses"
argspec: "args=[\'scope\', \'loss_collection\'], varargs=None, keywords=None, defaults=[\'None\', \'losses\'], "
}
member_method {
name: "get_regularization_loss"
argspec: "args=[\'scope\', \'name\'], varargs=None, keywords=None, defaults=[\'None\', \'total_regularization_loss\'], "
}
member_method {
name: "get_regularization_losses"
argspec: "args=[\'scope\'], varargs=None, keywords=None, defaults=[\'None\'], "
}
member_method {
name: "get_total_loss"
argspec: "args=[\'add_regularization_losses\', \'name\', \'scope\'], varargs=None, keywords=None, defaults=[\'True\', \'total_loss\', \'None\'], "
}
member_method {
name: "hinge_loss"
argspec: "args=[\'labels\', \'logits\', \'weights\', \'scope\', \'loss_collection\', \'reduction\'], varargs=None, keywords=None, defaults=[\'1.0\', \'None\', \'losses\', \'weighted_sum_by_nonzero_weights\'], "
}
member_method {
name: "huber_loss"
argspec: "args=[\'labels\', \'predictions\', \'weights\', \'delta\', \'scope\', \'loss_collection\', \'reduction\'], varargs=None, keywords=None, defaults=[\'1.0\', \'1.0\', \'None\', \'losses\', \'weighted_sum_by_nonzero_weights\'], "
}
member_method {
name: "log_loss"
argspec: "args=[\'labels\', \'predictions\', \'weights\', \'epsilon\', \'scope\', \'loss_collection\', \'reduction\'], varargs=None, keywords=None, defaults=[\'1.0\', \'1e-07\', \'None\', \'losses\', \'weighted_sum_by_nonzero_weights\'], "
}
member_method {
name: "mean_pairwise_squared_error"
argspec: "args=[\'labels\', \'predictions\', \'weights\', \'scope\', \'loss_collection\'], varargs=None, keywords=None, defaults=[\'1.0\', \'None\', \'losses\'], "
}
member_method {
name: "mean_squared_error"
argspec: "args=[\'labels\', \'predictions\', \'weights\', \'scope\', \'loss_collection\', \'reduction\'], varargs=None, keywords=None, defaults=[\'1.0\', \'None\', \'losses\', \'weighted_sum_by_nonzero_weights\'], "
}
member_method {
name: "sigmoid_cross_entropy"
argspec: "args=[\'multi_class_labels\', \'logits\', \'weights\', \'label_smoothing\', \'scope\', \'loss_collection\', \'reduction\'], varargs=None, keywords=None, defaults=[\'1.0\', \'0\', \'None\', \'losses\', \'weighted_sum_by_nonzero_weights\'], "
}
member_method {
name: "softmax_cross_entropy"
argspec: "args=[\'onehot_labels\', \'logits\', \'weights\', \'label_smoothing\', \'scope\', \'loss_collection\', \'reduction\'], varargs=None, keywords=None, defaults=[\'1.0\', \'0\', \'None\', \'losses\', \'weighted_sum_by_nonzero_weights\'], "
}
member_method {
name: "sparse_softmax_cross_entropy"
argspec: "args=[\'labels\', \'logits\', \'weights\', \'scope\', \'loss_collection\', \'reduction\'], varargs=None, keywords=None, defaults=[\'1.0\', \'None\', \'losses\', \'weighted_sum_by_nonzero_weights\'], "
}
}