Skip to content
Snippets Groups Projects
Commit a3095686 authored by Tiago de Freitas Pereira's avatar Tiago de Freitas Pereira
Browse files

Added summaries

parent 3af74bd5
No related branches found
No related tags found
1 merge request!21Resolve "Adopt to the Estimators API"
......@@ -51,6 +51,8 @@ def mean_cross_entropy_center_loss(logits, prelogits, labels, n_classes, alpha=0
with tf.variable_scope('cross_entropy_loss'):
loss = tf.reduce_mean(tf.nn.sparse_softmax_cross_entropy_with_logits(
logits=logits, labels=labels), name=tf.GraphKeys.LOSSES)
tf.summary.scalar('cross_entropy_loss', loss)
# Appending center loss
with tf.variable_scope('center_loss'):
......@@ -65,11 +67,13 @@ def mean_cross_entropy_center_loss(logits, prelogits, labels, n_classes, alpha=0
centers = tf.scatter_sub(centers, labels, diff)
center_loss = tf.reduce_mean(tf.square(prelogits - centers_batch))
tf.add_to_collection(tf.GraphKeys.REGULARIZATION_LOSSES, center_loss * factor)
tf.summary.scalar('center_loss', center_loss)
# Adding the regularizers in the loss
with tf.variable_scope('total_loss'):
regularization_losses = tf.get_collection(tf.GraphKeys.REGULARIZATION_LOSSES)
total_loss = tf.add_n([loss] + regularization_losses, name=tf.GraphKeys.LOSSES)
tf.summary.scalar('total_loss', total_loss)
loss = dict()
loss['loss'] = total_loss
......
0% Loading or .
You are about to add 0 people to the discussion. Proceed with caution.
Please register or to comment