Skip to content
Snippets Groups Projects
Commit 036a308f authored by Amir MOHAMMADI's avatar Amir MOHAMMADI
Browse files

improve logging

parent 54a80323
Branches
Tags
1 merge request!79Add keras-based models, add pixel-wise loss, other improvements
...@@ -13,12 +13,12 @@ def mean_cross_entropy_loss(logits, labels, add_regularization_losses=True): ...@@ -13,12 +13,12 @@ def mean_cross_entropy_loss(logits, labels, add_regularization_losses=True):
""" """
Simple CrossEntropy loss. Simple CrossEntropy loss.
Basically it wrapps the function tf.nn.sparse_softmax_cross_entropy_with_logits. Basically it wrapps the function tf.nn.sparse_softmax_cross_entropy_with_logits.
**Parameters** **Parameters**
logits: logits:
labels: labels:
add_regularization_losses: Regulize the loss??? add_regularization_losses: Regulize the loss???
""" """
with tf.variable_scope('cross_entropy_loss'): with tf.variable_scope('cross_entropy_loss'):
...@@ -50,7 +50,7 @@ def mean_cross_entropy_center_loss(logits, ...@@ -50,7 +50,7 @@ def mean_cross_entropy_center_loss(logits,
""" """
Implementation of the CrossEntropy + Center Loss from the paper Implementation of the CrossEntropy + Center Loss from the paper
"A Discriminative Feature Learning Approach for Deep Face Recognition"(http://ydwen.github.io/papers/WenECCV16.pdf) "A Discriminative Feature Learning Approach for Deep Face Recognition"(http://ydwen.github.io/papers/WenECCV16.pdf)
**Parameters** **Parameters**
logits: logits:
prelogits: prelogits:
...@@ -67,7 +67,7 @@ def mean_cross_entropy_center_loss(logits, ...@@ -67,7 +67,7 @@ def mean_cross_entropy_center_loss(logits,
logits=logits, labels=labels), logits=logits, labels=labels),
name="cross_entropy_loss") name="cross_entropy_loss")
tf.add_to_collection(tf.GraphKeys.LOSSES, cross_loss) tf.add_to_collection(tf.GraphKeys.LOSSES, cross_loss)
tf.summary.scalar('cross_entropy_loss', cross_loss) tf.summary.scalar('loss_cross_entropy', cross_loss)
# Appending center loss # Appending center loss
with tf.variable_scope('center_loss'): with tf.variable_scope('center_loss'):
...@@ -79,14 +79,14 @@ def mean_cross_entropy_center_loss(logits, ...@@ -79,14 +79,14 @@ def mean_cross_entropy_center_loss(logits,
initializer=tf.constant_initializer(0), initializer=tf.constant_initializer(0),
trainable=False) trainable=False)
#label = tf.reshape(labels, [-1]) # label = tf.reshape(labels, [-1])
centers_batch = tf.gather(centers, labels) centers_batch = tf.gather(centers, labels)
diff = (1 - alpha) * (centers_batch - prelogits) diff = (1 - alpha) * (centers_batch - prelogits)
centers = tf.scatter_sub(centers, labels, diff) centers = tf.scatter_sub(centers, labels, diff)
center_loss = tf.reduce_mean(tf.square(prelogits - centers_batch)) center_loss = tf.reduce_mean(tf.square(prelogits - centers_batch))
tf.add_to_collection(tf.GraphKeys.REGULARIZATION_LOSSES, tf.add_to_collection(tf.GraphKeys.REGULARIZATION_LOSSES,
center_loss * factor) center_loss * factor)
tf.summary.scalar('center_loss', center_loss) tf.summary.scalar('loss_center', center_loss)
# Adding the regularizers in the loss # Adding the regularizers in the loss
with tf.variable_scope('total_loss'): with tf.variable_scope('total_loss'):
...@@ -95,7 +95,7 @@ def mean_cross_entropy_center_loss(logits, ...@@ -95,7 +95,7 @@ def mean_cross_entropy_center_loss(logits,
total_loss = tf.add_n( total_loss = tf.add_n(
[cross_loss] + regularization_losses, name="total_loss") [cross_loss] + regularization_losses, name="total_loss")
tf.add_to_collection(tf.GraphKeys.LOSSES, total_loss) tf.add_to_collection(tf.GraphKeys.LOSSES, total_loss)
tf.summary.scalar('total_loss', total_loss) tf.summary.scalar('loss_total', total_loss)
loss = dict() loss = dict()
loss['loss'] = total_loss loss['loss'] = total_loss
......
...@@ -3,16 +3,15 @@ ...@@ -3,16 +3,15 @@
# @author: Tiago de Freitas Pereira <tiago.pereira@idiap.ch> # @author: Tiago de Freitas Pereira <tiago.pereira@idiap.ch>
import logging import logging
logger = logging.getLogger("bob.learn.tensorflow")
import tensorflow as tf import tensorflow as tf
from bob.learn.tensorflow.utils import (
compute_euclidean_distance,
)
from bob.learn.tensorflow.utils import compute_euclidean_distance logger = logging.getLogger(__name__)
def contrastive_loss(left_embedding, def contrastive_loss(left_embedding, right_embedding, labels, contrastive_margin=2.0):
right_embedding,
labels,
contrastive_margin=2.0):
""" """
Compute the contrastive loss as in Compute the contrastive loss as in
...@@ -49,18 +48,16 @@ def contrastive_loss(left_embedding, ...@@ -49,18 +48,16 @@ def contrastive_loss(left_embedding,
with tf.name_scope("within_class"): with tf.name_scope("within_class"):
one = tf.constant(1.0) one = tf.constant(1.0)
within_class = tf.multiply(one - labels, within_class = tf.multiply(one - labels, tf.square(d)) # (1-Y)*(d^2)
tf.square(d)) # (1-Y)*(d^2) within_class_loss = tf.reduce_mean(within_class, name="within_class")
within_class_loss = tf.reduce_mean(
within_class, name="within_class")
tf.add_to_collection(tf.GraphKeys.LOSSES, within_class_loss) tf.add_to_collection(tf.GraphKeys.LOSSES, within_class_loss)
with tf.name_scope("between_class"): with tf.name_scope("between_class"):
max_part = tf.square(tf.maximum(contrastive_margin - d, 0)) max_part = tf.square(tf.maximum(contrastive_margin - d, 0))
between_class = tf.multiply( between_class = tf.multiply(
labels, max_part) # (Y) * max((margin - d)^2, 0) labels, max_part
between_class_loss = tf.reduce_mean( ) # (Y) * max((margin - d)^2, 0)
between_class, name="between_class") between_class_loss = tf.reduce_mean(between_class, name="between_class")
tf.add_to_collection(tf.GraphKeys.LOSSES, between_class_loss) tf.add_to_collection(tf.GraphKeys.LOSSES, between_class_loss)
with tf.name_scope("total_loss"): with tf.name_scope("total_loss"):
...@@ -68,8 +65,8 @@ def contrastive_loss(left_embedding, ...@@ -68,8 +65,8 @@ def contrastive_loss(left_embedding,
loss = tf.reduce_mean(loss, name="contrastive_loss") loss = tf.reduce_mean(loss, name="contrastive_loss")
tf.add_to_collection(tf.GraphKeys.LOSSES, loss) tf.add_to_collection(tf.GraphKeys.LOSSES, loss)
tf.summary.scalar('contrastive_loss', loss) tf.summary.scalar("contrastive_loss", loss)
tf.summary.scalar('between_class', between_class_loss) tf.summary.scalar("between_class", between_class_loss)
tf.summary.scalar('within_class', within_class_loss) tf.summary.scalar("within_class", within_class_loss)
return loss return loss
0% Loading or .
You are about to add 0 people to the discussion. Proceed with caution.
Please register or to comment