Skip to content
Snippets Groups Projects
Commit ea28b602 authored by Amir MOHAMMADI's avatar Amir MOHAMMADI
Browse files

improve logging in losses

parent e2842b69
No related branches found
No related tags found
No related merge requests found
......@@ -4,8 +4,8 @@
import logging
import tensorflow as tf
logger = logging.getLogger("bob.learn.tensorflow")
import functools
logger = logging.getLogger("bob.learn.tensorflow")
def content_loss(noises, content_features):
......@@ -24,7 +24,7 @@ def content_loss(noises, content_features):
----------
noises: :any:`list`
A list of tf.Tensor containing all the noises convolved
A list of tf.Tensor containing all the noises convolved
content_features: :any:`list`
A list of numpy.array containing all the content_features convolved
......@@ -36,7 +36,7 @@ def content_loss(noises, content_features):
content_losses.append((2 * tf.nn.l2_loss(n - c) / c.size))
return functools.reduce(tf.add, content_losses)
def linear_gram_style_loss(noises, gram_style_features):
"""
......@@ -89,7 +89,7 @@ def denoising_loss(noise):
noise_y_size = _tensor_size(noise[:,1:,:,:])
noise_x_size = _tensor_size(noise[:,:,1:,:])
denoise_loss = 2 * ( (tf.nn.l2_loss(noise[:,1:,:,:] - noise[:,:shape[1]-1,:,:]) / noise_y_size) +
denoise_loss = 2 * ( (tf.nn.l2_loss(noise[:,1:,:,:] - noise[:,:shape[1]-1,:,:]) / noise_y_size) +
(tf.nn.l2_loss(noise[:,:,1:,:] - noise[:,:,:shape[2]-1,:]) / noise_x_size))
return denoise_loss
......
......@@ -57,24 +57,19 @@ def triplet_loss(anchor_embedding,
with tf.name_scope("TripletLoss"):
# Between
between_class_loss = tf.reduce_mean(d_negative)
tf.summary.scalar('between_class', between_class_loss)
tf.summary.scalar('loss_between_class', between_class_loss)
tf.add_to_collection(tf.GraphKeys.LOSSES, between_class_loss)
# Within
within_class_loss = tf.reduce_mean(d_positive)
tf.summary.scalar('within_class', within_class_loss)
tf.summary.scalar('loss_within_class', within_class_loss)
tf.add_to_collection(tf.GraphKeys.LOSSES, within_class_loss)
# Total loss
loss = tf.reduce_mean(
tf.maximum(basic_loss, 0.0), 0, name="total_loss")
tf.add_to_collection(tf.GraphKeys.LOSSES, loss)
tf.summary.scalar('loss_raw', loss)
# Appending the regularization loss
#regularization_losses = tf.get_collection(tf.GraphKeys.REGULARIZATION_LOSSES)
#loss = tf.add_n([loss] + regularization_losses, name="total_loss")
#tf.summary.scalar('loss', loss)
tf.summary.scalar('loss_triplet', loss)
return loss
......
0% Loading or .
You are about to add 0 people to the discussion. Proceed with caution.
Please register or to comment