Skip to content
GitLab
Explore
Sign in
Primary navigation
Search or go to…
Project
bob.learn.tensorflow
Manage
Activity
Members
Labels
Plan
Issues
Issue boards
Milestones
Code
Merge requests
Repository
Branches
Commits
Tags
Repository graph
Compare revisions
Build
Pipelines
Jobs
Pipeline schedules
Artifacts
Deploy
Releases
Model registry
Operate
Environments
Monitor
Incidents
Analyze
Value stream analytics
Contributor analytics
CI/CD analytics
Repository analytics
Model experiments
Help
Help
Support
GitLab documentation
Compare GitLab plans
Community forum
Contribute to GitLab
Provide feedback
Keyboard shortcuts
?
Snippets
Groups
Projects
This is an archived project. Repository and other project resources are read-only.
Show more breadcrumbs
bob
bob.learn.tensorflow
Commits
036a308f
Commit
036a308f
authored
5 years ago
by
Amir MOHAMMADI
Browse files
Options
Downloads
Patches
Plain Diff
improve logging
parent
54a80323
No related branches found
No related tags found
1 merge request
!79
Add keras-based models, add pixel-wise loss, other improvements
Changes
2
Hide whitespace changes
Inline
Side-by-side
Showing
2 changed files
bob/learn/tensorflow/loss/BaseLoss.py
+7
-7
7 additions, 7 deletions
bob/learn/tensorflow/loss/BaseLoss.py
bob/learn/tensorflow/loss/ContrastiveLoss.py
+13
-16
13 additions, 16 deletions
bob/learn/tensorflow/loss/ContrastiveLoss.py
with
20 additions
and
23 deletions
bob/learn/tensorflow/loss/BaseLoss.py
+
7
−
7
View file @
036a308f
...
...
@@ -13,12 +13,12 @@ def mean_cross_entropy_loss(logits, labels, add_regularization_losses=True):
"""
Simple CrossEntropy loss.
Basically it wrapps the function tf.nn.sparse_softmax_cross_entropy_with_logits.
**Parameters**
logits:
labels:
add_regularization_losses: Regulize the loss???
"""
with
tf
.
variable_scope
(
'
cross_entropy_loss
'
):
...
...
@@ -50,7 +50,7 @@ def mean_cross_entropy_center_loss(logits,
"""
Implementation of the CrossEntropy + Center Loss from the paper
"
A Discriminative Feature Learning Approach for Deep Face Recognition
"
(http://ydwen.github.io/papers/WenECCV16.pdf)
**Parameters**
logits:
prelogits:
...
...
@@ -67,7 +67,7 @@ def mean_cross_entropy_center_loss(logits,
logits
=
logits
,
labels
=
labels
),
name
=
"
cross_entropy_loss
"
)
tf
.
add_to_collection
(
tf
.
GraphKeys
.
LOSSES
,
cross_loss
)
tf
.
summary
.
scalar
(
'
cross_entropy
_loss
'
,
cross_loss
)
tf
.
summary
.
scalar
(
'
loss_
cross_entropy
'
,
cross_loss
)
# Appending center loss
with
tf
.
variable_scope
(
'
center_loss
'
):
...
...
@@ -79,14 +79,14 @@ def mean_cross_entropy_center_loss(logits,
initializer
=
tf
.
constant_initializer
(
0
),
trainable
=
False
)
#label = tf.reshape(labels, [-1])
#
label = tf.reshape(labels, [-1])
centers_batch
=
tf
.
gather
(
centers
,
labels
)
diff
=
(
1
-
alpha
)
*
(
centers_batch
-
prelogits
)
centers
=
tf
.
scatter_sub
(
centers
,
labels
,
diff
)
center_loss
=
tf
.
reduce_mean
(
tf
.
square
(
prelogits
-
centers_batch
))
tf
.
add_to_collection
(
tf
.
GraphKeys
.
REGULARIZATION_LOSSES
,
center_loss
*
factor
)
tf
.
summary
.
scalar
(
'
center
_loss
'
,
center_loss
)
tf
.
summary
.
scalar
(
'
loss_
center
'
,
center_loss
)
# Adding the regularizers in the loss
with
tf
.
variable_scope
(
'
total_loss
'
):
...
...
@@ -95,7 +95,7 @@ def mean_cross_entropy_center_loss(logits,
total_loss
=
tf
.
add_n
(
[
cross_loss
]
+
regularization_losses
,
name
=
"
total_loss
"
)
tf
.
add_to_collection
(
tf
.
GraphKeys
.
LOSSES
,
total_loss
)
tf
.
summary
.
scalar
(
'
total
_loss
'
,
total_loss
)
tf
.
summary
.
scalar
(
'
loss_
total
'
,
total_loss
)
loss
=
dict
()
loss
[
'
loss
'
]
=
total_loss
...
...
This diff is collapsed.
Click to expand it.
bob/learn/tensorflow/loss/ContrastiveLoss.py
+
13
−
16
View file @
036a308f
...
...
@@ -3,16 +3,15 @@
# @author: Tiago de Freitas Pereira <tiago.pereira@idiap.ch>
import
logging
logger
=
logging
.
getLogger
(
"
bob.learn.tensorflow
"
)
import
tensorflow
as
tf
from
bob.learn.tensorflow.utils
import
(
compute_euclidean_distance
,
)
from
bob.learn.tensorflow.utils
import
compute_euclidean_distance
logger
=
logging
.
getLogger
(
__name__
)
def
contrastive_loss
(
left_embedding
,
right_embedding
,
labels
,
contrastive_margin
=
2.0
):
def
contrastive_loss
(
left_embedding
,
right_embedding
,
labels
,
contrastive_margin
=
2.0
):
"""
Compute the contrastive loss as in
...
...
@@ -49,18 +48,16 @@ def contrastive_loss(left_embedding,
with
tf
.
name_scope
(
"
within_class
"
):
one
=
tf
.
constant
(
1.0
)
within_class
=
tf
.
multiply
(
one
-
labels
,
tf
.
square
(
d
))
# (1-Y)*(d^2)
within_class_loss
=
tf
.
reduce_mean
(
within_class
,
name
=
"
within_class
"
)
within_class
=
tf
.
multiply
(
one
-
labels
,
tf
.
square
(
d
))
# (1-Y)*(d^2)
within_class_loss
=
tf
.
reduce_mean
(
within_class
,
name
=
"
within_class
"
)
tf
.
add_to_collection
(
tf
.
GraphKeys
.
LOSSES
,
within_class_loss
)
with
tf
.
name_scope
(
"
between_class
"
):
max_part
=
tf
.
square
(
tf
.
maximum
(
contrastive_margin
-
d
,
0
))
between_class
=
tf
.
multiply
(
labels
,
max_part
)
# (Y) * max((margin - d)^2, 0)
between_class_loss
=
tf
.
reduce_mean
(
between_class
,
name
=
"
between_class
"
)
labels
,
max_part
)
# (Y) * max((margin - d)^2, 0)
between_class_loss
=
tf
.
reduce_mean
(
between_class
,
name
=
"
between_class
"
)
tf
.
add_to_collection
(
tf
.
GraphKeys
.
LOSSES
,
between_class_loss
)
with
tf
.
name_scope
(
"
total_loss
"
):
...
...
@@ -68,8 +65,8 @@ def contrastive_loss(left_embedding,
loss
=
tf
.
reduce_mean
(
loss
,
name
=
"
contrastive_loss
"
)
tf
.
add_to_collection
(
tf
.
GraphKeys
.
LOSSES
,
loss
)
tf
.
summary
.
scalar
(
'
contrastive_loss
'
,
loss
)
tf
.
summary
.
scalar
(
'
between_class
'
,
between_class_loss
)
tf
.
summary
.
scalar
(
'
within_class
'
,
within_class_loss
)
tf
.
summary
.
scalar
(
"
contrastive_loss
"
,
loss
)
tf
.
summary
.
scalar
(
"
between_class
"
,
between_class_loss
)
tf
.
summary
.
scalar
(
"
within_class
"
,
within_class_loss
)
return
loss
This diff is collapsed.
Click to expand it.
Preview
0%
Loading
Try again
or
attach a new file
.
Cancel
You are about to add
0
people
to the discussion. Proceed with caution.
Finish editing this message first!
Save comment
Cancel
Please
register
or
sign in
to comment