Skip to content
GitLab
Projects
Groups
Snippets
/
Help
Help
Support
Community forum
Keyboard shortcuts
?
Submit feedback
Contribute to GitLab
Sign in
Toggle navigation
Menu
Open sidebar
bob
bob.learn.tensorflow
Commits
7e2d37d7
Commit
7e2d37d7
authored
May 02, 2019
by
Amir MOHAMMADI
Browse files
Use tf.contrib.layers.optimize_loss in estimators
parent
9433478b
Changes
7
Expand all
Hide whitespace changes
Inline
Side-by-side
bob/learn/tensorflow/estimators/Logits.py
View file @
7e2d37d7
This diff is collapsed.
Click to expand it.
bob/learn/tensorflow/estimators/Regressor.py
View file @
7e2d37d7
...
...
@@ -12,134 +12,135 @@ class Regressor(estimator.Estimator):
"""An estimator for regression problems"""
def
__init__
(
self
,
architecture
,
optimizer
=
tf
.
train
.
AdamOptimizer
(),
loss_op
=
tf
.
losses
.
mean_squared_error
,
label_dimension
=
1
,
config
=
None
,
model_dir
=
None
,
apply_moving_averages
=
True
,
add_regularization_losses
=
True
,
extra_checkpoint
=
None
,
add_histograms
=
None
,
self
,
architecture
,
optimizer
=
tf
.
train
.
AdamOptimizer
(),
loss_op
=
tf
.
losses
.
mean_squared_error
,
label_dimension
=
1
,
config
=
None
,
model_dir
=
None
,
apply_moving_averages
=
True
,
add_regularization_losses
=
True
,
extra_checkpoint
=
None
,
add_histograms
=
None
,
optimize_loss
=
tf
.
contrib
.
layers
.
optimize_loss
,
optimize_loss_learning_rate
=
None
,
):
self
.
architecture
=
architecture
self
.
label_dimension
=
label_dimension
self
.
loss_op
=
loss_op
self
.
add_regularization_losses
=
add_regularization_losses
self
.
apply_moving_averages
=
apply_moving_averages
if
apply_moving_averages
:
logger
.
info
(
"Encapsulating the optimizer with "
"the MovingAverageOptimizer"
)
if
self
.
apply_moving_averages
and
isinstance
(
optimizer
,
tf
.
train
.
Optimizer
):
logger
.
info
(
"Encapsulating the optimizer with "
"the MovingAverageOptimizer"
)
optimizer
=
tf
.
contrib
.
opt
.
MovingAverageOptimizer
(
optimizer
)
self
.
optimizer
=
optimizer
self
.
optimize_loss
=
optimize_loss
self
.
optimize_loss_learning_rate
=
optimize_loss_learning_rate
def
_model_fn
(
features
,
labels
,
mode
,
config
):
check_features
(
features
)
data
=
features
[
'
data
'
]
key
=
features
[
'
key
'
]
data
=
features
[
"
data
"
]
key
=
features
[
"
key
"
]
# Checking if we have some variables/scope that we may want to shut
# down
trainable_variables
=
get_trainable_variables
(
extra_checkpoint
,
mode
=
mode
)
trainable_variables
=
get_trainable_variables
(
extra_checkpoint
,
mode
=
mode
)
prelogits
=
self
.
architecture
(
data
,
mode
=
mode
,
trainable_variables
=
trainable_variables
)[
0
]
data
,
mode
=
mode
,
trainable_variables
=
trainable_variables
)[
0
]
logits
=
append_logits
(
prelogits
,
label_dimension
,
trainable_variables
=
trainable_variables
)
prelogits
,
label_dimension
,
trainable_variables
=
trainable_variables
)
predictions
=
{
"predictions"
:
logits
,
"key"
:
key
,
}
predictions
=
{
"predictions"
:
logits
,
"key"
:
key
}
if
mode
==
tf
.
estimator
.
ModeKeys
.
PREDICT
:
return
tf
.
estimator
.
EstimatorSpec
(
mode
=
mode
,
predictions
=
predictions
)
return
tf
.
estimator
.
EstimatorSpec
(
mode
=
mode
,
predictions
=
predictions
)
# in PREDICT mode logits rank must be 2 but in EVAL and TRAIN the
# rank should be 1 for the loss function!
predictions
[
'
predictions
'
]
=
tf
.
squeeze
(
logits
)
predictions
[
"
predictions
"
]
=
tf
.
squeeze
(
logits
)
predictions_op
=
predictions
[
"predictions"
]
# Calculate root mean squared error
rmse
=
tf
.
metrics
.
root_mean_squared_error
(
labels
,
predictions_op
)
metrics
=
{
'
rmse
'
:
rmse
}
metrics
=
{
"
rmse
"
:
rmse
}
if
mode
==
tf
.
estimator
.
ModeKeys
.
EVAL
:
self
.
loss
=
self
.
_get_loss
(
predictions
=
predictions_op
,
labels
=
labels
)
self
.
loss
=
self
.
_get_loss
(
predictions
=
predictions_op
,
labels
=
labels
)
return
tf
.
estimator
.
EstimatorSpec
(
mode
=
mode
,
predictions
=
predictions
,
loss
=
self
.
loss
,
train_op
=
None
,
eval_metric_ops
=
metrics
)
eval_metric_ops
=
metrics
,
)
# restore the model from an extra_checkpoint
if
extra_checkpoint
is
not
None
:
if
'
Logits/
'
not
in
extra_checkpoint
[
"scopes"
]:
if
"
Logits/
"
not
in
extra_checkpoint
[
"scopes"
]:
logger
.
warning
(
'"Logits/" (which are automatically added by this '
'Regressor class are not in the scopes of '
'extra_checkpoint). Did you mean to restore the '
'Logits variables as well?'
)
"Regressor class are not in the scopes of "
"extra_checkpoint). Did you mean to restore the "
"Logits variables as well?"
)
tf
.
train
.
init_from_checkpoint
(
ckpt_dir_or_file
=
extra_checkpoint
[
"checkpoint_path"
],
assignment_map
=
extra_checkpoint
[
"scopes"
],
)
global_step
=
tf
.
train
.
get_or_create_global_step
()
# Some layer like tf.layers.batch_norm need this:
update_ops
=
tf
.
get_collection
(
tf
.
GraphKeys
.
UPDATE_OPS
)
with
tf
.
control_dependencies
(
update_ops
),
tf
.
name_scope
(
'train'
):
# Calculate Loss
self
.
loss
=
self
.
_get_loss
(
predictions
=
predictions_op
,
labels
=
labels
)
# Compute the moving average of all individual losses
# and the total loss.
loss_averages
=
tf
.
train
.
ExponentialMovingAverage
(
0.9
,
name
=
'avg'
)
loss_averages_op
=
loss_averages
.
apply
(
tf
.
get_collection
(
tf
.
GraphKeys
.
LOSSES
))
train_op
=
tf
.
group
(
self
.
optimizer
.
minimize
(
self
.
loss
,
global_step
=
global_step
),
loss_averages_op
)
# Get the moving average saver after optimizer.minimize is
# called
if
apply_moving_averages
:
self
.
saver
,
self
.
scaffold
=
moving_average_scaffold
(
self
.
optimizer
,
config
)
else
:
self
.
saver
,
self
.
scaffold
=
None
,
None
# Log rmse and loss
with
tf
.
name_scope
(
'train_metrics'
):
tf
.
summary
.
scalar
(
'rmse'
,
rmse
[
1
])
for
l
in
tf
.
get_collection
(
tf
.
GraphKeys
.
LOSSES
):
tf
.
summary
.
scalar
(
l
.
op
.
name
+
"_averaged"
,
loss_averages
.
average
(
l
))
# add histograms summaries
if
add_histograms
==
'all'
:
for
v
in
tf
.
all_variables
():
tf
.
summary
.
histogram
(
v
.
name
,
v
)
elif
add_histograms
==
'train'
:
for
v
in
tf
.
trainable_variables
():
tf
.
summary
.
histogram
(
v
.
name
,
v
)
# Calculate Loss
self
.
loss
=
self
.
_get_loss
(
predictions
=
predictions_op
,
labels
=
labels
)
# Compute the moving average of all individual losses
# and the total loss.
loss_averages
=
tf
.
train
.
ExponentialMovingAverage
(
0.9
,
name
=
"avg"
)
loss_averages_op
=
loss_averages
.
apply
(
tf
.
get_collection
(
tf
.
GraphKeys
.
LOSSES
)
)
train_op
=
tf
.
group
(
self
.
optimize_loss
(
loss
=
self
.
loss
,
global_step
=
tf
.
train
.
get_or_create_global_step
(),
optimizer
=
self
.
optimizer
,
learning_rate
=
self
.
optimize_loss_learning_rate
,
),
loss_averages_op
,
)
# Get the moving average saver after optimizer.minimize is called
if
self
.
apply_moving_averages
:
self
.
saver
,
self
.
scaffold
=
moving_average_scaffold
(
self
.
optimizer
.
optimizer
if
hasattr
(
self
.
optimizer
,
"optimizer"
)
else
self
.
optimizer
,
config
,
)
else
:
self
.
saver
,
self
.
scaffold
=
None
,
None
# Log rmse and loss
with
tf
.
name_scope
(
"train_metrics"
):
tf
.
summary
.
scalar
(
"rmse"
,
rmse
[
1
])
for
l
in
tf
.
get_collection
(
tf
.
GraphKeys
.
LOSSES
):
tf
.
summary
.
scalar
(
l
.
op
.
name
+
"_averaged"
,
loss_averages
.
average
(
l
))
# add histograms summaries
if
add_histograms
==
"all"
:
for
v
in
tf
.
all_variables
():
tf
.
summary
.
histogram
(
v
.
name
,
v
)
elif
add_histograms
==
"train"
:
for
v
in
tf
.
trainable_variables
():
tf
.
summary
.
histogram
(
v
.
name
,
v
)
return
tf
.
estimator
.
EstimatorSpec
(
mode
=
mode
,
...
...
@@ -147,21 +148,20 @@ class Regressor(estimator.Estimator):
loss
=
self
.
loss
,
train_op
=
train_op
,
eval_metric_ops
=
metrics
,
scaffold
=
self
.
scaffold
)
scaffold
=
self
.
scaffold
,
)
super
(
Regressor
,
self
).
__init__
(
model_fn
=
_model_fn
,
model_dir
=
model_dir
,
config
=
config
)
model_fn
=
_model_fn
,
model_dir
=
model_dir
,
config
=
config
)
def
_get_loss
(
self
,
predictions
,
labels
):
main_loss
=
self
.
loss_op
(
predictions
=
predictions
,
labels
=
labels
)
main_loss
=
self
.
loss_op
(
predictions
=
predictions
,
labels
=
labels
)
if
not
self
.
add_regularization_losses
:
return
main_loss
regularization_losses
=
tf
.
get_collection
(
tf
.
GraphKeys
.
REGULARIZATION_LOSSES
)
regularization_losses
=
[
tf
.
cast
(
l
,
main_loss
.
dtype
)
for
l
in
regularization_losses
]
total_loss
=
tf
.
add_n
(
[
main_loss
]
+
regularization_losses
,
name
=
"total_loss"
)
regularization_losses
=
tf
.
get_collection
(
tf
.
GraphKeys
.
REGULARIZATION_LOSSES
)
regularization_losses
=
[
tf
.
cast
(
l
,
main_loss
.
dtype
)
for
l
in
regularization_losses
]
total_loss
=
tf
.
add_n
(
[
main_loss
]
+
regularization_losses
,
name
=
"total_loss"
)
return
total_loss
bob/learn/tensorflow/estimators/Siamese.py
View file @
7e2d37d7
...
...
@@ -9,7 +9,7 @@ from . import check_features, get_trainable_variables
import
logging
logger
=
logging
.
getLogger
(
"bob.learn"
)
logger
=
logging
.
getLogger
(
__name__
)
class
Siamese
(
estimator
.
Estimator
):
...
...
@@ -22,25 +22,34 @@ class Siamese(estimator.Estimator):
See :any:`Logits` for the description of parameters.
"""
def
__init__
(
self
,
architecture
=
None
,
optimizer
=
None
,
config
=
None
,
loss_op
=
None
,
model_dir
=
""
,
validation_batch_size
=
None
,
params
=
None
,
extra_checkpoint
=
None
):
def
__init__
(
self
,
architecture
=
None
,
optimizer
=
None
,
config
=
None
,
loss_op
=
None
,
model_dir
=
""
,
validation_batch_size
=
None
,
params
=
None
,
extra_checkpoint
=
None
,
add_histograms
=
None
,
add_regularization_losses
=
True
,
optimize_loss
=
tf
.
contrib
.
layers
.
optimize_loss
,
optimize_loss_learning_rate
=
None
,
):
self
.
architecture
=
architecture
self
.
optimizer
=
optimizer
self
.
loss_op
=
loss_op
self
.
loss
=
None
self
.
extra_checkpoint
=
extra_checkpoint
self
.
add_regularization_losses
=
add_regularization_losses
self
.
optimize_loss
=
optimize_loss
self
.
optimize_loss_learning_rate
=
optimize_loss_learning_rate
if
self
.
architecture
is
None
:
raise
ValueError
(
"Please specify a function to build the architecture !!"
)
raise
ValueError
(
"Please specify a function to build the architecture !!"
)
if
self
.
optimizer
is
None
:
raise
ValueError
(
...
...
@@ -50,73 +59,103 @@ class Siamese(estimator.Estimator):
if
self
.
loss_op
is
None
:
raise
ValueError
(
"Please specify a function to build the loss !!"
)
def
_model_fn
(
features
,
labels
,
mode
,
params
,
config
):
def
_model_fn
(
features
,
labels
,
mode
):
if
mode
==
tf
.
estimator
.
ModeKeys
.
TRAIN
:
# Building one graph, by default everything is trainable
# The input function needs to have dictionary pair with the `left` and `right` keys
if
'left'
not
in
features
.
keys
(
)
or
'right'
not
in
features
.
keys
():
if
"left"
not
in
features
.
keys
()
or
"right"
not
in
features
.
keys
():
raise
ValueError
(
"The input function needs to contain a dictionary with the keys `left` and `right` "
)
# Building one graph
trainable_variables
=
get_trainable_variables
(
self
.
extra_checkpoint
)
trainable_variables
=
get_trainable_variables
(
self
.
extra_checkpoint
)
data_left
=
features
[
"left"
]
data_left
=
(
data_left
[
"data"
]
if
isinstance
(
data_left
,
dict
)
else
data_left
)
data_right
=
features
[
"right"
]
data_right
=
(
data_right
[
"data"
]
if
isinstance
(
data_right
,
dict
)
else
data_right
)
prelogits_left
,
end_points_left
=
self
.
architecture
(
features
[
'left'
],
mode
=
mode
,
trainable_variables
=
trainable_variables
)
data_left
,
mode
=
mode
,
trainable_variables
=
trainable_variables
)
prelogits_right
,
end_points_right
=
self
.
architecture
(
features
[
'
right
'
]
,
data_
right
,
reuse
=
True
,
mode
=
mode
,
trainable_variables
=
trainable_variables
)
trainable_variables
=
trainable_variables
,
)
if
self
.
extra_checkpoint
is
not
None
:
tf
.
contrib
.
framework
.
init_from_checkpoint
(
self
.
extra_checkpoint
[
"checkpoint_path"
],
self
.
extra_checkpoint
[
"scopes"
])
self
.
extra_checkpoint
[
"scopes"
],
)
# Compute Loss (for both TRAIN and EVAL modes)
self
.
loss
=
self
.
loss_op
(
prelogits_left
,
prelogits_right
,
labels
)
# Configure the Training Op (for TRAIN mode)
global_step
=
tf
.
train
.
get_or_create_global_step
()
train_op
=
self
.
optimizer
.
minimize
(
self
.
loss
,
global_step
=
global_step
)
labels
=
(
tf
.
not_equal
(
labels
[
"left"
],
labels
[
"right"
])
if
isinstance
(
labels
,
dict
)
else
labels
)
self
.
loss
=
self
.
loss_op
(
prelogits_left
,
prelogits_right
,
labels
)
if
self
.
add_regularization_losses
:
regularization_losses
=
tf
.
get_collection
(
tf
.
GraphKeys
.
REGULARIZATION_LOSSES
)
regularization_losses
=
[
tf
.
cast
(
l
,
self
.
loss
.
dtype
)
for
l
in
regularization_losses
]
self
.
loss
=
tf
.
add_n
(
[
self
.
loss
]
+
regularization_losses
,
name
=
"total_loss"
)
train_op
=
self
.
optimize_loss
(
loss
=
self
.
loss
,
global_step
=
tf
.
train
.
get_or_create_global_step
(),
optimizer
=
self
.
optimizer
,
learning_rate
=
self
.
learning_rate
,
)
# add histograms summaries
if
add_histograms
==
"all"
:
for
v
in
tf
.
all_variables
():
tf
.
summary
.
histogram
(
v
.
name
,
v
)
elif
add_histograms
==
"train"
:
for
v
in
tf
.
trainable_variables
():
tf
.
summary
.
histogram
(
v
.
name
,
v
)
return
tf
.
estimator
.
EstimatorSpec
(
mode
=
mode
,
loss
=
self
.
loss
,
train_op
=
train_op
)
mode
=
mode
,
loss
=
self
.
loss
,
train_op
=
train_op
)
check_features
(
features
)
data
=
features
[
'data'
]
data
=
features
[
"data"
]
key
=
features
[
"key"
]
# Compute the embeddings
prelogits
=
self
.
architecture
(
data
,
mode
=
mode
)[
0
]
embeddings
=
tf
.
nn
.
l2_normalize
(
prelogits
,
1
)
predictions
=
{
"embeddings"
:
embeddings
}
predictions
=
{
"embeddings"
:
embeddings
,
"key"
:
key
}
if
mode
==
tf
.
estimator
.
ModeKeys
.
PREDICT
:
return
tf
.
estimator
.
EstimatorSpec
(
mode
=
mode
,
predictions
=
predictions
)
return
tf
.
estimator
.
EstimatorSpec
(
mode
=
mode
,
predictions
=
predictions
)
predictions_op
=
predict_using_tensors
(
predictions
[
"embeddings"
],
labels
,
num
=
validation_batch_size
)
predictions
[
"embeddings"
],
labels
,
num
=
validation_batch_size
)
eval_metric_ops
=
{
"accuracy"
:
tf
.
metrics
.
accuracy
(
labels
=
labels
,
predictions
=
predictions_op
)
"accuracy"
:
tf
.
metrics
.
accuracy
(
labels
=
labels
,
predictions
=
predictions_op
)
}
return
tf
.
estimator
.
EstimatorSpec
(
mode
=
mode
,
loss
=
tf
.
reduce_mean
(
1
),
eval_metric_ops
=
eval_metric_ops
)
mode
=
mode
,
loss
=
tf
.
reduce_mean
(
1
),
eval_metric_ops
=
eval_metric_ops
)
super
(
Siamese
,
self
).
__init__
(
model_fn
=
_model_fn
,
model_dir
=
model_dir
,
params
=
params
,
config
=
config
)
model_fn
=
_model_fn
,
model_dir
=
model_dir
,
params
=
params
,
config
=
config
)
bob/learn/tensorflow/estimators/Triplet.py
View file @
7e2d37d7
...
...
@@ -10,7 +10,7 @@ from . import check_features, get_trainable_variables
import
logging
logger
=
logging
.
getLogger
(
"bob.learn"
)
logger
=
logging
.
getLogger
(
__name__
)
class
Triplet
(
estimator
.
Estimator
):
...
...
@@ -23,24 +23,29 @@ class Triplet(estimator.Estimator):
See :any:`Logits` for the description of parameters.
"""
def
__init__
(
self
,
architecture
=
None
,
optimizer
=
None
,
config
=
None
,
loss_op
=
triplet_loss
,
model_dir
=
""
,
validation_batch_size
=
None
,
extra_checkpoint
=
None
):
def
__init__
(
self
,
architecture
=
None
,
optimizer
=
None
,
config
=
None
,
loss_op
=
triplet_loss
,
model_dir
=
""
,
validation_batch_size
=
None
,
extra_checkpoint
=
None
,
optimize_loss
=
tf
.
contrib
.
layers
.
optimize_loss
,
optimize_loss_learning_rate
=
None
,
):
self
.
architecture
=
architecture
self
.
optimizer
=
optimizer
self
.
loss_op
=
loss_op
self
.
loss
=
None
self
.
extra_checkpoint
=
extra_checkpoint
self
.
optimize_loss
=
optimize_loss
self
.
optimize_loss_learning_rate
=
optimize_loss_learning_rate
if
self
.
architecture
is
None
:
raise
ValueError
(
"Please specify a function to build the architecture !!"
)
raise
ValueError
(
"Please specify a function to build the architecture !!"
)
if
self
.
optimizer
is
None
:
raise
ValueError
(
...
...
@@ -55,48 +60,60 @@ class Triplet(estimator.Estimator):
if
mode
==
tf
.
estimator
.
ModeKeys
.
TRAIN
:
# The input function needs to have dictionary pair with the `left` and `right` keys
if
'anchor'
not
in
features
.
keys
()
or
\
'positive'
not
in
features
.
keys
()
or
\
'negative'
not
in
features
.
keys
():
if
(
"anchor"
not
in
features
.
keys
()
or
"positive"
not
in
features
.
keys
()
or
"negative"
not
in
features
.
keys
()
):
raise
ValueError
(
"The input function needs to contain a dictionary with the "
"keys `anchor`, `positive` and `negative` "
)
"keys `anchor`, `positive` and `negative` "
)
# Building one graph
trainable_variables
=
get_trainable_variables
(
self
.
extra_checkpoint
)
trainable_variables
=
get_trainable_variables
(
self
.
extra_checkpoint
)
prelogits_anchor
=
self
.
architecture
(
features
[
'
anchor
'
],
features
[
"
anchor
"
],
mode
=
mode
,
trainable_variables
=
trainable_variables
)[
0
]
trainable_variables
=
trainable_variables
,
)[
0
]
prelogits_positive
=
self
.
architecture
(
features
[
'
positive
'
],
features
[
"
positive
"
],
reuse
=
True
,
mode
=
mode
,
trainable_variables
=
trainable_variables
)[
0
]
trainable_variables
=
trainable_variables
,
)[
0
]
prelogits_negative
=
self
.
architecture
(
features
[
'
negative
'
],
features
[
"
negative
"
],
reuse
=
True
,
mode
=
mode
,
trainable_variables
=
trainable_variables
)[
0
]
trainable_variables
=
trainable_variables
,
)[
0
]
if
self
.
extra_checkpoint
is
not
None
:
tf
.
contrib
.
framework
.
init_from_checkpoint
(
self
.
extra_checkpoint
[
"checkpoint_path"
],
self
.
extra_checkpoint
[
"scopes"
])
self
.
extra_checkpoint
[
"scopes"
],
)
# Compute Loss (for both TRAIN and EVAL modes)
self
.
loss
=
self
.
loss_op
(
prelogits_anchor
,
prelogits_positive
,
prelogits_negative
)
self
.
loss
=
self
.
loss_op
(
prelogits_anchor
,
prelogits_positive
,
prelogits_negative
)
# Configure the Training Op (for TRAIN mode)
global_step
=
tf
.
train
.
get_or_create_global_step
()
train_op
=
self
.
optimizer
.
minimize
(
self
.
loss
,
global_step
=
global_step
)
train_op
=
self
.
optimize_loss
(
loss
=
self
.
loss
,
global_step
=
global_step
,
optimizer
=
self
.
optimizer
,
learning_rate
=
self
.
optimize_loss_learning_rate
,
)
return
tf
.
estimator
.
EstimatorSpec
(
mode
=
mode
,
loss
=
self
.
loss
,
train_op
=
train_op
)
mode
=
mode
,
loss
=
self
.
loss
,
train_op
=
train_op
)
check_features
(
features
)
data
=
features
[
'
data
'
]
data
=
features
[
"
data
"
]
# Compute the embeddings
prelogits
=
self
.
architecture
(
data
,
mode
=
mode
)[
0
]
...
...
@@ -104,20 +121,21 @@ class Triplet(estimator.Estimator):
predictions
=
{
"embeddings"
:
embeddings
}
if
mode
==
tf
.
estimator
.
ModeKeys
.
PREDICT
:
return
tf
.
estimator
.
EstimatorSpec
(
mode
=
mode
,
predictions
=
predictions
)
return
tf
.
estimator
.
EstimatorSpec
(
mode
=
mode
,
predictions
=
predictions
)
predictions_op
=
predict_using_tensors
(
predictions
[
"embeddings"
],
labels
,
num
=
validation_batch_size
)
predictions
[
"embeddings"
],
labels
,
num
=
validation_batch_size
)
eval_metric_ops
=
{
"accuracy"
:
tf
.
metrics
.
accuracy
(
labels
=
labels
,
predictions
=
predictions_op
)
"accuracy"
:
tf
.
metrics
.
accuracy
(
labels
=
labels
,
predictions
=
predictions_op
)
}
return
tf
.
estimator
.
EstimatorSpec
(
mode
=
mode
,
loss
=
tf
.
reduce_mean
(
1
),
eval_metric_ops
=
eval_metric_ops
)