Skip to content
Snippets Groups Projects
Commit 44d5075f authored by Amir MOHAMMADI's avatar Amir MOHAMMADI
Browse files

Allow turning batch norm scale off

parent 2b79cb39
No related branches found
No related tags found
No related merge requests found
......@@ -270,6 +270,7 @@ def add_bottleneck(
use_bias=True,
batch_norm_decay=0.99,
batch_norm_epsilon=1e-3,
batch_norm_scale=True,
):
"""
Amend a bottleneck layer to a Keras Model
......@@ -293,7 +294,11 @@ def add_bottleneck(
new_model = model
new_model.add(
BatchNormalization(momentum=batch_norm_decay, epsilon=batch_norm_epsilon)
BatchNormalization(
momentum=batch_norm_decay,
epsilon=batch_norm_epsilon,
scale=batch_norm_scale,
)
)
new_model.add(Dropout(dropout_rate, name="Dropout"))
new_model.add(Flatten())
......@@ -308,18 +313,19 @@ def add_bottleneck(
bottleneck_size,
use_bias=use_bias,
kernel_regularizer=regularizer,
dtype="float32",
)
)
new_model.add(
BatchNormalization(
axis=-1,
name="embeddings",
momentum=batch_norm_decay,
epsilon=batch_norm_epsilon,
scale=batch_norm_scale,
dtype="float32",
)
)
# new_model.add(BatchNormalization())
return new_model
......
0% Loading or .
You are about to add 0 people to the discussion. Proceed with caution.
Please register or to comment