From f02aaff21ab89196c7b6cc8edc04d532fe58e147 Mon Sep 17 00:00:00 2001 From: Amir MOHAMMADI <amir.mohammadi@idiap.ch> Date: Wed, 9 Sep 2020 20:03:09 +0200 Subject: [PATCH] make sure all architectures output logits --- bob/learn/tensorflow/models/alexnet.py | 2 +- bob/learn/tensorflow/models/densenet.py | 7 +++---- bob/learn/tensorflow/models/inception.py | 2 +- bob/learn/tensorflow/models/inception_resnet_v2.py | 2 +- bob/learn/tensorflow/models/lenet5.py | 2 +- bob/learn/tensorflow/models/mcae.py | 8 ++++---- 6 files changed, 11 insertions(+), 12 deletions(-) diff --git a/bob/learn/tensorflow/models/alexnet.py b/bob/learn/tensorflow/models/alexnet.py index 326abe71..1ff09077 100644 --- a/bob/learn/tensorflow/models/alexnet.py +++ b/bob/learn/tensorflow/models/alexnet.py @@ -52,7 +52,7 @@ def AlexNet_simplified(name="AlexNet", **kwargs): tf.keras.layers.Dense(units=4096, activation="relu", name="F6"), tf.keras.layers.Dropout(rate=0.5, name="D7"), tf.keras.layers.Dense(units=4096, activation="relu", name="F7"), - tf.keras.layers.Dense(units=1000, activation="softmax", name="OUTPUT"), + tf.keras.layers.Dense(units=1000, name="OUTPUT"), ], name=name, **kwargs diff --git a/bob/learn/tensorflow/models/densenet.py b/bob/learn/tensorflow/models/densenet.py index bc0f6abd..f17f5b86 100644 --- a/bob/learn/tensorflow/models/densenet.py +++ b/bob/learn/tensorflow/models/densenet.py @@ -434,15 +434,14 @@ class DeepPixBiS(tf.keras.Model): tf.keras.layers.Flatten( data_format=data_format, name="Pixel_Logits_Flatten" ), - tf.keras.layers.Activation("sigmoid", name="activation"), ] def call(self, x, training=None): - for l in self.sequential_layers: + for layer in self.sequential_layers: try: - x = l(x, training=training) + x = layer(x, training=training) except TypeError: - x = l(x) + x = layer(x) return x diff --git a/bob/learn/tensorflow/models/inception.py b/bob/learn/tensorflow/models/inception.py index 90d56223..5a8314aa 100644 --- a/bob/learn/tensorflow/models/inception.py +++ b/bob/learn/tensorflow/models/inception.py @@ -132,7 +132,7 @@ def GoogLeNet(*, num_classes=1000, name="GoogLeNet", **kwargs): InceptionModule(384, 192, 384, 48, 128, 128, name="inception_5b"), tf.keras.layers.GlobalAvgPool2D(name="pool5"), tf.keras.layers.Dropout(rate=0.4, name="dropout"), - tf.keras.layers.Dense(num_classes, name="output", activation="softmax"), + tf.keras.layers.Dense(num_classes, name="output"), ], name=name, **kwargs diff --git a/bob/learn/tensorflow/models/inception_resnet_v2.py b/bob/learn/tensorflow/models/inception_resnet_v2.py index dc544b0f..bfab700a 100644 --- a/bob/learn/tensorflow/models/inception_resnet_v2.py +++ b/bob/learn/tensorflow/models/inception_resnet_v2.py @@ -542,7 +542,7 @@ def InceptionResNetV2( if include_top: # Classification block x = GlobalAvgPool2D(name="avg_pool")(x) - x = Dense(classes, activation="softmax", name="predictions")(x) + x = Dense(classes, name="predictions")(x) else: if pooling == "avg": x = GlobalAvgPool2D()(x) diff --git a/bob/learn/tensorflow/models/lenet5.py b/bob/learn/tensorflow/models/lenet5.py index 8654001b..425b3377 100644 --- a/bob/learn/tensorflow/models/lenet5.py +++ b/bob/learn/tensorflow/models/lenet5.py @@ -22,7 +22,7 @@ def LeNet5_simplified(name="LeNet5", **kwargs): ), tf.keras.layers.Flatten(name="FLATTEN"), tf.keras.layers.Dense(units=84, activation="tanh", name="F6"), - tf.keras.layers.Dense(units=10, activation="sigmoid", name="OUTPUT"), + tf.keras.layers.Dense(units=10, name="OUTPUT"), ], name=name, **kwargs diff --git a/bob/learn/tensorflow/models/mcae.py b/bob/learn/tensorflow/models/mcae.py index e4241c40..21430782 100644 --- a/bob/learn/tensorflow/models/mcae.py +++ b/bob/learn/tensorflow/models/mcae.py @@ -51,8 +51,8 @@ class ConvEncoder(tf.keras.Model): self.sequential_layers = layers def call(self, x, training=None): - for l in self.sequential_layers: - x = l(x) + for layer in self.sequential_layers: + x = layer(x) return x @@ -91,8 +91,8 @@ class ConvDecoder(tf.keras.Model): self.sequential_layers = layers def call(self, x, training=None): - for l in self.sequential_layers: - x = l(x) + for layer in self.sequential_layers: + x = layer(x) return x -- GitLab