diff --git a/bob/learn/tensorflow/models/alexnet.py b/bob/learn/tensorflow/models/alexnet.py
index 326abe71a60a5fc9f01ef2ab212d1786a36d4a9f..1ff0907775faa81c6ce5777f0065bf1ad49b3e42 100644
--- a/bob/learn/tensorflow/models/alexnet.py
+++ b/bob/learn/tensorflow/models/alexnet.py
@@ -52,7 +52,7 @@ def AlexNet_simplified(name="AlexNet", **kwargs):
             tf.keras.layers.Dense(units=4096, activation="relu", name="F6"),
             tf.keras.layers.Dropout(rate=0.5, name="D7"),
             tf.keras.layers.Dense(units=4096, activation="relu", name="F7"),
-            tf.keras.layers.Dense(units=1000, activation="softmax", name="OUTPUT"),
+            tf.keras.layers.Dense(units=1000, name="OUTPUT"),
         ],
         name=name,
         **kwargs
diff --git a/bob/learn/tensorflow/models/densenet.py b/bob/learn/tensorflow/models/densenet.py
index bc0f6abd65d1f3c7c9f09c7fe2887aeace1e565b..f17f5b860b2d3b92f06cd2b8586d5b590e0378e3 100644
--- a/bob/learn/tensorflow/models/densenet.py
+++ b/bob/learn/tensorflow/models/densenet.py
@@ -434,15 +434,14 @@ class DeepPixBiS(tf.keras.Model):
             tf.keras.layers.Flatten(
                 data_format=data_format, name="Pixel_Logits_Flatten"
             ),
-            tf.keras.layers.Activation("sigmoid", name="activation"),
         ]
 
     def call(self, x, training=None):
-        for l in self.sequential_layers:
+        for layer in self.sequential_layers:
             try:
-                x = l(x, training=training)
+                x = layer(x, training=training)
             except TypeError:
-                x = l(x)
+                x = layer(x)
         return x
 
 
diff --git a/bob/learn/tensorflow/models/inception.py b/bob/learn/tensorflow/models/inception.py
index 90d56223fd86d8e9b723a3d723f65d031598508c..5a8314aa9ce82c1ddadcab47be0a16a16b5a0739 100644
--- a/bob/learn/tensorflow/models/inception.py
+++ b/bob/learn/tensorflow/models/inception.py
@@ -132,7 +132,7 @@ def GoogLeNet(*, num_classes=1000, name="GoogLeNet", **kwargs):
             InceptionModule(384, 192, 384, 48, 128, 128, name="inception_5b"),
             tf.keras.layers.GlobalAvgPool2D(name="pool5"),
             tf.keras.layers.Dropout(rate=0.4, name="dropout"),
-            tf.keras.layers.Dense(num_classes, name="output", activation="softmax"),
+            tf.keras.layers.Dense(num_classes, name="output"),
         ],
         name=name,
         **kwargs
diff --git a/bob/learn/tensorflow/models/inception_resnet_v2.py b/bob/learn/tensorflow/models/inception_resnet_v2.py
index dc544b0ffc6726997425ef9d79084802c9748418..bfab700a721e539a428a4339312555e574523b47 100644
--- a/bob/learn/tensorflow/models/inception_resnet_v2.py
+++ b/bob/learn/tensorflow/models/inception_resnet_v2.py
@@ -542,7 +542,7 @@ def InceptionResNetV2(
     if include_top:
         # Classification block
         x = GlobalAvgPool2D(name="avg_pool")(x)
-        x = Dense(classes, activation="softmax", name="predictions")(x)
+        x = Dense(classes, name="predictions")(x)
     else:
         if pooling == "avg":
             x = GlobalAvgPool2D()(x)
diff --git a/bob/learn/tensorflow/models/lenet5.py b/bob/learn/tensorflow/models/lenet5.py
index 8654001bf24c3d5307531465e286f29d8d9c4d89..425b337725b60b6dc471c572c80df1f83eded74e 100644
--- a/bob/learn/tensorflow/models/lenet5.py
+++ b/bob/learn/tensorflow/models/lenet5.py
@@ -22,7 +22,7 @@ def LeNet5_simplified(name="LeNet5", **kwargs):
             ),
             tf.keras.layers.Flatten(name="FLATTEN"),
             tf.keras.layers.Dense(units=84, activation="tanh", name="F6"),
-            tf.keras.layers.Dense(units=10, activation="sigmoid", name="OUTPUT"),
+            tf.keras.layers.Dense(units=10, name="OUTPUT"),
         ],
         name=name,
         **kwargs
diff --git a/bob/learn/tensorflow/models/mcae.py b/bob/learn/tensorflow/models/mcae.py
index e4241c409f747464e9f0ab8408acfe591658107c..214307820df190b1eb6af376ad7af4a1654d9d2e 100644
--- a/bob/learn/tensorflow/models/mcae.py
+++ b/bob/learn/tensorflow/models/mcae.py
@@ -51,8 +51,8 @@ class ConvEncoder(tf.keras.Model):
         self.sequential_layers = layers
 
     def call(self, x, training=None):
-        for l in self.sequential_layers:
-            x = l(x)
+        for layer in self.sequential_layers:
+            x = layer(x)
         return x
 
 
@@ -91,8 +91,8 @@ class ConvDecoder(tf.keras.Model):
         self.sequential_layers = layers
 
     def call(self, x, training=None):
-        for l in self.sequential_layers:
-            x = l(x)
+        for layer in self.sequential_layers:
+            x = layer(x)
         return x