Skip to content
Snippets Groups Projects
Commit c57a9077 authored by Amir MOHAMMADI's avatar Amir MOHAMMADI
Browse files

make sure all architectures output logits

parent 602d790a
Branches
No related tags found
1 merge request!87WIP: Updates
Pipeline #42600 failed
......@@ -52,7 +52,7 @@ def AlexNet_simplified(name="AlexNet", **kwargs):
tf.keras.layers.Dense(units=4096, activation="relu", name="F6"),
tf.keras.layers.Dropout(rate=0.5, name="D7"),
tf.keras.layers.Dense(units=4096, activation="relu", name="F7"),
tf.keras.layers.Dense(units=1000, activation="softmax", name="OUTPUT"),
tf.keras.layers.Dense(units=1000, name="OUTPUT"),
],
name=name,
**kwargs
......
......@@ -434,15 +434,14 @@ class DeepPixBiS(tf.keras.Model):
tf.keras.layers.Flatten(
data_format=data_format, name="Pixel_Logits_Flatten"
),
tf.keras.layers.Activation("sigmoid", name="activation"),
]
def call(self, x, training=None):
for l in self.sequential_layers:
for layer in self.sequential_layers:
try:
x = l(x, training=training)
x = layer(x, training=training)
except TypeError:
x = l(x)
x = layer(x)
return x
......
......@@ -132,7 +132,7 @@ def GoogLeNet(*, num_classes=1000, name="GoogLeNet", **kwargs):
InceptionModule(384, 192, 384, 48, 128, 128, name="inception_5b"),
tf.keras.layers.GlobalAvgPool2D(name="pool5"),
tf.keras.layers.Dropout(rate=0.4, name="dropout"),
tf.keras.layers.Dense(num_classes, name="output", activation="softmax"),
tf.keras.layers.Dense(num_classes, name="output"),
],
name=name,
**kwargs
......
......@@ -542,7 +542,7 @@ def InceptionResNetV2(
if include_top:
# Classification block
x = GlobalAvgPool2D(name="avg_pool")(x)
x = Dense(classes, activation="softmax", name="predictions")(x)
x = Dense(classes, name="predictions")(x)
else:
if pooling == "avg":
x = GlobalAvgPool2D()(x)
......
......@@ -22,7 +22,7 @@ def LeNet5_simplified(name="LeNet5", **kwargs):
),
tf.keras.layers.Flatten(name="FLATTEN"),
tf.keras.layers.Dense(units=84, activation="tanh", name="F6"),
tf.keras.layers.Dense(units=10, activation="sigmoid", name="OUTPUT"),
tf.keras.layers.Dense(units=10, name="OUTPUT"),
],
name=name,
**kwargs
......
......@@ -51,8 +51,8 @@ class ConvEncoder(tf.keras.Model):
self.sequential_layers = layers
def call(self, x, training=None):
for l in self.sequential_layers:
x = l(x)
for layer in self.sequential_layers:
x = layer(x)
return x
......@@ -91,8 +91,8 @@ class ConvDecoder(tf.keras.Model):
self.sequential_layers = layers
def call(self, x, training=None):
for l in self.sequential_layers:
x = l(x)
for layer in self.sequential_layers:
x = layer(x)
return x
......
0% Loading or .
You are about to add 0 people to the discussion. Proceed with caution.
Please register or to comment