Commit 4f80284c authored by Tiago de Freitas Pereira's avatar Tiago de Freitas Pereira

Merge branch 'resnet101' into 'master'

Properly implemented resnet50 and resnet101

See merge request !94
parents 5a8c7061 324a941d
Pipeline #50181 passed with stages
in 4 minutes and 52 seconds
......@@ -5,7 +5,6 @@ import tensorflow as tf
from tensorflow.keras.layers import BatchNormalization
from tensorflow.keras.layers import Dense
from tensorflow.keras.layers import Dropout
from tensorflow.keras.layers import GlobalAvgPool2D
def _check_input(
......@@ -260,7 +259,12 @@ class ModifiedSoftMaxLayer(tf.keras.layers.Layer):
return logits
def add_bottleneck(model, bottleneck_size=128, dropout_rate=0.2):
from tensorflow.keras.layers import Flatten
def add_bottleneck(
model, bottleneck_size=128, dropout_rate=0.2, w_decay=5e-4, use_bias=True
):
"""
Amend a bottleneck layer to a Keras Model
......@@ -276,15 +280,31 @@ def add_bottleneck(model, bottleneck_size=128, dropout_rate=0.2):
dropout_rate: float
Dropout rate
"""
if not isinstance(model, tf.keras.models.Sequential):
new_model = tf.keras.models.Sequential(model, name="bottleneck")
else:
new_model = model
new_model.add(GlobalAvgPool2D())
new_model.add(BatchNormalization())
new_model.add(Dropout(dropout_rate, name="Dropout"))
new_model.add(Dense(bottleneck_size, use_bias=False, name="embeddings"))
new_model.add(BatchNormalization(axis=-1, scale=False, name="embeddings/BatchNorm"))
new_model.add(Flatten())
if w_decay is None:
regularizer = None
else:
regularizer = tf.keras.regularizers.l2(w_decay)
new_model.add(
Dense(
bottleneck_size,
use_bias=use_bias,
kernel_regularizer=regularizer,
)
)
new_model.add(BatchNormalization(axis=-1, name="embeddings"))
# new_model.add(BatchNormalization())
return new_model
......
......@@ -7,6 +7,8 @@ from .densenet import DenseNet
from .densenet import densenet161 # noqa: F401
from .embedding_validation import EmbeddingValidation
from .mine import MineModel
from .resnet50_modified import resnet50_modified # noqa: F401
from .resnet50_modified import resnet101_modified # noqa: F401
# gets sphinx autodoc done right - don't remove it
......
......@@ -12,6 +12,7 @@ class EmbeddingValidation(tf.keras.Model):
def compile(
self,
single_precision=False,
**kwargs,
):
"""
......@@ -27,14 +28,20 @@ class EmbeddingValidation(tf.keras.Model):
"""
X, y = data
with tf.GradientTape() as tape:
logits, _ = self(X, training=True)
loss = self.loss(y, logits)
# trainable_vars = self.trainable_variables
self.optimizer.minimize(loss, self.trainable_variables, tape=tape)
self.compiled_metrics.update_state(y, logits, sample_weight=None)
self.train_loss(loss)
tf.summary.scalar("training_loss", data=loss, step=self._train_counter)
return {m.name: m.result() for m in self.metrics + [self.train_loss]}
# self.optimizer.apply_gradients(zip(gradients, trainable_vars))
......
This diff is collapsed.
Markdown is supported
0%
or
You are about to add 0 people to the discussion. Proceed with caution.
Finish editing this message first!
Please register or to comment