Flake shit

parent 750171bb
Pipeline #50091 passed with stage
in 3 minutes and 52 seconds
......@@ -5,7 +5,6 @@ import tensorflow as tf
from tensorflow.keras.layers import BatchNormalization
from tensorflow.keras.layers import Dense
from tensorflow.keras.layers import Dropout
from tensorflow.keras.layers import GlobalAvgPool2D
def _check_input(
......@@ -260,7 +259,12 @@ class ModifiedSoftMaxLayer(tf.keras.layers.Layer):
return logits
def add_bottleneck(model, bottleneck_size=128, dropout_rate=0.2):
from tensorflow.keras.layers import Flatten
def add_bottleneck(
model, bottleneck_size=128, dropout_rate=0.2, w_decay=5e-4, use_bias=True
):
"""
Amend a bottleneck layer to a Keras Model
......@@ -276,15 +280,31 @@ def add_bottleneck(model, bottleneck_size=128, dropout_rate=0.2):
dropout_rate: float
Dropout rate
"""
if not isinstance(model, tf.keras.models.Sequential):
new_model = tf.keras.models.Sequential(model, name="bottleneck")
else:
new_model = model
new_model.add(GlobalAvgPool2D())
new_model.add(BatchNormalization())
new_model.add(Dropout(dropout_rate, name="Dropout"))
new_model.add(Dense(bottleneck_size, use_bias=False, name="embeddings"))
new_model.add(BatchNormalization(axis=-1, scale=False, name="embeddings/BatchNorm"))
new_model.add(Flatten())
if w_decay is None:
regularizer = None
else:
regularizer = tf.keras.regularizers.l2(w_decay)
new_model.add(
Dense(
bottleneck_size,
use_bias=use_bias,
kernel_regularizer=regularizer,
)
)
new_model.add(BatchNormalization(axis=-1, name="embeddings"))
# new_model.add(BatchNormalization())
return new_model
......
......@@ -7,9 +7,9 @@ from .densenet import DenseNet
from .densenet import densenet161 # noqa: F401
from .embedding_validation import EmbeddingValidation
from .mine import MineModel
from .resnet50_modified import resnet50_modified # noqa: F401
from .resnet50_modified import resnet101_modified # noqa: F401
from .arcface import ArcFaceLayer, ArcFaceLayer3Penalties, ArcFaceModel
from .resnet50_modified import resnet50_modified, resnet101_modified
# gets sphinx autodoc done right - don't remove it
def __appropriate__(*args):
......
......@@ -12,6 +12,7 @@ class EmbeddingValidation(tf.keras.Model):
def compile(
self,
single_precision=False,
**kwargs,
):
"""
......@@ -27,14 +28,20 @@ class EmbeddingValidation(tf.keras.Model):
"""
X, y = data
with tf.GradientTape() as tape:
logits, _ = self(X, training=True)
loss = self.loss(y, logits)
# trainable_vars = self.trainable_variables
self.optimizer.minimize(loss, self.trainable_variables, tape=tape)
self.compiled_metrics.update_state(y, logits, sample_weight=None)
self.train_loss(loss)
tf.summary.scalar("training_loss", data=loss, step=self._train_counter)
return {m.name: m.result() for m in self.metrics + [self.train_loss]}
# self.optimizer.apply_gradients(zip(gradients, trainable_vars))
......
......@@ -9,10 +9,11 @@ This resnet 50 implementation provides a cleaner version
"""
import tensorflow as tf
from tensorflow.keras.regularizers import l2
from tensorflow.keras.layers import Conv2D, Activation, BatchNormalization
from tensorflow.keras.layers import Activation
from tensorflow.keras.layers import BatchNormalization
from tensorflow.keras.layers import Conv2D
from tensorflow.keras.layers import MaxPooling2D
from tensorflow.keras.regularizers import l2
global weight_decay
weight_decay = 1e-4
......@@ -24,15 +25,15 @@ class IdentityBlock(tf.keras.layers.Layer):
):
"""Block that has no convolutianal layer as skip connection
Parameters
----------
kernel_size:
kernel_size:
The kernel size of middle conv layer at main path
filters:
filters:
list of integers, the filterss of 3 conv layer at main path
stage:
stage:
Current stage label, used for generating layer names
block:
......@@ -94,8 +95,8 @@ class IdentityBlock(tf.keras.layers.Layer):
def call(self, input_tensor, training=None):
x = input_tensor
for l in self.layers:
x = l(x, training=training)
for lay in self.layers:
x = lay(x, training=training)
x = tf.keras.layers.add([x, input_tensor])
x = Activation("relu")(x)
......@@ -115,15 +116,15 @@ class ConvBlock(tf.keras.layers.Layer):
name=None,
**kwargs,
):
""" Block that has a conv layer AS shortcut.
"""Block that has a conv layer AS shortcut.
Parameters
----------
kernel_size:
kernel_size:
The kernel size of middle conv layer at main path
filters:
filters:
list of integers, the filterss of 3 conv layer at main path
stage:
stage:
Current stage label, used for generating layer names
block:
......@@ -200,12 +201,12 @@ class ConvBlock(tf.keras.layers.Layer):
def call(self, input_tensor, training=None):
x = input_tensor
for l in self.layers:
x = l(x, training=training)
for lay in self.layers:
x = lay(x, training=training)
x_s = input_tensor
for l in self.shortcut:
x_s = l(x_s, training=training)
for lay in self.shortcut:
x_s = lay(x_s, training=training)
x = tf.keras.layers.add([x, x_s])
x = Activation("relu")(x)
......@@ -348,4 +349,3 @@ if __name__ == "__main__":
print(len(model.variables))
print(model.summary())
Markdown is supported
0%
or
You are about to add 0 people to the discussion. Proceed with caution.
Finish editing this message first!
Please register or to comment