Skip to content
Snippets Groups Projects
Commit c26f813e authored by Amir MOHAMMADI's avatar Amir MOHAMMADI
Browse files

Add tf2 iresnet models trained on msceleb at Idiap; reproduces pytorch versions from insightface

parent 277f3590
No related branches found
No related tags found
1 merge request!132Add tf2 iresnet models trained on msceleb at Idiap
Pipeline #52698 failed
from bob.bio.face.embeddings.tensorflow import iresnet100_msceleb_arcface_20210623
from bob.bio.face.utils import lookup_config_from_database
annotation_type, fixed_positions, memory_demanding = lookup_config_from_database(
locals().get("database")
)
def load(annotation_type, fixed_positions=None, memory_demanding=None):
return iresnet100_msceleb_arcface_20210623(
annotation_type, fixed_positions, memory_demanding
)
pipeline = load(annotation_type, fixed_positions, memory_demanding)
from bob.bio.face.embeddings.tensorflow import iresnet50_msceleb_arcface_20210623
from bob.bio.face.utils import lookup_config_from_database
annotation_type, fixed_positions, memory_demanding = lookup_config_from_database(
locals().get("database")
)
def load(annotation_type, fixed_positions=None, memory_demanding=None):
return iresnet50_msceleb_arcface_20210623(
annotation_type, fixed_positions, memory_demanding
)
pipeline = load(annotation_type, fixed_positions, memory_demanding)
......@@ -409,6 +409,171 @@ class Resnet50_MsCeleb_ArcFace_20210521(TensorflowTransformer):
)
class IResnet50_MsCeleb_ArcFace_20210623(TensorflowTransformer):
"""
IResnet50 Backbone trained with the MSCeleb 1M database. The bottleneck layer (a.k.a embedding) has 512d.
The complete code to reproduce this model is in the (private) repository:
bob.project.hardening/-/commit/9ac25c0a17c9628b7a99e84217cd7c680f1a3e1e
but you can reproduce it using ``cnn_training/arcface_large_batch.py`` script and the following configuration::
CONFIG = {
"n-workers": 8,
"batch-size": 256,
"n-train-samples-per-epoch": 256_000 * 1,
"real-n-train-samples": 985702,
"shuffle-buffer": int(1e6),
"face-size": 126,
"face-output_size": 112,
"n-classes": 83009,
"backbone": "resnet50_large_batch",
"use-l2-regularizer": False,
"batch-norm-decay": 0.9,
"batch-norm-epsilon": 1e-5,
"head": "arcface",
"s": 30,
"bottleneck": 512,
"m": 0.5,
"dropout-rate": 0.0,
"learning-rate-schedule": "none",
"train-tf-record-path": "/face-tfrecords/126x126/msceleb_facecrop/*.tfrecords",
"validation-tf-record-path": "/face-tfrecords/126x126/lfw_sharded/*.tfrecords",
"checkpoint-path": "/temp/hardening/arcface_sgd_prelu/w8_b1000_fp16_drp0",
"pre-train": False,
"epochs": 6000,
}
strategy_fn = "multi-worker-mirrored-strategy"
mixed_precision_policy = "mixed_float16"
initial_lr = 0.1 / 512 * CONFIG["batch-size"] * CONFIG["n-workers"]
real_n_steps_per_epoch = CONFIG["real-n-train-samples"] / (CONFIG["batch-size"] * CONFIG["n-workers"])
params = {
"optimizer": {
"type": "sgdw",
"sgdw": {
"momentum": min(0.9 * initial_lr, 0.999),
"nesterov": False,
"weight_decay": 5e-4,
},
},
"learning_rate": {
"type": "stepwise",
"stepwise": {
"boundaries": [int(i * real_n_steps_per_epoch) for i in [11, 17, 22]],
"values": [initial_lr / (10 ** i) for i in range(0, 4)],
},
},
}
The tensorboard logs can be found in: https://tensorboard.dev/experiment/6bBn0ya3SeilJ2elcZZoSg
The model at epoch 90 is used.
"""
def __init__(self, memory_demanding=False, **kwargs):
urls = [
"https://www.idiap.ch/software/bob/data/bob/bob.bio.face/master/tf2/arcface_iresnet50_msceleb_idiap-089640d2.tar.gz",
"http://www.idiap.ch/software/bob/data/bob/bob.bio.face/master/tf2/arcface_iresnet50_msceleb_idiap-089640d2.tar.gz",
]
filename = get_file(
"arcface_iresnet50_msceleb_idiap-089640d2.tar.gz",
urls,
cache_subdir="data/tensorflow/arcface_iresnet50_msceleb_idiap-089640d2",
file_hash="089640d2",
extract=True,
)
checkpoint_path = os.path.dirname(filename)
super().__init__(
checkpoint_path,
preprocessor=lambda X: X / 255.0,
memory_demanding=memory_demanding,
**kwargs,
)
class IResnet100_MsCeleb_ArcFace_20210623(TensorflowTransformer):
"""
IResnet100 Backbone trained with the MSCeleb 1M database. The bottleneck layer (a.k.a embedding) has 512d.
The complete code to reproduce this model is in the (private) repository:
bob.project.hardening/-/commit/b162ca60d26fcf8a93f6767f5b5a026a406c1076
but you can reproduce it using ``cnn_training/arcface_large_batch.py`` script and the following configuration::
CONFIG = {
"n-workers": 8,
"batch-size": 128,
"n-train-samples-per-epoch": 256_000 * 1,
"real-n-train-samples": 985702,
"shuffle-buffer": int(1e5),
"face-size": 126,
"face-output_size": 112,
"n-classes": 83009,
"backbone": "iresnet100",
"use-l2-regularizer": False,
"batch-norm-decay": 0.9,
"batch-norm-epsilon": 1e-5,
"head": "arcface",
"s": 30,
"bottleneck": 512,
"m": 0.5,
"dropout-rate": 0.0,
"learning-rate-schedule": "none",
"train-tf-record-path": "/face-tfrecords/126x126/msceleb_facecrop/*.tfrecords",
"validation-tf-record-path": "/face-tfrecords/126x126/lfw_sharded/*.tfrecords",
"checkpoint-path": "/temp/hardening/arcface_sgd_prelu/i100_w8_b128_fp16_drp0",
"pre-train": False,
"epochs": 6000,
}
strategy_fn = "multi-worker-mirrored-strategy"
mixed_precision_policy = "mixed_float16"
initial_lr = 0.1 / 512 * CONFIG["batch-size"] * CONFIG["n-workers"]
real_n_steps_per_epoch = CONFIG["real-n-train-samples"] / (CONFIG["batch-size"] * CONFIG["n-workers"])
params = {
"optimizer": {
"type": "sgdw",
"sgdw": {
"momentum": min(0.9 * initial_lr, 0.999),
"nesterov": False,
"weight_decay": 5e-4,
},
},
"learning_rate": {
# with ReduceLROnPlateau callback
"type": "constant",
"constant": {
"learning_rate": initial_lr,
}
},
}
The tensorboard logs can be found in: https://tensorboard.dev/experiment/HYJTPiowRMa36VZHDLJqdg/
The model is saved based on best ``epoch_embeddings_embedding_accuracy``, epoch 51
"""
def __init__(self, memory_demanding=False):
urls = [
"https://www.idiap.ch/software/bob/data/bob/bob.bio.face/master/tf2/arcface_iresnet100_msceleb_idiap-1b22d544.tar.gz",
"http://www.idiap.ch/software/bob/data/bob/bob.bio.face/master/tf2/arcface_iresnet100_msceleb_idiap-1b22d544.tar.gz",
]
filename = get_file(
"arcface_iresnet100_msceleb_idiap-1b22d544.tar.gz",
urls,
cache_subdir="data/tensorflow/arcface_iresnet100_msceleb_idiap-1b22d544",
file_hash="1b22d544",
extract=True,
)
checkpoint_path = os.path.dirname(filename)
super().__init__(
checkpoint_path,
preprocessor=lambda X: X / 255.0,
memory_demanding=memory_demanding,
)
class Resnet50_VGG2_ArcFace_2021(TensorflowTransformer):
"""
Resnet50 Backbone trained with the VGG2 database.
......@@ -660,6 +825,60 @@ def resnet50_msceleb_arcface_20210521(
)
def iresnet50_msceleb_arcface_20210623(
annotation_type, fixed_positions=None, memory_demanding=False
):
"""
Get the iresnet50 pipeline which will crop the face :math:`112 \times 112` and
use the :py:class:`IResnet50_MsCeleb_ArcFace_20210623` to extract the features
Parameters
----------
annotation_type: str
Type of the annotations (e.g. `eyes-center')
fixed_positions: dict
Set it if in your face images are registered to a fixed position in the image
memory_demanding: bool
"""
return resnet_template(
embedding=IResnet50_MsCeleb_ArcFace_20210623(memory_demanding=memory_demanding),
annotation_type=annotation_type,
fixed_positions=fixed_positions,
)
def iresnet100_msceleb_arcface_20210623(
annotation_type, fixed_positions=None, memory_demanding=False
):
"""
Get the iresnet100 pipeline which will crop the face :math:`112 \times 112` and
use the :py:class:`IResnet100_MsCeleb_ArcFace_20210623` to extract the features
Parameters
----------
annotation_type: str
Type of the annotations (e.g. `eyes-center')
fixed_positions: dict
Set it if in your face images are registered to a fixed position in the image
memory_demanding: bool
"""
return resnet_template(
embedding=IResnet100_MsCeleb_ArcFace_20210623(
memory_demanding=memory_demanding
),
annotation_type=annotation_type,
fixed_positions=fixed_positions,
)
def resnet50_vgg2_arcface_2021(
annotation_type, fixed_positions=None, memory_demanding=False
):
......
......@@ -152,6 +152,18 @@ def test_arcface_resnet50_msceleb_v1():
run_baseline("resnet50-msceleb-arcface-2021", target_scores=-0.0008105830382632018)
@pytest.mark.slow
@is_library_available("tensorflow")
def test_iresnet50_msceleb_idiap_20210623():
run_baseline("iresnet50-msceleb-idiap-20210623", target_scores=-0.00045892492346155667)
@pytest.mark.slow
@is_library_available("tensorflow")
def test_iresnet100_msceleb_idiap_20210623():
run_baseline("iresnet100-msceleb-idiap-20210623", target_scores=-0.00010635761699118174)
@pytest.mark.slow
@is_library_available("tensorflow")
def test_arcface_resnet50_vgg2_v1():
......
......@@ -132,8 +132,10 @@ setup(
"inception-resnetv2-casiawebface = bob.bio.face.config.baseline.inception_resnetv2_casiawebface:pipeline",
"inception-resnetv2-msceleb = bob.bio.face.config.baseline.inception_resnetv2_msceleb:pipeline",
"iresnet100 = bob.bio.face.config.baseline.iresnet100:pipeline",
"iresnet100-msceleb-idiap-20210623 = bob.bio.face.config.baseline.iresnet100_msceleb_arcface_20210623:pipeline",
"iresnet34 = bob.bio.face.config.baseline.iresnet34:pipeline",
"iresnet50 = bob.bio.face.config.baseline.iresnet50:pipeline",
"iresnet50-msceleb-idiap-20210623 = bob.bio.face.config.baseline.iresnet50_msceleb_arcface_20210623:pipeline",
"lda = bob.bio.face.config.baseline.lda:pipeline",
"lgbphs = bob.bio.face.config.baseline.lgbphs:pipeline",
"mobilenetv2-msceleb-arcface-2021 = bob.bio.face.config.baseline.mobilenetv2_msceleb_arcface_2021:pipeline",
......@@ -153,8 +155,10 @@ setup(
"inception-resnetv2-casiawebface = bob.bio.face.config.baseline.inception_resnetv2_casiawebface",
"inception-resnetv2-msceleb = bob.bio.face.config.baseline.inception_resnetv2_msceleb",
"iresnet100 = bob.bio.face.config.baseline.iresnet100",
"iresnet100-msceleb-idiap-20210623 = bob.bio.face.config.baseline.iresnet100_msceleb_arcface_20210623:pipeline",
"iresnet34 = bob.bio.face.config.baseline.iresnet34",
"iresnet50 = bob.bio.face.config.baseline.iresnet50",
"iresnet50-msceleb-idiap-20210623 = bob.bio.face.config.baseline.iresnet50_msceleb_arcface_20210623:pipeline",
"lda = bob.bio.face.config.baseline.lda",
"lgbphs = bob.bio.face.config.baseline.lgbphs",
"mobilenetv2-msceleb-arcface-2021 = bob.bio.face.config.baseline.mobilenetv2_msceleb_arcface_2021",
......
0% Loading or .
You are about to add 0 people to the discussion. Proceed with caution.
Please register or to comment