diff --git a/bob/bio/face/config/baseline/iresnet100_elastic.py b/bob/bio/face/config/baseline/iresnet100_elastic.py new file mode 100644 index 0000000000000000000000000000000000000000..85fb6352755db3220834e79c919ca49f8025895d --- /dev/null +++ b/bob/bio/face/config/baseline/iresnet100_elastic.py @@ -0,0 +1,15 @@ +from bob.bio.face.embeddings.pytorch import iresnet100_elastic +from bob.bio.face.utils import lookup_config_from_database + + +annotation_type, fixed_positions, memory_demanding = lookup_config_from_database( + locals().get("database") +) + + +def load(annotation_type, fixed_positions=None, memory_demanding=False): + return iresnet100_elastic(annotation_type, fixed_positions, memory_demanding) + + +pipeline = load(annotation_type, fixed_positions, memory_demanding) + diff --git a/bob/bio/face/config/baseline/resnet101_msceleb_arcface_20210521.py b/bob/bio/face/config/baseline/resnet101_msceleb_arcface_20210521.py new file mode 100644 index 0000000000000000000000000000000000000000..acb456d368b761acb081bf1ed60676923eff73f4 --- /dev/null +++ b/bob/bio/face/config/baseline/resnet101_msceleb_arcface_20210521.py @@ -0,0 +1,15 @@ +from bob.bio.face.embeddings.tensorflow import resnet101_msceleb_arcface_20210521 +from bob.bio.face.utils import lookup_config_from_database + +annotation_type, fixed_positions, memory_demanding = lookup_config_from_database( + locals().get("database") +) + + +def load(annotation_type, fixed_positions=None, memory_demanding=None): + return resnet101_msceleb_arcface_20210521( + annotation_type, fixed_positions, memory_demanding + ) + + +pipeline = load(annotation_type, fixed_positions, memory_demanding) diff --git a/bob/bio/face/embeddings/mxnet.py b/bob/bio/face/embeddings/mxnet.py index 46e64715a2d2ff2eae384c047807cbb73442a5a2..6600266c9a1dc23bf84b8009a98c7bc6113f8b16 100644 --- a/bob/bio/face/embeddings/mxnet.py +++ b/bob/bio/face/embeddings/mxnet.py @@ -172,6 +172,7 @@ from bob.bio.base.pipelines.vanilla_biometrics import ( def arcface_template(embedding, annotation_type, fixed_positions=None): # DEFINE CROPPING cropped_image_size = (112, 112) + if annotation_type == "eyes-center" or annotation_type == "bounding-box": # Hard coding eye positions for backward consistency # cropped_positions = { @@ -182,6 +183,8 @@ def arcface_template(embedding, annotation_type, fixed_positions=None): {"topleft": (0, 0), "bottomright": cropped_image_size} ) + elif isinstance(annotation_type, list): + cropped_positions = cropped_positions_arcface(annotation_type) else: cropped_positions = dnn_default_cropping(cropped_image_size, annotation_type) diff --git a/bob/bio/face/embeddings/pytorch.py b/bob/bio/face/embeddings/pytorch.py index fa15915db28657f2f937a276b4f332b473406fc3..2c07a429c34861d6816726a60bd43bcadb10acdf 100644 --- a/bob/bio/face/embeddings/pytorch.py +++ b/bob/bio/face/embeddings/pytorch.py @@ -289,6 +289,57 @@ class IResnet100(PyTorchModel): self.place_model_on_device() +class IResnet100Elastic(PyTorchModel): + """ + iResnet100 model from the paper. + + Boutros, Fadi, et al. "ElasticFace: Elastic Margin Loss for Deep Face Recognition." arXiv preprint arXiv:2109.09416 (2021). + """ + + def __init__( + self, + preprocessor=lambda x: (x - 127.5) / 128.0, + memory_demanding=False, + device=None, + **kwargs, + ): + + + urls = [ + "https://www.idiap.ch/software/bob/data/bob/bob.bio.face/master/pytorch/iresnet100-elastic.tar.gz", + "http://www.idiap.ch/software/bob/data/bob/bob.bio.face/master/pytorch/iresnet100-elastic.tar.gz", + ] + + filename= get_file( + "iresnet100-elastic.tar.gz", + urls, + cache_subdir="data/pytorch/iresnet100-elastic/", + file_hash="0ac36db3f0f94930993afdb27faa4f02", + extract=True, + ) + + path = os.path.dirname(filename) + config = os.path.join(path, "iresnet.py") + checkpoint_path = os.path.join(path, "iresnet100-elastic.pt") + + + super(IResnet100Elastic, self).__init__( + checkpoint_path, + config, + memory_demanding=memory_demanding, + preprocessor=preprocessor, + device=device, + **kwargs, + ) + + def _load_model(self): + model = imp.load_source("module", self.config).iresnet100(self.checkpoint_path) + self.model = model + + self.model.eval() + self.place_model_on_device() + + class FaceXZooModel(PyTorchModel): """ FaceXZoo models @@ -748,6 +799,36 @@ def iresnet100(annotation_type, fixed_positions=None, memory_demanding=False): ) +def iresnet100_elastic(annotation_type, fixed_positions=None, memory_demanding=False): + """ + Get the Resnet100 pipeline which will crop the face :math:`112 \\times 112` and + use the :py:class:`IResnet100` to extract the features + + + code referenced from https://raw.githubusercontent.com/nizhib/pytorch-insightface/master/insightface/iresnet.py + https://github.com/nizhib/pytorch-insightface + + + Parameters + ---------- + + annotation_type: str + Type of the annotations (e.g. `eyes-center') + + fixed_positions: dict + Set it if in your face images are registered to a fixed position in the image + + memory_demanding: bool + + """ + + return iresnet_template( + embedding=IResnet100Elastic(memory_demanding=memory_demanding), + annotation_type=annotation_type, + fixed_positions=fixed_positions, + ) + + def afffe_baseline(annotation_type, fixed_positions=None, memory_demanding=False): """ Get the AFFFE pipeline which will crop the face :math:`224 \\times 224` diff --git a/bob/bio/face/embeddings/tensorflow.py b/bob/bio/face/embeddings/tensorflow.py index 28e8d52fc8b4b079b100105e537bbf5450a06366..9834f34ee3d5ffb4afac50d5c96a6f1c7ee65f39 100644 --- a/bob/bio/face/embeddings/tensorflow.py +++ b/bob/bio/face/embeddings/tensorflow.py @@ -420,6 +420,73 @@ class Resnet50_MsCeleb_ArcFace_20210521(TensorflowTransformer): ) +class Resnet101_MsCeleb_ArcFace_20210521(TensorflowTransformer): + """ + Resnet101 Backbone trained with the MSCeleb 1M database. The bottleneck layer (a.k.a embedding) has 512d. + + The difference from this one to :any:`Resnet101_MsCeleb_ArcFace_2021` is the MSCeleb version used to train it. + This one uses 100% of the data pruned from annotators. + + + The configuration file used to trained is: + + .. warning:: + This configuration file might change in future releases + + + ```yaml + batch-size: 128 + face-size: 112 + face-output_size: 112 + n-classes: 83009 + + + ## Backbone + backbone: 'resnet50' + head: 'arcface' + s: 30 + bottleneck: 512 + m: 0.5 + + # Training parameters + solver: "sgd" + lr: 0.1 + dropout-rate: 0.5 + epochs: 300 + + + train-tf-record-path: "" + validation-tf-record-path: "" + + ``` + + + """ + + def __init__(self, memory_demanding=False, **kwargs): + + urls = [ + "https://www.idiap.ch/software/bob/data/bob/bob.bio.face/master/tf2/resnet101-msceleb-arcface_20210521.tar.gz", + "http://www.idiap.ch/software/bob/data/bob/bob.bio.face/master/tf2/resnet101-msceleb-arcface_20210521.tar.gz", + ] + + filename = get_file( + "resnet101-msceleb-arcface_20210521.tar.gz", + urls, + cache_subdir="data/tensorflow/resnet101-msceleb-arcface_20210521", + file_hash="c1b2124cb69186ff965f7e818f9f8641", + extract=True, + ) + checkpoint_path = os.path.dirname(filename) + + super(Resnet101_MsCeleb_ArcFace_20210521, self).__init__( + checkpoint_path, + preprocessor=lambda X: X / 255.0, + memory_demanding=memory_demanding, + **kwargs, + ) + + class IResnet50_MsCeleb_ArcFace_20210623(TensorflowTransformer): """ IResnet50 Backbone trained with the MSCeleb 1M database. The bottleneck layer (a.k.a embedding) has 512d. @@ -864,6 +931,33 @@ def resnet50_msceleb_arcface_20210521( ) +def resnet101_msceleb_arcface_20210521( + annotation_type, fixed_positions=None, memory_demanding=False +): + """ + Get the Resnet50 pipeline which will crop the face :math:`112 \\times 112` and + use the :py:class:`Resnet50_MsCeleb_ArcFace_20210521` to extract the features + + Parameters + ---------- + + annotation_type: str + Type of the annotations (e.g. `eyes-center') + + fixed_positions: dict + Set it if in your face images are registered to a fixed position in the image + + memory_demanding: bool + + """ + + return resnet_template( + embedding=Resnet101_MsCeleb_ArcFace_20210521(memory_demanding=memory_demanding), + annotation_type=annotation_type, + fixed_positions=fixed_positions, + ) + + def iresnet50_msceleb_arcface_20210623( annotation_type, fixed_positions=None, memory_demanding=False ): diff --git a/bob/bio/face/utils.py b/bob/bio/face/utils.py index 1185e61c9663aaa9b8a2e75e51116140e406d0d5..06da69d5c38c5279a957ebe439cedb2630d6cd4e 100644 --- a/bob/bio/face/utils.py +++ b/bob/bio/face/utils.py @@ -31,7 +31,7 @@ def lookup_config_from_database(database): return annotation_type, fixed_positions, memory_demanding -def cropped_positions_arcface(): +def cropped_positions_arcface(annotation_type="eyes-center"): """ Returns the 112 x 112 crop used in iResnet based models The crop follows the following rule: @@ -52,10 +52,22 @@ def cropped_positions_arcface(): """ - cropped_positions = { - "leye": (55, 72), - "reye": (55, 40), - } + if isinstance(annotation_type, list): + return [cropped_positions_arcface(item) for item in annotation_type] + + + if annotation_type == "eyes-center": + cropped_positions = { + "leye": (55, 72), + "reye": (55, 40), + } + elif annotation_type == "left-profile": + + cropped_positions = {"leye": (40, 30), "mouth": (85, 30)} + elif annotation_type == "right-profile": + return {"reye": (40, 82), "mouth": (85, 82)} + else: + raise ValueError(f"Annotations of the type `{annotation_type}` not supported") return cropped_positions diff --git a/setup.py b/setup.py index a391b9fd7e33a23674dd99b9ac95589e6cf50f92..7e53ab33c06b471f3df24e0e864ee50a57efd738 100644 --- a/setup.py +++ b/setup.py @@ -154,6 +154,7 @@ setup( "inception-resnetv2-casiawebface = bob.bio.face.config.baseline.inception_resnetv2_casiawebface:pipeline", "inception-resnetv2-msceleb = bob.bio.face.config.baseline.inception_resnetv2_msceleb:pipeline", "iresnet100 = bob.bio.face.config.baseline.iresnet100:pipeline", + "iresnet100-elastic = bob.bio.face.config.baseline.iresnet100_elastic:pipeline", "iresnet100-msceleb-idiap-20210623 = bob.bio.face.config.baseline.iresnet100_msceleb_arcface_20210623:pipeline", "iresnet34 = bob.bio.face.config.baseline.iresnet34:pipeline", "iresnet50 = bob.bio.face.config.baseline.iresnet50:pipeline", @@ -163,6 +164,7 @@ setup( "mobilenetv2-msceleb-arcface-2021 = bob.bio.face.config.baseline.mobilenetv2_msceleb_arcface_2021:pipeline", "resnet50-msceleb-arcface-2021 = bob.bio.face.config.baseline.resnet50_msceleb_arcface_2021:pipeline", "resnet50-msceleb-arcface-20210521 = bob.bio.face.config.baseline.resnet50_msceleb_arcface_20210521:pipeline", + "resnet101-msceleb-arcface-20210521 = bob.bio.face.config.baseline.resnet101_msceleb_arcface_20210521:pipeline", "resnet50-vgg2-arcface-2021 = bob.bio.face.config.baseline.resnet50_vgg2_arcface_2021:pipeline", "vgg16-oxford = bob.bio.face.config.baseline.vgg16_oxford:pipeline", "attentionnet = bob.bio.face.config.baseline.attention_net:pipeline", @@ -190,11 +192,13 @@ setup( "iresnet34 = bob.bio.face.config.baseline.iresnet34", "iresnet50 = bob.bio.face.config.baseline.iresnet50", "iresnet50-msceleb-idiap-20210623 = bob.bio.face.config.baseline.iresnet50_msceleb_arcface_20210623", + "iresnet100-elastic = bob.bio.face.config.baseline.iresnet100_elastic", "lda = bob.bio.face.config.baseline.lda", "lgbphs = bob.bio.face.config.baseline.lgbphs", "mobilenetv2-msceleb-arcface-2021 = bob.bio.face.config.baseline.mobilenetv2_msceleb_arcface_2021", "resnet50-msceleb-arcface-2021 = bob.bio.face.config.baseline.resnet50_msceleb_arcface_2021", "resnet50-msceleb-arcface-20210521 = bob.bio.face.config.baseline.resnet50_msceleb_arcface_20210521", + "resnet101-msceleb-arcface-20210521 = bob.bio.face.config.baseline.resnet101_msceleb_arcface_20210521", "resnet50-vgg2-arcface-2021 = bob.bio.face.config.baseline.resnet50_vgg2_arcface_2021", "vgg16-oxford = bob.bio.face.config.baseline.vgg16_oxford", "attentionnet = bob.bio.face.config.baseline.attention_net",