Skip to content
Snippets Groups Projects
Commit 8a6a7062 authored by Tiago de Freitas Pereira's avatar Tiago de Freitas Pereira
Browse files

50 shades

parent 63fb09cc
No related branches found
No related tags found
1 merge request!122arcface update
%% Cell type:markdown id: tags: %% Cell type:markdown id: tags:
# 50 Shades of face # 50 Shades of face
In this notebook we aim to evalute the impact of different face crops in FR baselines. In this notebook we aim to evalute the impact of different face crops in FR baselines.
For that we are relying on the MOBIO dataset, which is not ideal, but it's short enough to run a bunch of experiments. For that we are relying on the MOBIO dataset, which is not ideal, but it's short enough to run a bunch of experiments.
%% Cell type:code id: tags: %% Cell type:code id: tags:
``` python ``` python
# Fetching resources # Fetching resources
import bob.bio.base import bob.bio.base
from bob.bio.base.pipelines.vanilla_biometrics import execute_vanilla_biometrics from bob.bio.base.pipelines.vanilla_biometrics import execute_vanilla_biometrics
from bob.bio.base.pipelines.vanilla_biometrics import Distance from bob.bio.base.pipelines.vanilla_biometrics import Distance
from bob.bio.base.pipelines.vanilla_biometrics import VanillaBiometricsPipeline from bob.bio.base.pipelines.vanilla_biometrics import VanillaBiometricsPipeline
from bob.bio.face.database import MobioDatabase from bob.bio.face.database import MobioDatabase
from bob.bio.face.preprocessor import FaceCrop from bob.bio.face.preprocessor import FaceCrop
from bob.extension import rc from bob.extension import rc
from bob.pipelines import wrap from bob.pipelines import wrap
import os import os
import scipy.spatial import scipy.spatial
import bob.measure import bob.measure
dask_client = None
###
image_size = 112
# eyes position in the vertical axis
# final position will be image_size/height_denominators
height_denominators = [4.5,4,3.5,3,2.8]
# Eyes distance to be explored
eyes_distances = [30, 35, 40, 42,45,48]
output_path = "./50-shades" output_path = "./50-shades"
######## CHANGE YOUR FEATURE EXTRACTOR HERE ######## CHANGE YOUR FEATURE EXTRACTOR HERE
from bob.bio.face.embeddings.mxnet_models import ArcFaceInsightFace from bob.bio.face.embeddings.mxnet_models import ArcFaceInsightFace
extractor_transformer = wrap(["sample"],ArcFaceInsightFace()) extractor_transformer = wrap(["sample"],ArcFaceInsightFace())
### CHANGE YOUR MATCHER HERE ### CHANGE YOUR MATCHER HERE
algorithm = Distance(distance_function = scipy.spatial.distance.cosine,is_distance_function = True) algorithm = Distance(distance_function = scipy.spatial.distance.cosine,is_distance_function = True)
##### CHANGE YOUR DATABASE HERE ##### CHANGE YOUR DATABASE HERE
database = MobioDatabase(protocol="mobile0-male") database = MobioDatabase(protocol="mobile0-male")
sample = database.references()[0][0] sample = database.references()[0][0]
import matplotlib.pyplot as plt import matplotlib.pyplot as plt
import bob.io.image import bob.io.image
``` ```
%% Cell type:markdown id: tags:
## Setting up the grid
If you want to run this on the cluster, don't forget to `SETSHELL grid` before running the cell below.
%% Cell type:code id: tags: %% Cell type:code id: tags:
``` python ``` python
# Starting dask client # Starting dask client
from dask.distributed import Client from dask.distributed import Client
from bob.pipelines.distributed.sge import SGEMultipleQueuesCluster from bob.pipelines.distributed.sge import SGEMultipleQueuesCluster
cluster = SGEMultipleQueuesCluster(min_jobs=1) cluster = SGEMultipleQueuesCluster(min_jobs=1)
dask_client = Client(cluster) dask_client = Client(cluster)
``` ```
%% Cell type:markdown id: tags: %% Cell type:markdown id: tags:
## Running different face crops ## Running different face crops
Here we are varying the `eyes_distances` and the ration `fig_size/height_denominators`, Here we are varying the `eyes_distances` and the ration `fig_size/height_denominators`,
generating the transformers and plotting the outcome generating the transformers and plotting the outcome
%% Cell type:code id: tags: %% Cell type:code id: tags:
``` python ``` python
%matplotlib widget %matplotlib widget
annotation_type = "eyes-center" annotation_type = "eyes-center"
fixed_positions = None fixed_positions = None
height_denominators = [4.5,4,3.5,3]
eyes_distances = [30, 35, 40, 42,45,48]
#height_denominators = [3,4]
#eyes_distances = [42,43]
def get_cropers(final_size = 112, def get_cropers(final_size = 112,
height_denominators = [5,4,3,2], height_denominators = [5,4,3,2],
eyes_distances = [30, 35, 40, 42,45]): eyes_distances = [30, 35, 40, 42,45]):
left_eye_offset = 1.49 left_eye_offset = 1.49
transformers = [] transformers = []
for e in eyes_distances: for e in eyes_distances:
for h in height_denominators: for h in height_denominators:
right_eye_offset = (final_size*left_eye_offset)/(final_size-e*left_eye_offset) right_eye_offset = (final_size*left_eye_offset)/(final_size-e*left_eye_offset)
RIGHT_EYE_POS = (final_size / h, final_size/right_eye_offset) RIGHT_EYE_POS = (final_size / h, final_size/right_eye_offset)
LEFT_EYE_POS = (final_size / h, final_size/left_eye_offset) LEFT_EYE_POS = (final_size / h, final_size/left_eye_offset)
#RIGHT_EYE_POS = (final_size / 3.44, final_size / 3.02) #RIGHT_EYE_POS = (final_size / 3.44, final_size / 3.02)
#LEFT_EYE_POS = (final_size / 3.44, final_size / 1.49) #LEFT_EYE_POS = (final_size / 3.44, final_size / 1.49)
cropped_positions = { cropped_positions = {
"leye": LEFT_EYE_POS, "leye": LEFT_EYE_POS,
"reye": RIGHT_EYE_POS, "reye": RIGHT_EYE_POS,
} }
#print(cropped_positions) #print(cropped_positions)
preprocessor_transformer = FaceCrop(cropped_image_size=(112,112), preprocessor_transformer = FaceCrop(cropped_image_size=(112,112),
cropped_positions=cropped_positions, cropped_positions=cropped_positions,
color_channel='rgb', color_channel='rgb',
fixed_positions=fixed_positions) fixed_positions=fixed_positions)
transform_extra_arguments = (None if (cropped_positions is None or fixed_positions is not None) else (("annotations", "annotations"),)) transform_extra_arguments = (None if (cropped_positions is None or fixed_positions is not None) else (("annotations", "annotations"),))
preprocessor_transformer = wrap(["sample"], preprocessor_transformer,transform_extra_arguments=transform_extra_arguments) preprocessor_transformer = wrap(["sample"], preprocessor_transformer,transform_extra_arguments=transform_extra_arguments)
transformers.append(preprocessor_transformer) transformers.append(preprocessor_transformer)
return transformers return transformers
def plot_faces(transformers, database, subplot_shape, fnmrs=None): def plot_faces(transformers, database, subplot_shape, fnmrs=None):
fig, axis = plt.subplots(subplot_shape[0], subplot_shape[1]) fig, axis = plt.subplots(subplot_shape[0], subplot_shape[1])
offset = 0 offset = 0
for ax_h in axis: for ax_h in axis:
for ax_w in ax_h: for ax_w in ax_h:
# Picking the first sample # Picking the first sample
sample = database.references()[0][0] sample = database.references()[0][0]
preprocessor_transformer = transformers[offset] preprocessor_transformer = transformers[offset]
cropped = preprocessor_transformer.transform([sample])[0] cropped = preprocessor_transformer.transform([sample])[0]
cropped = bob.io.image.to_matplotlib(cropped.data).astype("uint8") cropped = bob.io.image.to_matplotlib(cropped.data).astype("uint8")
ax_w.imshow(cropped) ax_w.imshow(cropped)
reye_y = round(preprocessor_transformer.estimator.cropped_positions["reye"][0],2) reye_y = round(preprocessor_transformer.estimator.cropped_positions["reye"][0],2)
reye_x = round(preprocessor_transformer.estimator.cropped_positions["reye"][1],2) reye_x = round(preprocessor_transformer.estimator.cropped_positions["reye"][1],2)
leye_y = round(preprocessor_transformer.estimator.cropped_positions["leye"][0],2) leye_y = round(preprocessor_transformer.estimator.cropped_positions["leye"][0],2)
leye_x = round(preprocessor_transformer.estimator.cropped_positions["leye"][1],2) leye_x = round(preprocessor_transformer.estimator.cropped_positions["leye"][1],2)
if fnmrs is None: if fnmrs is None:
title = f"({reye_y},{reye_x}) - ({leye_y},{leye_x})" title = f"({reye_y},{reye_x}) - ({leye_y},{leye_x})"
else: else:
title = f"({reye_y},{reye_x}) - ({leye_y},{leye_x}) = {fnmrs[offset]}" title = f"({reye_y},{reye_x}) - ({leye_y},{leye_x}) = {fnmrs[offset]}"
ax_w.set_title(f"{title}", fontsize=5) ax_w.set_title(f"{title}", fontsize=5)
ax_w.axis('off') ax_w.axis('off')
offset +=1 offset +=1
subplot_shape = (int((len(eyes_distances)*len(height_denominators))/len(height_denominators)),len(height_denominators)) subplot_shape = (int((len(eyes_distances)*len(height_denominators))/len(height_denominators)),len(height_denominators))
transformers = get_cropers(final_size=image_size, transformers = get_cropers(height_denominators=height_denominators,
height_denominators=height_denominators,
eyes_distances=eyes_distances) eyes_distances=eyes_distances)
plot_faces(transformers, database, subplot_shape) plot_faces(transformers, database, subplot_shape)
``` ```
%% Output %% Output
%% Cell type:markdown id: tags: %% Cell type:markdown id: tags:
## Run vanilla biometrics ## Run vanilla biometrics
Here we are running Vanilla Biometrics several times and collecting the `1-FNMR@FMR=0.001` and plotting. Here we are running Vanilla Biometrics several times and collecting the `1-FNMR@FMR=0.01` and plotting.
%% Cell type:code id: tags: %% Cell type:code id: tags:
``` python ``` python
""" """
vanilla_biometrics( vanilla_biometrics(
pipeline, pipeline,
database, database,
dask_client, dask_client,
groups, groups,
output, output,
write_metadata_scores, write_metadata_scores,
checkpoint, checkpoint,
dask_partition_size, dask_partition_size,
dask_n_workers, dask_n_workers,
) )
""" """
from sklearn.pipeline import make_pipeline from sklearn.pipeline import make_pipeline
write_metadata_scores = False write_metadata_scores = False
checkpoint = False checkpoint = False
dask_partition_size = None dask_partition_size = None
dask_n_workers = 15 dask_n_workers = 15
### Preparing the pipeline ### Preparing the pipeline
fnmrs = [] fnmrs = []
for t in transformers: for t in transformers:
# Chain the Transformers together # Chain the Transformers together
transformer = make_pipeline(t, extractor_transformer) transformer = make_pipeline(t, extractor_transformer)
# Assemble the Vanilla Biometric pipeline and execute # Assemble the Vanilla Biometric pipeline and execute
pipeline = VanillaBiometricsPipeline(transformer, algorithm) pipeline = VanillaBiometricsPipeline(transformer, algorithm)
execute_vanilla_biometrics( execute_vanilla_biometrics(
pipeline, pipeline,
database, database,
dask_client, dask_client,
["dev"], ["dev"],
output_path, output_path,
write_metadata_scores, write_metadata_scores,
checkpoint, checkpoint,
dask_partition_size, dask_partition_size,
dask_n_workers, dask_n_workers,
allow_scoring_with_all_biometric_references=True allow_scoring_with_all_biometric_references=True
) )
scores_dev = os.path.join(output_path, "scores-dev") scores_dev = os.path.join(output_path, "scores-dev")
# Picking FNMR@FAR=0.001 # Picking FNMR@FAR=0.01
neg, pos = bob.bio.base.score.load.split_four_column(scores_dev) neg, pos = bob.bio.base.score.load.split_four_column(scores_dev)
far_thres = bob.measure.far_threshold(neg, pos, 0.001) far_thres = bob.measure.far_threshold(neg, pos, 0.01)
fpr,fnr = bob.measure.fprfnr(neg, pos, far_thres) fpr,fnr = bob.measure.fprfnr(neg, pos, far_thres)
fnmr_1 = round(1-fnr,2) fnmr_1 = round(1-fnr,2)
fnmrs.append(fnmr_1) fnmrs.append(fnmr_1)
plot_faces(transformers, database, subplot_shape, fnmrs) plot_faces(transformers, database, subplot_shape, fnmrs)
``` ```
%% Output %% Output
There's no data to train background model.For the rest of the execution it will be assumed that the pipeline is stateless. There's no data to train background model.For the rest of the execution it will be assumed that the pipeline is stateless.
There's no data to train background model.For the rest of the execution it will be assumed that the pipeline is stateless. There's no data to train background model.For the rest of the execution it will be assumed that the pipeline is stateless.
There's no data to train background model.For the rest of the execution it will be assumed that the pipeline is stateless. There's no data to train background model.For the rest of the execution it will be assumed that the pipeline is stateless.
There's no data to train background model.For the rest of the execution it will be assumed that the pipeline is stateless. There's no data to train background model.For the rest of the execution it will be assumed that the pipeline is stateless.
There's no data to train background model.For the rest of the execution it will be assumed that the pipeline is stateless. There's no data to train background model.For the rest of the execution it will be assumed that the pipeline is stateless.
There's no data to train background model.For the rest of the execution it will be assumed that the pipeline is stateless. There's no data to train background model.For the rest of the execution it will be assumed that the pipeline is stateless.
There's no data to train background model.For the rest of the execution it will be assumed that the pipeline is stateless. There's no data to train background model.For the rest of the execution it will be assumed that the pipeline is stateless.
There's no data to train background model.For the rest of the execution it will be assumed that the pipeline is stateless. There's no data to train background model.For the rest of the execution it will be assumed that the pipeline is stateless.
There's no data to train background model.For the rest of the execution it will be assumed that the pipeline is stateless. There's no data to train background model.For the rest of the execution it will be assumed that the pipeline is stateless.
There's no data to train background model.For the rest of the execution it will be assumed that the pipeline is stateless. There's no data to train background model.For the rest of the execution it will be assumed that the pipeline is stateless.
There's no data to train background model.For the rest of the execution it will be assumed that the pipeline is stateless. There's no data to train background model.For the rest of the execution it will be assumed that the pipeline is stateless.
There's no data to train background model.For the rest of the execution it will be assumed that the pipeline is stateless. There's no data to train background model.For the rest of the execution it will be assumed that the pipeline is stateless.
There's no data to train background model.For the rest of the execution it will be assumed that the pipeline is stateless. There's no data to train background model.For the rest of the execution it will be assumed that the pipeline is stateless.
There's no data to train background model.For the rest of the execution it will be assumed that the pipeline is stateless. There's no data to train background model.For the rest of the execution it will be assumed that the pipeline is stateless.
There's no data to train background model.For the rest of the execution it will be assumed that the pipeline is stateless. There's no data to train background model.For the rest of the execution it will be assumed that the pipeline is stateless.
There's no data to train background model.For the rest of the execution it will be assumed that the pipeline is stateless. There's no data to train background model.For the rest of the execution it will be assumed that the pipeline is stateless.
There's no data to train background model.For the rest of the execution it will be assumed that the pipeline is stateless. There's no data to train background model.For the rest of the execution it will be assumed that the pipeline is stateless.
There's no data to train background model.For the rest of the execution it will be assumed that the pipeline is stateless. There's no data to train background model.For the rest of the execution it will be assumed that the pipeline is stateless.
There's no data to train background model.For the rest of the execution it will be assumed that the pipeline is stateless. There's no data to train background model.For the rest of the execution it will be assumed that the pipeline is stateless.
There's no data to train background model.For the rest of the execution it will be assumed that the pipeline is stateless. There's no data to train background model.For the rest of the execution it will be assumed that the pipeline is stateless.
There's no data to train background model.For the rest of the execution it will be assumed that the pipeline is stateless. There's no data to train background model.For the rest of the execution it will be assumed that the pipeline is stateless.
There's no data to train background model.For the rest of the execution it will be assumed that the pipeline is stateless. There's no data to train background model.For the rest of the execution it will be assumed that the pipeline is stateless.
There's no data to train background model.For the rest of the execution it will be assumed that the pipeline is stateless. There's no data to train background model.For the rest of the execution it will be assumed that the pipeline is stateless.
There's no data to train background model.For the rest of the execution it will be assumed that the pipeline is stateless. There's no data to train background model.For the rest of the execution it will be assumed that the pipeline is stateless.
There's no data to train background model.For the rest of the execution it will be assumed that the pipeline is stateless.
There's no data to train background model.For the rest of the execution it will be assumed that the pipeline is stateless.
There's no data to train background model.For the rest of the execution it will be assumed that the pipeline is stateless.
There's no data to train background model.For the rest of the execution it will be assumed that the pipeline is stateless.
There's no data to train background model.For the rest of the execution it will be assumed that the pipeline is stateless.
There's no data to train background model.For the rest of the execution it will be assumed that the pipeline is stateless.
%% Cell type:code id: tags: %% Cell type:code id: tags:
``` python ``` python
# Shutting down client # Shutting down client
dask_client.shutdown() dask_client.shutdown()
``` ```
%% Cell type:code id: tags:
``` python
```
......
0% Loading or .
You are about to add 0 people to the discussion. Proceed with caution.
Finish editing this message first!
Please register or to comment