Commit 5920462a authored by Tiago de Freitas Pereira's avatar Tiago de Freitas Pereira
Browse files

Update

parent fa042420
Pipeline #55555 failed with stage
in 1 minute and 30 seconds
......@@ -78,12 +78,44 @@ class MorphTorchDataset(DemoraphicTorchDataset):
self, protocol, database_path, database_extension=".h5", transform=None
):
bob_dataset = MorphDatabase(
self.bob_dataset = MorphDatabase(
protocol=protocol,
dataset_original_directory=database_path,
dataset_original_extension=database_extension,
)
super().__init__(bob_dataset, transform=transform)
# Morph dataset has an intersection in between zprobes and treferences
self.excluding_list = [
"190276",
"332158",
"111942",
"308129",
"334074",
"350814",
"131677",
"168724",
"276055",
"275589",
"286810",
]
self.bucket = [s for sset in self.bob_dataset.zprobes() for s in sset]
self.bucket += [
s
for sset in self.bob_dataset.treferences()
for s in sset
if sset.subject_id not in self.excluding_list
]
# Defining keys and labels
keys = [b.subject_id for b in self.bucket]
self.labels = dict(zip(keys, range(len(keys))))
self.demographic_keys = self.load_demographics()
self.transform = transform
# super().__init__(bob_dataset, transform=transform)
def load_demographics(self):
......@@ -91,7 +123,11 @@ class MorphTorchDataset(DemoraphicTorchDataset):
metadata_keys = set(
[f"{sset.rac}-{sset.sex}" for sset in self.bob_dataset.zprobes()]
+ [f"{sset.rac}-{sset.sex}" for sset in self.bob_dataset.treferences()]
+ [
f"{sset.rac}-{sset.sex}"
for sset in self.bob_dataset.treferences()
if sset.subject_id not in self.excluding_list
]
)
metadata_keys = dict(zip(metadata_keys, range(len(metadata_keys))))
return metadata_keys
......
import click
from bob.bio.face.embeddings.pytorch import PyTorchModel, iresnet_template
from bob.bio.demographics.fair_transformers import RunnableTransformer
from functools import partial
import os
from bob.extension.scripts.click_helper import ResourceOption
from bob.bio.base.pipelines.vanilla_biometrics import checkpoint_vanilla_biometrics
from bob.bio.base.pipelines.vanilla_biometrics import execute_vanilla_biometrics
from bob.pipelines.distributed import VALID_DASK_CLIENT_STRINGS
from bob.bio.demographics.regularizers import AVAILABLE_BACKBONES
# BACKBONES = dict()
# BACKBONES["iresnet100"] = iresnet100
@click.command()
@click.argument("BACKBONE")
@click.argument("CHECKPOINT-PATH")
@click.argument("EXPERIMENT-PATH")
@click.option(
"--dask-client",
"-l",
entry_point_group="dask.client",
string_exceptions=VALID_DASK_CLIENT_STRINGS,
default="single-threaded",
help="Dask client for the execution of the pipeline.",
cls=ResourceOption,
)
def meds_experiment(backbone, checkpoint_path, experiment_path, dask_client, **kwargs):
annotation_type = "eyes-center"
fixed_positions = None
memory_demanding = False
backbone = AVAILABLE_BACKBONES[backbone]["structure"]
# Loading the pipeline
pipeline = iresnet_template(
embedding=RunnableTransformer(
partial(backbone, pretrained=checkpoint_path),
memory_demanding=memory_demanding,
),
annotation_type=annotation_type,
fixed_positions=fixed_positions,
)
face_crop_path = os.path.join(experiment_path, "../face-crop/")
# Checkpointing the pipeline
pipeline = checkpoint_vanilla_biometrics(pipeline, base_dir=experiment_path)
pipeline.transformer[0].features_dir = face_crop_path
dask_partition_size = 100
#### DATABASE
from bob.bio.face.database import MEDSDatabase
protocol = "verification_fold1"
database = MEDSDatabase(protocol=protocol)
groups = ["dev", "eval"]
execute_vanilla_biometrics(
pipeline,
database,
dask_client,
groups,
experiment_path,
write_metadata_scores=True,
checkpoint=False,
dask_partition_size=dask_partition_size,
dask_n_workers=10,
allow_scoring_with_all_biometric_references=True,
)
if __name__ == "__main__":
meds_experiment()
import click
from bob.bio.face.embeddings.pytorch import PyTorchModel, iresnet_template
# from bob.learn.pytorch.architectures.iresnet import iresnet100
from bob.bio.demographics.fair_transformers import RunnableTransformer
from functools import partial
import os
from bob.extension.scripts.click_helper import ResourceOption
from bob.bio.base.pipelines.vanilla_biometrics import checkpoint_vanilla_biometrics
from bob.bio.base.pipelines.vanilla_biometrics import execute_vanilla_biometrics
from bob.pipelines.distributed import VALID_DASK_CLIENT_STRINGS
from bob.bio.demographics.regularizers import AVAILABLE_BACKBONES
@click.command()
@click.argument("BACKBONE")
@click.argument("CHECKPOINT-PATH")
@click.argument("EXPERIMENT-PATH")
@click.option(
"--dask-client",
"-l",
entry_point_group="dask.client",
string_exceptions=VALID_DASK_CLIENT_STRINGS,
default="single-threaded",
help="Dask client for the execution of the pipeline.",
cls=ResourceOption,
)
def morph_experiment(backbone, checkpoint_path, experiment_path, dask_client, **kwargs):
annotation_type = "eyes-center"
fixed_positions = None
memory_demanding = False
# checkpoint_path = "/idiap/temp/tpereira/2.FRDemographics/regularization/models/orthogonality_hypothesis/meds/iresnet100.pth"
# checkpoint_path = "/idiap/temp/tpereira/2.FRDemographics/regularization/models/orthogonality_hypothesis/meds_identity-10.0_orthogonality-1.0/iresnet100.pth"
backbone = AVAILABLE_BACKBONES[backbone]["structure"]
# Loading the pipeline
pipeline = iresnet_template(
embedding=RunnableTransformer(
partial(backbone, pretrained=checkpoint_path),
memory_demanding=memory_demanding,
),
annotation_type=annotation_type,
fixed_positions=fixed_positions,
)
face_crop_path = os.path.join(experiment_path, "../face-crop/")
# Checkpointing the pipeline
pipeline = checkpoint_vanilla_biometrics(pipeline, base_dir=experiment_path)
pipeline.transformer[0].features_dir = face_crop_path
dask_partition_size = 100
#### DATABASE
from bob.bio.face.database import MorphDatabase
protocol = "verification_fold1"
database = MorphDatabase(protocol=protocol)
groups = ["dev", "eval"]
execute_vanilla_biometrics(
pipeline,
database,
dask_client,
groups,
experiment_path,
write_metadata_scores=True,
checkpoint=False,
dask_partition_size=dask_partition_size,
dask_n_workers=10,
allow_scoring_with_all_biometric_references=True,
)
if __name__ == "__main__":
morph_experiment()
from bob.bio.face.embeddings.pytorch import PyTorchModel, iresnet_template
from bob.learn.pytorch.architectures.iresnet import iresnet100
from bob.bio.demographics.fair_transformers import RunnableTransformer
from functools import partial
annotation_type = "eyes-center"
fixed_positions = None
memory_demanding = False
# checkpoint_path = "/idiap/temp/tpereira/2.FRDemographics/regularization/models/orthogonality_hypothesis/meds/iresnet100.pth"
checkpoint_path = "/idiap/temp/tpereira/2.FRDemographics/regularization/models/orthogonality_hypothesis/meds_identity-10.0_orthogonality-1.0/iresnet100.pth"
pipeline = iresnet_template(
embedding=RunnableTransformer(
partial(iresnet100, pretrained=checkpoint_path),
memory_demanding=memory_demanding,
),
annotation_type=annotation_type,
fixed_positions=fixed_positions,
)
#### DATABASE
from bob.bio.face.database import MEDSDatabase
protocol = "verification_fold1"
database = MEDSDatabase(protocol=protocol)
# output = (
# "/remote/idiap.svm/user.active/tpereira/gitlab/bob/bob.nightlies/vanilla-callback"
# )
from bob.bio.demographics.datasets import MedsTorchDataset, MorphTorchDataset
# https://pytorch.org/docs/stable/data.html
from torch.utils.data import DataLoader
from bob.extension import rc
import os
import bob.io.image
import torch
from functools import partial
import torchvision.transforms as transforms
import click
import yaml
from bob.bio.demographics.regularizers.trainers import mine_trainer
@click.command()
@click.argument("OUTPUT_DIR")
@click.option("--identity-factor", default=1.0, help="Identity factor")
@click.option("--mine-factor", default=1.0, help="MINE factor")
@click.option("--max-epochs", default=600, help="Max number of epochs")
@click.option(
"--demographic-epochs",
default=100,
help="Number of epochs to train the demographic classifier",
)
@click.option(
"--identity-epochs",
default=200,
help="Number of epochs to train the identity classifier",
)
@click.option("--batch-size", default=64, help="Batch size")
@click.option("--backbone", default="iresnet100", help="Backbone")
def mine_meds(
output_dir,
identity_factor,
mine_factor,
max_epochs,
demographic_epochs,
identity_epochs,
batch_size,
backbone,
):
from bob.bio.demographics.regularizers import AVAILABLE_BACKBONES
database_path = os.path.join(
rc.get("bob.bio.demographics.directory"), "meds", "samplewrapper"
)
transform = transforms.Compose(
[
lambda x: bob.io.image.to_matplotlib(x.astype("float32")),
# transforms.ToPILImage(mode="RGB"),
# transforms.RandomHorizontalFlip(p=0.5),
# transforms.RandomRotation(degrees=(-3, 3)),
# transforms.RandomAutocontrast(p=0.1),
transforms.ToTensor(),
lambda x: (x - 127.5) / 128.0,
]
)
dataset = MedsTorchDataset(
protocol="verification_fold1", database_path=database_path, transform=transform,
)
train_dataloader = DataLoader(
dataset, batch_size=batch_size, shuffle=True, pin_memory=True, num_workers=2
)
# train_dataloader = DataLoader(dataset, batch_size=64, shuffle=True)
backbone_model = AVAILABLE_BACKBONES[backbone]()
mine_trainer(
output_dir,
identity_factor,
mine_factor,
max_epochs,
demographic_epochs,
identity_epochs,
batch_size,
train_dataloader,
backbone_model,
transform,
)
if __name__ == "__main__":
mine_meds()
from bob.bio.demographics.datasets import MedsTorchDataset, MorphTorchDataset
# https://pytorch.org/docs/stable/data.html
from torch.utils.data import DataLoader
from bob.extension import rc
import os
import bob.io.image
import torch
from functools import partial
import torchvision.transforms as transforms
import click
import yaml
from bob.bio.demographics.regularizers.trainers import ortogonality_trainer
@click.command()
@click.argument("OUTPUT_DIR")
@click.option("--identity-factor", default=1.0, help="Identity factor")
@click.option("--orthogonality-factor", default=1.0, help="Ortogonality factor")
@click.option("--max-epochs", default=600, help="Max number of epochs")
@click.option(
"--demographic-epochs",
default=100,
help="Number of epochs to train the demographic classifier",
)
@click.option(
"--identity-epochs",
default=200,
help="Number of epochs to train the identity classifier",
)
@click.option("--batch-size", default=64, help="Batch size")
@click.option("--backbone", default="iresnet100", help="Backbone")
def ortogonality_meds(
output_dir,
identity_factor,
orthogonality_factor,
max_epochs,
demographic_epochs,
identity_epochs,
batch_size,
backbone,
):
from bob.bio.demographics.regularizers import AVAILABLE_BACKBONES
database_path = os.path.join(
rc.get("bob.bio.demographics.directory"), "meds", "samplewrapper"
)
transform = transforms.Compose(
[
lambda x: bob.io.image.to_matplotlib(x.astype("float32")),
# transforms.ToPILImage(mode="RGB"),
# transforms.RandomHorizontalFlip(p=0.5),
# transforms.RandomRotation(degrees=(-3, 3)),
# transforms.RandomAutocontrast(p=0.1),
transforms.ToTensor(),
lambda x: (x - 127.5) / 128.0,
]
)
dataset = MedsTorchDataset(
protocol="verification_fold1", database_path=database_path, transform=transform,
)
train_dataloader = DataLoader(
dataset, batch_size=batch_size, shuffle=True, pin_memory=True, num_workers=2
)
# train_dataloader = DataLoader(dataset, batch_size=64, shuffle=True)
backbone_model = AVAILABLE_BACKBONES[backbone]()
ortogonality_trainer(
output_dir,
identity_factor,
orthogonality_factor,
max_epochs,
demographic_epochs,
identity_epochs,
batch_size,
train_dataloader,
backbone_model,
transform,
)
if __name__ == "__main__":
ortogonality_meds()
from bob.bio.demographics.datasets import MedsTorchDataset, MorphTorchDataset
# https://pytorch.org/docs/stable/data.html
from torch.utils.data import DataLoader
from bob.extension import rc
import os
import bob.io.image
import torch
from functools import partial
import torchvision.transforms as transforms
import click
import yaml
from bob.bio.demographics.regularizers.trainers import ortogonality_trainer
@click.command()
@click.argument("OUTPUT_DIR")
@click.option("--identity-factor", default=1.0, help="Identity factor")
@click.option("--orthogonality-factor", default=1.0, help="Ortogonality factor")
@click.option("--max-epochs", default=600, help="Max number of epochs")
@click.option(
"--demographic-epochs",
default=100,
help="Number of epochs to train the demographic classifier",
)
@click.option(
"--identity-epochs",
default=200,
help="Number of epochs to train the identity classifier",
)
@click.option("--batch-size", default=64, help="Batch size")
@click.option("--backbone", default="iresnet100", help="Backbone")
def ortogonality_morph(
output_dir,
identity_factor,
orthogonality_factor,
max_epochs,
demographic_epochs,
identity_epochs,
batch_size,
backbone,
):
from bob.bio.demographics.regularizers import AVAILABLE_BACKBONES
database_path = os.path.join(
rc.get("bob.bio.demographics.directory"), "morph", "samplewrapper"
)
transform = transforms.Compose(
[
lambda x: bob.io.image.to_matplotlib(x.astype("float32")),
# transforms.ToPILImage(mode="RGB"),
# transforms.RandomHorizontalFlip(p=0.5),
# transforms.RandomRotation(degrees=(-3, 3)),
# transforms.RandomAutocontrast(p=0.1),
transforms.ToTensor(),
lambda x: (x - 127.5) / 128.0,
]
)
dataset = MorphTorchDataset(
protocol="verification_fold1", database_path=database_path, transform=transform,
)
train_dataloader = DataLoader(
dataset, batch_size=batch_size, shuffle=True, pin_memory=True, num_workers=2
)
# train_dataloader = DataLoader(dataset, batch_size=64, shuffle=True)
backbone_model = AVAILABLE_BACKBONES[backbone]()
ortogonality_trainer(
output_dir,
identity_factor,
orthogonality_factor,
max_epochs,
demographic_epochs,
identity_epochs,
batch_size,
train_dataloader,
backbone_model,
transform,
)
if __name__ == "__main__":
ortogonality_morph()
AVAILABLE_BACKBONES = dict()
from bob.learn.pytorch.architectures.iresnet import iresnet34, iresnet100, iresnet50
from functools import partial
# Organize these checkpoints
AVAILABLE_BACKBONES["iresnet100"] = {
"structure": iresnet100,
"prior": partial(
iresnet100,
"/idiap/temp/tpereira/bob/data/pytorch/iresnet-91a5de61/iresnet100-73e07ba7.pth",
),
}
AVAILABLE_BACKBONES["iresnet50"] = {
"structure": iresnet50,
"prior": partial(
iresnet50,
"/idiap/temp/tpereira/bob/data/pytorch/iresnet-91a5de61/iresnet50-7f187506.pth",
),
}
AVAILABLE_BACKBONES["iresnet34"] = {
"structure": iresnet34,
"prior": partial(
iresnet34,
"/idiap/temp/tpereira/bob/data/pytorch/iresnet-91a5de61/iresnet34-5b0d0e90.pth",
),
}
......@@ -4,8 +4,9 @@ import pytorch_lightning as pl
import torch
import torch.nn.functional as F
import numpy as np
import torch.nn as nn
import copy
import math
class DemographicRegularHead(Module):
......@@ -42,7 +43,7 @@ def switch(model, flag):
class OrthogonalityModel(BackboneHeadModel):
"""
Here we hypothesize that the sensitive attribute is orthogonal
to the identity
to the identity attribute