From 654a91cb2681b5e51fce7a634a81cca200158b69 Mon Sep 17 00:00:00 2001
From: Laurent COLBOIS <lcolbois@.idiap.ch>
Date: Mon, 10 May 2021 11:34:19 +0200
Subject: [PATCH] Add bobrc entry pointing towards the current directory, for
 relative paths

---
 README.rst                                    | 60 ++++++++++++++++---
 .../config/database/multipie/P.py             |  3 +-
 .../config/database/synmultipie/E.py          |  3 +-
 .../config/database/synmultipie/P.py          |  3 +-
 .../config/database/synmultipie/U.py          |  3 +-
 .../config/project/multipie_E.py              |  3 +-
 .../config/project/multipie_P.py              |  3 +-
 .../stylegan2/generator.py                    |  4 ++
 8 files changed, 68 insertions(+), 14 deletions(-)

diff --git a/README.rst b/README.rst
index eb3cf4f..20ae83f 100644
--- a/README.rst
+++ b/README.rst
@@ -18,31 +18,75 @@
  New package
 =============
 
-This package is part of the signal-processing and machine learning toolbox Bob_.
+This package is part of the signal-processing and machine learning toolbox Bob_. It contains source code to reproduce experiments from the article 
+*On the use of automatically generated ijcb2021_synthetic_dataset image datasets for becnhmarking face recognition*.
 
-.. todo::
+It mainly contains tools to perform the following operations:
 
-   **Complete the sentence above to include one phrase about your
-   package!  Once this is done, delete this to-do!**
+1. Projection of a face dataset into StyleGAN2's latent space (`./bin/project_db.py`)
+2. Computation of semantic editing latent directions from those projections (`./bin/latent_analysis.py`)
+3. Generation of a synthetic dataset using the precomputed latent directions (`./bin/generate_db.py`)
+4. Running a face recognition benchmark experiment on the synthetic dataset (`bob bio pipelines vanilla-biometrics`) 
 
 
 Installation
 ------------
 
-Complete bob's `installation`_ instructions. Then, to install this
-package, run::
+This project contains two distinct conda environments:
 
-  $ conda install bob.paper.ijcb2021_synthetic_dataset
+* `generation_env.yml` This environment is based on Bob 8 and Tensorflow 1, and is used for step 1 to 3 (dataset projection, latent analysis and database generation)
+* `benchmark_env.yml` This environment is based on Bob 9 and Tensorflow 2, and is used for step 4 (running the benchmark experiments).
 
+To install everything correctly, after pulling this repository from Gitlab, you need to 
+
+1. Install both environments
+::
+
+   conda env create -f generation_env.yml
+   conda env create -f benchmark_env.yml
+
+2. Run `buildout` to extend the generation environment with the tools available in this repository::
+   conda activate synface # Activate the generation env.
+   buildout -c buildout.cfg # Run buildout
+
+This second step creates a `bin` folder containing in particular
+
+1. `./bin/python` Custom Python executable containing the generation env. extended with `bob.paper.ijcb2021_synthetic_dataset`
+2. `./bin/project_db.py` Dataset projection script (entry point)
+3. `./bin/latent_analysis.py` Script for computing latent directions (entry point)
+4. `./bin/generate_db.py` Synthetic dataset generation script (entry point)
+5. `./bin/download_models.py` Utilitary to download required pretrained models (entry point)
 
 How to run
 ----------
 
+Download model dependencies
+***************************
+This project relies on several preexisting pretrained models:
+
+* **DLIB Face Landmark detector** for cropping and aligning the projected faces exactly as in FFHQ. ([Example](http://dlib.net/face_landmark_detection.py.html))
+* **StyleGAN2** as the main face synthesis network. ([Original paper](https://arxiv.org/abs/1912.04958), [Official repository](https://github.com/NVlabs/stylegan2)). We are using Config-F, trained on FFHQ at resolution 1024 x 1024
+* A pretrained **VGG16** model, used to compute a perceptual loss between projected and target image ([Original paper](https://arxiv.org/abs/1801.03924))
+
+In order to download those models, one must specify the destination path in the `~/.bobrc` file, through the following commands:
+::
+
+   conda activate synface
+   bob config set sg2_morph.dlib_lmd_path </path/to/dlib/landmark/detector.dat>
+   bob config set sg2_morph.sg2_path </path/to/stylegan2/pretrained/model.pkl>
+   bob config set sg2_morph.vgg16_path </path/to/vgg16/pretrained/model.pkl>
+
+This should then enable to download the models once and for all by running
+::
+
+./bin/download_models.py
+
 Prepare folder configuration
 *********************************
 
 ::
-
+    # Absolute path of this repo, can be useful to launch execution on a grid due to some relative paths in the code
+    bob config set bob.paper.ijcb2021_synthetic_dataset.path <path_of_this_repo>
     # Folder to store projected Multi-PIE latent projections
     bob config set bob.synface.multipie_projections <path_to_folder>
     # Folder containing Multi-PIE images
diff --git a/bob/paper/ijcb2021_synthetic_dataset/config/database/multipie/P.py b/bob/paper/ijcb2021_synthetic_dataset/config/database/multipie/P.py
index cf00810..3434f02 100644
--- a/bob/paper/ijcb2021_synthetic_dataset/config/database/multipie/P.py
+++ b/bob/paper/ijcb2021_synthetic_dataset/config/database/multipie/P.py
@@ -13,7 +13,8 @@ from sklearn.pipeline import make_pipeline
 # Kept cameras : ["08_0", "13_0", "14_0", "05_1", "05_0", "04_1", "19_0"]
 
 def get_database():
-    csv_dir = './bob/paper/ijcb2021_synthetic_dataset/config/database/multipie/protocols'
+    csv_dir = os.path.join(rc['bob.paper.ijcb2021_synthetic_dataset.path'],
+                           'bob/paper/ijcb2021_synthetic_dataset/config/database/multipie/protocols')
                                         
     database = CSVDataset(dataset_protocol_path=csv_dir,
                           protocol_name='P_center',
diff --git a/bob/paper/ijcb2021_synthetic_dataset/config/database/synmultipie/E.py b/bob/paper/ijcb2021_synthetic_dataset/config/database/synmultipie/E.py
index 39b6085..52c6598 100644
--- a/bob/paper/ijcb2021_synthetic_dataset/config/database/synmultipie/E.py
+++ b/bob/paper/ijcb2021_synthetic_dataset/config/database/synmultipie/E.py
@@ -6,7 +6,8 @@ import os
 def get_database(protocol):
 
     image_dir = rc['bob.db.synmultipie.directory']
-    csv_dir = './bob/paper/ijcb2021_synthetic_dataset/config/database/synmultipie/protocols'
+    csv_dir = os.path.join(rc['bob.paper.ijcb2021_synthetic_dataset.path'],
+                           'bob/paper/ijcb2021_synthetic_dataset/config/database/synmultipie/protocols')
 
     sample_loader = CSVToSampleLoaderBiometrics(data_loader=bob.io.image.load,
                                       dataset_original_directory = image_dir,
diff --git a/bob/paper/ijcb2021_synthetic_dataset/config/database/synmultipie/P.py b/bob/paper/ijcb2021_synthetic_dataset/config/database/synmultipie/P.py
index d7fb229..eb06ab2 100644
--- a/bob/paper/ijcb2021_synthetic_dataset/config/database/synmultipie/P.py
+++ b/bob/paper/ijcb2021_synthetic_dataset/config/database/synmultipie/P.py
@@ -5,7 +5,8 @@ from bob.bio.base.database import CSVToSampleLoaderBiometrics, CSVDataset
 def get_database(protocol):
 
     image_dir = rc['bob.db.synmultipie.directory']
-    csv_dir = './bob/paper/ijcb2021_synthetic_dataset/config/database/synmultipie/protocols'
+    csv_dir = os.path.join(rc['bob.paper.ijcb2021_synthetic_dataset.path'],
+                           'bob/paper/ijcb2021_synthetic_dataset/config/database/synmultipie/protocols')
 
     sample_loader = CSVToSampleLoaderBiometrics(data_loader=bob.io.image.load,
                                       dataset_original_directory = image_dir,
diff --git a/bob/paper/ijcb2021_synthetic_dataset/config/database/synmultipie/U.py b/bob/paper/ijcb2021_synthetic_dataset/config/database/synmultipie/U.py
index a9713fe..3de2d8c 100644
--- a/bob/paper/ijcb2021_synthetic_dataset/config/database/synmultipie/U.py
+++ b/bob/paper/ijcb2021_synthetic_dataset/config/database/synmultipie/U.py
@@ -5,7 +5,8 @@ from bob.bio.base.database import CSVToSampleLoaderBiometrics, CSVDataset
 def get_database(protocol):
 
     image_dir = rc['bob.db.synmultipie.directory']
-    csv_dir = './bob/paper/ijcb2021_synthetic_dataset/config/database/synmultipie/protocols'
+    csv_dir = os.path.join(rc['bob.paper.ijcb2021_synthetic_dataset.path'],
+                           'bob/paper/ijcb2021_synthetic_dataset/config/database/synmultipie/protocols')
 
     sample_loader = CSVToSampleLoaderBiometrics(data_loader=bob.io.image.load,
                                       dataset_original_directory = image_dir,
diff --git a/bob/paper/ijcb2021_synthetic_dataset/config/project/multipie_E.py b/bob/paper/ijcb2021_synthetic_dataset/config/project/multipie_E.py
index b7030b8..3e07ce7 100644
--- a/bob/paper/ijcb2021_synthetic_dataset/config/project/multipie_E.py
+++ b/bob/paper/ijcb2021_synthetic_dataset/config/project/multipie_E.py
@@ -4,7 +4,8 @@ from bob.bio.base.database import FileListBioDatabase
 
 
 database = FileListBioDatabase(
-    filelists_directory='./bob/paper/ijcb2021_synthetic_dataset/config/project/protocols',
+    filelists_directory=os.path.join(rc['bob.paper.ijcb2021_synthetic_dataset.path'],
+                                     'bob/paper/ijcb2021_synthetic_dataset/config/project/protocols'),
     name='multipie',
     protocol='E_lit',
     original_directory=rc['bob.db.multipie.directory'],
diff --git a/bob/paper/ijcb2021_synthetic_dataset/config/project/multipie_P.py b/bob/paper/ijcb2021_synthetic_dataset/config/project/multipie_P.py
index 337399c..8e253be 100644
--- a/bob/paper/ijcb2021_synthetic_dataset/config/project/multipie_P.py
+++ b/bob/paper/ijcb2021_synthetic_dataset/config/project/multipie_P.py
@@ -3,7 +3,8 @@ from bob.extension import rc
 from bob.bio.base.database import FileListBioDatabase
 
 database = FileListBioDatabase(
-    filelists_directory='./bob/paper/ijcb2021_synthetic_dataset/config/project/protocols',
+    filelists_directory=os.path.join(rc['bob.paper.ijcb2021_synthetic_dataset.path'],
+                                     'bob/paper/ijcb2021_synthetic_dataset/config/project/protocols'),
     name='multipie',
     protocol='P_center_lit',
     original_directory=rc['bob.db.multipie.directory'],
diff --git a/bob/paper/ijcb2021_synthetic_dataset/stylegan2/generator.py b/bob/paper/ijcb2021_synthetic_dataset/stylegan2/generator.py
index 5605754..96ab680 100644
--- a/bob/paper/ijcb2021_synthetic_dataset/stylegan2/generator.py
+++ b/bob/paper/ijcb2021_synthetic_dataset/stylegan2/generator.py
@@ -6,7 +6,11 @@ import numpy as np
 import bob.io.image
 import tensorflow as tf
 import pickle
+import sys
+import os
 
+# Need dnnlib in the path to correctly load the pickle
+sys.path.insert(0, os.path.join(rc['bob.paper.ijcb2021_synthetic_dataset.path'], 'bob/paper/ijcb2021_synthetic_dataset/stylegan2'))
 class StyleGAN2Generator(object):
     def __init__(self, 
                  sg2_path=rc['sg2_morph.sg2_path'], 
-- 
GitLab