diff --git a/README.rst b/README.rst
index eb3cf4fb14d0b9074613422d1f851b293f8ec271..20ae83f0dd114b0604e78ff9941dcd77b4bde14f 100644
--- a/README.rst
+++ b/README.rst
@@ -18,31 +18,75 @@
  New package
 =============
 
-This package is part of the signal-processing and machine learning toolbox Bob_.
+This package is part of the signal-processing and machine learning toolbox Bob_. It contains source code to reproduce experiments from the article 
+*On the use of automatically generated ijcb2021_synthetic_dataset image datasets for becnhmarking face recognition*.
 
-.. todo::
+It mainly contains tools to perform the following operations:
 
-   **Complete the sentence above to include one phrase about your
-   package!  Once this is done, delete this to-do!**
+1. Projection of a face dataset into StyleGAN2's latent space (`./bin/project_db.py`)
+2. Computation of semantic editing latent directions from those projections (`./bin/latent_analysis.py`)
+3. Generation of a synthetic dataset using the precomputed latent directions (`./bin/generate_db.py`)
+4. Running a face recognition benchmark experiment on the synthetic dataset (`bob bio pipelines vanilla-biometrics`) 
 
 
 Installation
 ------------
 
-Complete bob's `installation`_ instructions. Then, to install this
-package, run::
+This project contains two distinct conda environments:
 
-  $ conda install bob.paper.ijcb2021_synthetic_dataset
+* `generation_env.yml` This environment is based on Bob 8 and Tensorflow 1, and is used for step 1 to 3 (dataset projection, latent analysis and database generation)
+* `benchmark_env.yml` This environment is based on Bob 9 and Tensorflow 2, and is used for step 4 (running the benchmark experiments).
 
+To install everything correctly, after pulling this repository from Gitlab, you need to 
+
+1. Install both environments
+::
+
+   conda env create -f generation_env.yml
+   conda env create -f benchmark_env.yml
+
+2. Run `buildout` to extend the generation environment with the tools available in this repository::
+   conda activate synface # Activate the generation env.
+   buildout -c buildout.cfg # Run buildout
+
+This second step creates a `bin` folder containing in particular
+
+1. `./bin/python` Custom Python executable containing the generation env. extended with `bob.paper.ijcb2021_synthetic_dataset`
+2. `./bin/project_db.py` Dataset projection script (entry point)
+3. `./bin/latent_analysis.py` Script for computing latent directions (entry point)
+4. `./bin/generate_db.py` Synthetic dataset generation script (entry point)
+5. `./bin/download_models.py` Utilitary to download required pretrained models (entry point)
 
 How to run
 ----------
 
+Download model dependencies
+***************************
+This project relies on several preexisting pretrained models:
+
+* **DLIB Face Landmark detector** for cropping and aligning the projected faces exactly as in FFHQ. ([Example](http://dlib.net/face_landmark_detection.py.html))
+* **StyleGAN2** as the main face synthesis network. ([Original paper](https://arxiv.org/abs/1912.04958), [Official repository](https://github.com/NVlabs/stylegan2)). We are using Config-F, trained on FFHQ at resolution 1024 x 1024
+* A pretrained **VGG16** model, used to compute a perceptual loss between projected and target image ([Original paper](https://arxiv.org/abs/1801.03924))
+
+In order to download those models, one must specify the destination path in the `~/.bobrc` file, through the following commands:
+::
+
+   conda activate synface
+   bob config set sg2_morph.dlib_lmd_path </path/to/dlib/landmark/detector.dat>
+   bob config set sg2_morph.sg2_path </path/to/stylegan2/pretrained/model.pkl>
+   bob config set sg2_morph.vgg16_path </path/to/vgg16/pretrained/model.pkl>
+
+This should then enable to download the models once and for all by running
+::
+
+./bin/download_models.py
+
 Prepare folder configuration
 *********************************
 
 ::
-
+    # Absolute path of this repo, can be useful to launch execution on a grid due to some relative paths in the code
+    bob config set bob.paper.ijcb2021_synthetic_dataset.path <path_of_this_repo>
     # Folder to store projected Multi-PIE latent projections
     bob config set bob.synface.multipie_projections <path_to_folder>
     # Folder containing Multi-PIE images
diff --git a/bob/paper/ijcb2021_synthetic_dataset/config/database/multipie/P.py b/bob/paper/ijcb2021_synthetic_dataset/config/database/multipie/P.py
index cf0081002459ee269b23bc9a8644c2533d6068f4..3434f022b42ded9e2768c5995f50874fa11a0a52 100644
--- a/bob/paper/ijcb2021_synthetic_dataset/config/database/multipie/P.py
+++ b/bob/paper/ijcb2021_synthetic_dataset/config/database/multipie/P.py
@@ -13,7 +13,8 @@ from sklearn.pipeline import make_pipeline
 # Kept cameras : ["08_0", "13_0", "14_0", "05_1", "05_0", "04_1", "19_0"]
 
 def get_database():
-    csv_dir = './bob/paper/ijcb2021_synthetic_dataset/config/database/multipie/protocols'
+    csv_dir = os.path.join(rc['bob.paper.ijcb2021_synthetic_dataset.path'],
+                           'bob/paper/ijcb2021_synthetic_dataset/config/database/multipie/protocols')
                                         
     database = CSVDataset(dataset_protocol_path=csv_dir,
                           protocol_name='P_center',
diff --git a/bob/paper/ijcb2021_synthetic_dataset/config/database/synmultipie/E.py b/bob/paper/ijcb2021_synthetic_dataset/config/database/synmultipie/E.py
index 39b6085e981ebe9d97e099038ada397a4191a98b..52c65982b730fb91bc05955e01c6a9f46930e554 100644
--- a/bob/paper/ijcb2021_synthetic_dataset/config/database/synmultipie/E.py
+++ b/bob/paper/ijcb2021_synthetic_dataset/config/database/synmultipie/E.py
@@ -6,7 +6,8 @@ import os
 def get_database(protocol):
 
     image_dir = rc['bob.db.synmultipie.directory']
-    csv_dir = './bob/paper/ijcb2021_synthetic_dataset/config/database/synmultipie/protocols'
+    csv_dir = os.path.join(rc['bob.paper.ijcb2021_synthetic_dataset.path'],
+                           'bob/paper/ijcb2021_synthetic_dataset/config/database/synmultipie/protocols')
 
     sample_loader = CSVToSampleLoaderBiometrics(data_loader=bob.io.image.load,
                                       dataset_original_directory = image_dir,
diff --git a/bob/paper/ijcb2021_synthetic_dataset/config/database/synmultipie/P.py b/bob/paper/ijcb2021_synthetic_dataset/config/database/synmultipie/P.py
index d7fb2293b1ce5b9f27e17bfe4d821a0307e39d4d..eb06ab273120d60922dcfbfd99756e1bf3889f47 100644
--- a/bob/paper/ijcb2021_synthetic_dataset/config/database/synmultipie/P.py
+++ b/bob/paper/ijcb2021_synthetic_dataset/config/database/synmultipie/P.py
@@ -5,7 +5,8 @@ from bob.bio.base.database import CSVToSampleLoaderBiometrics, CSVDataset
 def get_database(protocol):
 
     image_dir = rc['bob.db.synmultipie.directory']
-    csv_dir = './bob/paper/ijcb2021_synthetic_dataset/config/database/synmultipie/protocols'
+    csv_dir = os.path.join(rc['bob.paper.ijcb2021_synthetic_dataset.path'],
+                           'bob/paper/ijcb2021_synthetic_dataset/config/database/synmultipie/protocols')
 
     sample_loader = CSVToSampleLoaderBiometrics(data_loader=bob.io.image.load,
                                       dataset_original_directory = image_dir,
diff --git a/bob/paper/ijcb2021_synthetic_dataset/config/database/synmultipie/U.py b/bob/paper/ijcb2021_synthetic_dataset/config/database/synmultipie/U.py
index a9713fe4921ba04e7d04a7b31b59290b7bff3abf..3de2d8c5219d9712605704bd064578f312ff84d1 100644
--- a/bob/paper/ijcb2021_synthetic_dataset/config/database/synmultipie/U.py
+++ b/bob/paper/ijcb2021_synthetic_dataset/config/database/synmultipie/U.py
@@ -5,7 +5,8 @@ from bob.bio.base.database import CSVToSampleLoaderBiometrics, CSVDataset
 def get_database(protocol):
 
     image_dir = rc['bob.db.synmultipie.directory']
-    csv_dir = './bob/paper/ijcb2021_synthetic_dataset/config/database/synmultipie/protocols'
+    csv_dir = os.path.join(rc['bob.paper.ijcb2021_synthetic_dataset.path'],
+                           'bob/paper/ijcb2021_synthetic_dataset/config/database/synmultipie/protocols')
 
     sample_loader = CSVToSampleLoaderBiometrics(data_loader=bob.io.image.load,
                                       dataset_original_directory = image_dir,
diff --git a/bob/paper/ijcb2021_synthetic_dataset/config/project/multipie_E.py b/bob/paper/ijcb2021_synthetic_dataset/config/project/multipie_E.py
index b7030b889814ed903b914b1cf8a26bac059972a9..3e07ce76bae9d0c7da9f62554d677a637c1e6325 100644
--- a/bob/paper/ijcb2021_synthetic_dataset/config/project/multipie_E.py
+++ b/bob/paper/ijcb2021_synthetic_dataset/config/project/multipie_E.py
@@ -4,7 +4,8 @@ from bob.bio.base.database import FileListBioDatabase
 
 
 database = FileListBioDatabase(
-    filelists_directory='./bob/paper/ijcb2021_synthetic_dataset/config/project/protocols',
+    filelists_directory=os.path.join(rc['bob.paper.ijcb2021_synthetic_dataset.path'],
+                                     'bob/paper/ijcb2021_synthetic_dataset/config/project/protocols'),
     name='multipie',
     protocol='E_lit',
     original_directory=rc['bob.db.multipie.directory'],
diff --git a/bob/paper/ijcb2021_synthetic_dataset/config/project/multipie_P.py b/bob/paper/ijcb2021_synthetic_dataset/config/project/multipie_P.py
index 337399c20eac75b9f32ea8d674606c0475545893..8e253bed5b065a1037c35498cc61673a592fe37e 100644
--- a/bob/paper/ijcb2021_synthetic_dataset/config/project/multipie_P.py
+++ b/bob/paper/ijcb2021_synthetic_dataset/config/project/multipie_P.py
@@ -3,7 +3,8 @@ from bob.extension import rc
 from bob.bio.base.database import FileListBioDatabase
 
 database = FileListBioDatabase(
-    filelists_directory='./bob/paper/ijcb2021_synthetic_dataset/config/project/protocols',
+    filelists_directory=os.path.join(rc['bob.paper.ijcb2021_synthetic_dataset.path'],
+                                     'bob/paper/ijcb2021_synthetic_dataset/config/project/protocols'),
     name='multipie',
     protocol='P_center_lit',
     original_directory=rc['bob.db.multipie.directory'],
diff --git a/bob/paper/ijcb2021_synthetic_dataset/stylegan2/generator.py b/bob/paper/ijcb2021_synthetic_dataset/stylegan2/generator.py
index 56057546e5068ad7a8233f1d59e4819415b3b65a..96ab680185997e496b6a9c2584d5a5d5d795b2d3 100644
--- a/bob/paper/ijcb2021_synthetic_dataset/stylegan2/generator.py
+++ b/bob/paper/ijcb2021_synthetic_dataset/stylegan2/generator.py
@@ -6,7 +6,11 @@ import numpy as np
 import bob.io.image
 import tensorflow as tf
 import pickle
+import sys
+import os
 
+# Need dnnlib in the path to correctly load the pickle
+sys.path.insert(0, os.path.join(rc['bob.paper.ijcb2021_synthetic_dataset.path'], 'bob/paper/ijcb2021_synthetic_dataset/stylegan2'))
 class StyleGAN2Generator(object):
     def __init__(self, 
                  sg2_path=rc['sg2_morph.sg2_path'],