diff --git a/bob/bio/face/config/baseline/helpers.py b/bob/bio/face/config/baseline/helpers.py
index 764ae274e6ecf5cf3fd7926d6fe595e002a77bdb..aa9e7cd481aba706af4ce8c22b7cc10c5d343128 100644
--- a/bob/bio/face/config/baseline/helpers.py
+++ b/bob/bio/face/config/baseline/helpers.py
@@ -1,14 +1,50 @@
 import bob.bio.face
+from bob.bio.face.preprocessor import FaceCrop, MultiFaceCrop, Scale
 from sklearn.pipeline import make_pipeline
 from bob.bio.base.wrappers import wrap_sample_preprocessor
 from bob.pipelines import wrap
-from bob.bio.face.helpers import face_crop_solver
 import numpy as np
 import logging
 
 logger = logging.getLogger(__name__)
 
 
+def face_crop_solver(
+    cropped_image_size,
+    cropped_positions=None,
+    color_channel="rgb",
+    fixed_positions=None,
+    annotator=None,
+    dtype="uint8",
+):
+    """
+    Decide which face cropper to use.
+    """
+    # If there's not cropped positions, just resize
+    if cropped_positions is None:
+        return Scale(cropped_image_size)
+    else:
+        # Detects the face and crops it without eye detection
+        if isinstance(cropped_positions, list):
+            return MultiFaceCrop(
+                cropped_image_size=cropped_image_size,
+                cropped_positions_list=cropped_positions,
+                fixed_positions_list=fixed_positions,
+                color_channel=color_channel,
+                dtype=dtype,
+                annotation=annotator,
+            )
+        else:
+            return FaceCrop(
+                cropped_image_size=cropped_image_size,
+                cropped_positions=cropped_positions,
+                color_channel=color_channel,
+                fixed_positions=fixed_positions,
+                dtype=dtype,
+                annotator=annotator,
+            )
+
+
 def embedding_transformer_default_cropping(cropped_image_size, annotation_type):
     """
     Computes the default cropped positions for the FaceCropper used with Facenet-like 
diff --git a/bob/bio/face/helpers.py b/bob/bio/face/helpers.py
deleted file mode 100644
index 51bdf9b8e35b1d7004f7acbbae0841c10b7b4772..0000000000000000000000000000000000000000
--- a/bob/bio/face/helpers.py
+++ /dev/null
@@ -1,37 +0,0 @@
-from bob.bio.face.preprocessor import FaceCrop, MultiFaceCrop, Scale
-
-
-def face_crop_solver(
-    cropped_image_size,
-    cropped_positions=None,
-    color_channel="rgb",
-    fixed_positions=None,
-    annotator=None,
-    dtype="uint8",
-):
-    """
-    Decide which face cropper to use.
-    """
-    # If there's not cropped positions, just resize
-    if cropped_positions is None:
-        return Scale(cropped_image_size)
-    else:
-        # Detects the face and crops it without eye detection
-        if isinstance(cropped_positions, list):
-            return MultiFaceCrop(
-                cropped_image_size=cropped_image_size,
-                cropped_positions_list=cropped_positions,
-                fixed_positions_list=fixed_positions,
-                color_channel=color_channel,
-                dtype=dtype,
-                annotation=annotator,
-            )
-        else:
-            return FaceCrop(
-                cropped_image_size=cropped_image_size,
-                cropped_positions=cropped_positions,
-                color_channel=color_channel,
-                fixed_positions=fixed_positions,
-                dtype=dtype,
-                annotator=annotator,
-            )