diff --git a/.pre-commit-config.yaml b/.pre-commit-config.yaml
index b4d1d1ca583eb211cc4b0a5ab9b2243c715bb45e..49337196592599a11942ecd8fc24a4230c5a196d 100644
--- a/.pre-commit-config.yaml
+++ b/.pre-commit-config.yaml
@@ -2,20 +2,20 @@
 # See https://pre-commit.com/hooks.html for more hooks
 repos:
   - repo: https://github.com/timothycrosley/isort
-    rev: 5.10.1
+    rev: 5.12.0
     hooks:
       - id: isort
         args: [--settings-path, "pyproject.toml"]
   - repo: https://github.com/psf/black
-    rev: 22.3.0
+    rev: 23.1.0
     hooks:
       - id: black
   - repo: https://github.com/pycqa/flake8
-    rev: 3.9.2
+    rev: 6.0.0
     hooks:
       - id: flake8
   - repo: https://github.com/pre-commit/pre-commit-hooks
-    rev: v4.2.0
+    rev: v4.4.0
     hooks:
       - id: check-ast
       - id: check-case-conflict
diff --git a/src/bob/pipelines/dataset/database.py b/src/bob/pipelines/dataset/database.py
index 2ea4653cdf410d24ec7d73d804e0a7788974090d..fe43dee9bb016b9f2f6c0a20fcc4e4d8aa55b845 100644
--- a/src/bob/pipelines/dataset/database.py
+++ b/src/bob/pipelines/dataset/database.py
@@ -299,7 +299,6 @@ class FileListDatabase:
         )
         all_samples = []
         for grp in groups:
-
             for sample in self.get_reader(grp):
                 all_samples.append(sample)
 
diff --git a/src/bob/pipelines/distributed/sge.py b/src/bob/pipelines/distributed/sge.py
index 83c678da0f7a67c67d59a68be8033532970b7e46..048ee0235697d770c2cd6a308f7cc744d5e467d2 100644
--- a/src/bob/pipelines/distributed/sge.py
+++ b/src/bob/pipelines/distributed/sge.py
@@ -42,7 +42,6 @@ class SGEIdiapJob(Job):
         config_name="sge",
         **kwargs,
     ):
-
         if queue is None:
             queue = dask.config.get("jobqueue.%s.queue" % config_name)
         if project is None:
@@ -244,7 +243,6 @@ class SGEMultipleQueuesCluster(JobQueueCluster):
         project=rc.get("sge.project"),
         **kwargs,
     ):
-
         # Defining the job launcher
         self.job_cls = SGEIdiapJob
         self.sge_job_spec = sge_job_spec
diff --git a/src/bob/pipelines/wrappers.py b/src/bob/pipelines/wrappers.py
index 182dec239247bcf821b256f254154745115370a4..fe3326f61528a7b468945e7b03533b1b16b1ca92 100644
--- a/src/bob/pipelines/wrappers.py
+++ b/src/bob/pipelines/wrappers.py
@@ -566,7 +566,6 @@ class CheckpointWrapper(BaseWrapper, TransformerMixin):
         return self.estimator.score(samples)
 
     def fit(self, samples, y=None, **kwargs):
-
         if not estimator_requires_fit(self.estimator):
             return self
 
@@ -582,7 +581,6 @@ class CheckpointWrapper(BaseWrapper, TransformerMixin):
         return self.save_model()
 
     def make_path(self, sample):
-
         if self.features_dir is None:
             return None
 
@@ -605,7 +603,6 @@ class CheckpointWrapper(BaseWrapper, TransformerMixin):
         to_save = getattr(sample, self.sample_attribute)
         for _ in range(self.attempts):
             try:
-
                 dirname = os.path.dirname(path)
                 os.makedirs(dirname, exist_ok=True)
 
@@ -697,7 +694,6 @@ def _shape_samples(samples):
 
 
 def _array_from_sample_bags(X: dask.bag.Bag, attribute: str, ndim: int = 2):
-
     if ndim not in (1, 2):
         raise NotImplementedError(f"ndim must be 1 or 2. Got: {ndim}")
 
@@ -1028,7 +1024,6 @@ def wrap(bases, estimator=None, **kwargs):
     if isinstance(estimator, Pipeline):
         # wrap inner steps
         for idx, name, trans in estimator._iter():
-
             # when checkpointing a pipeline, checkpoint each transformer in its own folder
             new_kwargs = dict(kwargs)
             features_dir, model_path = (
diff --git a/src/bob/pipelines/xarray.py b/src/bob/pipelines/xarray.py
index 6b35d1c0df4902d38d03d579eeafa80211b1ff0b..1a13b8367f3c9ea74e3089f58441a4f2ddae7bee 100644
--- a/src/bob/pipelines/xarray.py
+++ b/src/bob/pipelines/xarray.py
@@ -313,7 +313,6 @@ def _get_dask_args_from_ds(ds, columns):
 
 
 def _blockwise_with_block_args(args, block, method_name=None):
-
     meta = []
     for _ in range(1, block.output_ndim):
         meta = [meta]
diff --git a/tests/test_samples.py b/tests/test_samples.py
index 4b246a33660152f4e30f6e397a4cfc2594116d86..84b79f7deca77dba05204cbdd0739a4809363c6a 100644
--- a/tests/test_samples.py
+++ b/tests/test_samples.py
@@ -17,7 +17,6 @@ from bob.pipelines import (
 
 
 def test_sampleset_collection():
-
     n_samples = 10
     X = np.ones(shape=(n_samples, 2), dtype=int)
     sampleset = SampleSet(
@@ -46,7 +45,6 @@ def test_sampleset_collection():
 
     # Testing delayed sampleset
     with tempfile.TemporaryDirectory() as dir_name:
-
         samples = [Sample(data, key=str(i)) for i, data in enumerate(X)]
         filename = os.path.join(dir_name, "samples.pkl")
         with open(filename, "wb") as f:
@@ -59,7 +57,6 @@ def test_sampleset_collection():
 
     # Testing delayed sampleset cached
     with tempfile.TemporaryDirectory() as dir_name:
-
         samples = [Sample(data, key=str(i)) for i, data in enumerate(X)]
         filename = os.path.join(dir_name, "samples.pkl")
         with open(filename, "wb") as f:
diff --git a/tests/test_utils.py b/tests/test_utils.py
index 488c796cb3998a3ff59714dc0dcb3f1494cfe91d..8a335bea7a8e646fc547c739f3ace5e445486af7 100644
--- a/tests/test_utils.py
+++ b/tests/test_utils.py
@@ -101,7 +101,6 @@ def test_is_instance_nested():
 
 
 def test_break_sample_set():
-
     samplesets = []
     n_samples = 10
     X = np.ones(shape=(n_samples, 2), dtype=int)
@@ -109,7 +108,6 @@ def test_break_sample_set():
 
     # Creating a face list of samplesets
     for i in range(n_samples):
-
         samplesets.append(
             SampleSet(
                 [
diff --git a/tests/test_wrappers.py b/tests/test_wrappers.py
index a90090df473497fb6266e4195e44bdba7c176c09..83c4c76ba3378be256dd6e0d052ea69d393135f6 100644
--- a/tests/test_wrappers.py
+++ b/tests/test_wrappers.py
@@ -65,7 +65,6 @@ class DummyTransformer(TransformerMixin, BaseEstimator):
         return self
 
     def transform(self, X):
-
         # Input validation
         X = check_array(X)
         # Check that the input is of the same shape as the one passed
@@ -181,7 +180,6 @@ def test_sklearn_compatible_estimator():
 
 
 def test_function_sample_transfomer():
-
     X = np.zeros(shape=(10, 2), dtype=int)
     samples = [bob.pipelines.Sample(data) for data in X]
 
@@ -200,7 +198,6 @@ def test_function_sample_transfomer():
 
 
 def test_fittable_sample_transformer():
-
     X = np.ones(shape=(10, 2), dtype=int)
     samples = [bob.pipelines.Sample(data) for data in X]
 
@@ -214,7 +211,6 @@ def test_fittable_sample_transformer():
 
 
 def test_tagged_sample_transformer():
-
     X = np.ones(shape=(10, 2), dtype=int)
     samples = [bob.pipelines.Sample(data) for data in X]
 
@@ -227,7 +223,6 @@ def test_tagged_sample_transformer():
 
 
 def test_tagged_input_sample_transformer():
-
     X = np.ones(shape=(10, 2), dtype=int)
     samples = [bob.pipelines.Sample(data) for data in X]
 
@@ -242,7 +237,6 @@ def test_tagged_input_sample_transformer():
 
 
 def test_dask_tag_transformer():
-
     X = np.ones(shape=(10, 2), dtype=int)
     samples = [bob.pipelines.Sample(data) for data in X]
     sample_bags = bob.pipelines.ToDaskBag().transform(samples)
@@ -255,7 +249,6 @@ def test_dask_tag_transformer():
 
 
 def test_dask_tag_checkpoint_transformer():
-
     X = np.ones(shape=(10, 2), dtype=int)
     samples = [bob.pipelines.Sample(data) for data in X]
     sample_bags = bob.pipelines.ToDaskBag().transform(samples)
@@ -279,7 +272,6 @@ def test_dask_tag_checkpoint_transformer():
 
 
 def test_dask_tag_daskml_estimator():
-
     X, labels = make_blobs(
         n_samples=1000,
         n_features=2,
@@ -328,7 +320,6 @@ def test_dask_tag_daskml_estimator():
 
 
 def test_failing_sample_transformer():
-
     X = np.zeros(shape=(10, 2))
     samples = [bob.pipelines.Sample(data) for i, data in enumerate(X)]
     expected = np.full_like(X, 2, dtype=object)
@@ -371,7 +362,6 @@ def test_failing_sample_transformer():
 
 
 def test_failing_checkpoint_transformer():
-
     X = np.zeros(shape=(10, 2))
     samples = [bob.pipelines.Sample(data, key=i) for i, data in enumerate(X)]
     expected = np.full_like(X, 2)
@@ -470,7 +460,6 @@ def _assert_delayed_samples(samples):
 
 
 def test_checkpoint_function_sample_transfomer():
-
     X = np.arange(20, dtype=int).reshape(10, 2)
     samples = [
         bob.pipelines.Sample(data, key=str(i)) for i, data in enumerate(X)
@@ -576,7 +565,6 @@ def _build_estimator(path, i):
 
 
 def _build_transformer(path, i, force=False):
-
     features_dir = os.path.join(path, f"transformer{i}")
     estimator = bob.pipelines.wrap(
         [DummyTransformer, "sample", "checkpoint"],
@@ -588,7 +576,6 @@ def _build_transformer(path, i, force=False):
 
 
 def test_checkpoint_fittable_pipeline():
-
     X = np.ones(shape=(10, 2), dtype=int)
     samples = [
         bob.pipelines.Sample(data, key=str(i)) for i, data in enumerate(X)
@@ -613,7 +600,6 @@ def test_checkpoint_fittable_pipeline():
 
 def test_checkpoint_transform_pipeline():
     def _run(dask_enabled):
-
         X = np.ones(shape=(10, 2), dtype=int)
         samples_transform = [
             bob.pipelines.Sample(data, key=str(i)) for i, data in enumerate(X)
@@ -642,11 +628,9 @@ def test_checkpoint_transform_pipeline():
 
 
 def test_checkpoint_transform_pipeline_force():
-
     with tempfile.TemporaryDirectory() as d:
 
         def _run():
-
             X = np.ones(shape=(10, 2), dtype=int)
             samples_transform = [
                 bob.pipelines.Sample(data, key=str(i))
@@ -782,7 +766,6 @@ def test_dask_checkpoint_transform_pipeline():
 
 def test_checkpoint_transform_pipeline_with_sampleset():
     def _run(dask_enabled):
-
         X = np.ones(shape=(10, 2), dtype=int)
         samples_transform = bob.pipelines.SampleSet(
             [
@@ -821,7 +804,6 @@ def test_checkpoint_transform_pipeline_with_sampleset():
 
 
 def test_estimator_requires_fit():
-
     all_wraps = [
         ["sample"],
         ["sample", "checkpoint"],
diff --git a/tests/test_xarray.py b/tests/test_xarray.py
index 6bdab976ca666c0bbcfcfcdf60e5bb04e70e2982..b12646714132c336337fbbf86de03b08f2dae91f 100644
--- a/tests/test_xarray.py
+++ b/tests/test_xarray.py
@@ -67,7 +67,6 @@ def test_delayed_samples_to_dataset():
 
 
 def _build_iris_dataset(shuffle=False, delayed=False):
-
     iris = datasets.load_iris()
 
     X = iris.data
@@ -228,7 +227,6 @@ def test_dataset_pipeline_with_failures():
 
 
 def test_dataset_pipeline_with_dask_ml():
-
     scaler = dask_ml.preprocessing.StandardScaler()
     pca = dask_ml.decomposition.PCA(n_components=3, random_state=0)
     clf = SGDClassifier(random_state=0, loss="log_loss", penalty="l2", tol=1e-3)