From 729a7e9bc7002fa5e06bd9128dc96d6695812455 Mon Sep 17 00:00:00 2001 From: dcarron <daniel.carron@idiap.ch> Date: Tue, 11 Apr 2023 13:03:39 +0200 Subject: [PATCH] Renamed optimizer_params to optimizer_configs --- src/ptbench/models/densenet.py | 2 +- src/ptbench/models/pasa.py | 2 +- 2 files changed, 2 insertions(+), 2 deletions(-) diff --git a/src/ptbench/models/densenet.py b/src/ptbench/models/densenet.py index ab7a1f71..4e5b34c0 100644 --- a/src/ptbench/models/densenet.py +++ b/src/ptbench/models/densenet.py @@ -114,7 +114,7 @@ class Densenet(pl.LightningModule): def configure_optimizers(self): # Dynamically instantiates the optimizer given the configs optimizer = getattr(torch.optim, self.hparams.optimizer)( - self.parameters(), **self.hparams.optimizer_params + self.parameters(), **self.hparams.optimizer_configs ) return optimizer diff --git a/src/ptbench/models/pasa.py b/src/ptbench/models/pasa.py index d5657218..8c9705e6 100644 --- a/src/ptbench/models/pasa.py +++ b/src/ptbench/models/pasa.py @@ -205,7 +205,7 @@ class PASA(pl.LightningModule): def configure_optimizers(self): # Dynamically instantiates the optimizer given the configs optimizer = getattr(torch.optim, self.hparams.optimizer)( - self.parameters(), **self.hparams.optimizer_params + self.parameters(), **self.hparams.optimizer_configs ) return optimizer -- GitLab