Skip to content
Snippets Groups Projects
Commit 4f3105a7 authored by Daniel CARRON's avatar Daniel CARRON :b:
Browse files

Renamed optimizer_params to optimizer_configs

parent e8d70f15
No related branches found
No related tags found
1 merge request!4Moved code to lightning
......@@ -46,7 +46,7 @@ class LoggingCallback(Callback):
self.log("total_time", current_time)
self.log("eta", eta_seconds)
self.log("loss", numpy.average(self.training_loss))
self.log("learning_rate", pl_module.hparams["optimizer_params"]["lr"])
self.log("learning_rate", pl_module.hparams["optimizer_configs"]["lr"])
self.log("validation_loss", numpy.average(self.validation_loss))
queue_retries = 0
......
......@@ -21,7 +21,7 @@ class Densenet(pl.LightningModule):
criterion,
criterion_valid,
optimizer,
optimizer_params,
optimizer_configs,
pretrained=False,
nb_channels=3,
):
......
......@@ -33,7 +33,9 @@ class PASA(pl.LightningModule):
Based on paper by [PASA-2019]_.
"""
def __init__(self, criterion, criterion_valid, optimizer, optimizer_params):
def __init__(
self, criterion, criterion_valid, optimizer, optimizer_configs
):
super().__init__()
self.save_hyperparameters()
......
0% Loading or .
You are about to add 0 people to the discussion. Proceed with caution.
Finish editing this message first!
Please register or to comment