Skip to content
Snippets Groups Projects
Commit c7068292 authored by ogueler@idiap.ch's avatar ogueler@idiap.ch
Browse files

removed strict parameter for checkpointing

parent 0209ebe1
No related branches found
No related tags found
1 merge request!2Checkpointing cleanup
......@@ -131,7 +131,7 @@ def predict(
weight_fullpath = os.path.abspath(weight)
checkpointer = Checkpointer(model)
checkpointer.load(weight_fullpath, strict=False)
checkpointer.load(weight_fullpath)
# Logistic regressor weights
if model.name == "logistic_regression":
......
......@@ -51,7 +51,7 @@ class Checkpointer:
with open(self._last_checkpoint_filename, "w") as f:
f.write(name)
def load(self, f=None, strict=True):
def load(self, f=None):
"""Loads model, optimizer and scheduler from file.
Parameters
......@@ -62,9 +62,6 @@ class Checkpointer:
contains the checkpoint data to load into the model, and optionally
into the optimizer and the scheduler. If not specified, loads data
from current path.
partial : :py:class:`bool`, Optional
If True, loading is not strict and only the model is loaded
"""
if f is None:
f = self.last_checkpoint()
......@@ -79,13 +76,12 @@ class Checkpointer:
checkpoint = torch.load(f, map_location=torch.device("cpu"))
# converts model entry to model parameters
self.model.load_state_dict(checkpoint.pop("model"), strict=strict)
self.model.load_state_dict(checkpoint.pop("model"))
if strict:
if self.optimizer is not None:
self.optimizer.load_state_dict(checkpoint.pop("optimizer"))
if self.scheduler is not None:
self.scheduler.load_state_dict(checkpoint.pop("scheduler"))
if self.optimizer is not None:
self.optimizer.load_state_dict(checkpoint.pop("optimizer"))
if self.scheduler is not None:
self.scheduler.load_state_dict(checkpoint.pop("scheduler"))
return checkpoint
......
0% Loading or .
You are about to add 0 people to the discussion. Proceed with caution.
Finish editing this message first!
Please register or to comment