Commit 60d7070f authored by Anjith George's avatar Anjith George
Browse files

WIP: logging with summary writer

parent f7180d0d
Pipeline #40306 passed with stage
in 7 minutes and 40 seconds
......@@ -10,6 +10,7 @@ import torch.nn as nn
from torch.autograd import Variable
from .tflog import Logger
from torch.utils.tensorboard import SummaryWriter
import bob.core
logger = bob.core.log.setup("bob.learn.pytorch")
......@@ -67,7 +68,7 @@ class GenericTrainer(object):
bob.core.log.set_verbosity_level(logger, verbosity_level)
self.tf_logger = Logger(tf_logdir)
self.tf_logger = SummaryWriter(log_dir=tf_logdir)
# Setting the gradients to true for the layers which needs to be adapted
......@@ -252,17 +253,17 @@ class GenericTrainer(object):
# scalar logs
for tag, value in info.items():
self.tf_logger.scalar_summary(tag, value, epoch+1)
self.tf_logger.add_scalar(tag=tag, scalar_value=value, global_step=epoch+1)
# Log values and gradients of the parameters (histogram summary)
for tag, value in self.network.named_parameters():
tag = tag.replace('.', '/')
try:
self.tf_logger.histo_summary(
tag, value.data.cpu().numpy(), epoch+1)
self.tf_logger.histo_summary(
tag+'/grad', value.grad.data.cpu().numpy(), epoch+1)
self.tf_logger.add_histogram(
tag=tag, values=value.data.cpu().numpy(), global_step=epoch+1)
self.tf_logger.add_histogram(
tag=tag+'/grad', values=value.grad.data.cpu().numpy(), global_step=epoch+1)
except:
pass
......
Markdown is supported
0% or .
You are about to add 0 people to the discussion. Proceed with caution.
Finish editing this message first!
Please register or to comment