From 0baca28d4ca1abbe51add78c8e8a3ea3802dc938 Mon Sep 17 00:00:00 2001 From: dcarron <daniel.carron@idiap.ch> Date: Wed, 10 May 2023 09:29:53 +0200 Subject: [PATCH] Updated extras.inv to fix issues with lightning's doc The obects.inv file from lightning's documentation is missing a lot of methods, raising errors when building with sphinx. Problematic methods have been added to the extras.txt and encoded into extras.inv --- doc/extras.inv | 4 ++-- doc/extras.txt | 3 +++ src/ptbench/models/alexnet.py | 16 +--------------- src/ptbench/models/densenet.py | 15 --------------- src/ptbench/models/densenet_rs.py | 15 --------------- src/ptbench/models/logistic_regression.py | 14 -------------- src/ptbench/models/pasa.py | 14 -------------- src/ptbench/models/signs_to_tb.py | 14 -------------- 8 files changed, 6 insertions(+), 89 deletions(-) diff --git a/doc/extras.inv b/doc/extras.inv index 88973215..d053cdcf 100644 --- a/doc/extras.inv +++ b/doc/extras.inv @@ -2,5 +2,5 @@ # Project: extras # Version: stable # The remainder of this file is compressed using zlib. -xÚEËÁ € лSti¼² * - PÒ~MØÞÞ߃è–îlYšƒ†f‡h5êÃWÙ¯i¡tóÌ}àÅNôäo°!¬%ò]B-4OÎŒ ã \ No newline at end of file +xÚA +1E÷ž¢ [[ÜÎôR§± ´MH£2žÞ‡A\ˆ.ÃÏ{OIútņTŠ¯íLRšá¡+.ÌÎ$Uns<èôlI¢› ×ÔŸ2¸h“–l¶«Œ1iÅíBõ$`g§Ý/ëa¾Gôæ%<« ‰ÂXõŒîƒåŸšëÍ×UØë±)ðÏibÅ‚wÿèĘ/ \ No newline at end of file diff --git a/doc/extras.txt b/doc/extras.txt index e827f8fa..77fd0ca6 100644 --- a/doc/extras.txt +++ b/doc/extras.txt @@ -3,3 +3,6 @@ # Version: stable # The remainder of this file is compressed using zlib. torchvision.transforms py:module 1 https://pytorch.org/vision/stable/transforms.html - +lightning.pytorch.core.module.LightningModule.forward py:method 1 api/lightning.pytorch.core.LightningModule.html#$ - +lightning.pytorch.core.module.LightningModule.predict_step py:method 1 api/lightning.pytorch.core.LightningModule.html#$ - +optimizer_step py:method 1 api/lightning.pytorch.core.LightningModule.html#$ - diff --git a/src/ptbench/models/alexnet.py b/src/ptbench/models/alexnet.py index 073013cd..10ecfc72 100644 --- a/src/ptbench/models/alexnet.py +++ b/src/ptbench/models/alexnet.py @@ -10,7 +10,7 @@ import torchvision.models as models from .normalizer import TorchVisionNormalizer -class Alexnet(pl.core.LightningModule): +class Alexnet(pl.LightningModule): """Alexnet module. Note: only usable with a normalized dataset @@ -43,20 +43,6 @@ class Alexnet(pl.core.LightningModule): self.model_ft.classifier[6] = nn.Linear(512, 1) def forward(self, x): - """ - - Parameters - ---------- - - x : list - list of tensors. - - Returns - ------- - - tensor : :py:class:`torch.Tensor` - - """ x = self.normalizer(x) x = self.model_ft(x) diff --git a/src/ptbench/models/densenet.py b/src/ptbench/models/densenet.py index 27c3393d..77cbc0a8 100644 --- a/src/ptbench/models/densenet.py +++ b/src/ptbench/models/densenet.py @@ -43,22 +43,7 @@ class Densenet(pl.LightningModule): ) def forward(self, x): - """ - - Parameters - ---------- - - x : list - list of tensors. - - Returns - ------- - - tensor : :py:class:`torch.Tensor` - - """ x = self.normalizer(x) - x = self.model_ft(x) return x diff --git a/src/ptbench/models/densenet_rs.py b/src/ptbench/models/densenet_rs.py index 16f4eefb..6e5a3df4 100644 --- a/src/ptbench/models/densenet_rs.py +++ b/src/ptbench/models/densenet_rs.py @@ -38,21 +38,6 @@ class DensenetRS(pl.LightningModule): self.model_ft.classifier = nn.Linear(num_ftrs, 14) def forward(self, x): - """ - - Parameters - ---------- - - x : list - list of tensors. - - Returns - ------- - - tensor : :py:class:`torch.Tensor` - - """ - x = self.normalizer(x) x = self.model_ft(x) return x diff --git a/src/ptbench/models/logistic_regression.py b/src/ptbench/models/logistic_regression.py index c6df54bc..485a3967 100644 --- a/src/ptbench/models/logistic_regression.py +++ b/src/ptbench/models/logistic_regression.py @@ -27,20 +27,6 @@ class LogisticRegression(pl.LightningModule): self.linear = nn.Linear(self.hparams.input_size, 1) def forward(self, x): - """ - - Parameters - ---------- - - x : list - list of tensors. - - Returns - ------- - - tensor : :py:class:`torch.Tensor` - - """ output = self.linear(x) return output diff --git a/src/ptbench/models/pasa.py b/src/ptbench/models/pasa.py index 3d4a7641..d4e5b2a8 100644 --- a/src/ptbench/models/pasa.py +++ b/src/ptbench/models/pasa.py @@ -93,20 +93,6 @@ class PASA(pl.LightningModule): self.dense = nn.Linear(80, 1) # Fully connected layer def forward(self, x): - """ - - Parameters - ---------- - - x : list - list of tensors. - - Returns - ------- - - tensor : :py:class:`torch.Tensor` - - """ x = self.normalizer(x) # First convolution block diff --git a/src/ptbench/models/signs_to_tb.py b/src/ptbench/models/signs_to_tb.py index 47337727..9267e777 100644 --- a/src/ptbench/models/signs_to_tb.py +++ b/src/ptbench/models/signs_to_tb.py @@ -31,20 +31,6 @@ class SignsToTB(pl.LightningModule): self.fc2 = torch.nn.Linear(self.hparams.hidden_size, 1) def forward(self, x): - """ - - Parameters - ---------- - - x : list - list of tensors. - - Returns - ------- - - tensor : :py:class:`torch.Tensor` - - """ hidden = self.fc1(x) relu = self.relu(hidden) -- GitLab