Skip to content
Snippets Groups Projects
Commit d1eef74f authored by André Anjos's avatar André Anjos :speech_balloon:
Browse files

[doc] Fix doc generation after COVD config collapse

parent 82ef4d3c
No related branches found
No related tags found
1 merge request!12Streamlining
Pipeline #38963 passed
...@@ -37,8 +37,8 @@ _iostar = SampleList2TorchDataset(_raw_iostar.subsets("vessel")["train"], ...@@ -37,8 +37,8 @@ _iostar = SampleList2TorchDataset(_raw_iostar.subsets("vessel")["train"],
from bob.ip.binseg.data.hrf import dataset as _raw_hrf from bob.ip.binseg.data.hrf import dataset as _raw_hrf
_hrf_transforms = [Resize((363)), Pad((0, 90, 0, 91))] + _DA _hrf_transforms = [Resize((363)), Pad((0, 90, 0, 91))] + _DA
dataset = SampleList2TorchDataset(_raw_hrf.subsets("default")["train"], _hrf = SampleList2TorchDataset(_raw_hrf.subsets("default")["train"],
transforms=_hrf_transforms) transforms=_hrf_transforms)
import torch.utils.data import torch.utils.data
dataset = torch.utils.data.ConcatDataset([_stare, _chase, _hrf, _iostar]) dataset = torch.utils.data.ConcatDataset([_stare, _chase, _iostar, _hrf])
...@@ -40,12 +40,12 @@ _chase = SampleList2TorchDataset(_raw_chase.subsets("default")["train"], ...@@ -40,12 +40,12 @@ _chase = SampleList2TorchDataset(_raw_chase.subsets("default")["train"],
from bob.ip.binseg.data.iostar import dataset as _raw_iostar from bob.ip.binseg.data.iostar import dataset as _raw_iostar
_iostar_transforms = [Pad((81, 0, 81, 0)), Resize(608)] + _DA _iostar_transforms = [Pad((81, 0, 81, 0)), Resize(608)] + _DA
dataset = SampleList2TorchDataset(_raw_iostar.subsets("vessel")["train"], _iostar = SampleList2TorchDataset(_raw_iostar.subsets("vessel")["train"],
transforms=_iostar_transforms) transforms=_iostar_transforms)
from bob.ip.binseg.data.hrf import dataset as _raw_hrf from bob.ip.binseg.data.hrf import dataset as _raw_hrf
_hrf_transforms = [Pad((0, 345, 0, 345)), Resize(608)] + _DA _hrf_transforms = [Pad((0, 345, 0, 345)), Resize(608)] + _DA
dataset = SampleList2TorchDataset(_raw_hrf.subsets("default")["train"], _hrf = SampleList2TorchDataset(_raw_hrf.subsets("default")["train"],
transforms=_hrf_transforms) transforms=_hrf_transforms)
import torch.utils.data import torch.utils.data
......
...@@ -73,7 +73,7 @@ More information: ...@@ -73,7 +73,7 @@ More information:
# First, define how to access and load the raw data. Our package provides some # First, define how to access and load the raw data. Our package provides some
# stock loaders we use for other datasets. You may have a look at the # stock loaders we use for other datasets. You may have a look at the
# documentation of that module for details. # documentation of that module for details.
from bob.ip.binseg.data.loaders import ( from bob.ip.binseg.data.loader import (
load_pil_rgb, load_pil_rgb,
load_pil_1, load_pil_1,
data_path_keymaker, data_path_keymaker,
...@@ -129,4 +129,4 @@ _transforms = [ ...@@ -129,4 +129,4 @@ _transforms = [
# This class also inherits from pytorch Dataset and respect its required API. # This class also inherits from pytorch Dataset and respect its required API.
# See the documentation for details. # See the documentation for details.
from bob.ip.binseg.data.utils import SampleList2TorchDataset from bob.ip.binseg.data.utils import SampleList2TorchDataset
dataset = SampleList2TorchDataset(raw_dataset.subset("data"), _transforms) #dataset = SampleList2TorchDataset(raw_dataset.samples("data"), _transforms)
#!/usr/bin/env python
# -*- coding: utf-8 -*-
"""Example self-scanning folder-based dataset
In case you have data that is organized on your filesystem, this configuration
shows an example setup so you can feed such files **without** ground-truth to
predict vessel probalities using one of our trained models. There can be any
number of images within the root folder of your dataset, with any kind of
subfolder arrangements. For example:
.. code-block:: text
├── image_1.png
└── subdir1
├── image_subdir_1.jpg
├── ...
└── image_subdir_k.jpg
├── ...
└── image_n.png
Use the path leading to the root of your dataset, and replace ``<path.csv>`` on
the example code for this configuration, that you must copy locally to make
changes:
.. code-block:: sh
$ bob binseg config copy folder-dataset-example mydataset.py
# edit mydataset.py as explained here
Fine-tune the transformations for your particular purpose.
Keep in mind that specific models require that you feed images respecting
certain restrictions (input dimensions, image centering, etc.). Check the
configuration that was used to train models and try to match it as well as
possible.
"""
# add your transforms below - these are just examples
from bob.ip.binseg.data.transforms import CenterCrop
_transforms = [
#CenterCrop((544, 544)),
]
from bob.ip.binseg.data.folderdataset import FolderDataset
#dataset = FolderDataset("<path.csv>", glob="*.*", transforms=_transforms)
...@@ -45,17 +45,13 @@ Inference on a custom dataset ...@@ -45,17 +45,13 @@ Inference on a custom dataset
============================= =============================
If you would like to test your own data against one of the pre-trained models, If you would like to test your own data against one of the pre-trained models,
you need to instantiate one of: you need to instantiate :py:mod:`A CSV-based configuration
<bob.ip.binseg.configs.datasets.csv>`
* :py:mod:`A CSV-based configuration <bob.ip.binseg.configs.datasets.csv>`
* :py:mod:`A folder-based configuration <bob.ip.binseg.configs.datasets.folder>`
Read the appropriate module documentation for details. Read the appropriate module documentation for details.
.. code-block:: bash .. code-block:: bash
$ bob binseg config copy folder-dataset-example mydataset.py
# or
$ bob binseg config copy csv-dataset-example mydataset.py $ bob binseg config copy csv-dataset-example mydataset.py
# edit mydataset.py to your liking # edit mydataset.py to your liking
$ bob binseg predict -vv <model> -w <path/to/model.pth> ./mydataset.py $ bob binseg predict -vv <model> -w <path/to/model.pth> ./mydataset.py
......
0% Loading or .
You are about to add 0 people to the discussion. Proceed with caution.
Please register or to comment