From ab19f65cbb65b851d8b5d15d12f5e34587628036 Mon Sep 17 00:00:00 2001 From: dcarron <daniel.carron@idiap.ch> Date: Mon, 17 Jun 2024 10:33:17 +0200 Subject: [PATCH] [mednet.scripts] Update usage examples --- src/mednet/libs/classification/scripts/config.py | 10 +++++----- .../libs/classification/scripts/database.py | 4 ++-- .../libs/classification/scripts/evaluate.py | 4 ++-- .../libs/classification/scripts/experiment.py | 3 +-- .../libs/classification/scripts/predict.py | 4 ++-- src/mednet/libs/classification/scripts/train.py | 2 +- src/mednet/libs/segmentation/scripts/config.py | 16 ++++++++-------- src/mednet/libs/segmentation/scripts/database.py | 16 ++++++++-------- .../libs/segmentation/scripts/experiment.py | 7 +++---- src/mednet/libs/segmentation/scripts/predict.py | 4 ++-- src/mednet/libs/segmentation/scripts/train.py | 6 +++--- 11 files changed, 37 insertions(+), 39 deletions(-) diff --git a/src/mednet/libs/classification/scripts/config.py b/src/mednet/libs/classification/scripts/config.py index 9a41ea83..7d32cc1c 100644 --- a/src/mednet/libs/classification/scripts/config.py +++ b/src/mednet/libs/classification/scripts/config.py @@ -27,7 +27,7 @@ def config(): .. code:: sh - mednet config list + mednet classification config list \b @@ -36,7 +36,7 @@ def config(): .. code:: sh - mednet config list -v + mednet classification config list -v """, ) @@ -54,7 +54,7 @@ def list_(verbose) -> None: # numpydoc ignore=PR01 .. code:: sh - mednet config describe montgomery + mednet classification config describe montgomery \b @@ -63,7 +63,7 @@ def list_(verbose) -> None: # numpydoc ignore=PR01 .. code:: sh - mednet config describe montgomery -v + mednet classification config describe montgomery -v """, ) @@ -87,7 +87,7 @@ def describe(name, verbose) -> None: # numpydoc ignore=PR01 .. code:: sh - $ mednet config copy montgomery -vvv newdataset.py + $ mednet classification config copy montgomery -vvv newdataset.py """, ) diff --git a/src/mednet/libs/classification/scripts/database.py b/src/mednet/libs/classification/scripts/database.py index 974bf426..4cd6a414 100644 --- a/src/mednet/libs/classification/scripts/database.py +++ b/src/mednet/libs/classification/scripts/database.py @@ -89,7 +89,7 @@ def database() -> None: .. code:: sh - $ mednet database list + $ mednet classification database list """, ) @@ -108,7 +108,7 @@ def list_(): .. code:: sh - mednet datamodule check -vv montgomery-f0 + mednet classification datamodule check -vv montgomery-f0 """, ) diff --git a/src/mednet/libs/classification/scripts/evaluate.py b/src/mednet/libs/classification/scripts/evaluate.py index eb7970c9..50c2e56a 100644 --- a/src/mednet/libs/classification/scripts/evaluate.py +++ b/src/mednet/libs/classification/scripts/evaluate.py @@ -24,13 +24,13 @@ logger = setup(__name__.split(".")[0], format="%(levelname)s: %(message)s") .. code:: sh - mednet evaluate -vv --predictions=path/to/predictions.json + mednet classification evaluate -vv --predictions=path/to/predictions.json 2. Run evaluation on an existing prediction output, tune threshold a priori on the `validation` set: .. code:: sh - mednet evaluate -vv --predictions=path/to/predictions.json --threshold=validation + mednet classification evaluate -vv --predictions=path/to/predictions.json --threshold=validation """, ) @click.option( diff --git a/src/mednet/libs/classification/scripts/experiment.py b/src/mednet/libs/classification/scripts/experiment.py index 3a469a8c..5dcfe0c9 100644 --- a/src/mednet/libs/classification/scripts/experiment.py +++ b/src/mednet/libs/classification/scripts/experiment.py @@ -28,7 +28,7 @@ logger = setup(__name__.split(".")[0], format="%(levelname)s: %(message)s") .. code:: sh - $ mednet experiment -vv pasa montgomery --epochs=2 + $ mednet classification experiment -vv pasa montgomery --epochs=2 """, ) @training_options @@ -60,7 +60,6 @@ def experiment( .. code:: └─ <output-folder>/ - ├── command.sh ├── model/ # the generated model will be here ├── predictions.json # the prediction outputs for the sets └── evaluation/ # the outputs of the evaluations for the sets diff --git a/src/mednet/libs/classification/scripts/predict.py b/src/mednet/libs/classification/scripts/predict.py index 2691727d..a6f45a11 100644 --- a/src/mednet/libs/classification/scripts/predict.py +++ b/src/mednet/libs/classification/scripts/predict.py @@ -21,13 +21,13 @@ logger = setup(__name__.split(".")[0], format="%(levelname)s: %(message)s") .. code:: sh - mednet predict -vv pasa montgomery --weight=path/to/model.ckpt --output=path/to/predictions.json + mednet classification predict -vv pasa montgomery --weight=path/to/model.ckpt --output=path/to/predictions.json 2. Enable multi-processing data loading with 6 processes: .. code:: sh - mednet predict -vv pasa montgomery --parallel=6 --weight=path/to/model.ckpt --output=path/to/predictions.json + mednet classification predict -vv pasa montgomery --parallel=6 --weight=path/to/model.ckpt --output=path/to/predictions.json """, ) diff --git a/src/mednet/libs/classification/scripts/train.py b/src/mednet/libs/classification/scripts/train.py index 9374afad..58c99cad 100644 --- a/src/mednet/libs/classification/scripts/train.py +++ b/src/mednet/libs/classification/scripts/train.py @@ -23,7 +23,7 @@ logger = setup("mednet", format="%(levelname)s: %(message)s") .. code:: sh - mednet train -vv pasa elastic montgomery --batch-size=4 --device="cuda:0" + mednet classification train -vv pasa elastic montgomery --batch-size=4 --device="cuda:0" """, ) @reusable_options diff --git a/src/mednet/libs/segmentation/scripts/config.py b/src/mednet/libs/segmentation/scripts/config.py index b900ba86..8fef1df9 100644 --- a/src/mednet/libs/segmentation/scripts/config.py +++ b/src/mednet/libs/segmentation/scripts/config.py @@ -23,11 +23,11 @@ def config(): epilog="""Examples: \b - 1. Lists all configuration resources (type: mednet.libs.classification.config) installed: + 1. Lists all configuration resources (type: mednet.libs.segmentation.config) installed: .. code:: sh - mednet config list + mednet segmentation config list \b @@ -36,7 +36,7 @@ def config(): .. code:: sh - mednet config list -v + mednet segmentation config list -v """, ) @@ -50,20 +50,20 @@ def list_(verbose) -> None: # numpydoc ignore=PR01 epilog="""Examples: \b - 1. Describe the Montgomery dataset configuration: + 1. Describe the Drive dataset configuration: .. code:: sh - mednet config describe montgomery + mednet segmentation config describe drive \b - 2. Describe the Montgomery dataset configuration and lists its + 2. Describe the Drive dataset configuration and lists its contents: .. code:: sh - mednet config describe montgomery -v + mednet segmentation config describe deive -v """, ) @@ -87,7 +87,7 @@ def describe(name, verbose) -> None: # numpydoc ignore=PR01 .. code:: sh - $ mednet config copy montgomery -vvv newdataset.py + $ mednet segmentation config copy drive -vvv newdataset.py """, ) diff --git a/src/mednet/libs/segmentation/scripts/database.py b/src/mednet/libs/segmentation/scripts/database.py index 1610c16c..272d4a09 100644 --- a/src/mednet/libs/segmentation/scripts/database.py +++ b/src/mednet/libs/segmentation/scripts/database.py @@ -21,7 +21,7 @@ def _get_raw_databases() -> dict[str, dict[str, str]]: containing two string keys: * ``module``: the full Pythonic module name (e.g. - ``mednet.libs.classification.data.montgomery``). + ``mednet.libs.segmentation.data.drive``). * ``datadir``: points to the user-configured data directory for the current dataset, if set, or ``None`` otherwise. """ @@ -71,14 +71,14 @@ def database() -> None: \b 1. To install a database, set up its data directory ("datadir"). For - example, to setup access to Montgomery files you downloaded locally at - the directory "/path/to/montgomery/files", edit the RC file (typically - ``$HOME/.config/mednet.libs.classification.toml``), and add a line like the following: + example, to setup access to Drive files you downloaded locally at + the directory "/path/to/drive/files", edit the RC file (typically + ``$HOME/.config/mednet.toml``), and add a line like the following: .. code:: toml [datadir] - montgomery = "/path/to/montgomery/files" + drive = "/path/to/drive/files" .. note:: @@ -89,7 +89,7 @@ def database() -> None: .. code:: sh - $ mednet database list + $ mednet segmentation database list """, ) @@ -102,12 +102,12 @@ def list_(): @database.command( epilog="""Examples: - 1. Check if all files from the fold 'montgomery-f0' of the Montgomery + 1. Check if all files from the fold 'default' of the Drive database can be loaded: .. code:: sh - mednet datamodule check -vv montgomery-f0 + mednet segmentation database check -vv drive """, ) diff --git a/src/mednet/libs/segmentation/scripts/experiment.py b/src/mednet/libs/segmentation/scripts/experiment.py index 4c906c02..a912559b 100644 --- a/src/mednet/libs/segmentation/scripts/experiment.py +++ b/src/mednet/libs/segmentation/scripts/experiment.py @@ -23,13 +23,13 @@ logger = setup(__name__.split(".")[0], format="%(levelname)s: %(message)s") epilog="""Examples: \b - 1. Train a pasa model with montgomery dataset, on the CPU, for only two + 1. Train a lwnet model with drive dataset, on the CPU, for only two epochs, then runs inference and evaluation on stock datasets, report performance as a table and figures: .. code:: sh - $ mednet experiment -vv pasa montgomery --epochs=2 + $ mednet segmentation experiment -vv lwnet drive --epochs=2 """, ) @training_options @@ -59,9 +59,8 @@ def experiment( \b └─ <output-folder>/ - ├── command.sh ├── model/ # the generated model will be here - ├── predictions.json # the prediction outputs for the sets + ├── predictions # the prediction outputs for the sets └── evaluation/ # the outputs of the evaluations for the sets """ diff --git a/src/mednet/libs/segmentation/scripts/predict.py b/src/mednet/libs/segmentation/scripts/predict.py index 84449741..e96d25a3 100644 --- a/src/mednet/libs/segmentation/scripts/predict.py +++ b/src/mednet/libs/segmentation/scripts/predict.py @@ -69,13 +69,13 @@ def _save_hdf5( .. code:: sh - mednet predict -vv pasa montgomery --weight=path/to/model.ckpt --output=path/to/predictions.json + mednet segmentation predict -vv lwnet drive --weight=path/to/model.ckpt --output=path/to/predictions.json 2. Enable multi-processing data loading with 6 processes: .. code:: sh - mednet predict -vv pasa montgomery --parallel=6 --weight=path/to/model.ckpt --output=path/to/predictions.json + mednet segmentation predict -vv lwnet drive --parallel=6 --weight=path/to/model.ckpt --output=path/to/predictions.json """, ) diff --git a/src/mednet/libs/segmentation/scripts/train.py b/src/mednet/libs/segmentation/scripts/train.py index ac381328..5089ccab 100644 --- a/src/mednet/libs/segmentation/scripts/train.py +++ b/src/mednet/libs/segmentation/scripts/train.py @@ -18,11 +18,11 @@ logger = setup("mednet", format="%(levelname)s: %(message)s") cls=ConfigCommand, epilog="""Examples: -1. Train a pasa model with the montgomery dataset, on a GPU (``cuda:0``): +1. Train a lwnet model with the frive dataset, on a GPU (``cuda:0``): .. code:: sh - mednet train -vv pasa montgomery --batch-size=4 --device="cuda:0" + mednet segmentation train -vv lwnet drive --batch-size=4 --device="cuda:0" """, ) @reusable_options @@ -44,7 +44,7 @@ def train( augmentations, **_, ) -> None: # numpydoc ignore=PR01 - """Train an CNN to perform image classification. + """Train an CNN to perform image segmentation. Training is performed for a configurable number of epochs, and generates checkpoints. Checkpoints are model files with a .ckpt -- GitLab