diff --git a/src/mednet/libs/classification/scripts/config.py b/src/mednet/libs/classification/scripts/config.py index 9a41ea8330e9007584ff33f3b179f66fe5f0de4c..7d32cc1c69800861fd95e3324c48b2e915e28c71 100644 --- a/src/mednet/libs/classification/scripts/config.py +++ b/src/mednet/libs/classification/scripts/config.py @@ -27,7 +27,7 @@ def config(): .. code:: sh - mednet config list + mednet classification config list \b @@ -36,7 +36,7 @@ def config(): .. code:: sh - mednet config list -v + mednet classification config list -v """, ) @@ -54,7 +54,7 @@ def list_(verbose) -> None: # numpydoc ignore=PR01 .. code:: sh - mednet config describe montgomery + mednet classification config describe montgomery \b @@ -63,7 +63,7 @@ def list_(verbose) -> None: # numpydoc ignore=PR01 .. code:: sh - mednet config describe montgomery -v + mednet classification config describe montgomery -v """, ) @@ -87,7 +87,7 @@ def describe(name, verbose) -> None: # numpydoc ignore=PR01 .. code:: sh - $ mednet config copy montgomery -vvv newdataset.py + $ mednet classification config copy montgomery -vvv newdataset.py """, ) diff --git a/src/mednet/libs/classification/scripts/database.py b/src/mednet/libs/classification/scripts/database.py index 974bf426ade241b4c02c2c224ebfa9e66c0e8101..4cd6a4145fe54c4cbb0c924ace82f607a602cf7f 100644 --- a/src/mednet/libs/classification/scripts/database.py +++ b/src/mednet/libs/classification/scripts/database.py @@ -89,7 +89,7 @@ def database() -> None: .. code:: sh - $ mednet database list + $ mednet classification database list """, ) @@ -108,7 +108,7 @@ def list_(): .. code:: sh - mednet datamodule check -vv montgomery-f0 + mednet classification datamodule check -vv montgomery-f0 """, ) diff --git a/src/mednet/libs/classification/scripts/evaluate.py b/src/mednet/libs/classification/scripts/evaluate.py index eb7970c9f8ad8eae45ff647d066f204a54ccf9cd..50c2e56a7dcb37051a78265ef27eb33bec9fd442 100644 --- a/src/mednet/libs/classification/scripts/evaluate.py +++ b/src/mednet/libs/classification/scripts/evaluate.py @@ -24,13 +24,13 @@ logger = setup(__name__.split(".")[0], format="%(levelname)s: %(message)s") .. code:: sh - mednet evaluate -vv --predictions=path/to/predictions.json + mednet classification evaluate -vv --predictions=path/to/predictions.json 2. Run evaluation on an existing prediction output, tune threshold a priori on the `validation` set: .. code:: sh - mednet evaluate -vv --predictions=path/to/predictions.json --threshold=validation + mednet classification evaluate -vv --predictions=path/to/predictions.json --threshold=validation """, ) @click.option( diff --git a/src/mednet/libs/classification/scripts/experiment.py b/src/mednet/libs/classification/scripts/experiment.py index 3a469a8c3a59458befbd9d4f64afda7b429e3ab6..5dcfe0c9d400117178d2f314c23fc083fe24b0c7 100644 --- a/src/mednet/libs/classification/scripts/experiment.py +++ b/src/mednet/libs/classification/scripts/experiment.py @@ -28,7 +28,7 @@ logger = setup(__name__.split(".")[0], format="%(levelname)s: %(message)s") .. code:: sh - $ mednet experiment -vv pasa montgomery --epochs=2 + $ mednet classification experiment -vv pasa montgomery --epochs=2 """, ) @training_options @@ -60,7 +60,6 @@ def experiment( .. code:: └─ <output-folder>/ - ├── command.sh ├── model/ # the generated model will be here ├── predictions.json # the prediction outputs for the sets └── evaluation/ # the outputs of the evaluations for the sets diff --git a/src/mednet/libs/classification/scripts/predict.py b/src/mednet/libs/classification/scripts/predict.py index 2691727d6176bb17b618f900ca75a76b885faf36..a6f45a113fda61bc73a54ce4e95ef3f72dfdb772 100644 --- a/src/mednet/libs/classification/scripts/predict.py +++ b/src/mednet/libs/classification/scripts/predict.py @@ -21,13 +21,13 @@ logger = setup(__name__.split(".")[0], format="%(levelname)s: %(message)s") .. code:: sh - mednet predict -vv pasa montgomery --weight=path/to/model.ckpt --output=path/to/predictions.json + mednet classification predict -vv pasa montgomery --weight=path/to/model.ckpt --output=path/to/predictions.json 2. Enable multi-processing data loading with 6 processes: .. code:: sh - mednet predict -vv pasa montgomery --parallel=6 --weight=path/to/model.ckpt --output=path/to/predictions.json + mednet classification predict -vv pasa montgomery --parallel=6 --weight=path/to/model.ckpt --output=path/to/predictions.json """, ) diff --git a/src/mednet/libs/classification/scripts/train.py b/src/mednet/libs/classification/scripts/train.py index 9374afad382bf91f2ec2e0cdd266eafdbf187a24..58c99cad6c1477e336544ebd9bc25fc197c3aa3a 100644 --- a/src/mednet/libs/classification/scripts/train.py +++ b/src/mednet/libs/classification/scripts/train.py @@ -23,7 +23,7 @@ logger = setup("mednet", format="%(levelname)s: %(message)s") .. code:: sh - mednet train -vv pasa elastic montgomery --batch-size=4 --device="cuda:0" + mednet classification train -vv pasa elastic montgomery --batch-size=4 --device="cuda:0" """, ) @reusable_options diff --git a/src/mednet/libs/segmentation/scripts/config.py b/src/mednet/libs/segmentation/scripts/config.py index b900ba866fd78fe24bc2f834525f16938178f1f1..8fef1df9ee64b30e54c3437f46848457caa8079e 100644 --- a/src/mednet/libs/segmentation/scripts/config.py +++ b/src/mednet/libs/segmentation/scripts/config.py @@ -23,11 +23,11 @@ def config(): epilog="""Examples: \b - 1. Lists all configuration resources (type: mednet.libs.classification.config) installed: + 1. Lists all configuration resources (type: mednet.libs.segmentation.config) installed: .. code:: sh - mednet config list + mednet segmentation config list \b @@ -36,7 +36,7 @@ def config(): .. code:: sh - mednet config list -v + mednet segmentation config list -v """, ) @@ -50,20 +50,20 @@ def list_(verbose) -> None: # numpydoc ignore=PR01 epilog="""Examples: \b - 1. Describe the Montgomery dataset configuration: + 1. Describe the Drive dataset configuration: .. code:: sh - mednet config describe montgomery + mednet segmentation config describe drive \b - 2. Describe the Montgomery dataset configuration and lists its + 2. Describe the Drive dataset configuration and lists its contents: .. code:: sh - mednet config describe montgomery -v + mednet segmentation config describe deive -v """, ) @@ -87,7 +87,7 @@ def describe(name, verbose) -> None: # numpydoc ignore=PR01 .. code:: sh - $ mednet config copy montgomery -vvv newdataset.py + $ mednet segmentation config copy drive -vvv newdataset.py """, ) diff --git a/src/mednet/libs/segmentation/scripts/database.py b/src/mednet/libs/segmentation/scripts/database.py index 1610c16c8b2c958b9d3dae903e5c554d5289e048..272d4a0989678b16f6fee206cd91a3affd565d49 100644 --- a/src/mednet/libs/segmentation/scripts/database.py +++ b/src/mednet/libs/segmentation/scripts/database.py @@ -21,7 +21,7 @@ def _get_raw_databases() -> dict[str, dict[str, str]]: containing two string keys: * ``module``: the full Pythonic module name (e.g. - ``mednet.libs.classification.data.montgomery``). + ``mednet.libs.segmentation.data.drive``). * ``datadir``: points to the user-configured data directory for the current dataset, if set, or ``None`` otherwise. """ @@ -71,14 +71,14 @@ def database() -> None: \b 1. To install a database, set up its data directory ("datadir"). For - example, to setup access to Montgomery files you downloaded locally at - the directory "/path/to/montgomery/files", edit the RC file (typically - ``$HOME/.config/mednet.libs.classification.toml``), and add a line like the following: + example, to setup access to Drive files you downloaded locally at + the directory "/path/to/drive/files", edit the RC file (typically + ``$HOME/.config/mednet.toml``), and add a line like the following: .. code:: toml [datadir] - montgomery = "/path/to/montgomery/files" + drive = "/path/to/drive/files" .. note:: @@ -89,7 +89,7 @@ def database() -> None: .. code:: sh - $ mednet database list + $ mednet segmentation database list """, ) @@ -102,12 +102,12 @@ def list_(): @database.command( epilog="""Examples: - 1. Check if all files from the fold 'montgomery-f0' of the Montgomery + 1. Check if all files from the fold 'default' of the Drive database can be loaded: .. code:: sh - mednet datamodule check -vv montgomery-f0 + mednet segmentation database check -vv drive """, ) diff --git a/src/mednet/libs/segmentation/scripts/experiment.py b/src/mednet/libs/segmentation/scripts/experiment.py index 4c906c023f6e2d6e4e63f3973bb7c676b73040f6..a912559bc2e5da79f435f863ea1a4bc213ebde51 100644 --- a/src/mednet/libs/segmentation/scripts/experiment.py +++ b/src/mednet/libs/segmentation/scripts/experiment.py @@ -23,13 +23,13 @@ logger = setup(__name__.split(".")[0], format="%(levelname)s: %(message)s") epilog="""Examples: \b - 1. Train a pasa model with montgomery dataset, on the CPU, for only two + 1. Train a lwnet model with drive dataset, on the CPU, for only two epochs, then runs inference and evaluation on stock datasets, report performance as a table and figures: .. code:: sh - $ mednet experiment -vv pasa montgomery --epochs=2 + $ mednet segmentation experiment -vv lwnet drive --epochs=2 """, ) @training_options @@ -59,9 +59,8 @@ def experiment( \b └─ <output-folder>/ - ├── command.sh ├── model/ # the generated model will be here - ├── predictions.json # the prediction outputs for the sets + ├── predictions # the prediction outputs for the sets └── evaluation/ # the outputs of the evaluations for the sets """ diff --git a/src/mednet/libs/segmentation/scripts/predict.py b/src/mednet/libs/segmentation/scripts/predict.py index 84449741c95f9ab51305cd4820e0b23a622dd5d9..e96d25a3b905967b5497fd15df2773366b2f08e3 100644 --- a/src/mednet/libs/segmentation/scripts/predict.py +++ b/src/mednet/libs/segmentation/scripts/predict.py @@ -69,13 +69,13 @@ def _save_hdf5( .. code:: sh - mednet predict -vv pasa montgomery --weight=path/to/model.ckpt --output=path/to/predictions.json + mednet segmentation predict -vv lwnet drive --weight=path/to/model.ckpt --output=path/to/predictions.json 2. Enable multi-processing data loading with 6 processes: .. code:: sh - mednet predict -vv pasa montgomery --parallel=6 --weight=path/to/model.ckpt --output=path/to/predictions.json + mednet segmentation predict -vv lwnet drive --parallel=6 --weight=path/to/model.ckpt --output=path/to/predictions.json """, ) diff --git a/src/mednet/libs/segmentation/scripts/train.py b/src/mednet/libs/segmentation/scripts/train.py index ac3813280a6c2bb6e28c246a1c1a1b28cb6600d5..5089ccab1d865ae8465e26686c7eec290a80c26f 100644 --- a/src/mednet/libs/segmentation/scripts/train.py +++ b/src/mednet/libs/segmentation/scripts/train.py @@ -18,11 +18,11 @@ logger = setup("mednet", format="%(levelname)s: %(message)s") cls=ConfigCommand, epilog="""Examples: -1. Train a pasa model with the montgomery dataset, on a GPU (``cuda:0``): +1. Train a lwnet model with the frive dataset, on a GPU (``cuda:0``): .. code:: sh - mednet train -vv pasa montgomery --batch-size=4 --device="cuda:0" + mednet segmentation train -vv lwnet drive --batch-size=4 --device="cuda:0" """, ) @reusable_options @@ -44,7 +44,7 @@ def train( augmentations, **_, ) -> None: # numpydoc ignore=PR01 - """Train an CNN to perform image classification. + """Train an CNN to perform image segmentation. Training is performed for a configurable number of epochs, and generates checkpoints. Checkpoints are model files with a .ckpt