Skip to content
Snippets Groups Projects
test_indian.py 2.54 KiB
# SPDX-FileCopyrightText: Copyright © 2023 Idiap Research Institute <contact@idiap.ch>
#
# SPDX-License-Identifier: GPL-3.0-or-later
"""Tests for Indian (a.k.a.

dataset A/dataset B) dataset.
"""

import importlib

import pytest


def id_function(val):
    if isinstance(val, dict):
        return str(val)
    return repr(val)


@pytest.mark.parametrize(
    "split,lenghts",
    [
        ("default", dict(train=83, validation=20, test=52)),
        ("fold-0", dict(train=111, validation=28, test=16)),
        ("fold-1", dict(train=111, validation=28, test=16)),
        ("fold-2", dict(train=111, validation=28, test=16)),
        ("fold-3", dict(train=111, validation=28, test=16)),
        ("fold-4", dict(train=111, validation=28, test=16)),
        ("fold-5", dict(train=112, validation=28, test=15)),
        ("fold-6", dict(train=112, validation=28, test=15)),
        ("fold-7", dict(train=112, validation=28, test=15)),
        ("fold-8", dict(train=112, validation=28, test=15)),
        ("fold-9", dict(train=112, validation=28, test=15)),
    ],
    ids=id_function,  # just changes how pytest prints it
)
def test_protocol_consistency(
    database_checkers, split: str, lenghts: dict[str, int]
):
    from mednet.config.data.indian.datamodule import make_split

    database_checkers.check_split(
        make_split(f"{split}.json"),
        lengths=lenghts,
        prefixes=("DatasetA/Training", "DatasetA/Testing"),
        possible_labels=(0, 1),
    )


@pytest.mark.skip_if_rc_var_not_set("datadir.indian")
@pytest.mark.parametrize(
    "dataset",
    [
        "train",
        "validation",
        "test",
    ],
)
@pytest.mark.parametrize(
    "name",
    [
        "default",
        "fold_0",
        "fold_1",
        "fold_2",
        "fold_3",
        "fold_4",
        "fold_5",
        "fold_6",
        "fold_7",
        "fold_8",
        "fold_9",
    ],
)
def test_loading(database_checkers, name: str, dataset: str):
    datamodule = importlib.import_module(
        f".{name}", "mednet.config.data.indian"
    ).datamodule

    datamodule.model_transforms = []  # should be done before setup()
    datamodule.setup("predict")  # sets up all datasets

    loader = datamodule.predict_dataloader()[dataset]

    limit = 3  # limit load checking
    for batch in loader:
        if limit == 0:
            break
        database_checkers.check_loaded_batch(
            batch,
            batch_size=1,
            color_planes=1,
            prefixes=("DatasetA/Training", "DatasetA/Testing"),
            possible_labels=(0, 1),
        )
        limit -= 1