diff --git a/.gitignore b/.gitignore index d1dfa18ce8c26f10ec4887002a30462ead44df74..874a8f5518b777522808b9582461f05cebc8b431 100644 --- a/.gitignore +++ b/.gitignore @@ -17,7 +17,7 @@ dist build .DS_Store *.egg -src/ record.txt dask-worker-space .gitlab-ci-local* +html/ diff --git a/MANIFEST.in b/MANIFEST.in index fe4735ac49c627c49e20db6b40d9394ae2c72e5f..e008f08fcfa5e5a464cd5d770e78987a4d3a6c46 100644 --- a/MANIFEST.in +++ b/MANIFEST.in @@ -1,4 +1,3 @@ -include LICENSE README.rst buildout.cfg develop.cfg requirements.txt version.txt +include LICENSE README.rst recursive-include doc conf.py *.rst -recursive-include bob/learn/em *.cpp *.h -recursive-include bob/learn/em/data *.* +recursive-include src/bob/learn/em/data *.* diff --git a/README.rst b/README.rst index 3d97c36eceac13882a5d551c09b9a5fd167dc0d2..d64f771e2ad7ab1d8fee4a9a006de285a510bc64 100644 --- a/README.rst +++ b/README.rst @@ -2,11 +2,11 @@ .. Mon 15 Aug 2016 09:48:28 CEST .. image:: https://img.shields.io/badge/docs-latest-orange.svg - :target: https://www.idiap.ch/software/bob/docs/bob/bob.learn.em/master/index.html + :target: https://www.idiap.ch/software/bob/docs/bob/bob.learn.em/master/sphinx/index.html .. image:: https://gitlab.idiap.ch/bob/bob.learn.em/badges/master/pipeline.svg :target: https://gitlab.idiap.ch/bob/bob.learn.em/commits/master .. image:: https://gitlab.idiap.ch/bob/bob.learn.em/badges/master/coverage.svg - :target: https://gitlab.idiap.ch/bob/bob.learn.em/commits/master + :target: https://www.idiap.ch/software/bob/docs/bob/bob.learn.em/master/coverage .. image:: https://img.shields.io/badge/gitlab-project-0000c0.svg :target: https://gitlab.idiap.ch/bob/bob.learn.em diff --git a/conda/meta.yaml b/conda/meta.yaml index 679e793afeb697e81ef79e65fab2eed646978d70..eb579f93936b5f19b4350aff581559e4fd580d7c 100644 --- a/conda/meta.yaml +++ b/conda/meta.yaml @@ -1,29 +1,26 @@ {% set data = load_file_data(RECIPE_DIR + '/../pyproject.toml') %} {% set name = data['project']['name'] %} -{% set project_dir = environ.get('RECIPE_DIR') + '/..' %} package: name: {{ name }} version: {{ data['project']['version'] }} +source: + path: .. + build: + noarch: python number: {{ environ.get('BOB_BUILD_NUMBER', 0) }} run_exports: - {{ pin_subpackage(name) }} script: - - cd {{ project_dir }} - - "{{ PYTHON }} -m pip install . -vv" - # installs the documentation source, readme to share/doc so it is available - # during test time - - install -d "${PREFIX}/share/doc/{{ name }}" - - cp -R README.rst requirements.txt doc "${PREFIX}/share/doc/{{ name }}/" + - "{{ PYTHON }} -m pip install {{ SRC_DIR }} -vv" requirements: host: - python {{ python }} - setuptools {{ setuptools }} - pip {{ pip }} - - bob.extension - numpy {{ numpy }} - dask {{ dask }} - dask-ml {{ dask_ml }} @@ -33,26 +30,21 @@ requirements: run: - python - setuptools - - {{ pin_compatible('numpy', max_pin='x.x') }} + - {{ pin_compatible('numpy') }} - {{ pin_compatible('dask') }} - {{ pin_compatible('dask-ml') }} - {{ pin_compatible('h5py') }} - # scikit-learn keeps breaking API in minor versions, so we pin to the - # next minor version - - {{ pin_compatible('scikit-learn', max_pin='x.x') }} + - {{ pin_compatible('scikit-learn') }} test: imports: - - {{ name }} + - {{ name.replace('-','_') }} commands: - # runs tests for package only, report only what is in the package - # creates xml tests report and place it in a specific directory - # creates html and xml coverage reports and place them in a specific directory - conda inspect linkages -p $PREFIX {{ name }} # [not win] - conda inspect objects -p $PREFIX {{ name }} # [osx] about: home: {{ data['project']['urls']['homepage'] }} - license: {{ data['project']['description'] }} - summary: {{ data['project']['license']['text'] }} + summary: {{ data['project']['description'] }} + license: {{ data['project']['license']['text'] }} license_family: BSD diff --git a/doc/conf.py b/doc/conf.py index 2999f19cee4d71de6a3484d9bf3f590ec0a7a2da..59964034e8fac4f87cb9144f31c4ff36fc5ce89b 100755 --- a/doc/conf.py +++ b/doc/conf.py @@ -2,13 +2,10 @@ # vim: set fileencoding=utf-8 : import os -import sys import pkg_resources # -- General configuration ----------------------------------------------------- -# Adding the parent directory to the python path. -sys.path.insert(0, os.path.abspath("../")) # If your documentation needs a minimal Sphinx version, state it here. needs_sphinx = "1.3" diff --git a/pyproject.toml b/pyproject.toml index 87e339dcdae377f063dfd8938c654b122bc35281..0b003e8ad019906708dbfe3498588b2caca1d729 100644 --- a/pyproject.toml +++ b/pyproject.toml @@ -7,7 +7,7 @@ version = "3.2.1b0" requires-python = ">=3.9" description = "Bindings for EM machines and trainers of Bob" - dynamic = ["readme", "dependencies"] + dynamic = ["readme"] license = {text = "BSD 3-Clause License"} authors = [ {name = "Andre Anjos"}, @@ -25,6 +25,14 @@ "Programming Language :: Python :: 3", "Topic :: Software Development :: Libraries :: Python Modules", ] + dependencies = [ + "setuptools", + "bob.extension", + "dask", + "dask-ml", + "h5py >= 3", + "scikit-learn", + ] [project.urls] documentation = "https://www.idiap.ch/software/bob/docs/bob/bob.learn.em/stable/" @@ -49,12 +57,10 @@ [tool.setuptools] zip-safe = false - include-package-data = true - packages = ["bob"] + package-dir = {"" = "src"} [tool.setuptools.dynamic] readme = {file = "README.rst"} - dependencies = {file = "requirements.txt"} [tool.distutils.bdist_wheel] diff --git a/requirements.txt b/requirements.txt deleted file mode 100644 index 89806f2653f0e6e5a971fb107e848c0beffb5c56..0000000000000000000000000000000000000000 --- a/requirements.txt +++ /dev/null @@ -1,6 +0,0 @@ -setuptools -bob.extension -dask -dask-ml -h5py >= 3 -scikit-learn diff --git a/bob/__init__.py b/src/bob/__init__.py similarity index 100% rename from bob/__init__.py rename to src/bob/__init__.py diff --git a/bob/learn/__init__.py b/src/bob/learn/__init__.py similarity index 100% rename from bob/learn/__init__.py rename to src/bob/learn/__init__.py diff --git a/bob/learn/em/__init__.py b/src/bob/learn/em/__init__.py similarity index 84% rename from bob/learn/em/__init__.py rename to src/bob/learn/em/__init__.py index 28eb1b6a7a2e9ff3299821a678b45b84f0ee354c..df69c819f5b62875cf64470d5cded3a9afb6b7bf 100644 --- a/bob/learn/em/__init__.py +++ b/src/bob/learn/em/__init__.py @@ -1,5 +1,3 @@ -import bob.extension - from .factor_analysis import ISVMachine, JFAMachine from .gmm import GMMMachine, GMMStats from .ivector import IVectorMachine @@ -9,11 +7,6 @@ from .wccn import WCCN from .whitening import Whitening -def get_config(): - """Returns a string containing the configuration information.""" - return bob.extension.get_config(__name__) - - # gets sphinx autodoc done right - don't remove it def __appropriate__(*args): """Says object was actually declared here, an not on the import module. diff --git a/bob/learn/em/factor_analysis.py b/src/bob/learn/em/factor_analysis.py similarity index 100% rename from bob/learn/em/factor_analysis.py rename to src/bob/learn/em/factor_analysis.py diff --git a/bob/learn/em/gmm.py b/src/bob/learn/em/gmm.py similarity index 100% rename from bob/learn/em/gmm.py rename to src/bob/learn/em/gmm.py diff --git a/bob/learn/em/ivector.py b/src/bob/learn/em/ivector.py similarity index 100% rename from bob/learn/em/ivector.py rename to src/bob/learn/em/ivector.py diff --git a/bob/learn/em/kmeans.py b/src/bob/learn/em/kmeans.py similarity index 100% rename from bob/learn/em/kmeans.py rename to src/bob/learn/em/kmeans.py diff --git a/bob/learn/em/linear_scoring.py b/src/bob/learn/em/linear_scoring.py similarity index 100% rename from bob/learn/em/linear_scoring.py rename to src/bob/learn/em/linear_scoring.py diff --git a/bob/learn/em/utils.py b/src/bob/learn/em/utils.py similarity index 100% rename from bob/learn/em/utils.py rename to src/bob/learn/em/utils.py diff --git a/bob/learn/em/wccn.py b/src/bob/learn/em/wccn.py similarity index 100% rename from bob/learn/em/wccn.py rename to src/bob/learn/em/wccn.py diff --git a/bob/learn/em/whitening.py b/src/bob/learn/em/whitening.py similarity index 100% rename from bob/learn/em/whitening.py rename to src/bob/learn/em/whitening.py diff --git a/bob/learn/em/test/__init__.py b/tests/__init__.py similarity index 100% rename from bob/learn/em/test/__init__.py rename to tests/__init__.py diff --git a/bob/learn/em/data/data.hdf5 b/tests/data/data.hdf5 similarity index 100% rename from bob/learn/em/data/data.hdf5 rename to tests/data/data.hdf5 diff --git a/bob/learn/em/data/dataNormalized.hdf5 b/tests/data/dataNormalized.hdf5 similarity index 100% rename from bob/learn/em/data/dataNormalized.hdf5 rename to tests/data/dataNormalized.hdf5 diff --git a/bob/learn/em/data/dataforMAP.hdf5 b/tests/data/dataforMAP.hdf5 similarity index 100% rename from bob/learn/em/data/dataforMAP.hdf5 rename to tests/data/dataforMAP.hdf5 diff --git a/bob/learn/em/data/faithful.torch3.hdf5 b/tests/data/faithful.torch3.hdf5 similarity index 100% rename from bob/learn/em/data/faithful.torch3.hdf5 rename to tests/data/faithful.torch3.hdf5 diff --git a/bob/learn/em/data/faithful.torch3_f64.hdf5 b/tests/data/faithful.torch3_f64.hdf5 similarity index 100% rename from bob/learn/em/data/faithful.torch3_f64.hdf5 rename to tests/data/faithful.torch3_f64.hdf5 diff --git a/bob/learn/em/data/gmm.init_means.hdf5 b/tests/data/gmm.init_means.hdf5 similarity index 100% rename from bob/learn/em/data/gmm.init_means.hdf5 rename to tests/data/gmm.init_means.hdf5 diff --git a/bob/learn/em/data/gmm.init_variances.hdf5 b/tests/data/gmm.init_variances.hdf5 similarity index 100% rename from bob/learn/em/data/gmm.init_variances.hdf5 rename to tests/data/gmm.init_variances.hdf5 diff --git a/bob/learn/em/data/gmm.init_weights.hdf5 b/tests/data/gmm.init_weights.hdf5 similarity index 100% rename from bob/learn/em/data/gmm.init_weights.hdf5 rename to tests/data/gmm.init_weights.hdf5 diff --git a/bob/learn/em/data/gmm_MAP.hdf5 b/tests/data/gmm_MAP.hdf5 similarity index 100% rename from bob/learn/em/data/gmm_MAP.hdf5 rename to tests/data/gmm_MAP.hdf5 diff --git a/bob/learn/em/data/gmm_ML.hdf5 b/tests/data/gmm_ML.hdf5 similarity index 100% rename from bob/learn/em/data/gmm_ML.hdf5 rename to tests/data/gmm_ML.hdf5 diff --git a/bob/learn/em/data/gmm_ML_32bit_debug.hdf5 b/tests/data/gmm_ML_32bit_debug.hdf5 similarity index 100% rename from bob/learn/em/data/gmm_ML_32bit_debug.hdf5 rename to tests/data/gmm_ML_32bit_debug.hdf5 diff --git a/bob/learn/em/data/gmm_ML_32bit_release.hdf5 b/tests/data/gmm_ML_32bit_release.hdf5 similarity index 100% rename from bob/learn/em/data/gmm_ML_32bit_release.hdf5 rename to tests/data/gmm_ML_32bit_release.hdf5 diff --git a/bob/learn/em/data/gmm_ML_fitted.hdf5 b/tests/data/gmm_ML_fitted.hdf5 similarity index 100% rename from bob/learn/em/data/gmm_ML_fitted.hdf5 rename to tests/data/gmm_ML_fitted.hdf5 diff --git a/bob/learn/em/data/gmm_ML_legacy.hdf5 b/tests/data/gmm_ML_legacy.hdf5 similarity index 100% rename from bob/learn/em/data/gmm_ML_legacy.hdf5 rename to tests/data/gmm_ML_legacy.hdf5 diff --git a/bob/learn/em/data/ivector_fit_data.hdf5 b/tests/data/ivector_fit_data.hdf5 similarity index 100% rename from bob/learn/em/data/ivector_fit_data.hdf5 rename to tests/data/ivector_fit_data.hdf5 diff --git a/bob/learn/em/data/ivector_gs1.hdf5 b/tests/data/ivector_gs1.hdf5 similarity index 100% rename from bob/learn/em/data/ivector_gs1.hdf5 rename to tests/data/ivector_gs1.hdf5 diff --git a/bob/learn/em/data/ivector_gs2.hdf5 b/tests/data/ivector_gs2.hdf5 similarity index 100% rename from bob/learn/em/data/ivector_gs2.hdf5 rename to tests/data/ivector_gs2.hdf5 diff --git a/bob/learn/em/data/ivector_ref_nosigma_step1.hdf5 b/tests/data/ivector_ref_nosigma_step1.hdf5 similarity index 100% rename from bob/learn/em/data/ivector_ref_nosigma_step1.hdf5 rename to tests/data/ivector_ref_nosigma_step1.hdf5 diff --git a/bob/learn/em/data/ivector_ref_nosigma_step2.hdf5 b/tests/data/ivector_ref_nosigma_step2.hdf5 similarity index 100% rename from bob/learn/em/data/ivector_ref_nosigma_step2.hdf5 rename to tests/data/ivector_ref_nosigma_step2.hdf5 diff --git a/bob/learn/em/data/ivector_ref_step1.hdf5 b/tests/data/ivector_ref_step1.hdf5 similarity index 100% rename from bob/learn/em/data/ivector_ref_step1.hdf5 rename to tests/data/ivector_ref_step1.hdf5 diff --git a/bob/learn/em/data/ivector_ref_step2.hdf5 b/tests/data/ivector_ref_step2.hdf5 similarity index 100% rename from bob/learn/em/data/ivector_ref_step2.hdf5 rename to tests/data/ivector_ref_step2.hdf5 diff --git a/bob/learn/em/data/ivector_results.hdf5 b/tests/data/ivector_results.hdf5 similarity index 100% rename from bob/learn/em/data/ivector_results.hdf5 rename to tests/data/ivector_results.hdf5 diff --git a/bob/learn/em/data/ivector_test_data.hdf5 b/tests/data/ivector_test_data.hdf5 similarity index 100% rename from bob/learn/em/data/ivector_test_data.hdf5 rename to tests/data/ivector_test_data.hdf5 diff --git a/bob/learn/em/data/means.hdf5 b/tests/data/means.hdf5 similarity index 100% rename from bob/learn/em/data/means.hdf5 rename to tests/data/means.hdf5 diff --git a/bob/learn/em/data/meansAfterKMeans.hdf5 b/tests/data/meansAfterKMeans.hdf5 similarity index 100% rename from bob/learn/em/data/meansAfterKMeans.hdf5 rename to tests/data/meansAfterKMeans.hdf5 diff --git a/bob/learn/em/data/meansAfterMAP.hdf5 b/tests/data/meansAfterMAP.hdf5 similarity index 100% rename from bob/learn/em/data/meansAfterMAP.hdf5 rename to tests/data/meansAfterMAP.hdf5 diff --git a/bob/learn/em/data/meansAfterML.hdf5 b/tests/data/meansAfterML.hdf5 similarity index 100% rename from bob/learn/em/data/meansAfterML.hdf5 rename to tests/data/meansAfterML.hdf5 diff --git a/bob/learn/em/data/new_adapted_mean.hdf5 b/tests/data/new_adapted_mean.hdf5 similarity index 100% rename from bob/learn/em/data/new_adapted_mean.hdf5 rename to tests/data/new_adapted_mean.hdf5 diff --git a/bob/learn/em/data/samplesFrom2G_f64.hdf5 b/tests/data/samplesFrom2G_f64.hdf5 similarity index 100% rename from bob/learn/em/data/samplesFrom2G_f64.hdf5 rename to tests/data/samplesFrom2G_f64.hdf5 diff --git a/bob/learn/em/data/stats.hdf5 b/tests/data/stats.hdf5 similarity index 100% rename from bob/learn/em/data/stats.hdf5 rename to tests/data/stats.hdf5 diff --git a/bob/learn/em/data/variances.hdf5 b/tests/data/variances.hdf5 similarity index 100% rename from bob/learn/em/data/variances.hdf5 rename to tests/data/variances.hdf5 diff --git a/bob/learn/em/data/variancesAfterKMeans.hdf5 b/tests/data/variancesAfterKMeans.hdf5 similarity index 100% rename from bob/learn/em/data/variancesAfterKMeans.hdf5 rename to tests/data/variancesAfterKMeans.hdf5 diff --git a/bob/learn/em/data/variancesAfterMAP.hdf5 b/tests/data/variancesAfterMAP.hdf5 similarity index 100% rename from bob/learn/em/data/variancesAfterMAP.hdf5 rename to tests/data/variancesAfterMAP.hdf5 diff --git a/bob/learn/em/data/variancesAfterML.hdf5 b/tests/data/variancesAfterML.hdf5 similarity index 100% rename from bob/learn/em/data/variancesAfterML.hdf5 rename to tests/data/variancesAfterML.hdf5 diff --git a/bob/learn/em/data/weights.hdf5 b/tests/data/weights.hdf5 similarity index 100% rename from bob/learn/em/data/weights.hdf5 rename to tests/data/weights.hdf5 diff --git a/bob/learn/em/data/weightsAfterKMeans.hdf5 b/tests/data/weightsAfterKMeans.hdf5 similarity index 100% rename from bob/learn/em/data/weightsAfterKMeans.hdf5 rename to tests/data/weightsAfterKMeans.hdf5 diff --git a/bob/learn/em/data/weightsAfterMAP.hdf5 b/tests/data/weightsAfterMAP.hdf5 similarity index 100% rename from bob/learn/em/data/weightsAfterMAP.hdf5 rename to tests/data/weightsAfterMAP.hdf5 diff --git a/bob/learn/em/data/weightsAfterML.hdf5 b/tests/data/weightsAfterML.hdf5 similarity index 100% rename from bob/learn/em/data/weightsAfterML.hdf5 rename to tests/data/weightsAfterML.hdf5 diff --git a/bob/learn/em/data/ztnorm_eval_eval.hdf5 b/tests/data/ztnorm_eval_eval.hdf5 similarity index 100% rename from bob/learn/em/data/ztnorm_eval_eval.hdf5 rename to tests/data/ztnorm_eval_eval.hdf5 diff --git a/bob/learn/em/data/ztnorm_eval_tnorm.hdf5 b/tests/data/ztnorm_eval_tnorm.hdf5 similarity index 100% rename from bob/learn/em/data/ztnorm_eval_tnorm.hdf5 rename to tests/data/ztnorm_eval_tnorm.hdf5 diff --git a/bob/learn/em/data/ztnorm_result.hdf5 b/tests/data/ztnorm_result.hdf5 similarity index 100% rename from bob/learn/em/data/ztnorm_result.hdf5 rename to tests/data/ztnorm_result.hdf5 diff --git a/bob/learn/em/data/ztnorm_znorm_eval.hdf5 b/tests/data/ztnorm_znorm_eval.hdf5 similarity index 100% rename from bob/learn/em/data/ztnorm_znorm_eval.hdf5 rename to tests/data/ztnorm_znorm_eval.hdf5 diff --git a/bob/learn/em/data/ztnorm_znorm_tnorm.hdf5 b/tests/data/ztnorm_znorm_tnorm.hdf5 similarity index 100% rename from bob/learn/em/data/ztnorm_znorm_tnorm.hdf5 rename to tests/data/ztnorm_znorm_tnorm.hdf5 diff --git a/bob/learn/em/test/test_factor_analysis.py b/tests/test_factor_analysis.py similarity index 100% rename from bob/learn/em/test/test_factor_analysis.py rename to tests/test_factor_analysis.py diff --git a/bob/learn/em/test/test_gmm.py b/tests/test_gmm.py similarity index 90% rename from bob/learn/em/test/test_gmm.py rename to tests/test_gmm.py index cb9ebc28cac70d69b71d6ebb2c5dfaaa65bcad21..af77842f8f144296c275be5063b4b74d0f7df383 100644 --- a/bob/learn/em/test/test_gmm.py +++ b/tests/test_gmm.py @@ -47,13 +47,13 @@ def loadGMM(): gmm = GMMMachine(n_gaussians=2) gmm.weights = load_array( - resource_filename("bob.learn.em", "data/gmm.init_weights.hdf5") + resource_filename(__name__, "data/gmm.init_weights.hdf5") ) gmm.means = load_array( - resource_filename("bob.learn.em", "data/gmm.init_means.hdf5") + resource_filename(__name__, "data/gmm.init_means.hdf5") ) gmm.variances = load_array( - resource_filename("bob.learn.em", "data/gmm.init_variances.hdf5") + resource_filename(__name__, "data/gmm.init_variances.hdf5") ) return gmm @@ -296,10 +296,8 @@ def test_GMMMachine(): def test_GMMMachine_legacy_loading(): """Tests that old GMMMachine checkpoints are loaded correctly.""" - reference_file = resource_filename("bob.learn.em", "data/gmm_ML.hdf5") - legacy_gmm_file = resource_filename( - "bob.learn.em", "data/gmm_ML_legacy.hdf5" - ) + reference_file = resource_filename(__name__, "data/gmm_ML.hdf5") + legacy_gmm_file = resource_filename(__name__, "data/gmm_ML_legacy.hdf5") gmm = GMMMachine.from_hdf5(legacy_gmm_file) assert isinstance(gmm, GMMMachine) assert isinstance(gmm.n_gaussians, np.int64), type(gmm.n_gaussians) @@ -312,7 +310,7 @@ def test_GMMMachine_legacy_loading(): def test_GMMMachine_stats(): """Tests a GMMMachine (statistics)""" arrayset = load_array( - resource_filename("bob.learn.em", "data/faithful.torch3_f64.hdf5") + resource_filename(__name__, "data/faithful.torch3_f64.hdf5") ) gmm = GMMMachine(n_gaussians=2) gmm.weights = np.array([0.5, 0.5], "float64") @@ -327,7 +325,7 @@ def test_GMMMachine_stats(): stats_ref = GMMStats(n_gaussians=2, n_features=2) stats_ref.load( - HDF5File(resource_filename("bob.learn.em", "data/stats.hdf5"), "r") + HDF5File(resource_filename(__name__, "data/stats.hdf5"), "r") ) np.testing.assert_equal(stats.t, stats_ref.t) @@ -341,14 +339,12 @@ def test_GMMMachine_stats(): def test_GMMMachine_ll_computation(): """Test a GMMMachine (log-likelihood computation)""" - data = load_array(resource_filename("bob.learn.em", "data/data.hdf5")) + data = load_array(resource_filename(__name__, "data/data.hdf5")) gmm = GMMMachine(n_gaussians=2) - gmm.weights = load_array( - resource_filename("bob.learn.em", "data/weights.hdf5") - ) - gmm.means = load_array(resource_filename("bob.learn.em", "data/means.hdf5")) + gmm.weights = load_array(resource_filename(__name__, "data/weights.hdf5")) + gmm.means = load_array(resource_filename(__name__, "data/means.hdf5")) gmm.variances = load_array( - resource_filename("bob.learn.em", "data/variances.hdf5") + resource_filename(__name__, "data/variances.hdf5") ) # Compare the log-likelihood with the one obtained using Chris Matlab implementation @@ -367,12 +363,10 @@ def test_GMMMachine_single_ll_vs_multiple(): ) # Doesn't matter if it is random. The average of 1D array (in python) MUST output the same result for the 2D array (in C++) gmm = GMMMachine(n_gaussians=2) - gmm.weights = load_array( - resource_filename("bob.learn.em", "data/weights.hdf5") - ) - gmm.means = load_array(resource_filename("bob.learn.em", "data/means.hdf5")) + gmm.weights = load_array(resource_filename(__name__, "data/weights.hdf5")) + gmm.means = load_array(resource_filename(__name__, "data/means.hdf5")) gmm.variances = load_array( - resource_filename("bob.learn.em", "data/variances.hdf5") + resource_filename(__name__, "data/variances.hdf5") ) ll = 0 @@ -813,12 +807,10 @@ def test_map_transformer(): def test_gmm_ML_1(): """Trains a GMMMachine with ML_GMMTrainer""" ar = load_array( - resource_filename("bob.learn.em", "data/faithful.torch3_f64.hdf5") + resource_filename(__name__, "data/faithful.torch3_f64.hdf5") ) gmm_ref = GMMMachine.from_hdf5( - HDF5File( - resource_filename("bob.learn.em", "data/gmm_ML_fitted.hdf5"), "r" - ) + HDF5File(resource_filename(__name__, "data/gmm_ML_fitted.hdf5"), "r") ) for transform in (to_numpy, to_dask_array): @@ -847,20 +839,18 @@ def test_gmm_ML_1(): def test_gmm_ML_2(): # Trains a GMMMachine with ML_GMMTrainer; compares to a reference - ar = load_array( - resource_filename("bob.learn.em", "data/dataNormalized.hdf5") - ) + ar = load_array(resource_filename(__name__, "data/dataNormalized.hdf5")) # Test results # Load torch3vision reference meansML_ref = load_array( - resource_filename("bob.learn.em", "data/meansAfterML.hdf5") + resource_filename(__name__, "data/meansAfterML.hdf5") ) variancesML_ref = load_array( - resource_filename("bob.learn.em", "data/variancesAfterML.hdf5") + resource_filename(__name__, "data/variancesAfterML.hdf5") ) weightsML_ref = load_array( - resource_filename("bob.learn.em", "data/weightsAfterML.hdf5") + resource_filename(__name__, "data/weightsAfterML.hdf5") ) for transform in (to_numpy, to_dask_array): @@ -868,16 +858,14 @@ def test_gmm_ML_2(): # Initialize GMMMachine gmm = GMMMachine(n_gaussians=5) gmm.means = load_array( - resource_filename("bob.learn.em", "data/meansAfterKMeans.hdf5") + resource_filename(__name__, "data/meansAfterKMeans.hdf5") ).astype("float64") gmm.variances = load_array( - resource_filename("bob.learn.em", "data/variancesAfterKMeans.hdf5") + resource_filename(__name__, "data/variancesAfterKMeans.hdf5") ).astype("float64") gmm.weights = np.exp( load_array( - resource_filename( - "bob.learn.em", "data/weightsAfterKMeans.hdf5" - ) + resource_filename(__name__, "data/weightsAfterKMeans.hdf5") ).astype("float64") ) @@ -904,15 +892,15 @@ def test_gmm_ML_2(): def test_gmm_MAP_1(): # Train a GMMMachine with MAP_GMMTrainer ar = load_array( - resource_filename("bob.learn.em", "data/faithful.torch3_f64.hdf5") + resource_filename(__name__, "data/faithful.torch3_f64.hdf5") ) # test with rng gmmprior = GMMMachine.from_hdf5( - HDF5File(resource_filename("bob.learn.em", "data/gmm_ML.hdf5"), "r") + HDF5File(resource_filename(__name__, "data/gmm_ML.hdf5"), "r") ) gmm = GMMMachine.from_hdf5( - HDF5File(resource_filename("bob.learn.em", "data/gmm_ML.hdf5"), "r"), + HDF5File(resource_filename(__name__, "data/gmm_ML.hdf5"), "r"), ubm=gmmprior, ) gmm.update_means = True @@ -923,19 +911,17 @@ def test_gmm_MAP_1(): gmm = gmm.fit(ar) gmmprior = GMMMachine.from_hdf5( - HDF5File(resource_filename("bob.learn.em", "data/gmm_ML.hdf5"), "r") + HDF5File(resource_filename(__name__, "data/gmm_ML.hdf5"), "r") ) gmm_ref = GMMMachine.from_hdf5( - HDF5File(resource_filename("bob.learn.em", "data/gmm_MAP.hdf5"), "r") + HDF5File(resource_filename(__name__, "data/gmm_MAP.hdf5"), "r") ) for transform in (to_numpy, to_dask_array): ar = transform(ar) gmm = GMMMachine.from_hdf5( - HDF5File( - resource_filename("bob.learn.em", "data/gmm_ML.hdf5"), "r" - ), + HDF5File(resource_filename(__name__, "data/gmm_ML.hdf5"), "r"), ubm=gmmprior, ) gmm.update_means = True @@ -952,13 +938,11 @@ def test_gmm_MAP_1(): def test_gmm_MAP_2(): # Train a GMMMachine with MAP_GMMTrainer and compare with matlab reference - data = load_array(resource_filename("bob.learn.em", "data/data.hdf5")) + data = load_array(resource_filename(__name__, "data/data.hdf5")) data = data.reshape((1, -1)) # make a 2D array out of it - means = load_array(resource_filename("bob.learn.em", "data/means.hdf5")) - variances = load_array( - resource_filename("bob.learn.em", "data/variances.hdf5") - ) - weights = load_array(resource_filename("bob.learn.em", "data/weights.hdf5")) + means = load_array(resource_filename(__name__, "data/means.hdf5")) + variances = load_array(resource_filename(__name__, "data/variances.hdf5")) + weights = load_array(resource_filename(__name__, "data/weights.hdf5")) gmm = GMMMachine(n_gaussians=2) gmm.means = means @@ -980,7 +964,7 @@ def test_gmm_MAP_2(): gmm_adapted.weights = weights new_means = load_array( - resource_filename("bob.learn.em", "data/new_adapted_mean.hdf5") + resource_filename(__name__, "data/new_adapted_mean.hdf5") ) for transform in (to_numpy, to_dask_array): @@ -993,19 +977,19 @@ def test_gmm_MAP_2(): def test_gmm_MAP_3(): # Train a GMMMachine with MAP_GMMTrainer; compares to old reference - ar = load_array(resource_filename("bob.learn.em", "data/dataforMAP.hdf5")) + ar = load_array(resource_filename(__name__, "data/dataforMAP.hdf5")) # Initialize GMMMachine n_gaussians = 5 prior_gmm = GMMMachine(n_gaussians) prior_gmm.means = load_array( - resource_filename("bob.learn.em", "data/meansAfterML.hdf5") + resource_filename(__name__, "data/meansAfterML.hdf5") ) prior_gmm.variances = load_array( - resource_filename("bob.learn.em", "data/variancesAfterML.hdf5") + resource_filename(__name__, "data/variancesAfterML.hdf5") ) prior_gmm.weights = load_array( - resource_filename("bob.learn.em", "data/weightsAfterML.hdf5") + resource_filename(__name__, "data/weightsAfterML.hdf5") ) threshold = 0.001 @@ -1031,13 +1015,13 @@ def test_gmm_MAP_3(): # Test results # Load torch3vision reference meansMAP_ref = load_array( - resource_filename("bob.learn.em", "data/meansAfterMAP.hdf5") + resource_filename(__name__, "data/meansAfterMAP.hdf5") ) variancesMAP_ref = load_array( - resource_filename("bob.learn.em", "data/variancesAfterMAP.hdf5") + resource_filename(__name__, "data/variancesAfterMAP.hdf5") ) weightsMAP_ref = load_array( - resource_filename("bob.learn.em", "data/weightsAfterMAP.hdf5") + resource_filename(__name__, "data/weightsAfterMAP.hdf5") ) for transform in (to_numpy, to_dask_array): @@ -1057,19 +1041,19 @@ def test_gmm_MAP_3(): def test_gmm_test(): # Tests a GMMMachine by computing scores against a model and comparing to a reference - ar = load_array(resource_filename("bob.learn.em", "data/dataforMAP.hdf5")) + ar = load_array(resource_filename(__name__, "data/dataforMAP.hdf5")) # Initialize GMMMachine n_gaussians = 5 gmm = GMMMachine(n_gaussians) gmm.means = load_array( - resource_filename("bob.learn.em", "data/meansAfterML.hdf5") + resource_filename(__name__, "data/meansAfterML.hdf5") ) gmm.variances = load_array( - resource_filename("bob.learn.em", "data/variancesAfterML.hdf5") + resource_filename(__name__, "data/variancesAfterML.hdf5") ) gmm.weights = load_array( - resource_filename("bob.learn.em", "data/weightsAfterML.hdf5") + resource_filename(__name__, "data/weightsAfterML.hdf5") ) threshold = 0.001 diff --git a/bob/learn/em/test/test_ivector.py b/tests/test_ivector.py similarity index 92% rename from bob/learn/em/test/test_ivector.py rename to tests/test_ivector.py index e1a30f77064f281abe55ff2b24615cdc67e8402c..8001db039c45c4381312a04b8bd67869eb43f396 100644 --- a/bob/learn/em/test/test_ivector.py +++ b/tests/test_ivector.py @@ -14,7 +14,8 @@ from pkg_resources import resource_filename from bob.learn.em import GMMMachine, GMMStats, IVectorMachine from bob.learn.em.ivector import e_step, m_step -from bob.learn.em.test.test_kmeans import to_numpy + +from .test_kmeans import to_numpy @contextlib.contextmanager @@ -105,10 +106,10 @@ def test_ivector_machine_transformer(): def test_ivector_machine_training(): gs1 = GMMStats.from_hdf5( - resource_filename("bob.learn.em", "data/ivector_gs1.hdf5") + resource_filename(__name__, "data/ivector_gs1.hdf5") ) gs2 = GMMStats.from_hdf5( - resource_filename("bob.learn.em", "data/ivector_gs2.hdf5") + resource_filename(__name__, "data/ivector_gs2.hdf5") ) data = [gs1, gs2] @@ -160,7 +161,7 @@ def test_trainer_nosigma(): data = [ GMMStats.from_hdf5( - resource_filename("bob.learn.em", f"data/ivector_gs{i+1}.hdf5") + resource_filename(__name__, f"data/ivector_gs{i+1}.hdf5") ) for i in range(2) ] @@ -168,7 +169,7 @@ def test_trainer_nosigma(): references = [ _load_references_from_file( resource_filename( - "bob.learn.em", f"data/ivector_ref_nosigma_step{i+1}.hdf5" + __name__, f"data/ivector_ref_nosigma_step{i+1}.hdf5" ) ) for i in range(2) @@ -217,16 +218,14 @@ def test_trainer_update_sigma(): data = [ GMMStats.from_hdf5( - resource_filename("bob.learn.em", f"data/ivector_gs{i+1}.hdf5") + resource_filename(__name__, f"data/ivector_gs{i+1}.hdf5") ) for i in range(2) ] references = [ _load_references_from_file( - resource_filename( - "bob.learn.em", f"data/ivector_ref_step{i+1}.hdf5" - ) + resource_filename(__name__, f"data/ivector_ref_step{i+1}.hdf5") ) for i in range(2) ] @@ -273,20 +272,16 @@ def test_ivector_fit(): ubm.means = np.array([[1.0, 7, 4], [4, 5, 3]]) ubm.variances = np.array([[0.5, 1.0, 1.5], [1.0, 1.5, 2.0]]) - fit_data_file = resource_filename( - "bob.learn.em", "data/ivector_fit_data.hdf5" - ) + fit_data_file = resource_filename(__name__, "data/ivector_fit_data.hdf5") with HDF5File(fit_data_file, "r") as f: fit_data = f["array"][()] - test_data_file = resource_filename( - "bob.learn.em", "data/ivector_test_data.hdf5" - ) + test_data_file = resource_filename(__name__, "data/ivector_test_data.hdf5") with HDF5File(test_data_file, "r") as f: test_data = f["array"][()] reference_result_file = resource_filename( - "bob.learn.em", "data/ivector_results.hdf5" + __name__, "data/ivector_results.hdf5" ) with HDF5File(reference_result_file, "r") as f: reference_result = f["array"][()] diff --git a/bob/learn/em/test/test_kmeans.py b/tests/test_kmeans.py similarity index 100% rename from bob/learn/em/test/test_kmeans.py rename to tests/test_kmeans.py diff --git a/bob/learn/em/test/test_linear.py b/tests/test_linear.py similarity index 100% rename from bob/learn/em/test/test_linear.py rename to tests/test_linear.py diff --git a/bob/learn/em/test/test_linearscoring.py b/tests/test_linearscoring.py similarity index 100% rename from bob/learn/em/test/test_linearscoring.py rename to tests/test_linearscoring.py diff --git a/bob/learn/em/test/test_picklability.py b/tests/test_picklability.py similarity index 100% rename from bob/learn/em/test/test_picklability.py rename to tests/test_picklability.py