diff --git a/bob/learn/misc/cpp/IVectorTrainer.cpp b/bob/learn/misc/cpp/IVectorTrainer.cpp index be843b20e3347a79a24ceac835ce8ad0b2eeec84..4f929af95a62fa232f73f6d529e9ca8e8d849c43 100644 --- a/bob/learn/misc/cpp/IVectorTrainer.cpp +++ b/bob/learn/misc/cpp/IVectorTrainer.cpp @@ -15,7 +15,6 @@ #include <bob.math/linear.h> #include <bob.math/linsolve.h> #include <boost/shared_ptr.hpp> -#include <boost/random.hpp> bob::learn::misc::IVectorTrainer::IVectorTrainer(const bool update_sigma, const double convergence_threshold, diff --git a/bob/learn/misc/cpp/KMeansTrainer.cpp b/bob/learn/misc/cpp/KMeansTrainer.cpp index ae69ee740a482b9eb6c6c739970496f5ccd74f9f..092c87d48698d46e4a45d1ac03a0c4337b5ed04a 100644 --- a/bob/learn/misc/cpp/KMeansTrainer.cpp +++ b/bob/learn/misc/cpp/KMeansTrainer.cpp @@ -8,11 +8,9 @@ #include <bob.learn.misc/KMeansTrainer.h> #include <bob.core/array_copy.h> -#include <boost/random.hpp> +#include <bob.core/random.h> -#if BOOST_VERSION >= 104700 -#include <boost/random/discrete_distribution.hpp> -#endif +#include <boost/random.hpp> bob::learn::misc::KMeansTrainer::KMeansTrainer(double convergence_threshold, size_t max_iterations, bool compute_likelihood, InitializationMethod i_m): @@ -71,9 +69,7 @@ void bob::learn::misc::KMeansTrainer::initialize(bob::learn::misc::KMeansMachine // assign the i'th mean to a random example within the i'th chunk blitz::Range a = blitz::Range::all(); -#if BOOST_VERSION >= 104700 if(m_initialization_method == RANDOM || m_initialization_method == RANDOM_NO_DUPLICATE) // Random initialization -#endif { unsigned int n_chunk = n_data / kmeans.getNMeans(); size_t n_max_trials = (size_t)n_chunk * 5; @@ -83,11 +79,10 @@ void bob::learn::misc::KMeansTrainer::initialize(bob::learn::misc::KMeansMachine for(size_t i=0; i<kmeans.getNMeans(); ++i) { - boost::uniform_int<> range(i*n_chunk, (i+1)*n_chunk-1); - boost::variate_generator<boost::mt19937&, boost::uniform_int<> > die(*m_rng, range); + boost::uniform_int<> die(i*n_chunk, (i+1)*n_chunk-1); // get random index within chunk - unsigned int index = die(); + unsigned int index = die(*m_rng); // get the example at that index blitz::Array<double, 1> mean = ar(index,a); @@ -110,7 +105,7 @@ void bob::learn::misc::KMeansTrainer::initialize(bob::learn::misc::KMeansMachine break; else { - index = die(); + index = die(*m_rng); mean = ar(index,a); ++count; } @@ -127,14 +122,12 @@ void bob::learn::misc::KMeansTrainer::initialize(bob::learn::misc::KMeansMachine kmeans.setMean(i, mean); } } -#if BOOST_VERSION >= 104700 else // K-Means++ { // 1.a. Selects one sample randomly - boost::uniform_int<> range(0, n_data-1); - boost::variate_generator<boost::mt19937&, boost::uniform_int<> > die(*m_rng, range); + boost::uniform_int<> die(0, n_data-1); // Gets the example at a random index - blitz::Array<double,1> mean = ar(die(),a); + blitz::Array<double,1> mean = ar(die(*m_rng),a); kmeans.setMean(0, mean); // 1.b. Loops, computes probability distribution and select samples accordingly @@ -160,12 +153,11 @@ void bob::learn::misc::KMeansTrainer::initialize(bob::learn::misc::KMeansMachine // Takes a sample according to the weights distribution // Blitz iterators is fine as the weights array should be C-style contiguous bob::core::array::assertCContiguous(weights); - boost::random::discrete_distribution<> die2(weights.begin(), weights.end()); + bob::core::random::discrete_distribution<> die2(weights.begin(), weights.end()); blitz::Array<double,1> new_mean = ar(die2(*m_rng),a); kmeans.setMean(m, new_mean); } } -#endif // Resize the accumulator m_zeroethOrderStats.resize(kmeans.getNMeans()); m_firstOrderStats.resize(kmeans.getNMeans(), kmeans.getNInputs()); diff --git a/bob/learn/misc/cpp/PLDATrainer.cpp b/bob/learn/misc/cpp/PLDATrainer.cpp index 2c687f30e2a2fc8a02a1581895c386f0818cb96f..725dd45b509d88926f48bd2afce5c13a1ce8f72d 100644 --- a/bob/learn/misc/cpp/PLDATrainer.cpp +++ b/bob/learn/misc/cpp/PLDATrainer.cpp @@ -14,7 +14,6 @@ #include <bob.math/inv.h> #include <bob.math/svd.h> #include <algorithm> -#include <boost/random.hpp> #include <vector> #include <limits> diff --git a/bob/learn/misc/include/bob.learn.misc/IVectorTrainer.h b/bob/learn/misc/include/bob.learn.misc/IVectorTrainer.h index 91f28ac90fb87ceb62bde3e11651285a70e92ee4..f3e82254d01e24c67d472280fabd2130752bff3e 100644 --- a/bob/learn/misc/include/bob.learn.misc/IVectorTrainer.h +++ b/bob/learn/misc/include/bob.learn.misc/IVectorTrainer.h @@ -13,7 +13,6 @@ #include <bob.learn.misc/IVectorMachine.h> #include <bob.learn.misc/GMMStats.h> #include <boost/shared_ptr.hpp> -#include <boost/random.hpp> #include <vector> namespace bob { namespace learn { namespace misc { diff --git a/bob/learn/misc/old/ivector_trainer.cc b/bob/learn/misc/old/ivector_trainer.cc index 8b964a832799ef492d6201b66d0bbfde4d7e4269..72328bd8f55cb9aeea1f5924f88c98623bbc33fb 100644 --- a/bob/learn/misc/old/ivector_trainer.cc +++ b/bob/learn/misc/old/ivector_trainer.cc @@ -83,7 +83,7 @@ static void py_set_AccSnormij(bob::learn::misc::IVectorTrainer& trainer, // include the random API of bob.core -#include <bob.core/random.h> +#include <bob.core/random_api.h> static boost::python::object ITB_getRng(EMTrainerIVectorBase& self){ // create new object PyObject* o = PyBoostMt19937_Type.tp_alloc(&PyBoostMt19937_Type,0); diff --git a/bob/learn/misc/old/jfa_trainer.cc b/bob/learn/misc/old/jfa_trainer.cc index a5966e27b5c32718863253aed8e5695c55ce88d7..542c33ab8f987658997ab7c927bf07d4c8a56d31 100644 --- a/bob/learn/misc/old/jfa_trainer.cc +++ b/bob/learn/misc/old/jfa_trainer.cc @@ -311,7 +311,7 @@ static void jfa_set_accDA2(bob::learn::misc::JFATrainer& trainer, // include the random API of bob.core -#include <bob.core/random.h> +#include <bob.core/random_api.h> static boost::python::object isv_getRng(bob::learn::misc::ISVTrainer& self){ // create new object PyObject* o = PyBoostMt19937_Type.tp_alloc(&PyBoostMt19937_Type,0); diff --git a/bob/learn/misc/old/kmeans_trainer.cc b/bob/learn/misc/old/kmeans_trainer.cc index 4ef008bb01702da3555c8aa0f22b48fb6ae0fcc2..0b90db77d5bcfbea5d927da0220bae8d87762c03 100644 --- a/bob/learn/misc/old/kmeans_trainer.cc +++ b/bob/learn/misc/old/kmeans_trainer.cc @@ -57,7 +57,7 @@ static void py_mStep(EMTrainerKMeansBase& trainer, } // include the random API of bob.core -#include <bob.core/random.h> +#include <bob.core/random_api.h> static boost::python::object KMTB_getRng(EMTrainerKMeansBase& self){ // create new object PyObject* o = PyBoostMt19937_Type.tp_alloc(&PyBoostMt19937_Type,0); diff --git a/bob/learn/misc/old/main.cc b/bob/learn/misc/old/main.cc index c8a483428957898d347542c6bd082d95789b3ce2..16f7f9ba565063a449f54451489bb4ba88282e53 100644 --- a/bob/learn/misc/old/main.cc +++ b/bob/learn/misc/old/main.cc @@ -14,7 +14,7 @@ #include <bob.blitz/capi.h> #include <bob.blitz/cleanup.h> #include <bob.io.base/api.h> -#include <bob.core/random.h> +#include <bob.core/random_api.h> #include "ndarray.h" diff --git a/bob/learn/misc/old/plda_trainer.cc b/bob/learn/misc/old/plda_trainer.cc index a9a9de3e24def7637fbf93e25c86253989955d01..c68444496c8940d640d7844e1967c9c0dca72f5f 100644 --- a/bob/learn/misc/old/plda_trainer.cc +++ b/bob/learn/misc/old/plda_trainer.cc @@ -93,7 +93,7 @@ static object get_z_second_order(bob::learn::misc::PLDATrainer& m) { // include the random API of bob.core -#include <bob.core/random.h> +#include <bob.core/random_api.h> static boost::python::object TB_getRng(EMTrainerPLDA& self){ // create new object PyObject* o = PyBoostMt19937_Type.tp_alloc(&PyBoostMt19937_Type,0);