From 0313361732f5551f274fef309840e432c475cbc8 Mon Sep 17 00:00:00 2001
From: Andre Anjos <andre.dos.anjos@gmail.com>
Date: Mon, 26 May 2014 18:43:09 +0200
Subject: [PATCH] xbob -> bob

---
 .travis.yml                                   |   4 +-
 MANIFEST.in                                   |   2 +-
 README.rst                                    |  24 ++--
 {xbob => bob}/__init__.py                     |   0
 {xbob => bob}/learn/__init__.py               |   0
 {xbob => bob}/learn/mlp/__init__.py           |   0
 {xbob => bob}/learn/mlp/backprop.cpp          |  22 ++--
 {xbob => bob}/learn/mlp/cost.cpp              |  22 ++--
 {xbob => bob}/learn/mlp/cxx/backprop.cpp      |   2 +-
 {xbob => bob}/learn/mlp/cxx/cross_entropy.cpp |   2 +-
 {xbob => bob}/learn/mlp/cxx/machine.cpp       |   2 +-
 {xbob => bob}/learn/mlp/cxx/roll.cpp          |   2 +-
 {xbob => bob}/learn/mlp/cxx/rprop.cpp         |   2 +-
 {xbob => bob}/learn/mlp/cxx/shuffler.cpp      |   2 +-
 {xbob => bob}/learn/mlp/cxx/square_error.cpp  |   2 +-
 {xbob => bob}/learn/mlp/cxx/trainer.cpp       |   2 +-
 .../learn/mlp/include/bob.learn.mlp}/api.h    | 120 +++++++++---------
 .../mlp/include/bob.learn.mlp}/backprop.h     |   0
 bob/learn/mlp/include/bob.learn.mlp/config.h  |  14 ++
 .../learn/mlp/include/bob.learn.mlp}/cost.h   |   0
 .../include/bob.learn.mlp}/cross_entropy.h    |   0
 .../mlp/include/bob.learn.mlp}/machine.h      |   0
 .../learn/mlp/include/bob.learn.mlp}/roll.h   |   0
 .../learn/mlp/include/bob.learn.mlp}/rprop.h  |   0
 .../mlp/include/bob.learn.mlp}/shuffler.h     |   0
 .../mlp/include/bob.learn.mlp}/square_error.h |   0
 .../mlp/include/bob.learn.mlp}/trainer.h      |   0
 {xbob => bob}/learn/mlp/machine.cpp           |  24 ++--
 {xbob => bob}/learn/mlp/main.cpp              | 102 +++++++--------
 {xbob => bob}/learn/mlp/roll.cpp              |  10 +-
 {xbob => bob}/learn/mlp/rprop.cpp             |  22 ++--
 {xbob => bob}/learn/mlp/shuffler.cpp          |  14 +-
 {xbob => bob}/learn/mlp/test_backprop.py      |   2 +-
 {xbob => bob}/learn/mlp/test_cost.py          |   2 +-
 {xbob => bob}/learn/mlp/test_machine.py       |  16 +--
 {xbob => bob}/learn/mlp/test_roll.py          |   0
 {xbob => bob}/learn/mlp/test_rprop.py         |   2 +-
 {xbob => bob}/learn/mlp/test_shuffler.py      |  12 +-
 {xbob => bob}/learn/mlp/test_utils.py         |   0
 {xbob => bob}/learn/mlp/trainer.cpp           |  20 +--
 {xbob => bob}/learn/mlp/utils.h               |   8 +-
 {xbob => bob}/learn/mlp/version.cpp           |  58 ++++-----
 buildout.cfg                                  |  28 ++--
 doc/c_cpp_api.rst                             |  48 +++----
 doc/conf.py                                   |  12 +-
 doc/guide.rst                                 |  36 +++---
 doc/py_api.rst                                |   4 +-
 setup.py                                      |  74 +++++------
 .../learn/mlp/include/xbob.learn.mlp/config.h |  14 --
 49 files changed, 366 insertions(+), 366 deletions(-)
 rename {xbob => bob}/__init__.py (100%)
 rename {xbob => bob}/learn/__init__.py (100%)
 rename {xbob => bob}/learn/mlp/__init__.py (100%)
 rename {xbob => bob}/learn/mlp/backprop.cpp (97%)
 rename {xbob => bob}/learn/mlp/cost.cpp (97%)
 rename {xbob => bob}/learn/mlp/cxx/backprop.cpp (99%)
 rename {xbob => bob}/learn/mlp/cxx/cross_entropy.cpp (96%)
 rename {xbob => bob}/learn/mlp/cxx/machine.cpp (99%)
 rename {xbob => bob}/learn/mlp/cxx/roll.cpp (98%)
 rename {xbob => bob}/learn/mlp/cxx/rprop.cpp (99%)
 rename {xbob => bob}/learn/mlp/cxx/shuffler.cpp (99%)
 rename {xbob => bob}/learn/mlp/cxx/square_error.cpp (95%)
 rename {xbob => bob}/learn/mlp/cxx/trainer.cpp (99%)
 rename {xbob/learn/mlp/include/xbob.learn.mlp => bob/learn/mlp/include/bob.learn.mlp}/api.h (68%)
 rename {xbob/learn/mlp/include/xbob.learn.mlp => bob/learn/mlp/include/bob.learn.mlp}/backprop.h (100%)
 create mode 100644 bob/learn/mlp/include/bob.learn.mlp/config.h
 rename {xbob/learn/mlp/include/xbob.learn.mlp => bob/learn/mlp/include/bob.learn.mlp}/cost.h (100%)
 rename {xbob/learn/mlp/include/xbob.learn.mlp => bob/learn/mlp/include/bob.learn.mlp}/cross_entropy.h (100%)
 rename {xbob/learn/mlp/include/xbob.learn.mlp => bob/learn/mlp/include/bob.learn.mlp}/machine.h (100%)
 rename {xbob/learn/mlp/include/xbob.learn.mlp => bob/learn/mlp/include/bob.learn.mlp}/roll.h (100%)
 rename {xbob/learn/mlp/include/xbob.learn.mlp => bob/learn/mlp/include/bob.learn.mlp}/rprop.h (100%)
 rename {xbob/learn/mlp/include/xbob.learn.mlp => bob/learn/mlp/include/bob.learn.mlp}/shuffler.h (100%)
 rename {xbob/learn/mlp/include/xbob.learn.mlp => bob/learn/mlp/include/bob.learn.mlp}/square_error.h (100%)
 rename {xbob/learn/mlp/include/xbob.learn.mlp => bob/learn/mlp/include/bob.learn.mlp}/trainer.h (100%)
 rename {xbob => bob}/learn/mlp/machine.cpp (98%)
 rename {xbob => bob}/learn/mlp/main.cpp (76%)
 rename {xbob => bob}/learn/mlp/roll.cpp (98%)
 rename {xbob => bob}/learn/mlp/rprop.cpp (98%)
 rename {xbob => bob}/learn/mlp/shuffler.cpp (98%)
 rename {xbob => bob}/learn/mlp/test_backprop.py (98%)
 rename {xbob => bob}/learn/mlp/test_cost.py (99%)
 rename {xbob => bob}/learn/mlp/test_machine.py (95%)
 rename {xbob => bob}/learn/mlp/test_roll.py (100%)
 rename {xbob => bob}/learn/mlp/test_rprop.py (99%)
 rename {xbob => bob}/learn/mlp/test_shuffler.py (96%)
 rename {xbob => bob}/learn/mlp/test_utils.py (100%)
 rename {xbob => bob}/learn/mlp/trainer.cpp (98%)
 rename {xbob => bob}/learn/mlp/utils.h (95%)
 rename {xbob => bob}/learn/mlp/version.cpp (71%)
 delete mode 100644 xbob/learn/mlp/include/xbob.learn.mlp/config.h

diff --git a/.travis.yml b/.travis.yml
index 8cc1dc5..1f9f3d6 100644
--- a/.travis.yml
+++ b/.travis.yml
@@ -25,8 +25,8 @@ install:
   - "python bootstrap.py"
   - "CFLAGS=-coverage ./bin/buildout"
 script:
-  - "./bin/python -c 'from xbob.learn.mlp import get_config; print(get_config())'"
-  - "./bin/coverage run --source=xbob.learn.mlp ./bin/nosetests -sv"
+  - "./bin/python -c 'from bob.learn.mlp import get_config; print(get_config())'"
+  - "./bin/coverage run --source=bob.learn.mlp ./bin/nosetests -sv"
   - "./bin/sphinx-build -b doctest doc sphinx"
   - "./bin/sphinx-build -b html doc sphinx"
 after_success:
diff --git a/MANIFEST.in b/MANIFEST.in
index 09ed020..4b6cf9d 100644
--- a/MANIFEST.in
+++ b/MANIFEST.in
@@ -1,3 +1,3 @@
 include LICENSE README.rst bootstrap.py buildout.cfg
 recursive-include doc conf.py *.rst
-recursive-include xbob *.cpp *.h
+recursive-include bob *.cpp *.h
diff --git a/README.rst b/README.rst
index d35afe8..8c541ee 100644
--- a/README.rst
+++ b/README.rst
@@ -2,16 +2,16 @@
 .. Andre Anjos <andre.anjos@idiap.ch>
 .. Thu 24 Apr 17:24:10 2014 CEST
 
-.. image:: https://travis-ci.org/bioidiap/xbob.learn.mlp.svg?branch=master
-   :target: https://travis-ci.org/bioidiap/xbob.learn.mlp
-.. image:: https://coveralls.io/repos/bioidiap/xbob.learn.mlp/badge.png
-   :target: https://coveralls.io/r/bioidiap/xbob.learn.mlp
-.. image:: http://img.shields.io/github/tag/bioidiap/xbob.learn.mlp.png
-   :target: https://github.com/bioidiap/xbob.learn.mlp
-.. image:: http://img.shields.io/pypi/v/xbob.learn.mlp.png
-   :target: https://pypi.python.org/pypi/xbob.learn.mlp
-.. image:: http://img.shields.io/pypi/dm/xbob.learn.mlp.png
-   :target: https://pypi.python.org/pypi/xbob.learn.mlp
+.. image:: https://travis-ci.org/bioidiap/bob.learn.mlp.svg?branch=master
+   :target: https://travis-ci.org/bioidiap/bob.learn.mlp
+.. image:: https://coveralls.io/repos/bioidiap/bob.learn.mlp/badge.png
+   :target: https://coveralls.io/r/bioidiap/bob.learn.mlp
+.. image:: http://img.shields.io/github/tag/bioidiap/bob.learn.mlp.png
+   :target: https://github.com/bioidiap/bob.learn.mlp
+.. image:: http://img.shields.io/pypi/v/bob.learn.mlp.png
+   :target: https://pypi.python.org/pypi/bob.learn.mlp
+.. image:: http://img.shields.io/pypi/dm/bob.learn.mlp.png
+   :target: https://pypi.python.org/pypi/bob.learn.mlp
 
 ===============================================================
  Python bindings for Bob's Multi-Layer Perceptron and Trainers
@@ -41,7 +41,7 @@ Testing
 
 You can run a set of tests using the nose test runner::
 
-  $ nosetests -sv xbob.learn.mlp
+  $ nosetests -sv bob.learn.mlp
 
 .. warning::
 
@@ -58,7 +58,7 @@ You can run our documentation tests using sphinx itself::
 
 You can test overall test coverage with::
 
-  $ nosetests --with-coverage --cover-package=xbob.learn.mlp
+  $ nosetests --with-coverage --cover-package=bob.learn.mlp
 
 The ``coverage`` egg must be installed for this to work properly.
 
diff --git a/xbob/__init__.py b/bob/__init__.py
similarity index 100%
rename from xbob/__init__.py
rename to bob/__init__.py
diff --git a/xbob/learn/__init__.py b/bob/learn/__init__.py
similarity index 100%
rename from xbob/learn/__init__.py
rename to bob/learn/__init__.py
diff --git a/xbob/learn/mlp/__init__.py b/bob/learn/mlp/__init__.py
similarity index 100%
rename from xbob/learn/mlp/__init__.py
rename to bob/learn/mlp/__init__.py
diff --git a/xbob/learn/mlp/backprop.cpp b/bob/learn/mlp/backprop.cpp
similarity index 97%
rename from xbob/learn/mlp/backprop.cpp
rename to bob/learn/mlp/backprop.cpp
index d9462de..3580be3 100644
--- a/xbob/learn/mlp/backprop.cpp
+++ b/bob/learn/mlp/backprop.cpp
@@ -7,10 +7,10 @@
  * Copyright (C) 2011-2014 Idiap Research Institute, Martigny, Switzerland
  */
 
-#define XBOB_LEARN_MLP_MODULE
-#include <xbob.blitz/cppapi.h>
-#include <xbob.blitz/cleanup.h>
-#include <xbob.learn.mlp/api.h>
+#define BOB_LEARN_MLP_MODULE
+#include <bob.blitz/cppapi.h>
+#include <bob.blitz/cleanup.h>
+#include <bob.learn.mlp/api.h>
 #include <structmember.h>
 
 #include "utils.h"
@@ -19,7 +19,7 @@
  * Implementation of BackProp trainer *
  **************************************/
 
-PyDoc_STRVAR(s_trainer_str, XBOB_EXT_MODULE_PREFIX ".BackProp");
+PyDoc_STRVAR(s_trainer_str, BOB_EXT_MODULE_PREFIX ".BackProp");
 
 PyDoc_STRVAR(s_trainer_doc,
 "BackProp(batch_size, cost, [trainer, [train_biases]]) -> new BackProp\n\
@@ -46,10 +46,10 @@ batch_size, int\n\
    \n\
       This setting affects the convergence.\n\
 \n\
-cost, :py:class:`xbob.learn.mlp.Cost`\n\
+cost, :py:class:`bob.learn.mlp.Cost`\n\
    An object that can calculate the cost at every iteration.\n\
 \n\
-machine, :py:class:`xbob.learn.mlp.Machine`\n\
+machine, :py:class:`bob.learn.mlp.Machine`\n\
    This parameter that will be used as a basis for this trainer's\n\
    internal properties (cache sizes, for instance).\n\
 \n\
@@ -57,7 +57,7 @@ train_biases, bool\n\
    A boolean indicating if we should train the biases weights (set\n\
    it to ``True``) or not (set it to ``False``).\n\
 \n\
-other, :py:class:`xbob.learn.mlp.Trainer`\n\
+other, :py:class:`bob.learn.mlp.Trainer`\n\
    Another trainer from which this new copy will get its properties\n\
    from. If you use this constructor than a new (deep) copy of the\n\
    trainer is created.\n\
@@ -395,7 +395,7 @@ thrown.\n\
    \n\
    The machine is **not** initialized randomly at each call to this\n\
    method. It is your task to call\n\
-   :py:meth:`xbob.learn.mlp.Machine.randomize` once at the machine\n\
+   :py:meth:`bob.learn.mlp.Machine.randomize` once at the machine\n\
    you want to train and then call this method as many times as you\n\
    think is necessary. This design allows for a *stopping criteria*\n\
    to be encoded outside the scope of this trainer and for this method\n\
@@ -405,9 +405,9 @@ thrown.\n\
 \n\
 Keyword arguments:\n\
 \n\
-machine, :py:class:`xbob.learn.mlp.Machine`\n\
+machine, :py:class:`bob.learn.mlp.Machine`\n\
    The machine that will be trained. You must have called\n\
-   :py:meth:`xbob.learn.mlp.Trainer.initialize` which a similarly\n\
+   :py:meth:`bob.learn.mlp.Trainer.initialize` which a similarly\n\
    configured machine before being able to call this method, or an\n\
    exception may be thrown.\n\
 \n\
diff --git a/xbob/learn/mlp/cost.cpp b/bob/learn/mlp/cost.cpp
similarity index 97%
rename from xbob/learn/mlp/cost.cpp
rename to bob/learn/mlp/cost.cpp
index 722dabb..51fc9aa 100644
--- a/xbob/learn/mlp/cost.cpp
+++ b/bob/learn/mlp/cost.cpp
@@ -7,11 +7,11 @@
  * Copyright (C) 2011-2014 Idiap Research Institute, Martigny, Switzerland
  */
 
-#define XBOB_LEARN_MLP_MODULE
-#include <xbob.blitz/cppapi.h>
-#include <xbob.blitz/cleanup.h>
-#include <xbob.learn.mlp/api.h>
-#include <xbob.learn.activation/api.h>
+#define BOB_LEARN_MLP_MODULE
+#include <bob.blitz/cppapi.h>
+#include <bob.blitz/cleanup.h>
+#include <bob.learn.mlp/api.h>
+#include <bob.learn.activation/api.h>
 #include <structmember.h>
 #include <boost/function.hpp>
 #include <boost/bind.hpp>
@@ -20,7 +20,7 @@
  * Implementation of Cost base class *
  *************************************/
 
-PyDoc_STRVAR(s_cost_str, XBOB_EXT_MODULE_PREFIX ".Cost");
+PyDoc_STRVAR(s_cost_str, BOB_EXT_MODULE_PREFIX ".Cost");
 
 PyDoc_STRVAR(s_cost_doc,
 "A base class for evaluating the performance cost.\n\
@@ -80,7 +80,7 @@ PyObject* PyBobLearnCost_Repr(PyBobLearnCostObject* self) {
   /**
    * Expected output:
    *
-   * <xbob.learn.linear.Cost [...]>
+   * <bob.learn.linear.Cost [...]>
    */
 
   auto retval = PyUnicode_FromFormat("<%s [act: %s]>",
@@ -503,7 +503,7 @@ PyTypeObject PyBobLearnCost_Type = {
     PyBobLearnCost_new,                       /* tp_new */
 };
 
-PyDoc_STRVAR(s_squareerror_str, XBOB_EXT_MODULE_PREFIX ".SquareError");
+PyDoc_STRVAR(s_squareerror_str, BOB_EXT_MODULE_PREFIX ".SquareError");
 
 PyDoc_STRVAR(s_squareerror_doc,
 "SquareError(actfun) -> new SquareError functor\n\
@@ -604,7 +604,7 @@ PyTypeObject PyBobLearnSquareError_Type = {
     (initproc)PyBobLearnSquareError_init,     /* tp_init */
 };
 
-PyDoc_STRVAR(s_crossentropyloss_str, XBOB_EXT_MODULE_PREFIX ".CrossEntropyLoss");
+PyDoc_STRVAR(s_crossentropyloss_str, BOB_EXT_MODULE_PREFIX ".CrossEntropyLoss");
 
 PyDoc_STRVAR(s_crossentropyloss_doc,
 "CrossEntropyLoss(actfun) -> new CrossEntropyLoss functor\n\
@@ -623,7 +623,7 @@ Keyword arguments:\n\
 \n\
 actfun\n\
   The activation function object used at the last layer. If you\n\
-  set this to :py:class:`xbob.learn.activation.Logistic`, a\n\
+  set this to :py:class:`bob.learn.activation.Logistic`, a\n\
   mathematical simplification is possible in which\n\
   ``backprop_error()`` can benefit increasing the numerical\n\
   stability of the training process. The simplification goes\n\
@@ -689,7 +689,7 @@ PyDoc_STRVAR(s_logistic_activation_doc,
 "o.logistic_activation() -> bool\n\
 \n\
 Tells if this functor is set to operate together with a\n\
-:py:class:`xbob.learn.activation.Logistic` activation function.\n\
+:py:class:`bob.learn.activation.Logistic` activation function.\n\
 ");
 
 static PyObject* PyBobLearnCrossEntropyLoss_getLogisticActivation
diff --git a/xbob/learn/mlp/cxx/backprop.cpp b/bob/learn/mlp/cxx/backprop.cpp
similarity index 99%
rename from xbob/learn/mlp/cxx/backprop.cpp
rename to bob/learn/mlp/cxx/backprop.cpp
index d881769..d9f0290 100644
--- a/xbob/learn/mlp/cxx/backprop.cpp
+++ b/bob/learn/mlp/cxx/backprop.cpp
@@ -12,7 +12,7 @@
 #include <bob/core/check.h>
 #include <bob/math/linear.h>
 
-#include <xbob.learn.mlp/backprop.h>
+#include <bob.learn.mlp/backprop.h>
 
 bob::learn::mlp::BackProp::BackProp(size_t batch_size,
     boost::shared_ptr<bob::learn::mlp::Cost> cost):
diff --git a/xbob/learn/mlp/cxx/cross_entropy.cpp b/bob/learn/mlp/cxx/cross_entropy.cpp
similarity index 96%
rename from xbob/learn/mlp/cxx/cross_entropy.cpp
rename to bob/learn/mlp/cxx/cross_entropy.cpp
index aa3c544..ef9127c 100644
--- a/xbob/learn/mlp/cxx/cross_entropy.cpp
+++ b/bob/learn/mlp/cxx/cross_entropy.cpp
@@ -7,7 +7,7 @@
  * Copyright (C) 2011-2014 Idiap Research Institute, Martigny, Switzerland
  */
 
-#include <xbob.learn.mlp/cross_entropy.h>
+#include <bob.learn.mlp/cross_entropy.h>
 
 namespace bob { namespace learn { namespace mlp {
 
diff --git a/xbob/learn/mlp/cxx/machine.cpp b/bob/learn/mlp/cxx/machine.cpp
similarity index 99%
rename from xbob/learn/mlp/cxx/machine.cpp
rename to bob/learn/mlp/cxx/machine.cpp
index 27ce9ef..6ccb42a 100644
--- a/xbob/learn/mlp/cxx/machine.cpp
+++ b/bob/learn/mlp/cxx/machine.cpp
@@ -17,7 +17,7 @@
 #include <bob/core/assert.h>
 #include <bob/math/linear.h>
 
-#include <xbob.learn.mlp/machine.h>
+#include <bob.learn.mlp/machine.h>
 
 bob::learn::mlp::Machine::Machine (size_t input, size_t output):
   m_input_sub(input),
diff --git a/xbob/learn/mlp/cxx/roll.cpp b/bob/learn/mlp/cxx/roll.cpp
similarity index 98%
rename from xbob/learn/mlp/cxx/roll.cpp
rename to bob/learn/mlp/cxx/roll.cpp
index d76de0a..c4b8aa9 100644
--- a/xbob/learn/mlp/cxx/roll.cpp
+++ b/bob/learn/mlp/cxx/roll.cpp
@@ -5,7 +5,7 @@
  * Copyright (C) 2011-2014 Idiap Research Institute, Martigny, Switzerland
  */
 
-#include <xbob.learn.mlp/roll.h>
+#include <bob.learn.mlp/roll.h>
 
 int bob::learn::mlp::detail::getNbParameters(const bob::learn::mlp::Machine& machine)
 {
diff --git a/xbob/learn/mlp/cxx/rprop.cpp b/bob/learn/mlp/cxx/rprop.cpp
similarity index 99%
rename from xbob/learn/mlp/cxx/rprop.cpp
rename to bob/learn/mlp/cxx/rprop.cpp
index 09ff3fc..361cdc0 100644
--- a/xbob/learn/mlp/cxx/rprop.cpp
+++ b/bob/learn/mlp/cxx/rprop.cpp
@@ -13,7 +13,7 @@
 #include <bob/core/array_copy.h>
 #include <bob/math/linear.h>
 
-#include <xbob.learn.mlp/rprop.h>
+#include <bob.learn.mlp/rprop.h>
 
 bob::learn::mlp::RProp::RProp(size_t batch_size,
     boost::shared_ptr<bob::learn::mlp::Cost> cost):
diff --git a/xbob/learn/mlp/cxx/shuffler.cpp b/bob/learn/mlp/cxx/shuffler.cpp
similarity index 99%
rename from xbob/learn/mlp/cxx/shuffler.cpp
rename to bob/learn/mlp/cxx/shuffler.cpp
index 63cccad..7ea5435 100644
--- a/xbob/learn/mlp/cxx/shuffler.cpp
+++ b/bob/learn/mlp/cxx/shuffler.cpp
@@ -14,7 +14,7 @@
 #include <bob/core/assert.h>
 #include <bob/core/array_copy.h>
 
-#include <xbob.learn.mlp/shuffler.h>
+#include <bob.learn.mlp/shuffler.h>
 
 bob::learn::mlp::DataShuffler::DataShuffler
 (const std::vector<blitz::Array<double,2> >& data,
diff --git a/xbob/learn/mlp/cxx/square_error.cpp b/bob/learn/mlp/cxx/square_error.cpp
similarity index 95%
rename from xbob/learn/mlp/cxx/square_error.cpp
rename to bob/learn/mlp/cxx/square_error.cpp
index def3864..6fcf37d 100644
--- a/xbob/learn/mlp/cxx/square_error.cpp
+++ b/bob/learn/mlp/cxx/square_error.cpp
@@ -9,7 +9,7 @@
 
 #include <cmath>
 
-#include <xbob.learn.mlp/square_error.h>
+#include <bob.learn.mlp/square_error.h>
 
 namespace bob { namespace learn { namespace mlp {
 
diff --git a/xbob/learn/mlp/cxx/trainer.cpp b/bob/learn/mlp/cxx/trainer.cpp
similarity index 99%
rename from xbob/learn/mlp/cxx/trainer.cpp
rename to bob/learn/mlp/cxx/trainer.cpp
index 902986c..f789e79 100644
--- a/xbob/learn/mlp/cxx/trainer.cpp
+++ b/bob/learn/mlp/cxx/trainer.cpp
@@ -11,7 +11,7 @@
 #include <bob/core/check.h>
 #include <bob/math/linear.h>
 
-#include <xbob.learn.mlp/trainer.h>
+#include <bob.learn.mlp/trainer.h>
 
 bob::learn::mlp::Trainer::Trainer(size_t batch_size,
     boost::shared_ptr<bob::learn::mlp::Cost> cost):
diff --git a/xbob/learn/mlp/include/xbob.learn.mlp/api.h b/bob/learn/mlp/include/bob.learn.mlp/api.h
similarity index 68%
rename from xbob/learn/mlp/include/xbob.learn.mlp/api.h
rename to bob/learn/mlp/include/bob.learn.mlp/api.h
index b27a123..05dba5a 100644
--- a/xbob/learn/mlp/include/xbob.learn.mlp/api.h
+++ b/bob/learn/mlp/include/bob.learn.mlp/api.h
@@ -3,12 +3,12 @@
  * @date Thu 24 Apr 17:32:07 2014 CEST
  */
 
-#ifndef XBOB_LEARN_MLP_H
-#define XBOB_LEARN_MLP_H
+#ifndef BOB_LEARN_MLP_H
+#define BOB_LEARN_MLP_H
 
 #include <Python.h>
 #include <boost/shared_ptr.hpp>
-#include <xbob.learn.mlp/config.h>
+#include <bob.learn.mlp/config.h>
 
 #include "machine.h"
 #include "cost.h"
@@ -19,8 +19,8 @@
 #include "backprop.h"
 #include "rprop.h"
 
-#define XBOB_LEARN_MLP_MODULE_PREFIX xbob.learn.mlp
-#define XBOB_LEARN_MLP_MODULE_NAME _library
+#define BOB_LEARN_MLP_MODULE_PREFIX bob.learn.mlp
+#define BOB_LEARN_MLP_MODULE_NAME _library
 
 /*******************
  * C API functions *
@@ -28,20 +28,20 @@
 
 /* Enum defining entries in the function table */
 enum _PyBobLearnMLP_ENUM {
-  PyXbobLearnMLP_APIVersion_NUM = 0,
-  // Bindings for xbob.learn.mlp.Machine
+  PyBobLearnMLP_APIVersion_NUM = 0,
+  // Bindings for bob.learn.mlp.Machine
   PyBobLearnMLPMachine_Type_NUM,
   PyBobLearnMLPMachine_Check_NUM,
-  // Bindings for xbob.learn.mlp.Cost and variants
+  // Bindings for bob.learn.mlp.Cost and variants
   PyBobLearnCost_Type_NUM,
   PyBobLearnCost_Check_NUM,
   PyBobLearnCost_NewFromCost_NUM,
   PyBobLearnSquareError_Type_NUM,
   PyBobLearnCrossEntropyLoss_Type_NUM,
-  // Bindings for xbob.learn.mlp.DataShuffler
+  // Bindings for bob.learn.mlp.DataShuffler
   PyBobLearnDataShuffler_Type_NUM,
   PyBobLearnDataShuffler_Check_NUM,
-  // Bindings for xbob.learn.mlp.Trainer
+  // Bindings for bob.learn.mlp.Trainer
   PyBobLearnMLPTrainer_Type_NUM,
   PyBobLearnMLPTrainer_Check_NUM,
   PyBobLearnMLPBackProp_Type_NUM,
@@ -49,17 +49,17 @@ enum _PyBobLearnMLP_ENUM {
   PyBobLearnMLPRProp_Type_NUM,
   PyBobLearnMLPRProp_Check_NUM,
   // Total number of C API pointers
-  PyXbobLearnMLP_API_pointers
+  PyBobLearnMLP_API_pointers
 };
 
 /**************
  * Versioning *
  **************/
 
-#define PyXbobLearnMLP_APIVersion_TYPE int
+#define PyBobLearnMLP_APIVersion_TYPE int
 
 /***************************************
- * Bindings for xbob.learn.mlp.Machine *
+ * Bindings for bob.learn.mlp.Machine *
  ***************************************/
 
 typedef struct {
@@ -110,7 +110,7 @@ typedef struct {
 #define PyBobLearnDataShuffler_Check_PROTO (PyObject* o)
 
 /***************************************
- * Bindings for xbob.learn.mlp.Trainer *
+ * Bindings for bob.learn.mlp.Trainer *
  ***************************************/
 
 typedef struct {
@@ -143,18 +143,18 @@ typedef struct {
 #define PyBobLearnMLPRProp_Check_RET int
 #define PyBobLearnMLPRProp_Check_PROTO (PyObject* o)
 
-#ifdef XBOB_LEARN_MLP_MODULE
+#ifdef BOB_LEARN_MLP_MODULE
 
-  /* This section is used when compiling `xbob.learn.mlp' itself */
+  /* This section is used when compiling `bob.learn.mlp' itself */
 
   /**************
    * Versioning *
    **************/
 
-  extern int PyXbobLearnMLP_APIVersion;
+  extern int PyBobLearnMLP_APIVersion;
 
   /***************************************
-   * Bindings for xbob.learn.mlp.Machine *
+   * Bindings for bob.learn.mlp.Machine *
    ***************************************/
 
   extern PyBobLearnMLPMachine_Type_TYPE PyBobLearnMLPMachine_Type;
@@ -162,7 +162,7 @@ typedef struct {
   PyBobLearnMLPMachine_Check_RET PyBobLearnMLPMachine_Check PyBobLearnMLPMachine_Check_PROTO;
 
   /************************************
-   * Bindings for xbob.learn.mlp.Cost *
+   * Bindings for bob.learn.mlp.Cost *
    ************************************/
 
   extern PyBobLearnCost_Type_TYPE PyBobLearnCost_Type;
@@ -176,7 +176,7 @@ typedef struct {
   PyBobLearnCost_NewFromCost_RET PyBobLearnCost_NewFromCost PyBobLearnCost_NewFromCost_PROTO;
 
   /********************************************
-   * Bindings for xbob.learn.mlp.DataShuffler *
+   * Bindings for bob.learn.mlp.DataShuffler *
    ********************************************/
 
   extern PyBobLearnDataShuffler_Type_TYPE PyBobLearnDataShuffler_Type;
@@ -184,7 +184,7 @@ typedef struct {
   PyBobLearnDataShuffler_Check_RET PyBobLearnDataShuffler_Check PyBobLearnDataShuffler_Check_PROTO;
 
   /***************************************
-   * Bindings for xbob.learn.mlp.Trainer *
+   * Bindings for bob.learn.mlp.Trainer *
    ***************************************/
 
   extern PyBobLearnMLPTrainer_Type_TYPE PyBobLearnMLPTrainer_Type;
@@ -201,7 +201,7 @@ typedef struct {
 
 #else
 
-  /* This section is used in modules that use `xbob.learn.mlp's' C-API */
+  /* This section is used in modules that use `bob.learn.mlp's' C-API */
 
 /************************************************************************
  * Macros to avoid symbol collision and allow for separate compilation. *
@@ -210,18 +210,18 @@ typedef struct {
  ************************************************************************/
 
 #  if defined(PY_ARRAY_UNIQUE_SYMBOL)
-#    define XBOB_LEARN_MLP_MAKE_API_NAME_INNER(a) XBOB_LEARN_MLP_ ## a
-#    define XBOB_LEARN_MLP_MAKE_API_NAME(a) XBOB_LEARN_MLP_MAKE_API_NAME_INNER(a)
-#    define PyXbobLearnMLP_API XBOB_LEARN_MLP_MAKE_API_NAME(PY_ARRAY_UNIQUE_SYMBOL)
+#    define BOB_LEARN_MLP_MAKE_API_NAME_INNER(a) BOB_LEARN_MLP_ ## a
+#    define BOB_LEARN_MLP_MAKE_API_NAME(a) BOB_LEARN_MLP_MAKE_API_NAME_INNER(a)
+#    define PyBobLearnMLP_API BOB_LEARN_MLP_MAKE_API_NAME(PY_ARRAY_UNIQUE_SYMBOL)
 #  endif
 
 #  if defined(NO_IMPORT_ARRAY)
-  extern void **PyXbobLearnMLP_API;
+  extern void **PyBobLearnMLP_API;
 #  else
 #    if defined(PY_ARRAY_UNIQUE_SYMBOL)
-  void **PyXbobLearnMLP_API;
+  void **PyBobLearnMLP_API;
 #    else
-  static void **PyXbobLearnMLP_API=NULL;
+  static void **PyBobLearnMLP_API=NULL;
 #    endif
 #  endif
 
@@ -229,53 +229,53 @@ typedef struct {
    * Versioning *
    **************/
 
-# define PyXbobLearnMLP_APIVersion (*(PyXbobLearnMLP_APIVersion_TYPE *)PyXbobLearnMLP_API[PyXbobLearnMLP_APIVersion_NUM])
+# define PyBobLearnMLP_APIVersion (*(PyBobLearnMLP_APIVersion_TYPE *)PyBobLearnMLP_API[PyBobLearnMLP_APIVersion_NUM])
 
   /***************************************
-   * Bindings for xbob.learn.mlp.Machine *
+   * Bindings for bob.learn.mlp.Machine *
    ***************************************/
 
-# define PyBobLearnMLPMachine_Type (*(PyBobLearnMLPMachine_Type_TYPE *)PyXbobLearnMLP_API[PyBobLearnMLPMachine_Type_NUM])
+# define PyBobLearnMLPMachine_Type (*(PyBobLearnMLPMachine_Type_TYPE *)PyBobLearnMLP_API[PyBobLearnMLPMachine_Type_NUM])
 
-# define PyBobLearnMLPMachine_Check (*(PyBobLearnMLPMachine_Check_RET (*)PyBobLearnMLPMachine_Check_PROTO) PyXbobLearnMLP_API[PyBobLearnMLPMachine_Check_NUM])
+# define PyBobLearnMLPMachine_Check (*(PyBobLearnMLPMachine_Check_RET (*)PyBobLearnMLPMachine_Check_PROTO) PyBobLearnMLP_API[PyBobLearnMLPMachine_Check_NUM])
 
   /************************************
-   * Bindings for xbob.learn.mlp.Cost *
+   * Bindings for bob.learn.mlp.Cost *
    ************************************/
 
-# define PyBobLearnCost_Type (*(PyBobLearnCost_Type_TYPE *)PyXbobLearnMLP_API[PyBobLearnCost_Type_NUM])
+# define PyBobLearnCost_Type (*(PyBobLearnCost_Type_TYPE *)PyBobLearnMLP_API[PyBobLearnCost_Type_NUM])
 
-# define PyBobLearnCost_Check (*(PyBobLearnCost_Check_RET (*)PyBobLearnCost_Check_PROTO) PyXbobLearnMLP_API[PyBobLearnCost_Check_NUM])
+# define PyBobLearnCost_Check (*(PyBobLearnCost_Check_RET (*)PyBobLearnCost_Check_PROTO) PyBobLearnMLP_API[PyBobLearnCost_Check_NUM])
 
-# define PyBobLearnCost_NewFromCost (*(PyBobLearnCost_NewFromCost_RET (*)PyBobLearnCost_NewFromCost_PROTO) PyXbobLearnMLP_API[PyBobLearnCost_NewFromCost_NUM])
+# define PyBobLearnCost_NewFromCost (*(PyBobLearnCost_NewFromCost_RET (*)PyBobLearnCost_NewFromCost_PROTO) PyBobLearnMLP_API[PyBobLearnCost_NewFromCost_NUM])
 
-# define PyBobLearnSquareError_Type (*(PyBobLearnSquareError_Type_TYPE *)PyXbobLearnMLP_API[PyBobLearnSquareError_Type_NUM])
+# define PyBobLearnSquareError_Type (*(PyBobLearnSquareError_Type_TYPE *)PyBobLearnMLP_API[PyBobLearnSquareError_Type_NUM])
 
-# define PyBobLearnCrossEntropyLoss_Type (*(PyBobLearnCrossEntropyLoss_Type_TYPE *)PyXbobLearnMLP_API[PyBobLearnCrossEntropyLoss_Type_NUM])
+# define PyBobLearnCrossEntropyLoss_Type (*(PyBobLearnCrossEntropyLoss_Type_TYPE *)PyBobLearnMLP_API[PyBobLearnCrossEntropyLoss_Type_NUM])
 
   /********************************************
-   * Bindings for xbob.learn.mlp.DataShuffler *
+   * Bindings for bob.learn.mlp.DataShuffler *
    ********************************************/
 
-# define PyBobLearnDataShuffler_Type (*(PyBobLearnDataShuffler_Type_TYPE *)PyXbobLearnMLP_API[PyBobLearnDataShuffler_Type_NUM])
+# define PyBobLearnDataShuffler_Type (*(PyBobLearnDataShuffler_Type_TYPE *)PyBobLearnMLP_API[PyBobLearnDataShuffler_Type_NUM])
 
-# define PyBobLearnDataShuffler_Check (*(PyBobLearnDataShuffler_Check_RET (*)PyBobLearnDataShuffler_Check_PROTO) PyXbobLearnMLP_API[PyBobLearnDataShuffler_Check_NUM])
+# define PyBobLearnDataShuffler_Check (*(PyBobLearnDataShuffler_Check_RET (*)PyBobLearnDataShuffler_Check_PROTO) PyBobLearnMLP_API[PyBobLearnDataShuffler_Check_NUM])
 
   /***************************************
-   * Bindings for xbob.learn.mlp.Trainer *
+   * Bindings for bob.learn.mlp.Trainer *
    ***************************************/
 
-# define PyBobLearnMLPTrainer_Type (*(PyBobLearnMLPTrainer_Type_TYPE *)PyXbobLearnMLP_API[PyBobLearnMLPTrainer_Type_NUM])
+# define PyBobLearnMLPTrainer_Type (*(PyBobLearnMLPTrainer_Type_TYPE *)PyBobLearnMLP_API[PyBobLearnMLPTrainer_Type_NUM])
 
-# define PyBobLearnMLPTrainer_Check (*(PyBobLearnMLPTrainer_Check_RET (*)PyBobLearnMLPTrainer_Check_PROTO) PyXbobLearnMLP_API[PyBobLearnMLPTrainer_Check_NUM])
+# define PyBobLearnMLPTrainer_Check (*(PyBobLearnMLPTrainer_Check_RET (*)PyBobLearnMLPTrainer_Check_PROTO) PyBobLearnMLP_API[PyBobLearnMLPTrainer_Check_NUM])
 
-# define PyBobLearnMLPBackProp_Type (*(PyBobLearnMLPBackProp_Type_TYPE *)PyXbobLearnMLP_API[PyBobLearnMLPBackProp_Type_NUM])
+# define PyBobLearnMLPBackProp_Type (*(PyBobLearnMLPBackProp_Type_TYPE *)PyBobLearnMLP_API[PyBobLearnMLPBackProp_Type_NUM])
 
-# define PyBobLearnMLPBackProp_Check (*(PyBobLearnMLPBackProp_Check_RET (*)PyBobLearnMLPBackProp_Check_PROTO) PyXbobLearnMLP_API[PyBobLearnMLPBackProp_Check_NUM])
+# define PyBobLearnMLPBackProp_Check (*(PyBobLearnMLPBackProp_Check_RET (*)PyBobLearnMLPBackProp_Check_PROTO) PyBobLearnMLP_API[PyBobLearnMLPBackProp_Check_NUM])
 
-# define PyBobLearnMLPRProp_Type (*(PyBobLearnMLPRProp_Type_TYPE *)PyXbobLearnMLP_API[PyBobLearnMLPRProp_Type_NUM])
+# define PyBobLearnMLPRProp_Type (*(PyBobLearnMLPRProp_Type_TYPE *)PyBobLearnMLP_API[PyBobLearnMLPRProp_Type_NUM])
 
-# define PyBobLearnMLPRProp_Check (*(PyBobLearnMLPRProp_Check_RET (*)PyBobLearnMLPRProp_Check_PROTO) PyXbobLearnMLP_API[PyBobLearnMLPRProp_Check_NUM])
+# define PyBobLearnMLPRProp_Check (*(PyBobLearnMLPRProp_Check_RET (*)PyBobLearnMLPRProp_Check_PROTO) PyBobLearnMLP_API[PyBobLearnMLPRProp_Check_NUM])
 
 # if !defined(NO_IMPORT_ARRAY)
 
@@ -283,12 +283,12 @@ typedef struct {
    * Returns -1 on error, 0 on success. PyCapsule_Import will set an exception
    * if there's an error.
    */
-  static int import_xbob_learn_mlp(void) {
+  static int import_bob_learn_mlp(void) {
 
     PyObject *c_api_object;
     PyObject *module;
 
-    module = PyImport_ImportModule(BOOST_PP_STRINGIZE(XBOB_LEARN_MLP_MODULE_PREFIX) "." BOOST_PP_STRINGIZE(XBOB_LEARN_MLP_MODULE_NAME));
+    module = PyImport_ImportModule(BOOST_PP_STRINGIZE(BOB_LEARN_MLP_MODULE_PREFIX) "." BOOST_PP_STRINGIZE(BOB_LEARN_MLP_MODULE_NAME));
 
     if (module == NULL) return -1;
 
@@ -301,35 +301,35 @@ typedef struct {
 
 #   if PY_VERSION_HEX >= 0x02070000
     if (PyCapsule_CheckExact(c_api_object)) {
-      PyXbobLearnMLP_API = (void **)PyCapsule_GetPointer(c_api_object,
+      PyBobLearnMLP_API = (void **)PyCapsule_GetPointer(c_api_object,
           PyCapsule_GetName(c_api_object));
     }
 #   else
     if (PyCObject_Check(c_api_object)) {
-      XbobLearnMLP_API = (void **)PyCObject_AsVoidPtr(c_api_object);
+      BobLearnMLP_API = (void **)PyCObject_AsVoidPtr(c_api_object);
     }
 #   endif
 
     Py_DECREF(c_api_object);
     Py_DECREF(module);
 
-    if (!XbobLearnMLP_API) {
+    if (!BobLearnMLP_API) {
       PyErr_Format(PyExc_ImportError,
 #   if PY_VERSION_HEX >= 0x02070000
           "cannot find C/C++ API capsule at `%s.%s._C_API'",
 #   else
           "cannot find C/C++ API cobject at `%s.%s._C_API'",
 #   endif
-          BOOST_PP_STRINGIZE(XBOB_LEARN_MLP_MODULE_PREFIX),
-          BOOST_PP_STRINGIZE(XBOB_LEARN_MLP_MODULE_NAME));
+          BOOST_PP_STRINGIZE(BOB_LEARN_MLP_MODULE_PREFIX),
+          BOOST_PP_STRINGIZE(BOB_LEARN_MLP_MODULE_NAME));
       return -1;
     }
 
     /* Checks that the imported version matches the compiled version */
-    int imported_version = *(int*)PyXbobLearnMLP_API[PyXbobLearnMLP_APIVersion_NUM];
+    int imported_version = *(int*)PyBobLearnMLP_API[PyBobLearnMLP_APIVersion_NUM];
 
-    if (XBOB_LEARN_MLP_API_VERSION != imported_version) {
-      PyErr_Format(PyExc_ImportError, "%s.%s import error: you compiled against API version 0x%04x, but are now importing an API with version 0x%04x which is not compatible - check your Python runtime environment for errors", BOOST_PP_STRINGIZE(XBOB_LEARN_MLP_MODULE_PREFIX), BOOST_PP_STRINGIZE(XBOB_LEARN_MLP_MODULE_NAME), XBOB_LEARN_MLP_API_VERSION, imported_version);
+    if (BOB_LEARN_MLP_API_VERSION != imported_version) {
+      PyErr_Format(PyExc_ImportError, "%s.%s import error: you compiled against API version 0x%04x, but are now importing an API with version 0x%04x which is not compatible - check your Python runtime environment for errors", BOOST_PP_STRINGIZE(BOB_LEARN_MLP_MODULE_PREFIX), BOOST_PP_STRINGIZE(BOB_LEARN_MLP_MODULE_NAME), BOB_LEARN_MLP_API_VERSION, imported_version);
       return -1;
     }
 
@@ -340,6 +340,6 @@ typedef struct {
 
 # endif //!defined(NO_IMPORT_ARRAY)
 
-#endif /* XBOB_LEARN_MLP_MODULE */
+#endif /* BOB_LEARN_MLP_MODULE */
 
-#endif /* XBOB_LEARN_MLP_H */
+#endif /* BOB_LEARN_MLP_H */
diff --git a/xbob/learn/mlp/include/xbob.learn.mlp/backprop.h b/bob/learn/mlp/include/bob.learn.mlp/backprop.h
similarity index 100%
rename from xbob/learn/mlp/include/xbob.learn.mlp/backprop.h
rename to bob/learn/mlp/include/bob.learn.mlp/backprop.h
diff --git a/bob/learn/mlp/include/bob.learn.mlp/config.h b/bob/learn/mlp/include/bob.learn.mlp/config.h
new file mode 100644
index 0000000..1744dde
--- /dev/null
+++ b/bob/learn/mlp/include/bob.learn.mlp/config.h
@@ -0,0 +1,14 @@
+/**
+ * @author Andre Anjos <andre.anjos@idiap.ch>
+ * @date Thu 24 Apr 17:31:59 2014 CEST
+ *
+ * @brief General directives for all modules in bob.learn.mlp
+ */
+
+#ifndef BOB_LEARN_MLP_CONFIG_H
+#define BOB_LEARN_MLP_CONFIG_H
+
+/* Macros that define versions and important names */
+#define BOB_LEARN_MLP_API_VERSION 0x0200
+
+#endif /* BOB_LEARN_MLP_CONFIG_H */
diff --git a/xbob/learn/mlp/include/xbob.learn.mlp/cost.h b/bob/learn/mlp/include/bob.learn.mlp/cost.h
similarity index 100%
rename from xbob/learn/mlp/include/xbob.learn.mlp/cost.h
rename to bob/learn/mlp/include/bob.learn.mlp/cost.h
diff --git a/xbob/learn/mlp/include/xbob.learn.mlp/cross_entropy.h b/bob/learn/mlp/include/bob.learn.mlp/cross_entropy.h
similarity index 100%
rename from xbob/learn/mlp/include/xbob.learn.mlp/cross_entropy.h
rename to bob/learn/mlp/include/bob.learn.mlp/cross_entropy.h
diff --git a/xbob/learn/mlp/include/xbob.learn.mlp/machine.h b/bob/learn/mlp/include/bob.learn.mlp/machine.h
similarity index 100%
rename from xbob/learn/mlp/include/xbob.learn.mlp/machine.h
rename to bob/learn/mlp/include/bob.learn.mlp/machine.h
diff --git a/xbob/learn/mlp/include/xbob.learn.mlp/roll.h b/bob/learn/mlp/include/bob.learn.mlp/roll.h
similarity index 100%
rename from xbob/learn/mlp/include/xbob.learn.mlp/roll.h
rename to bob/learn/mlp/include/bob.learn.mlp/roll.h
diff --git a/xbob/learn/mlp/include/xbob.learn.mlp/rprop.h b/bob/learn/mlp/include/bob.learn.mlp/rprop.h
similarity index 100%
rename from xbob/learn/mlp/include/xbob.learn.mlp/rprop.h
rename to bob/learn/mlp/include/bob.learn.mlp/rprop.h
diff --git a/xbob/learn/mlp/include/xbob.learn.mlp/shuffler.h b/bob/learn/mlp/include/bob.learn.mlp/shuffler.h
similarity index 100%
rename from xbob/learn/mlp/include/xbob.learn.mlp/shuffler.h
rename to bob/learn/mlp/include/bob.learn.mlp/shuffler.h
diff --git a/xbob/learn/mlp/include/xbob.learn.mlp/square_error.h b/bob/learn/mlp/include/bob.learn.mlp/square_error.h
similarity index 100%
rename from xbob/learn/mlp/include/xbob.learn.mlp/square_error.h
rename to bob/learn/mlp/include/bob.learn.mlp/square_error.h
diff --git a/xbob/learn/mlp/include/xbob.learn.mlp/trainer.h b/bob/learn/mlp/include/bob.learn.mlp/trainer.h
similarity index 100%
rename from xbob/learn/mlp/include/xbob.learn.mlp/trainer.h
rename to bob/learn/mlp/include/bob.learn.mlp/trainer.h
diff --git a/xbob/learn/mlp/machine.cpp b/bob/learn/mlp/machine.cpp
similarity index 98%
rename from xbob/learn/mlp/machine.cpp
rename to bob/learn/mlp/machine.cpp
index abdac46..1ad2f5a 100644
--- a/xbob/learn/mlp/machine.cpp
+++ b/bob/learn/mlp/machine.cpp
@@ -7,20 +7,20 @@
  * Copyright (C) 2011-2014 Idiap Research Institute, Martigny, Switzerland
  */
 
-#define XBOB_LEARN_MLP_MODULE
-#include <xbob.blitz/cppapi.h>
-#include <xbob.blitz/cleanup.h>
-#include <xbob.io.base/api.h>
-#include <xbob.learn.activation/api.h>
-#include <xbob.learn.mlp/api.h>
-#include <xbob.core/random.h>
+#define BOB_LEARN_MLP_MODULE
+#include <bob.blitz/cppapi.h>
+#include <bob.blitz/cleanup.h>
+#include <bob.io.base/api.h>
+#include <bob.learn.activation/api.h>
+#include <bob.learn.mlp/api.h>
+#include <bob.core/random.h>
 #include <structmember.h>
 
 /****************************************
  * Implementation of Machine base class *
  ****************************************/
 
-PyDoc_STRVAR(s_machine_str, XBOB_EXT_MODULE_PREFIX ".Machine");
+PyDoc_STRVAR(s_machine_str, BOB_EXT_MODULE_PREFIX ".Machine");
 
 PyDoc_STRVAR(s_machine_doc,
 "Machine(shape)\n\
@@ -42,7 +42,7 @@ MLPs normally are multi-layered systems, with 1 or more hidden\n\
 layers. As a special case, this implementation also supports\n\
 connecting the input directly to the output by means of a single\n\
 weight matrix. This is equivalent of a\n\
-:py:class:`xbob.learn.linear.Machine`, with the advantage it can\n\
+:py:class:`bob.learn.linear.Machine`, with the advantage it can\n\
 be trained by trainers defined in this package.\n\
 \n\
 An MLP can be constructed in different ways. In the first form,\n\
@@ -698,7 +698,7 @@ PyObject* PyBobLearnMLPMachine_Repr(PyBobLearnMLPMachineObject* self) {
   /**
    * Expected output:
    *
-   * <xbob.learn.linear.MLP float64@(3, 5, 2) [hidden: f(z) = tanh(z), out: f(z) = * tanh(z)]>
+   * <bob.learn.linear.MLP float64@(3, 5, 2) [hidden: f(z) = tanh(z), out: f(z) = * tanh(z)]>
    */
 
   auto weights = make_safe(PyBobLearnMLPMachine_getWeights(self, 0));
@@ -872,7 +872,7 @@ PyDoc_STRVAR(s_load_str, "load");
 PyDoc_STRVAR(s_load_doc,
 "o.load(f) -> None\n\
 \n\
-Loads itself from a :py:class:`xbob.io.HDF5File`\n\
+Loads itself from a :py:class:`bob.io.HDF5File`\n\
 \n\
 ");
 
@@ -906,7 +906,7 @@ PyDoc_STRVAR(s_save_str, "save");
 PyDoc_STRVAR(s_save_doc,
 "o.save(f) -> None\n\
 \n\
-Saves itself at a :py:class:`xbob.io.HDF5File`\n\
+Saves itself at a :py:class:`bob.io.HDF5File`\n\
 \n\
 ");
 
diff --git a/xbob/learn/mlp/main.cpp b/bob/learn/mlp/main.cpp
similarity index 76%
rename from xbob/learn/mlp/main.cpp
rename to bob/learn/mlp/main.cpp
index 20f94b0..ffc0216 100644
--- a/xbob/learn/mlp/main.cpp
+++ b/bob/learn/mlp/main.cpp
@@ -5,17 +5,17 @@
  * @brief Bindings to bob::learn::mlp
  */
 
-#define XBOB_LEARN_MLP_MODULE
-#include <xbob.learn.mlp/api.h>
+#define BOB_LEARN_MLP_MODULE
+#include <bob.learn.mlp/api.h>
 
 #ifdef NO_IMPORT_ARRAY
 #undef NO_IMPORT_ARRAY
 #endif
-#include <xbob.blitz/capi.h>
-#include <xbob.blitz/cleanup.h>
-#include <xbob.io.base/api.h>
-#include <xbob.learn.activation/api.h>
-#include <xbob.core/random.h>
+#include <bob.blitz/capi.h>
+#include <bob.blitz/cleanup.h>
+#include <bob.io.base/api.h>
+#include <bob.learn.activation/api.h>
+#include <bob.core/random.h>
 
 PyDoc_STRVAR(s_unroll_str, "unroll");
 PyDoc_STRVAR(s_unroll_doc,
@@ -31,7 +31,7 @@ generic optimization procedures for the task of training MLPs.\n\
 \n\
 Keyword parameters:\n\
 \n\
-machine, :py:class:`xbob.learn.mlp.Machine`\n\
+machine, :py:class:`bob.learn.mlp.Machine`\n\
   An MLP that will have its weights and biases unrolled into a 1D array\n\
 \n\
 weights, sequence of 2D 64-bit float arrays\n\
@@ -45,7 +45,7 @@ weights, sequence of 2D 64-bit float arrays\n\
      \n\
      Other checks are disabled as this is considered an *expert* API.\n\
      If you plan to unroll the weights and biases on a\n\
-     :py:class:`xbob.learn.mlp.Machine`, notice that in a given\n\
+     :py:class:`bob.learn.mlp.Machine`, notice that in a given\n\
      ``weights`` sequence, the number of outputs in layer ``k``\n\
      must match the number of inputs on layer ``k+1`` and the\n\
      number of biases on layer ``k``. In practice, you must assert\n\
@@ -91,7 +91,7 @@ generic optimization procedures for the task of training MLPs.\n\
 \n\
 Keyword parameters:\n\
 \n\
-machine, :py:class:`xbob.learn.mlp.Machine`\n\
+machine, :py:class:`bob.learn.mlp.Machine`\n\
   An MLP that will have its weights and biases rolled from a 1D array\n\
 \n\
 weights, sequence of 2D 64-bit float arrays\n\
@@ -105,7 +105,7 @@ weights, sequence of 2D 64-bit float arrays\n\
      \n\
      Other checks are disabled as this is considered an *expert* API.\n\
      If you plan to roll the weights and biases on a\n\
-     :py:class:`xbob.learn.mlp.Machine`, notice that in a given\n\
+     :py:class:`bob.learn.mlp.Machine`, notice that in a given\n\
      ``weights`` sequence, the number of outputs in layer ``k``\n\
      must match the number of inputs on layer ``k+1`` and the\n\
      number of biases on layer ``k``. In practice, you must assert\n\
@@ -147,7 +147,7 @@ Returns the total number of parameters in an MLP.\n\
 \n\
 Keyword parameters:\n\
 \n\
-machine, :py:class:`xbob.learn.mlp.Machine`\n\
+machine, :py:class:`bob.learn.mlp.Machine`\n\
   Using the first call API, counts the total number of parameters in\n\
   an MLP.\n\
 \n\
@@ -162,7 +162,7 @@ weights, sequence of 2D 64-bit float arrays\n\
      \n\
      Other checks are disabled as this is considered an *expert* API.\n\
      If you plan to unroll the weights and biases on a\n\
-     :py:class:`xbob.learn.mlp.Machine`, notice that in a given\n\
+     :py:class:`bob.learn.mlp.Machine`, notice that in a given\n\
      ``weights`` sequence the number of outputs in layer ``k``\n\
      must match the number of inputs on layer ``k+1`` and the\n\
      number of bias on layer ``k``. In practice, you must assert\n\
@@ -206,12 +206,12 @@ static PyMethodDef module_methods[] = {
 
 PyDoc_STRVAR(module_docstr, "bob's multi-layer perceptron machine and trainers");
 
-int PyXbobLearnMLP_APIVersion = XBOB_LEARN_MLP_API_VERSION;
+int PyBobLearnMLP_APIVersion = BOB_LEARN_MLP_API_VERSION;
 
 #if PY_VERSION_HEX >= 0x03000000
 static PyModuleDef module_definition = {
   PyModuleDef_HEAD_INIT,
-  XBOB_EXT_MODULE_NAME,
+  BOB_EXT_MODULE_NAME,
   module_docstr,
   -1,
   module_methods,
@@ -248,14 +248,14 @@ static PyObject* create_module (void) {
 # if PY_VERSION_HEX >= 0x03000000
   PyObject* m = PyModule_Create(&module_definition);
 # else
-  PyObject* m = Py_InitModule3(XBOB_EXT_MODULE_NAME, module_methods, module_docstr);
+  PyObject* m = Py_InitModule3(BOB_EXT_MODULE_NAME, module_methods, module_docstr);
 # endif
   if (!m) return 0;
   auto m_ = make_safe(m);
 
   /* register some constants */
-  if (PyModule_AddIntConstant(m, "__api_version__", XBOB_LEARN_MLP_API_VERSION) < 0) return 0;
-  if (PyModule_AddStringConstant(m, "__version__", XBOB_EXT_MODULE_VERSION) < 0) return 0;
+  if (PyModule_AddIntConstant(m, "__api_version__", BOB_LEARN_MLP_API_VERSION) < 0) return 0;
+  if (PyModule_AddStringConstant(m, "__version__", BOB_EXT_MODULE_VERSION) < 0) return 0;
 
   /* register the types to python */
   Py_INCREF(&PyBobLearnMLPMachine_Type);
@@ -282,7 +282,7 @@ static PyObject* create_module (void) {
   Py_INCREF(&PyBobLearnMLPRProp_Type);
   if (PyModule_AddObject(m, "RProp", (PyObject *)&PyBobLearnMLPRProp_Type) < 0) return 0;
 
-  static void* PyXbobLearnMLP_API[PyXbobLearnMLP_API_pointers];
+  static void* PyBobLearnMLP_API[PyBobLearnMLP_API_pointers];
 
   /* exhaustive list of C APIs */
 
@@ -290,91 +290,91 @@ static PyObject* create_module (void) {
    * Versioning *
    **************/
 
-  PyXbobLearnMLP_API[PyXbobLearnMLP_APIVersion_NUM] = (void *)&PyXbobLearnMLP_APIVersion;
+  PyBobLearnMLP_API[PyBobLearnMLP_APIVersion_NUM] = (void *)&PyBobLearnMLP_APIVersion;
 
   /***************************************
-   * Bindings for xbob.learn.mlp.Machine *
+   * Bindings for bob.learn.mlp.Machine *
    ***************************************/
 
-  PyXbobLearnMLP_API[PyBobLearnMLPMachine_Type_NUM] = (void *)&PyBobLearnMLPMachine_Type;
+  PyBobLearnMLP_API[PyBobLearnMLPMachine_Type_NUM] = (void *)&PyBobLearnMLPMachine_Type;
 
-  PyXbobLearnMLP_API[PyBobLearnMLPMachine_Check_NUM] = (void *)&PyBobLearnMLPMachine_Check;
+  PyBobLearnMLP_API[PyBobLearnMLPMachine_Check_NUM] = (void *)&PyBobLearnMLPMachine_Check;
 
   /************************************
-   * Bindings for xbob.learn.mlp.Cost *
+   * Bindings for bob.learn.mlp.Cost *
    ************************************/
 
-  PyXbobLearnMLP_API[PyBobLearnCost_Type_NUM] = (void *)&PyBobLearnCost_Type;
+  PyBobLearnMLP_API[PyBobLearnCost_Type_NUM] = (void *)&PyBobLearnCost_Type;
 
-  PyXbobLearnMLP_API[PyBobLearnCost_Check_NUM] = (void *)&PyBobLearnCost_Check;
+  PyBobLearnMLP_API[PyBobLearnCost_Check_NUM] = (void *)&PyBobLearnCost_Check;
 
-  PyXbobLearnMLP_API[PyBobLearnCost_NewFromCost_NUM] = (void *)&PyBobLearnCost_NewFromCost;
+  PyBobLearnMLP_API[PyBobLearnCost_NewFromCost_NUM] = (void *)&PyBobLearnCost_NewFromCost;
 
-  PyXbobLearnMLP_API[PyBobLearnSquareError_Type_NUM] = (void *)&PyBobLearnSquareError_Type;
+  PyBobLearnMLP_API[PyBobLearnSquareError_Type_NUM] = (void *)&PyBobLearnSquareError_Type;
 
-  PyXbobLearnMLP_API[PyBobLearnCrossEntropyLoss_Type_NUM] = (void *)&PyBobLearnCrossEntropyLoss_Type;
+  PyBobLearnMLP_API[PyBobLearnCrossEntropyLoss_Type_NUM] = (void *)&PyBobLearnCrossEntropyLoss_Type;
 
   /********************************************
-   * Bindings for xbob.learn.mlp.DataShuffler *
+   * Bindings for bob.learn.mlp.DataShuffler *
    ********************************************/
 
-  PyXbobLearnMLP_API[PyBobLearnDataShuffler_Type_NUM] = (void *)&PyBobLearnDataShuffler_Type;
+  PyBobLearnMLP_API[PyBobLearnDataShuffler_Type_NUM] = (void *)&PyBobLearnDataShuffler_Type;
 
-  PyXbobLearnMLP_API[PyBobLearnDataShuffler_Check_NUM] = (void *)&PyBobLearnDataShuffler_Check;
+  PyBobLearnMLP_API[PyBobLearnDataShuffler_Check_NUM] = (void *)&PyBobLearnDataShuffler_Check;
 
   /***************************************
-   * Bindings for xbob.learn.mlp.Trainer *
+   * Bindings for bob.learn.mlp.Trainer *
    ***************************************/
 
-  PyXbobLearnMLP_API[PyBobLearnMLPTrainer_Type_NUM] = (void *)&PyBobLearnMLPTrainer_Type;
+  PyBobLearnMLP_API[PyBobLearnMLPTrainer_Type_NUM] = (void *)&PyBobLearnMLPTrainer_Type;
 
-  PyXbobLearnMLP_API[PyBobLearnMLPTrainer_Check_NUM] = (void *)&PyBobLearnMLPTrainer_Check;
+  PyBobLearnMLP_API[PyBobLearnMLPTrainer_Check_NUM] = (void *)&PyBobLearnMLPTrainer_Check;
 
-  PyXbobLearnMLP_API[PyBobLearnMLPBackProp_Type_NUM] = (void *)&PyBobLearnMLPBackProp_Type;
+  PyBobLearnMLP_API[PyBobLearnMLPBackProp_Type_NUM] = (void *)&PyBobLearnMLPBackProp_Type;
 
-  PyXbobLearnMLP_API[PyBobLearnMLPBackProp_Check_NUM] = (void *)&PyBobLearnMLPBackProp_Check;
+  PyBobLearnMLP_API[PyBobLearnMLPBackProp_Check_NUM] = (void *)&PyBobLearnMLPBackProp_Check;
 
-  PyXbobLearnMLP_API[PyBobLearnMLPRProp_Type_NUM] = (void *)&PyBobLearnMLPRProp_Type;
+  PyBobLearnMLP_API[PyBobLearnMLPRProp_Type_NUM] = (void *)&PyBobLearnMLPRProp_Type;
 
-  PyXbobLearnMLP_API[PyBobLearnMLPRProp_Check_NUM] = (void *)&PyBobLearnMLPRProp_Check;
+  PyBobLearnMLP_API[PyBobLearnMLPRProp_Check_NUM] = (void *)&PyBobLearnMLPRProp_Check;
 
 #if PY_VERSION_HEX >= 0x02070000
 
   /* defines the PyCapsule */
 
-  PyObject* c_api_object = PyCapsule_New((void *)PyXbobLearnMLP_API,
-      XBOB_EXT_MODULE_PREFIX "." XBOB_EXT_MODULE_NAME "._C_API", 0);
+  PyObject* c_api_object = PyCapsule_New((void *)PyBobLearnMLP_API,
+      BOB_EXT_MODULE_PREFIX "." BOB_EXT_MODULE_NAME "._C_API", 0);
 
 #else
 
-  PyObject* c_api_object = PyCObject_FromVoidPtr((void *)PyXbobLearnMLP_API, 0);
+  PyObject* c_api_object = PyCObject_FromVoidPtr((void *)PyBobLearnMLP_API, 0);
 
 #endif
 
   if (c_api_object) PyModule_AddObject(m, "_C_API", c_api_object);
 
   /* imports dependencies */
-  if (import_xbob_blitz() < 0) {
+  if (import_bob_blitz() < 0) {
     PyErr_Print();
-    PyErr_Format(PyExc_ImportError, "cannot import `%s'", XBOB_EXT_MODULE_NAME);
+    PyErr_Format(PyExc_ImportError, "cannot import `%s'", BOB_EXT_MODULE_NAME);
     return 0;
   }
 
-  if (import_xbob_io_base() < 0) {
+  if (import_bob_io_base() < 0) {
     PyErr_Print();
-    PyErr_Format(PyExc_ImportError, "cannot import `%s'", XBOB_EXT_MODULE_NAME);
+    PyErr_Format(PyExc_ImportError, "cannot import `%s'", BOB_EXT_MODULE_NAME);
     return 0;
   }
 
-  if (import_xbob_learn_activation() < 0) {
+  if (import_bob_learn_activation() < 0) {
     PyErr_Print();
-    PyErr_Format(PyExc_ImportError, "cannot import `%s'", XBOB_EXT_MODULE_NAME);
+    PyErr_Format(PyExc_ImportError, "cannot import `%s'", BOB_EXT_MODULE_NAME);
     return 0;
   }
 
-  if (import_xbob_core_random() < 0) {
+  if (import_bob_core_random() < 0) {
     PyErr_Print();
-    PyErr_Format(PyExc_ImportError, "cannot import `%s'", XBOB_EXT_MODULE_NAME);
+    PyErr_Format(PyExc_ImportError, "cannot import `%s'", BOB_EXT_MODULE_NAME);
     return 0;
   }
 
@@ -383,7 +383,7 @@ static PyObject* create_module (void) {
 
 }
 
-PyMODINIT_FUNC XBOB_EXT_ENTRY_NAME (void) {
+PyMODINIT_FUNC BOB_EXT_ENTRY_NAME (void) {
 # if PY_VERSION_HEX >= 0x03000000
   return
 # endif
diff --git a/xbob/learn/mlp/roll.cpp b/bob/learn/mlp/roll.cpp
similarity index 98%
rename from xbob/learn/mlp/roll.cpp
rename to bob/learn/mlp/roll.cpp
index 7155c5d..387974d 100644
--- a/xbob/learn/mlp/roll.cpp
+++ b/bob/learn/mlp/roll.cpp
@@ -6,11 +6,11 @@
  */
 
 
-#define XBOB_LEARN_MLP_MODULE
-#include <xbob.learn.mlp/api.h>
-#include <xbob.learn.mlp/roll.h>
-#include <xbob.blitz/capi.h>
-#include <xbob.blitz/cleanup.h>
+#define BOB_LEARN_MLP_MODULE
+#include <bob.learn.mlp/api.h>
+#include <bob.learn.mlp/roll.h>
+#include <bob.blitz/capi.h>
+#include <bob.blitz/cleanup.h>
 
 #include "utils.h"
 
diff --git a/xbob/learn/mlp/rprop.cpp b/bob/learn/mlp/rprop.cpp
similarity index 98%
rename from xbob/learn/mlp/rprop.cpp
rename to bob/learn/mlp/rprop.cpp
index 4c79cda..7a86291 100644
--- a/xbob/learn/mlp/rprop.cpp
+++ b/bob/learn/mlp/rprop.cpp
@@ -7,10 +7,10 @@
  * Copyright (C) 2011-2014 Idiap Research Institute, Martigny, Switzerland
  */
 
-#define XBOB_LEARN_MLP_MODULE
-#include <xbob.blitz/cppapi.h>
-#include <xbob.blitz/cleanup.h>
-#include <xbob.learn.mlp/api.h>
+#define BOB_LEARN_MLP_MODULE
+#include <bob.blitz/cppapi.h>
+#include <bob.blitz/cleanup.h>
+#include <bob.learn.mlp/api.h>
 #include <structmember.h>
 
 #include "utils.h"
@@ -19,7 +19,7 @@
  * Implementation of RProp trainer *
  **************************************/
 
-PyDoc_STRVAR(s_trainer_str, XBOB_EXT_MODULE_PREFIX ".RProp");
+PyDoc_STRVAR(s_trainer_str, BOB_EXT_MODULE_PREFIX ".RProp");
 
 PyDoc_STRVAR(s_trainer_doc,
 "RProp(batch_size, cost, [trainer, [train_biases]]) -> new RProp\n\
@@ -51,10 +51,10 @@ batch_size, int\n\
    \n\
       This setting affects the convergence.\n\
 \n\
-cost, :py:class:`xbob.learn.mlp.Cost`\n\
+cost, :py:class:`bob.learn.mlp.Cost`\n\
    An object that can calculate the cost at every iteration.\n\
 \n\
-machine, :py:class:`xbob.learn.mlp.Machine`\n\
+machine, :py:class:`bob.learn.mlp.Machine`\n\
    This parameter that will be used as a basis for this trainer's\n\
    internal properties (cache sizes, for instance).\n\
 \n\
@@ -62,7 +62,7 @@ train_biases, bool\n\
    A boolean indicating if we should train the biases weights (set\n\
    it to ``True``) or not (set it to ``False``).\n\
 \n\
-other, :py:class:`xbob.learn.mlp.Trainer`\n\
+other, :py:class:`bob.learn.mlp.Trainer`\n\
    Another trainer from which this new copy will get its properties\n\
    from. If you use this constructor than a new (deep) copy of the\n\
    trainer is created.\n\
@@ -581,7 +581,7 @@ exception is thrown.\n\
 \n\
    The machine is not initialized randomly at each call to this\n\
    method. It is your task to call\n\
-   :py:meth:`xbob.learn.mlp.Machine.randomize` once at the machine\n\
+   :py:meth:`bob.learn.mlp.Machine.randomize` once at the machine\n\
    you want to train and then call this method as many times as you\n\
    think are necessary. This design allows for a training criteria\n\
    to be encoded outside the scope of this trainer and to this type\n\
@@ -589,9 +589,9 @@ exception is thrown.\n\
 \n\
 Keyword arguments:\n\
 \n\
-machine, :py:class:`xbob.learn.mlp.Machine`\n\
+machine, :py:class:`bob.learn.mlp.Machine`\n\
    The machine that will be trained. You must have called\n\
-   :py:meth:`xbob.learn.mlp.Trainer.initialize` which a similarly\n\
+   :py:meth:`bob.learn.mlp.Trainer.initialize` which a similarly\n\
    configured machine before being able to call this method, or an\n\
    exception may be thrown.\n\
 \n\
diff --git a/xbob/learn/mlp/shuffler.cpp b/bob/learn/mlp/shuffler.cpp
similarity index 98%
rename from xbob/learn/mlp/shuffler.cpp
rename to bob/learn/mlp/shuffler.cpp
index 4e08a23..fe175c0 100644
--- a/xbob/learn/mlp/shuffler.cpp
+++ b/bob/learn/mlp/shuffler.cpp
@@ -7,18 +7,18 @@
  * Copyright (C) 2011-2014 Idiap Research Institute, Martigny, Switzerland
  */
 
-#define XBOB_LEARN_MLP_MODULE
-#include <xbob.blitz/cppapi.h>
-#include <xbob.blitz/cleanup.h>
-#include <xbob.learn.mlp/api.h>
-#include <xbob.core/random.h>
+#define BOB_LEARN_MLP_MODULE
+#include <bob.blitz/cppapi.h>
+#include <bob.blitz/cleanup.h>
+#include <bob.learn.mlp/api.h>
+#include <bob.core/random.h>
 #include <structmember.h>
 
 /*********************************************
  * Implementation of DataShuffler base class *
  *********************************************/
 
-PyDoc_STRVAR(s_shuffler_str, XBOB_EXT_MODULE_PREFIX ".DataShuffler");
+PyDoc_STRVAR(s_shuffler_str, BOB_EXT_MODULE_PREFIX ".DataShuffler");
 
 PyDoc_STRVAR(s_shuffler_doc,
 "DataShuffler(data, target) -> New DataShuffler\n\
@@ -175,7 +175,7 @@ provided. It is an error not to provide one of ``data``,\n\
 ``target`` or ``n``.\n\
 \n\
 If a random generator ``rng`` is provided, it must of the type\n\
-:py:class:`xbob.core.random.mt19937`. In this case, the shuffler\n\
+:py:class:`bob.core.random.mt19937`. In this case, the shuffler\n\
 is going to use this generator instead of its internal one. This\n\
 mechanism is useful for repeating draws in case of tests.\n\
 \n\
diff --git a/xbob/learn/mlp/test_backprop.py b/bob/learn/mlp/test_backprop.py
similarity index 98%
rename from xbob/learn/mlp/test_backprop.py
rename to bob/learn/mlp/test_backprop.py
index 80d9dbe..c988afc 100644
--- a/xbob/learn/mlp/test_backprop.py
+++ b/bob/learn/mlp/test_backprop.py
@@ -9,7 +9,7 @@
 """
 
 import numpy
-from xbob.learn.activation import HyperbolicTangent, Logistic, Identity
+from bob.learn.activation import HyperbolicTangent, Logistic, Identity
 
 from . import Machine, Trainer, CrossEntropyLoss, SquareError, BackProp
 
diff --git a/xbob/learn/mlp/test_cost.py b/bob/learn/mlp/test_cost.py
similarity index 99%
rename from xbob/learn/mlp/test_cost.py
rename to bob/learn/mlp/test_cost.py
index a6c9637..87eaaa4 100644
--- a/xbob/learn/mlp/test_cost.py
+++ b/bob/learn/mlp/test_cost.py
@@ -12,7 +12,7 @@ import math
 from . import SquareError, CrossEntropyLoss
 from .test_utils import estimate_gradient
 
-from xbob.learn.activation import Logistic, Identity
+from bob.learn.activation import Logistic, Identity
 
 def is_close(x, y, eps=1e-10):
   return (abs(x - y) < eps)
diff --git a/xbob/learn/mlp/test_machine.py b/bob/learn/mlp/test_machine.py
similarity index 95%
rename from xbob/learn/mlp/test_machine.py
rename to bob/learn/mlp/test_machine.py
index 743546f..f2657ab 100644
--- a/xbob/learn/mlp/test_machine.py
+++ b/bob/learn/mlp/test_machine.py
@@ -15,10 +15,10 @@ import nose.tools
 from . import Machine
 from .test_utils import Machine as PythonMachine
 
-import xbob.io.base
-from xbob.io.base.test_utils import temporary_filename
-from xbob.learn.activation import Logistic, HyperbolicTangent
-from xbob.core.random import mt19937
+import bob.io.base
+from bob.io.base.test_utils import temporary_filename
+from bob.learn.activation import Logistic, HyperbolicTangent
+from bob.core.random import mt19937
 
 def test_2in_1out():
 
@@ -224,8 +224,8 @@ def test_persistence():
 
   # creates a file that will be used in the next test!
   machine_file = temporary_filename()
-  m.save(xbob.io.base.HDF5File(machine_file, 'w'))
-  m2 = Machine(xbob.io.base.HDF5File(machine_file))
+  m.save(bob.io.base.HDF5File(machine_file, 'w'))
+  m2 = Machine(bob.io.base.HDF5File(machine_file))
 
   assert m.is_similar_to(m2)
   nose.tools.eq_(m, m2)
@@ -285,14 +285,14 @@ def test_randomness_different():
 def test_randomness_same():
 
   m1 = Machine((2,3,2))
-  rng = xbob.core.random.mt19937(0) #fixed seed
+  rng = bob.core.random.mt19937(0) #fixed seed
   m1.randomize(rng=rng)
 
   for k in range(3):
     time.sleep(0.1)
 
     m2 = Machine((2,3,2))
-    rng = xbob.core.random.mt19937(0) #fixed seed
+    rng = bob.core.random.mt19937(0) #fixed seed
     m2.randomize(rng=rng)
 
     for w1, w2 in zip(m1.weights, m2.weights):
diff --git a/xbob/learn/mlp/test_roll.py b/bob/learn/mlp/test_roll.py
similarity index 100%
rename from xbob/learn/mlp/test_roll.py
rename to bob/learn/mlp/test_roll.py
diff --git a/xbob/learn/mlp/test_rprop.py b/bob/learn/mlp/test_rprop.py
similarity index 99%
rename from xbob/learn/mlp/test_rprop.py
rename to bob/learn/mlp/test_rprop.py
index cb8a23e..839ee40 100644
--- a/xbob/learn/mlp/test_rprop.py
+++ b/bob/learn/mlp/test_rprop.py
@@ -9,7 +9,7 @@
 """
 
 import numpy
-from xbob.learn.activation import HyperbolicTangent, Logistic, Identity
+from bob.learn.activation import HyperbolicTangent, Logistic, Identity
 
 from . import Machine, Trainer, CrossEntropyLoss, SquareError, RProp
 
diff --git a/xbob/learn/mlp/test_shuffler.py b/bob/learn/mlp/test_shuffler.py
similarity index 96%
rename from xbob/learn/mlp/test_shuffler.py
rename to bob/learn/mlp/test_shuffler.py
index 918d4a4..dc1e99b 100644
--- a/xbob/learn/mlp/test_shuffler.py
+++ b/bob/learn/mlp/test_shuffler.py
@@ -14,7 +14,7 @@ import nose.tools
 
 from . import DataShuffler
 
-import xbob.core.random
+import bob.core.random
 
 # Some data structures used for the tests
 fixture = dict()
@@ -149,8 +149,8 @@ def test_seeding():
       [fixture['target1'], fixture['target2'], fixture['target3']])
 
   # Use the same seed for 2 different random number generators
-  rng1 = xbob.core.random.mt19937(32)
-  rng2 = xbob.core.random.mt19937(32)
+  rng1 = bob.core.random.mt19937(32)
+  rng2 = bob.core.random.mt19937(32)
 
   [data1, target1] = shuffle1(N, rng=rng1)
   [data2, target2] = shuffle2(N, rng=rng2)
@@ -189,17 +189,17 @@ def test_normalization():
 
 def test_normalization_big():
 
-  rng = xbob.core.random.mt19937()
+  rng = bob.core.random.mt19937()
 
   set1 = []
-  draw25 = xbob.core.random.normal(mean=2.0, sigma=5.0, dtype=float)
+  draw25 = bob.core.random.normal(mean=2.0, sigma=5.0, dtype=float)
   for i in range(10000):
     set1.append(numpy.array([draw25(rng)], dtype='float64'))
   set1 = numpy.array(set1)
   target1 = numpy.array([1], dtype='float64')
 
   set2 = []
-  draw32 = xbob.core.random.normal(mean=3.0, sigma=2.0, dtype=float)
+  draw32 = bob.core.random.normal(mean=3.0, sigma=2.0, dtype=float)
   for i in range(10000):
     set2.append(numpy.array([draw32(rng)], dtype='float64'))
   set2 = numpy.array(set2)
diff --git a/xbob/learn/mlp/test_utils.py b/bob/learn/mlp/test_utils.py
similarity index 100%
rename from xbob/learn/mlp/test_utils.py
rename to bob/learn/mlp/test_utils.py
diff --git a/xbob/learn/mlp/trainer.cpp b/bob/learn/mlp/trainer.cpp
similarity index 98%
rename from xbob/learn/mlp/trainer.cpp
rename to bob/learn/mlp/trainer.cpp
index f5c8fe4..8d577b3 100644
--- a/xbob/learn/mlp/trainer.cpp
+++ b/bob/learn/mlp/trainer.cpp
@@ -7,10 +7,10 @@
  * Copyright (C) 2011-2014 Idiap Research Institute, Martigny, Switzerland
  */
 
-#define XBOB_LEARN_MLP_MODULE
-#include <xbob.blitz/cppapi.h>
-#include <xbob.blitz/cleanup.h>
-#include <xbob.learn.mlp/api.h>
+#define BOB_LEARN_MLP_MODULE
+#include <bob.blitz/cppapi.h>
+#include <bob.blitz/cleanup.h>
+#include <bob.learn.mlp/api.h>
 #include <structmember.h>
 
 #include "utils.h"
@@ -19,7 +19,7 @@
  * Implementation of base Trainer class *
  ****************************************/
 
-PyDoc_STRVAR(s_trainer_str, XBOB_EXT_MODULE_PREFIX ".Trainer");
+PyDoc_STRVAR(s_trainer_str, BOB_EXT_MODULE_PREFIX ".Trainer");
 
 PyDoc_STRVAR(s_trainer_doc,
 "Trainer(batch_size, cost, [trainer, [train_biases]]) -> new Trainer\n\
@@ -48,10 +48,10 @@ batch_size, int\n\
    \n\
       This setting affects the convergence.\n\
 \n\
-cost, :py:class:`xbob.learn.mlp.Cost`\n\
+cost, :py:class:`bob.learn.mlp.Cost`\n\
    An object that can calculate the cost at every iteration.\n\
 \n\
-machine, :py:class:`xbob.learn.mlp.Machine`\n\
+machine, :py:class:`bob.learn.mlp.Machine`\n\
    This parameter that will be used as a basis for this trainer's\n\
    internal properties (cache sizes, for instance).\n\
 \n\
@@ -59,7 +59,7 @@ train_biases, bool\n\
    A boolean indicating if we should train the biases weights (set\n\
    it to ``True``) or not (set it to ``False``).\n\
 \n\
-other, :py:class:`xbob.learn.mlp.Trainer`\n\
+other, :py:class:`bob.learn.mlp.Trainer`\n\
    Another trainer from which this new copy will get its properties\n\
    from. If you use this constructor than a new (deep) copy of the\n\
    trainer is created.\n\
@@ -218,8 +218,8 @@ static int PyBobLearnMLPTrainer_setBatchSize
 
 PyDoc_STRVAR(s_cost_object_str, "cost_object");
 PyDoc_STRVAR(s_cost_object_doc,
-"An object, derived from :py:class:`xbob.learn.mlp.Cost` (e.g.\n\
-:py:class:`xbob.learn.mlp.SquareError` or \n\
+"An object, derived from :py:class:`bob.learn.mlp.Cost` (e.g.\n\
+:py:class:`bob.learn.mlp.SquareError` or \n\
 :py:class:`bob.trainer.CrossEntropyLoss`), that is used to evaluate\n\
 the cost (a.k.a. *loss*) and the derivatives given the input, the\n\
 target and the MLP structure.");
diff --git a/xbob/learn/mlp/utils.h b/bob/learn/mlp/utils.h
similarity index 95%
rename from xbob/learn/mlp/utils.h
rename to bob/learn/mlp/utils.h
index 1497b61..8dc9f4a 100644
--- a/xbob/learn/mlp/utils.h
+++ b/bob/learn/mlp/utils.h
@@ -10,10 +10,10 @@
 #ifndef BOB_LEARN_MLP_UTILS_H
 #define BOB_LEARN_MLP_UTILS_H
 
-#define XBOB_LEARN_MLP_MODULE
-#include <xbob.blitz/cppapi.h>
-#include <xbob.blitz/cleanup.h>
-#include <xbob.learn.mlp/api.h>
+#define BOB_LEARN_MLP_MODULE
+#include <bob.blitz/cppapi.h>
+#include <bob.blitz/cleanup.h>
+#include <bob.learn.mlp/api.h>
 
 /**
  * Converts a vector of blitz::Array<double,N> into a python iterable over
diff --git a/xbob/learn/mlp/version.cpp b/bob/learn/mlp/version.cpp
similarity index 71%
rename from xbob/learn/mlp/version.cpp
rename to bob/learn/mlp/version.cpp
index e909045..66a7297 100644
--- a/xbob/learn/mlp/version.cpp
+++ b/bob/learn/mlp/version.cpp
@@ -19,12 +19,12 @@
 #ifdef NO_IMPORT_ARRAY
 #undef NO_IMPORT_ARRAY
 #endif
-#include <xbob.blitz/capi.h>
-#include <xbob.blitz/cleanup.h>
-#include <xbob.io.base/config.h>
-#include <xbob.learn.activation/config.h>
-#include <xbob.learn.mlp/config.h>
-#include <xbob.core/config.h>
+#include <bob.blitz/capi.h>
+#include <bob.blitz/cleanup.h>
+#include <bob.io.base/config.h>
+#include <bob.learn.activation/config.h>
+#include <bob.learn.mlp/config.h>
+#include <bob.core/config.h>
 
 static int dict_set(PyObject* d, const char* key, const char* value) {
   PyObject* v = Py_BuildValue("s", value);
@@ -100,31 +100,31 @@ static PyObject* numpy_version() {
 }
 
 /**
- * xbob.blitz c/c++ api version
+ * bob.blitz c/c++ api version
  */
-static PyObject* xbob_blitz_version() {
-  return Py_BuildValue("{ss}", "api", BOOST_PP_STRINGIZE(XBOB_BLITZ_API_VERSION));
+static PyObject* bob_blitz_version() {
+  return Py_BuildValue("{ss}", "api", BOOST_PP_STRINGIZE(BOB_BLITZ_API_VERSION));
 }
 
 /**
- * xbob.io.base c/c++ api version
+ * bob.io.base c/c++ api version
  */
-static PyObject* xbob_io_base_version() {
-  return Py_BuildValue("{ss}", "api", BOOST_PP_STRINGIZE(XBOB_IO_BASE_API_VERSION));
+static PyObject* bob_io_base_version() {
+  return Py_BuildValue("{ss}", "api", BOOST_PP_STRINGIZE(BOB_IO_BASE_API_VERSION));
 }
 
 /**
- * xbob.core c/c++ api version
+ * bob.core c/c++ api version
  */
-static PyObject* xbob_core_version() {
-  return Py_BuildValue("{ss}", "api", BOOST_PP_STRINGIZE(XBOB_CORE_API_VERSION));
+static PyObject* bob_core_version() {
+  return Py_BuildValue("{ss}", "api", BOOST_PP_STRINGIZE(BOB_CORE_API_VERSION));
 }
 
 /**
- * xbob.learn.activation c/c++ api version
+ * bob.learn.activation c/c++ api version
  */
-static PyObject* xbob_learn_activation_version() {
-  return Py_BuildValue("{ss}", "api", BOOST_PP_STRINGIZE(XBOB_LEARN_ACTIVATION_API_VERSION));
+static PyObject* bob_learn_activation_version() {
+  return Py_BuildValue("{ss}", "api", BOOST_PP_STRINGIZE(BOB_LEARN_ACTIVATION_API_VERSION));
 }
 
 static PyObject* build_version_dictionary() {
@@ -138,10 +138,10 @@ static PyObject* build_version_dictionary() {
   if (!dict_steal(retval, "Compiler", compiler_version())) return 0;
   if (!dict_steal(retval, "Python", python_version())) return 0;
   if (!dict_steal(retval, "NumPy", numpy_version())) return 0;
-  if (!dict_steal(retval, "xbob.blitz", xbob_blitz_version())) return 0;
-  if (!dict_steal(retval, "xbob.io.base", xbob_io_base_version())) return 0;
-  if (!dict_steal(retval, "xbob.core", xbob_core_version())) return 0;
-  if (!dict_steal(retval, "xbob.learn.activation", xbob_learn_activation_version())) return 0;
+  if (!dict_steal(retval, "bob.blitz", bob_blitz_version())) return 0;
+  if (!dict_steal(retval, "bob.io.base", bob_io_base_version())) return 0;
+  if (!dict_steal(retval, "bob.core", bob_core_version())) return 0;
+  if (!dict_steal(retval, "bob.learn.activation", bob_learn_activation_version())) return 0;
   if (!dict_steal(retval, "Bob", bob_version())) return 0;
 
   Py_INCREF(retval);
@@ -159,7 +159,7 @@ PyDoc_STRVAR(module_docstr,
 #if PY_VERSION_HEX >= 0x03000000
 static PyModuleDef module_definition = {
   PyModuleDef_HEAD_INIT,
-  XBOB_EXT_MODULE_NAME,
+  BOB_EXT_MODULE_NAME,
   module_docstr,
   -1,
   module_methods,
@@ -172,15 +172,15 @@ static PyObject* create_module (void) {
 # if PY_VERSION_HEX >= 0x03000000
   PyObject* m = PyModule_Create(&module_definition);
 # else
-  PyObject* m = Py_InitModule3(XBOB_EXT_MODULE_NAME, module_methods, module_docstr);
+  PyObject* m = Py_InitModule3(BOB_EXT_MODULE_NAME, module_methods, module_docstr);
 # endif
   if (!m) return 0;
   auto m_ = make_safe(m); ///< protects against early returns
 
   /* register version numbers and constants */
-  if (PyModule_AddIntConstant(m, "api", XBOB_LEARN_MLP_API_VERSION) < 0)
+  if (PyModule_AddIntConstant(m, "api", BOB_LEARN_MLP_API_VERSION) < 0)
     return 0;
-  if (PyModule_AddStringConstant(m, "module", XBOB_EXT_MODULE_VERSION) < 0)
+  if (PyModule_AddStringConstant(m, "module", BOB_EXT_MODULE_VERSION) < 0)
     return 0;
 
   PyObject* externals = build_version_dictionary();
@@ -188,9 +188,9 @@ static PyObject* create_module (void) {
   if (PyModule_AddObject(m, "externals", externals) < 0) return 0;
 
   /* imports dependencies */
-  if (import_xbob_blitz() < 0) {
+  if (import_bob_blitz() < 0) {
     PyErr_Print();
-    PyErr_Format(PyExc_ImportError, "cannot import `%s'", XBOB_EXT_MODULE_NAME);
+    PyErr_Format(PyExc_ImportError, "cannot import `%s'", BOB_EXT_MODULE_NAME);
     return 0;
   }
 
@@ -199,7 +199,7 @@ static PyObject* create_module (void) {
 
 }
 
-PyMODINIT_FUNC XBOB_EXT_ENTRY_NAME (void) {
+PyMODINIT_FUNC BOB_EXT_ENTRY_NAME (void) {
 # if PY_VERSION_HEX >= 0x03000000
   return
 # endif
diff --git a/buildout.cfg b/buildout.cfg
index 052cf16..99543b5 100644
--- a/buildout.cfg
+++ b/buildout.cfg
@@ -4,29 +4,29 @@
 
 [buildout]
 parts = scripts
-eggs = xbob.learn.mlp
-extensions = xbob.buildout
+eggs = bob.learn.mlp
+extensions = bob.buildout
              mr.developer
 auto-checkout = *
-develop = src/xbob.extension
-          src/xbob.blitz
-          src/xbob.io.base
-          src/xbob.learn.activation
-          src/xbob.core
+develop = src/bob.extension
+          src/bob.blitz
+          src/bob.io.base
+          src/bob.learn.activation
+          src/bob.core
           .
 
-; options for xbob.buildout extension
+; options for bob.buildout extension
 debug = true
 verbose = true
 prefixes = /idiap/group/torch5spro/releases/preview/install/linux-x86_64-release
            /Users/andre/work/bob/b/dbg/
 
 [sources]
-xbob.extension = git https://github.com/bioidiap/xbob.extension branch=prototype
-xbob.blitz = git https://github.com/bioidiap/xbob.blitz
-xbob.io.base = git https://github.com/bioidiap/xbob.io.base
-xbob.learn.activation = git https://github.com/bioidiap/xbob.learn.activation
-xbob.core = git https://github.com/bioidiap/xbob.core
+bob.extension = git https://github.com/bioidiap/bob.extension
+bob.blitz = git https://github.com/bioidiap/bob.blitz
+bob.io.base = git https://github.com/bioidiap/bob.io.base
+bob.learn.activation = git https://github.com/bioidiap/bob.learn.activation
+bob.core = git https://github.com/bioidiap/bob.core
 
 [scripts]
-recipe = xbob.buildout:scripts
+recipe = bob.buildout:scripts
diff --git a/doc/c_cpp_api.rst b/doc/c_cpp_api.rst
index 0e6074c..5525465 100644
--- a/doc/c_cpp_api.rst
+++ b/doc/c_cpp_api.rst
@@ -6,11 +6,11 @@
  C++ API
 =========
 
-The C++ API of ``xbob.learn.mlp`` allows users to leverage from automatic
-converters for classes in :py:class:`xbob.learn.mlp`.  To use the C API,
-clients should first, include the header file ``<xbob.learn.mlp.h>`` on
+The C++ API of ``bob.learn.mlp`` allows users to leverage from automatic
+converters for classes in :py:class:`bob.learn.mlp`.  To use the C API,
+clients should first, include the header file ``<bob.learn.mlp.h>`` on
 their compilation units and then, make sure to call once
-``import_xbob_learn_mlp()`` at their module instantiation, as explained at
+``import_bob_learn_mlp()`` at their module instantiation, as explained at
 the `Python manual
 <http://docs.python.org/2/extending/extending.html#using-capsules>`_.
 
@@ -19,7 +19,7 @@ the import function:
 
 .. code-block:: c++
 
-   #include <xbob.learn.mlp/api.h>
+   #include <bob.learn.mlp/api.h>
 
    PyMODINIT_FUNC initclient(void) {
 
@@ -27,10 +27,10 @@ the import function:
 
      if (!m) return 0;
 
-     if (import_xbob_blitz() < 0) return 0;
-     if (import_xbob_io() < 0) return 0;
-     if (import_xbob_learn_activation() < 0) return 0;
-     if (import_xbob_learn_mlp() < 0) return 0;
+     if (import_bob_blitz() < 0) return 0;
+     if (import_bob_io() < 0) return 0;
+     if (import_bob_learn_activation() < 0) return 0;
+     if (import_bob_learn_mlp() < 0) return 0;
 
      return m;
 
@@ -39,14 +39,14 @@ the import function:
 .. note::
 
   The include directory can be discovered using
-  :py:func:`xbob.learn.mlp.get_include`.
+  :py:func:`bob.learn.mlp.get_include`.
 
 Machine
 -------
 
 .. cpp:type:: PyBobLearnMLPMachineObject
 
-   The pythonic object representation for a :py:class:`xbob.learn.mlp.Machine`
+   The pythonic object representation for a :py:class:`bob.learn.mlp.Machine`
    object.
 
    .. code-block:: cpp
@@ -56,7 +56,7 @@ Machine
         bob::learn::mlp::Machine* cxx;
       } PyBobLearnMLPMachineObject;
 
-   .. cpp:member:: xbob::learn::mlp::Machine* cxx
+   .. cpp:member:: bob::learn::mlp::Machine* cxx
 
       A pointer to the machine implentation in C++.
 
@@ -73,7 +73,7 @@ Cost
 
 .. cpp:type:: PyBobLearnCostObject
 
-   The pythonic object representation for a :py:class:`xbob.learn.mlp.Cost`
+   The pythonic object representation for a :py:class:`bob.learn.mlp.Cost`
    object.  It is the base class of all derive cost types available in
    |project|.
 
@@ -84,7 +84,7 @@ Cost
         boost::shared_ptr<bob::learn::mlp::Cost> cxx;
       } PyBobLearnCostObject;
 
-   .. cpp:member:: boost::shared_ptr<xbob::learn::mlp::Cost> cxx
+   .. cpp:member:: boost::shared_ptr<bob::learn::mlp::Cost> cxx
 
       A pointer to the cost object implemented in C++. The cost object is an
       abstract interface. You cannot instantiate a cost from scratch, but only
@@ -105,8 +105,8 @@ Cost
 
    These are the cost object specializations you can use from Python:
 
-   * :py:class:`xbob.learn.mlp.SquareError`
-   * :py:class:`xbob.learn.mlp.CrossEntropyLoss`
+   * :py:class:`bob.learn.mlp.SquareError`
+   * :py:class:`bob.learn.mlp.CrossEntropyLoss`
 
    For each of those types, object types in C exist.
 
@@ -123,7 +123,7 @@ Data Shuffler
 
 .. cpp:type:: PyBobLearnDataShufflerObject
 
-   The pythonic representation for a :py:class:`xbob.learn.mlp.DataShuffler`
+   The pythonic representation for a :py:class:`bob.learn.mlp.DataShuffler`
    object.
 
    .. code-block:: cpp
@@ -133,7 +133,7 @@ Data Shuffler
         bob::learn::mlp::DataShuffler* cxx;
       } PyBobLearnCostObject;
 
-   .. cpp:member:: xbob::learn::mlp::DataShuffler* cxx
+   .. cpp:member:: bob::learn::mlp::DataShuffler* cxx
 
       A pointer to the data shuffler object implemented in C++.
 
@@ -150,7 +150,7 @@ Trainers
 
 .. cpp:type:: PyBobLearnMLPTrainerObject
 
-   The pythonic representation for a :py:class:`xbob.learn.mlp.Trainer` object.
+   The pythonic representation for a :py:class:`bob.learn.mlp.Trainer` object.
    All back-propagation-based trainers should inherit from this type as it
    implements most of the basic functionality needed by such a learning
    technique.
@@ -162,7 +162,7 @@ Trainers
         bob::learn::mlp::Trainer* cxx;
       } PyBobLearnCostObject;
 
-   .. cpp:member:: xbob::learn::mlp::Trainer* cxx
+   .. cpp:member:: bob::learn::mlp::Trainer* cxx
 
       A pointer to the base trainer object implemented in C++.
 
@@ -176,7 +176,7 @@ Trainers
 
 .. cpp:type:: PyBobLearnBackPropObject
 
-   The pythonic representation for a :py:class:`xbob.learn.mlp.BackProp` object.
+   The pythonic representation for a :py:class:`bob.learn.mlp.BackProp` object.
    All back-propagation-based trainers should inherit from this type as it
    implements most of the basic functionality needed by such a learning
    technique.
@@ -193,7 +193,7 @@ Trainers
       The parent abstract class pointer. Use ``parent.cxx`` to access the
       abstract C++ base interface.
 
-   .. cpp:member:: xbob::learn::mlp::BackProp* cxx
+   .. cpp:member:: bob::learn::mlp::BackProp* cxx
 
       A pointer to the derived trainer object implemented in C++.
 
@@ -207,7 +207,7 @@ Trainers
 
 .. cpp:type:: PyBobLearnRPropObject
 
-   The pythonic representation for a :py:class:`xbob.learn.mlp.RProp` object.
+   The pythonic representation for a :py:class:`bob.learn.mlp.RProp` object.
    All back-propagation-based trainers should inherit from this type as it
    implements most of the basic functionality needed by such a learning
    technique.
@@ -224,7 +224,7 @@ Trainers
       The parent abstract class pointer. Use ``parent.cxx`` to access the
       abstract C++ base interface.
 
-   .. cpp:member:: xbob::learn::mlp::RProp* cxx
+   .. cpp:member:: bob::learn::mlp::RProp* cxx
 
       A pointer to the derived trainer object implemented in C++.
 
diff --git a/doc/conf.py b/doc/conf.py
index bc2d217..9bf8077 100644
--- a/doc/conf.py
+++ b/doc/conf.py
@@ -58,12 +58,12 @@ source_suffix = '.rst'
 master_doc = 'index'
 
 # General information about the project.
-project = u'xbob.learn.mlp'
+project = u'bob.learn.mlp'
 import time
 copyright = u'%s, Idiap Research Institute' % time.strftime('%Y')
 
 # Grab the setup entry
-distribution = pkg_resources.require('xbob.learn.mlp')[0]
+distribution = pkg_resources.require('bob.learn.mlp')[0]
 
 # The version info for the project you're documenting, acts as replacement for
 # |version| and |release|, also used in various other places throughout the
@@ -129,7 +129,7 @@ if sphinx.__version__ >= "1.0":
 #html_title = None
 
 # A shorter title for the navigation bar.  Default is the same as html_title.
-#html_short_title = 'xbob_learn_mlp'
+#html_short_title = 'bob_learn_mlp'
 
 # The name of an image file (relative to this directory) to place at the top
 # of the sidebar.
@@ -187,7 +187,7 @@ html_favicon = 'img/favicon.ico'
 #html_file_suffix = None
 
 # Output file base name for HTML help builder.
-htmlhelp_basename = 'xbob_learn_mlp_doc'
+htmlhelp_basename = 'bob_learn_mlp_doc'
 
 
 # -- Options for LaTeX output --------------------------------------------------
@@ -201,7 +201,7 @@ latex_font_size = '10pt'
 # Grouping the document tree into LaTeX files. List of tuples
 # (source start file, target name, title, author, documentclass [howto/manual]).
 latex_documents = [
-  ('index', 'xbob_learn_mlp.tex', u'Bob Multi-Layer Perceptrons',
+  ('index', 'bob_learn_mlp.tex', u'Bob Multi-Layer Perceptrons',
    u'Biometrics Group, Idiap Research Institute', 'manual'),
 ]
 
@@ -241,7 +241,7 @@ rst_epilog = """
 # One entry per manual page. List of tuples
 # (source start file, name, description, authors, manual section).
 man_pages = [
-    ('index', 'xbob_learn_mlp', u'Bob Multi-Layer Perceptron Documentation', [u'Idiap Research Institute'], 1)
+    ('index', 'bob_learn_mlp', u'Bob Multi-Layer Perceptron Documentation', [u'Idiap Research Institute'], 1)
 ]
 
 # Default processing flags for sphinx
diff --git a/doc/guide.rst b/doc/guide.rst
index dcf7fbb..ecf5672 100644
--- a/doc/guide.rst
+++ b/doc/guide.rst
@@ -7,7 +7,7 @@
 .. testsetup:: *
 
    import numpy
-   import xbob.learn.mlp
+   import bob.learn.mlp
    import tempfile
    import os
 
@@ -26,7 +26,7 @@ feed-forward structure. You can create a new MLP using one of the trainers
 described below. We start this tutorial by examplifying how to actually use an
 MLP.
 
-To instantiate a new (uninitialized) :py:class:`xbob.learn.mlp.Machine` pass a
+To instantiate a new (uninitialized) :py:class:`bob.learn.mlp.Machine` pass a
 shape descriptor as a :py:func:`tuple`. The shape parameter should contain the
 input size as the first parameter and the output size as the last parameter.
 The parameters in between define the number of neurons in the hidden layers of
@@ -37,7 +37,7 @@ neurons in the second hidden layer and 2 outputs.  Here is an example:
 
 .. doctest::
 
-  >>> mlp = xbob.learn.mlp.Machine((3, 3, 2, 1))
+  >>> mlp = bob.learn.mlp.Machine((3, 3, 2, 1))
 
 As it is, the network is uninitialized. For the sake of demonstrating how to use
 MLPs, let's set the weight and biases manually (we would normally use a trainer
@@ -77,30 +77,30 @@ At this point, a few things should be noted:
    to many (or many to 1). You can use the NumPy_ ``reshape()`` array method
    for this purpose as shown above
 2. Biases should **always** be 1D arrays.
-3. By default, MLPs use the :py:class:`xbob.learn.activation.HyperbolicTangent`
+3. By default, MLPs use the :py:class:`bob.learn.activation.HyperbolicTangent`
    as activation function. There are currently 4 other activation functions
    available in |project|:
 
-   * The identity function: :py:class:`xbob.learn.activation.Identity`;
+   * The identity function: :py:class:`bob.learn.activation.Identity`;
    * The sigmoid function (also known as the `logistic function
      <http://mathworld.wolfram.com/SigmoidFunction.html>`_ function):
-     :py:class:`xbob.learn.activation.Logistic`;
+     :py:class:`bob.learn.activation.Logistic`;
    * A scaled version of the hyperbolic tangent function:
-     :py:class:`xbob.learn.activation.MultipliedHyperbolicTangent`; and
+     :py:class:`bob.learn.activation.MultipliedHyperbolicTangent`; and
    * A scaled version of the identity activation:
-     :py:class:`xbob.learn.activation.Linear`
+     :py:class:`bob.learn.activation.Linear`
 
 Let's try changing all of the activation functions to a simpler one, just for
 this example:
 
 .. doctest::
 
-  >>> mlp.hidden_activation = xbob.learn.activation.Identity()
-  >>> mlp.output_activation = xbob.learn.activation.Identity()
+  >>> mlp.hidden_activation = bob.learn.activation.Identity()
+  >>> mlp.output_activation = bob.learn.activation.Identity()
 
 Once the network weights and biases are set, we can feed forward an example
 through this machine. This is done using the ``()`` operator, like for a
-:py:class:`xbob.learn.Linear.Machine`:
+:py:class:`bob.learn.Linear.Machine`:
 
 .. doctest::
 
@@ -120,38 +120,38 @@ available MLP trainers in two different 2D `NumPy`_ arrays, one for the input
    >>> t0 = numpy.array([[.0]]) # target
 
 The class used to train a MLP [1]_ with backpropagation [2]_ is
-:py:class:`xbob.learn.MLP.BackProp`. An example is shown below.
+:py:class:`bob.learn.MLP.BackProp`. An example is shown below.
 
 
 .. doctest::
    :options: +NORMALIZE_WHITESPACE
 
-   >>> trainer = xbob.learn.mlp.BackProp(1, xbob.learn.mlp.SquareError(mlp.output_activation), mlp, train_biases=False) #  Creates a BackProp trainer with a batch size of 1
+   >>> trainer = bob.learn.mlp.BackProp(1, bob.learn.mlp.SquareError(mlp.output_activation), mlp, train_biases=False) #  Creates a BackProp trainer with a batch size of 1
    >>> trainer.train(mlp, d0, t0) # Performs the Back Propagation
 
 .. note::
 
   The second parameter of the trainer defines the cost function to be used for
   the training. You can use two different types of pre-programmed costs in
-  |project|: :py:class:`xbob.learn.mlp.SquareError`, like before, or
-  :py:class:`xbob.learn.mlp.CrossEntropyLoss` (normally in association with
+  |project|: :py:class:`bob.learn.mlp.SquareError`, like before, or
+  :py:class:`bob.learn.mlp.CrossEntropyLoss` (normally in association with
   :py:class:`bob.learn.activation.Logistic`). You can implement your own
   cost/loss functions. Nevertheless, to do so, you must do it using our
   C/C++-API and then bind it to Python in your own package.
 
 Backpropagation [2]_ requires a learning rate to be set. In the previous
 example, the default value ``0.1`` has been used. This might be updated using
-the :py:attr:`xbob.learn.mlp.BackProp.learning_rate` attribute.
+the :py:attr:`bob.learn.mlp.BackProp.learning_rate` attribute.
 
 Another training alternative exists referred to as **resilient propagation**
 (R-Prop) [3]_, which dynamically computes an optimal learning rate. The
-corresponding class is :py:class:`xbob.learn.mlp.RProp`, and the overall
+corresponding class is :py:class:`bob.learn.mlp.RProp`, and the overall
 training procedure remains identical.
 
 .. doctest::
    :options: +NORMALIZE_WHITESPACE
 
-   >>> trainer = xbob.learn.mlp.RProp(1, xbob.learn.mlp.SquareError(mlp.output_activation), mlp, train_biases=False)
+   >>> trainer = bob.learn.mlp.RProp(1, bob.learn.mlp.SquareError(mlp.output_activation), mlp, train_biases=False)
    >>> trainer.train(mlp, d0, t0)
 
 .. note::
diff --git a/doc/py_api.rst b/doc/py_api.rst
index 2b9cb69..6f44711 100644
--- a/doc/py_api.rst
+++ b/doc/py_api.rst
@@ -7,8 +7,8 @@
 ============
 
 This section includes information for using the pure Python API of
-``xbob.learn.mlp``.
+``bob.learn.mlp``.
 
 
-.. automodule:: xbob.learn.mlp
+.. automodule:: bob.learn.mlp
 
diff --git a/setup.py b/setup.py
index bc32e78..47d276b 100644
--- a/setup.py
+++ b/setup.py
@@ -4,21 +4,21 @@
 # Mon 16 Apr 08:18:08 2012 CEST
 
 from setuptools import setup, find_packages, dist
-dist.Distribution(dict(setup_requires=['xbob.blitz', 'xbob.io.base', 'xbob.learn.activation', 'xbob.core']))
-from xbob.blitz.extension import Extension
-import xbob.io.base
-import xbob.core
-import xbob.learn.activation
+dist.Distribution(dict(setup_requires=['bob.blitz', 'bob.io.base', 'bob.learn.activation', 'bob.core']))
+from bob.blitz.extension import Extension
+import bob.io.base
+import bob.core
+import bob.learn.activation
 
 import os
 package_dir = os.path.dirname(os.path.realpath(__file__))
-package_dir = os.path.join(package_dir, 'xbob', 'learn', 'mlp', 'include')
+package_dir = os.path.join(package_dir, 'bob', 'learn', 'mlp', 'include')
 include_dirs = [
     package_dir,
-    xbob.blitz.get_include(),
-    xbob.io.base.get_include(),
-    xbob.learn.activation.get_include(),
-    xbob.core.get_include(),
+    bob.blitz.get_include(),
+    bob.io.base.get_include(),
+    bob.learn.activation.get_include(),
+    bob.core.get_include(),
     ]
 
 packages = ['bob-io >= 2.0.0a2', 'bob-machine >= 2.0.0a2']
@@ -26,10 +26,10 @@ version = '2.0.0a0'
 
 setup(
 
-    name='xbob.learn.mlp',
+    name='bob.learn.mlp',
     version=version,
     description='Bindings for bob.machine\'s Multi-layer Perceptron and Trainers',
-    url='http://github.com/bioidiap/xbob.learn.mlp',
+    url='http://github.com/bioidiap/bob.learn.mlp',
     license='BSD',
     author='Andre Anjos',
     author_email='andre.anjos@idiap.ch',
@@ -41,44 +41,44 @@ setup(
 
     install_requires=[
       'setuptools',
-      'xbob.blitz',
-      'xbob.io.base',
-      'xbob.learn.activation',
-      'xbob.core',
+      'bob.blitz',
+      'bob.io.base',
+      'bob.learn.activation',
+      'bob.core',
     ],
 
     namespace_packages=[
-      "xbob",
-      "xbob.learn",
+      "bob",
+      "bob.learn",
       ],
 
     ext_modules = [
-      Extension("xbob.learn.mlp.version",
+      Extension("bob.learn.mlp.version",
         [
-          "xbob/learn/mlp/version.cpp",
+          "bob/learn/mlp/version.cpp",
           ],
         packages = packages,
         include_dirs = include_dirs,
         version = version,
         ),
-      Extension("xbob.learn.mlp._library",
+      Extension("bob.learn.mlp._library",
         [
-          "xbob/learn/mlp/roll.cpp",
-          "xbob/learn/mlp/rprop.cpp",
-          "xbob/learn/mlp/backprop.cpp",
-          "xbob/learn/mlp/trainer.cpp",
-          "xbob/learn/mlp/shuffler.cpp",
-          "xbob/learn/mlp/cost.cpp",
-          "xbob/learn/mlp/machine.cpp",
-          "xbob/learn/mlp/main.cpp",
-          "xbob/learn/mlp/cxx/roll.cpp",
-          "xbob/learn/mlp/cxx/machine.cpp",
-          "xbob/learn/mlp/cxx/cross_entropy.cpp",
-          "xbob/learn/mlp/cxx/square_error.cpp",
-          "xbob/learn/mlp/cxx/shuffler.cpp",
-          "xbob/learn/mlp/cxx/trainer.cpp",
-          "xbob/learn/mlp/cxx/backprop.cpp",
-          "xbob/learn/mlp/cxx/rprop.cpp",
+          "bob/learn/mlp/roll.cpp",
+          "bob/learn/mlp/rprop.cpp",
+          "bob/learn/mlp/backprop.cpp",
+          "bob/learn/mlp/trainer.cpp",
+          "bob/learn/mlp/shuffler.cpp",
+          "bob/learn/mlp/cost.cpp",
+          "bob/learn/mlp/machine.cpp",
+          "bob/learn/mlp/main.cpp",
+          "bob/learn/mlp/cxx/roll.cpp",
+          "bob/learn/mlp/cxx/machine.cpp",
+          "bob/learn/mlp/cxx/cross_entropy.cpp",
+          "bob/learn/mlp/cxx/square_error.cpp",
+          "bob/learn/mlp/cxx/shuffler.cpp",
+          "bob/learn/mlp/cxx/trainer.cpp",
+          "bob/learn/mlp/cxx/backprop.cpp",
+          "bob/learn/mlp/cxx/rprop.cpp",
           ],
         packages = packages,
         include_dirs = include_dirs,
diff --git a/xbob/learn/mlp/include/xbob.learn.mlp/config.h b/xbob/learn/mlp/include/xbob.learn.mlp/config.h
deleted file mode 100644
index e492d20..0000000
--- a/xbob/learn/mlp/include/xbob.learn.mlp/config.h
+++ /dev/null
@@ -1,14 +0,0 @@
-/**
- * @author Andre Anjos <andre.anjos@idiap.ch>
- * @date Thu 24 Apr 17:31:59 2014 CEST
- *
- * @brief General directives for all modules in xbob.learn.mlp
- */
-
-#ifndef XBOB_LEARN_MLP_CONFIG_H
-#define XBOB_LEARN_MLP_CONFIG_H
-
-/* Macros that define versions and important names */
-#define XBOB_LEARN_MLP_API_VERSION 0x0200
-
-#endif /* XBOB_LEARN_MLP_CONFIG_H */
-- 
GitLab