diff --git a/xbob/learn/activation/activation.cpp b/xbob/learn/activation/activation.cpp
index 718748d56ee578fa4e730e33afc10b97b9aee95d..51ca12398d2d8b271ce1b29445cdf98d077f9734 100644
--- a/xbob/learn/activation/activation.cpp
+++ b/xbob/learn/activation/activation.cpp
@@ -70,14 +70,14 @@ PyObject* PyBobLearnActivation_NewFromActivation
 static void PyBobLearnActivation_delete (PyBobLearnActivationObject* self) {
 
   self->cxx.reset();
-  self->ob_type->tp_free((PyObject*)self);
+  Py_TYPE(self)->tp_free((PyObject*)self);
 
 }
 
 static int PyBobLearnActivation_init(PyBobLearnActivationObject* self,
     PyObject*, PyObject*) {
 
-  PyErr_Format(PyExc_NotImplementedError, "cannot initialize object of base type `%s' - use one of the inherited classes", self->ob_type->tp_name);
+  PyErr_Format(PyExc_NotImplementedError, "cannot initialize object of base type `%s' - use one of the inherited classes", Py_TYPE(self)->tp_name);
   return -1;
 
 }
@@ -151,12 +151,12 @@ static PyObject* PyBobLearnActivation_call1(PyBobLearnActivationObject* self,
     auto z_converted_ = make_safe(z_converted);
 
     if (z_converted->type_num != NPY_FLOAT64) {
-      PyErr_Format(PyExc_TypeError, "`%s' function only supports 64-bit float arrays for input array `z'", self->ob_type->tp_name);
+      PyErr_Format(PyExc_TypeError, "`%s' function only supports 64-bit float arrays for input array `z'", Py_TYPE(self)->tp_name);
       return 0;
     }
 
     if (z_converted->ndim < 1 || z_converted->ndim > 4) {
-      PyErr_Format(PyExc_TypeError, "`%s' function only accepts 1, 2, 3 or 4-dimensional arrays (not %" PY_FORMAT_SIZE_T "dD arrays)", self->ob_type->tp_name, z_converted->ndim);
+      PyErr_Format(PyExc_TypeError, "`%s' function only accepts 1, 2, 3 or 4-dimensional arrays (not %" PY_FORMAT_SIZE_T "dD arrays)", Py_TYPE(self)->tp_name, z_converted->ndim);
       return 0;
     }
 
@@ -170,7 +170,7 @@ static PyObject* PyBobLearnActivation_call1(PyBobLearnActivationObject* self,
         z_converted, reinterpret_cast<PyBlitzArrayObject*>(res));
 
     if (!ok) {
-      PyErr_Format(PyExc_RuntimeError, "unexpected error occurred applying `%s' to input array (DEBUG ME)", self->ob_type->tp_name);
+      PyErr_Format(PyExc_RuntimeError, "unexpected error occurred applying `%s' to input array (DEBUG ME)", Py_TYPE(self)->tp_name);
       return 0;
     }
 
@@ -189,7 +189,7 @@ static PyObject* PyBobLearnActivation_call1(PyBobLearnActivationObject* self,
 
   }
 
-  PyErr_Format(PyExc_TypeError, "`%s' is not capable to process input objects of type `%s'", self->ob_type->tp_name, z->ob_type->tp_name);
+  PyErr_Format(PyExc_TypeError, "`%s' is not capable to process input objects of type `%s'", Py_TYPE(self)->tp_name, Py_TYPE(z)->tp_name);
   return 0;
 
 }
@@ -215,17 +215,17 @@ static PyObject* PyBobLearnActivation_call2(PyBobLearnActivationObject* self,
   auto res_ = make_safe(res);
 
   if (z->type_num != NPY_FLOAT64) {
-    PyErr_Format(PyExc_TypeError, "`%s' function only supports 64-bit float arrays for input array `z'", self->ob_type->tp_name);
+    PyErr_Format(PyExc_TypeError, "`%s' function only supports 64-bit float arrays for input array `z'", Py_TYPE(self)->tp_name);
     return 0;
   }
 
   if (res->type_num != NPY_FLOAT64) {
-    PyErr_Format(PyExc_TypeError, "`%s' function only supports 64-bit float arrays for output array `res'", self->ob_type->tp_name);
+    PyErr_Format(PyExc_TypeError, "`%s' function only supports 64-bit float arrays for output array `res'", Py_TYPE(self)->tp_name);
     return 0;
   }
 
   if (z->ndim < 1 || z->ndim > 4) {
-    PyErr_Format(PyExc_TypeError, "`%s' function only accepts 1, 2, 3 or 4-dimensional arrays (not %" PY_FORMAT_SIZE_T "dD arrays)", self->ob_type->tp_name, z->ndim);
+    PyErr_Format(PyExc_TypeError, "`%s' function only accepts 1, 2, 3 or 4-dimensional arrays (not %" PY_FORMAT_SIZE_T "dD arrays)", Py_TYPE(self)->tp_name, z->ndim);
     return 0;
   }
 
@@ -247,7 +247,7 @@ static PyObject* PyBobLearnActivation_call2(PyBobLearnActivationObject* self,
   int ok = apply(boost::bind(method, self->cxx, _1), z, res);
 
   if (!ok) {
-    PyErr_Format(PyExc_RuntimeError, "unexpected error occurred applying C++ `%s' to input array (DEBUG ME)", self->ob_type->tp_name);
+    PyErr_Format(PyExc_RuntimeError, "unexpected error occurred applying C++ `%s' to input array (DEBUG ME)", Py_TYPE(self)->tp_name);
     return 0;
   }
 
@@ -437,7 +437,7 @@ static PyObject* PyBobLearnActivation_Load(PyBobLearnActivationObject* self,
     PyObject* f) {
 
   if (!PyBobIoHDF5File_Check(f)) {
-    PyErr_Format(PyExc_TypeError, "`%s' cannot load itself from `%s', only from an HDF5 file", self->ob_type->tp_name, f->ob_type->tp_name);
+    PyErr_Format(PyExc_TypeError, "`%s' cannot load itself from `%s', only from an HDF5 file", Py_TYPE(self)->tp_name, Py_TYPE(f)->tp_name);
     return 0;
   }
 
@@ -471,7 +471,7 @@ static PyObject* PyBobLearnActivation_Save
 (PyBobLearnActivationObject* self, PyObject* f) {
 
   if (!PyBobIoHDF5File_Check(f)) {
-    PyErr_Format(PyExc_TypeError, "`%s' cannot write itself to `%s', only to an HDF5 file", self->ob_type->tp_name, f->ob_type->tp_name);
+    PyErr_Format(PyExc_TypeError, "`%s' cannot write itself to `%s', only to an HDF5 file", Py_TYPE(self)->tp_name, Py_TYPE(f)->tp_name);
     return 0;
   }
 
@@ -541,7 +541,7 @@ static PyObject* PyBobLearnActivation_RichCompare (PyBobLearnActivationObject* s
 
   if (!PyBobLearnActivation_Check(other)) {
     PyErr_Format(PyExc_TypeError, "cannot compare `%s' with `%s'",
-        self->ob_type->tp_name, other->ob_type->tp_name);
+        Py_TYPE(self)->tp_name, Py_TYPE(other)->tp_name);
     return 0;
   }
 
@@ -568,8 +568,7 @@ static PyObject* PyBobLearnActivation_Str (PyBobLearnActivationObject* self) {
 }
 
 PyTypeObject PyBobLearnActivation_Type = {
-    PyObject_HEAD_INIT(0)
-    0,                                              /* ob_size */
+    PyVarObject_HEAD_INIT(0, 0)
     s_activation_str,                               /* tp_name */
     sizeof(PyBobLearnActivationObject),             /* tp_basicsize */
     0,                                              /* tp_itemsize */
diff --git a/xbob/learn/activation/identity.cpp b/xbob/learn/activation/identity.cpp
index 0463d42b3bc8d27fd19a7c915f66d367d298a188..87bfd0739fa6d5eb1ccdd418c48bbeb8428cbeaa 100644
--- a/xbob/learn/activation/identity.cpp
+++ b/xbob/learn/activation/identity.cpp
@@ -50,13 +50,12 @@ static void PyBobLearnIdentityActivation_delete
 
   self->parent.cxx.reset();
   self->cxx.reset();
-  self->parent.ob_type->tp_free((PyObject*)self);
+  Py_TYPE(&self->parent)->tp_free((PyObject*)self);
 
 }
 
 PyTypeObject PyBobLearnIdentityActivation_Type = {
-    PyObject_HEAD_INIT(0)
-    0,                                                  /*ob_size*/
+    PyVarObject_HEAD_INIT(0, 0)
     s_identityactivation_str,                           /*tp_name*/
     sizeof(PyBobLearnIdentityActivationObject),         /*tp_basicsize*/
     0,                                                  /*tp_itemsize*/
diff --git a/xbob/learn/activation/linear.cpp b/xbob/learn/activation/linear.cpp
index bb338052e4dc75afb19e0c5b136a5487bfb9f2ce..942570db2091c15ff4ed21001e8fd37eb15f3d6a 100644
--- a/xbob/learn/activation/linear.cpp
+++ b/xbob/learn/activation/linear.cpp
@@ -55,7 +55,7 @@ static void PyBobLearnLinearActivation_delete
 
   self->parent.cxx.reset();
   self->cxx.reset();
-  self->parent.ob_type->tp_free((PyObject*)self);
+  Py_TYPE(&self->parent)->tp_free((PyObject*)self);
 
 }
 
@@ -83,8 +83,7 @@ static PyGetSetDef PyBobLearnLinearActivation_getseters[] = {
 };
 
 PyTypeObject PyBobLearnLinearActivation_Type = {
-    PyObject_HEAD_INIT(0)
-    0,                                                  /*ob_size*/
+    PyVarObject_HEAD_INIT(0, 0)
     s_linearactivation_str,                             /*tp_name*/
     sizeof(PyBobLearnLinearActivationObject),           /*tp_basicsize*/
     0,                                                  /*tp_itemsize*/
diff --git a/xbob/learn/activation/logistic.cpp b/xbob/learn/activation/logistic.cpp
index ba5af66f730fa8b90a6cace9c6beb7e0eb8c12c9..b59e83fa379125723038bad1b9ed26b8a17d7cec 100644
--- a/xbob/learn/activation/logistic.cpp
+++ b/xbob/learn/activation/logistic.cpp
@@ -49,13 +49,12 @@ static void PyBobLearnLogisticActivation_delete
 
   self->parent.cxx.reset();
   self->cxx.reset();
-  self->parent.ob_type->tp_free((PyObject*)self);
+  Py_TYPE(&self->parent)->tp_free((PyObject*)self);
 
 }
 
 PyTypeObject PyBobLearnLogisticActivation_Type = {
-    PyObject_HEAD_INIT(0)
-    0,                                                  /*ob_size*/
+    PyVarObject_HEAD_INIT(0, 0)
     s_logisticactivation_str,                           /*tp_name*/
     sizeof(PyBobLearnLogisticActivationObject),         /*tp_basicsize*/
     0,                                                  /*tp_itemsize*/
diff --git a/xbob/learn/activation/main.cpp b/xbob/learn/activation/main.cpp
index 7e25978753261f6f166ed0fc69f6bc3606ab59f8..a2d929c63fc331ad46e24c94a17411aff9f133c1 100644
--- a/xbob/learn/activation/main.cpp
+++ b/xbob/learn/activation/main.cpp
@@ -14,38 +14,81 @@
 #include <xbob.blitz/capi.h>
 #include <xbob.io/api.h>
 
-static PyMethodDef library_methods[] = {
+static PyMethodDef module_methods[] = {
     {0}  /* Sentinel */
 };
 
-PyDoc_STRVAR(library_docstr, "classes for activation functors");
+PyDoc_STRVAR(module_docstr, "classes for activation functors");
+
+#if PY_VERSION_HEX >= 0x03000000
+static PyModuleDef module_definition = {
+  PyModuleDef_HEAD_INIT,
+  XBOB_EXT_MODULE_NAME,
+  module_docstr,
+  -1,
+  module_methods, 
+  0, 0, 0, 0
+};
+#endif
 
 int PyXbobLearnActivation_APIVersion = XBOB_LEARN_ACTIVATION_API_VERSION;
 
 PyMODINIT_FUNC XBOB_EXT_ENTRY_NAME (void) {
 
   PyBobLearnActivation_Type.tp_new = PyType_GenericNew;
-  if (PyType_Ready(&PyBobLearnActivation_Type) < 0) return;
+  if (PyType_Ready(&PyBobLearnActivation_Type) < 0) return
+# if PY_VERSION_HEX >= 0x03000000
+    0
+# endif
+    ;
 
   PyBobLearnIdentityActivation_Type.tp_base = &PyBobLearnActivation_Type;
-  if (PyType_Ready(&PyBobLearnIdentityActivation_Type) < 0) return;
+  if (PyType_Ready(&PyBobLearnIdentityActivation_Type) < 0) return
+# if PY_VERSION_HEX >= 0x03000000
+    0
+# endif
+    ;
 
   PyBobLearnLinearActivation_Type.tp_base = &PyBobLearnActivation_Type;
-  if (PyType_Ready(&PyBobLearnLinearActivation_Type) < 0) return;
+  if (PyType_Ready(&PyBobLearnLinearActivation_Type) < 0) return
+# if PY_VERSION_HEX >= 0x03000000
+    0
+# endif
+    ;
 
   PyBobLearnLogisticActivation_Type.tp_base = &PyBobLearnActivation_Type;
-  if (PyType_Ready(&PyBobLearnLogisticActivation_Type) < 0) return;
+  if (PyType_Ready(&PyBobLearnLogisticActivation_Type) < 0) return
+# if PY_VERSION_HEX >= 0x03000000
+    0
+# endif
+    ;
 
   PyBobLearnHyperbolicTangentActivation_Type.tp_base =
     &PyBobLearnActivation_Type;
-  if (PyType_Ready(&PyBobLearnHyperbolicTangentActivation_Type) < 0) return;
+  if (PyType_Ready(&PyBobLearnHyperbolicTangentActivation_Type) < 0) return
+# if PY_VERSION_HEX >= 0x03000000
+    0
+# endif
+    ;
 
   PyBobLearnMultipliedHyperbolicTangentActivation_Type.tp_base =
     &PyBobLearnActivation_Type;
   if (PyType_Ready(&PyBobLearnMultipliedHyperbolicTangentActivation_Type) < 0)
-    return;
-
-  PyObject* m = Py_InitModule3(XBOB_EXT_MODULE_NAME, library_methods, library_docstr);
+    return
+# if PY_VERSION_HEX >= 0x03000000
+      0
+# endif
+      ;
+
+
+# if PY_VERSION_HEX >= 0x03000000
+  PyObject* m = PyModule_Create(&module_definition);
+  if (!m) return 0;
+# else
+  PyObject* m = Py_InitModule3(XBOB_EXT_MODULE_NAME, 
+      module_methods, module_docstr);
+  if (!m) return;
+# endif
 
   /* register some constants */
   PyModule_AddIntConstant(m, "__api_version__",
@@ -145,4 +188,8 @@ PyMODINIT_FUNC XBOB_EXT_ENTRY_NAME (void) {
   /* imports xbob.io C-API */
   import_xbob_io();
 
+# if PY_VERSION_HEX >= 0x03000000
+  return m;
+# endif
+
 }
diff --git a/xbob/learn/activation/mult_tanh.cpp b/xbob/learn/activation/mult_tanh.cpp
index 95e4acc10bfa02a34bbb108f30f124cd682dc52f..0bad6120ba7a528f8ae3f5b8c7690c4474bcff81 100644
--- a/xbob/learn/activation/mult_tanh.cpp
+++ b/xbob/learn/activation/mult_tanh.cpp
@@ -60,7 +60,7 @@ static void PyBobLearnMultipliedHyperbolicTangentActivation_delete
 
   self->parent.cxx.reset();
   self->cxx.reset();
-  self->parent.ob_type->tp_free((PyObject*)self);
+  Py_TYPE(&self->parent)->tp_free((PyObject*)self);
 
 }
 
@@ -110,8 +110,7 @@ static PyGetSetDef PyBobLearnMultipliedHyperbolicTangentActivation_getseters[] =
 };
 
 PyTypeObject PyBobLearnMultipliedHyperbolicTangentActivation_Type = {
-    PyObject_HEAD_INIT(0)
-    0,                                                  /*ob_size*/
+    PyVarObject_HEAD_INIT(0, 0)
     s_multtanhactivation_str,                           /*tp_name*/
     sizeof(PyBobLearnMultipliedHyperbolicTangentActivationObject),       /*tp_basicsize*/
     0,                                                  /*tp_itemsize*/
diff --git a/xbob/learn/activation/tanh.cpp b/xbob/learn/activation/tanh.cpp
index 866a713e98d4fd457adbde647b74fae9e6fd9997..db7239befe7eac62cf6dedcf44cd6538e9fde67d 100644
--- a/xbob/learn/activation/tanh.cpp
+++ b/xbob/learn/activation/tanh.cpp
@@ -51,13 +51,12 @@ static void PyBobLearnHyperbolicTangentActivation_delete
 
   self->parent.cxx.reset();
   self->cxx.reset();
-  self->parent.ob_type->tp_free((PyObject*)self);
+  Py_TYPE(&self->parent)->tp_free((PyObject*)self);
 
 }
 
 PyTypeObject PyBobLearnHyperbolicTangentActivation_Type = {
-    PyObject_HEAD_INIT(0)
-    0,                                                   /*ob_size*/
+    PyVarObject_HEAD_INIT(0, 0)
     s_hyperbolictangentactivation_str,                   /*tp_name*/
     sizeof(PyBobLearnHyperbolicTangentActivationObject), /*tp_basicsize*/
     0,                                                   /*tp_itemsize*/