Commit 4782082b authored by Tiago de Freitas Pereira's avatar Tiago de Freitas Pereira
Browse files

Documentation issues

parent d16ffdb2
......@@ -539,7 +539,7 @@ static PyObject* PyBobLearnEMGMMMachine_resize(PyBobLearnEMGMMMachineObject* sel
/*** log_likelihood ***/
static auto log_likelihood = bob::extension::FunctionDoc(
"log_likelihood",
"Output the log likelihood of the sample, x, i.e. log(p(x|GMM)). Inputs are checked.",
"Output the log likelihood of the sample, x, i.e. :math:`log(p(x|GMM))`. Inputs are checked.",
".. note:: The :py:meth:`__call__` function is an alias for this.",
true
)
......@@ -567,7 +567,7 @@ static PyObject* PyBobLearnEMGMMMachine_loglikelihood(PyBobLearnEMGMMMachineObje
/*** log_likelihood_ ***/
static auto log_likelihood_ = bob::extension::FunctionDoc(
"log_likelihood_",
"Output the log likelihood of the sample, x, i.e. log(p(x|GMM)). Inputs are NOT checked.",
"Output the log likelihood of the sample, x, i.e. :math:`log(p(x|GMM))`. Inputs are NOT checked.",
"",
true
)
......
......@@ -16,8 +16,8 @@
static auto ISVBase_doc = bob::extension::ClassDoc(
BOB_EXT_MODULE_PREFIX ".ISVBase",
"A ISVBase instance can be seen as a container for U and D when performing Joint Factor Analysis (JFA)."
"References: [Vogt2008,McCool2013]",
"A ISVBase instance can be seen as a container for U and D when performing Joint Factor Analysis (JFA).\n\n"
"References: [Vogt2008]_ [McCool2013]_",
""
).add_constructor(
bob::extension::FunctionDoc(
......@@ -26,12 +26,12 @@ static auto ISVBase_doc = bob::extension::ClassDoc(
"",
true
)
.add_prototype("gmm,ru","")
.add_prototype("ubm,ru","")
.add_prototype("other","")
.add_prototype("hdf5","")
.add_prototype("","")
.add_parameter("gmm", ":py:class:`bob.learn.em.GMMMachine`", "The Universal Background Model.")
.add_parameter("ubm", ":py:class:`bob.learn.em.GMMMachine`", "The Universal Background Model.")
.add_parameter("ru", "int", "Size of U (Within client variation matrix). In the end the U matrix will have (number_of_gaussians * feature_dimension x ru)")
.add_parameter("other", ":py:class:`bob.learn.em.ISVBase`", "A ISVBase object to be copied.")
.add_parameter("hdf5", ":py:class:`bob.io.base.HDF5File`", "An HDF5 file open for reading")
......
......@@ -15,8 +15,8 @@
static auto ISVMachine_doc = bob::extension::ClassDoc(
BOB_EXT_MODULE_PREFIX ".ISVMachine",
"A ISVMachine. An attached :py:class:`bob.learn.em.ISVBase` should be provided for Joint Factor Analysis. The :py:class:`bob.learn.em.ISVMachine` carries information about the speaker factors y and z, whereas a :py:class:`bob.learn.em.JFABase` carries information about the matrices U, V and D."
"References: [Vogt2008,McCool2013]",
"A ISVMachine. An attached :py:class:`bob.learn.em.ISVBase` should be provided for Joint Factor Analysis. The :py:class:`bob.learn.em.ISVMachine` carries information about the speaker factors :math:`y` and :math:`z`, whereas a :py:class:`bob.learn.em.JFABase` carries information about the matrices :math:`U` and :math:`D`.\n\n"
"References: [Vogt2008_ [McCool2013]_",
""
).add_constructor(
bob::extension::FunctionDoc(
......@@ -29,7 +29,7 @@ static auto ISVMachine_doc = bob::extension::ClassDoc(
.add_prototype("other","")
.add_prototype("hdf5","")
.add_parameter("isv", ":py:class:`bob.learn.em.ISVBase`", "The ISVBase associated with this machine")
.add_parameter("isv_base", ":py:class:`bob.learn.em.ISVBase`", "The ISVBase associated with this machine")
.add_parameter("other", ":py:class:`bob.learn.em.ISVMachine`", "A ISVMachine object to be copied.")
.add_parameter("hdf5", ":py:class:`bob.io.base.HDF5File`", "An HDF5 file open for reading")
......@@ -187,7 +187,7 @@ PyObject* PyBobLearnEMISVMachine_getSupervectorLength(PyBobLearnEMISVMachineObje
static auto Z = bob::extension::VariableDoc(
"z",
"array_like <float, 1D>",
"Returns the z speaker factor. Eq (31) from [McCool2013]",
"Returns the :math:`z` speaker factor. Eq (31) from [McCool2013]_",
""
);
PyObject* PyBobLearnEMISVMachine_getZ(PyBobLearnEMISVMachineObject* self, void*){
......@@ -215,8 +215,8 @@ int PyBobLearnEMISVMachine_setZ(PyBobLearnEMISVMachineObject* self, PyObject* va
static auto X = bob::extension::VariableDoc(
"x",
"array_like <float, 1D>",
"Returns the X session factor. Eq (29) from [McCool2013]",
"The latent variable x (last one computed). This is a feature provided for convenience, but this attribute is not 'part' of the machine. The session latent variable x is indeed not class-specific, but depends on the sample considered. Furthermore, it is not saved into the machine or used when comparing machines."
"Returns the :math:`X` session factor. Eq (29) from [McCool2013]_",
"The latent variable x (last one computed). This is a feature provided for convenience, but this attribute is not 'part' of the machine. The session latent variable :math:`x` is indeed not class-specific, but depends on the sample considered. Furthermore, it is not saved into the machine or used when comparing machines."
);
PyObject* PyBobLearnEMISVMachine_getX(PyBobLearnEMISVMachineObject* self, void*){
BOB_TRY
......@@ -404,7 +404,7 @@ static PyObject* PyBobLearnEMISVMachine_IsSimilarTo(PyBobLearnEMISVMachineObject
static auto estimate_x = bob::extension::FunctionDoc(
"estimate_x",
"Estimates the session offset x (LPT assumption) given GMM statistics.",
"Estimates x from the GMM statistics considering the LPT assumption, that is the latent session variable x is approximated using the UBM",
"Estimates :math:`x` from the GMM statistics considering the LPT assumption, that is the latent session variable :math:`x` is approximated using the UBM",
true
)
.add_prototype("stats,input")
......
......@@ -15,8 +15,8 @@
static auto IVectorMachine_doc = bob::extension::ClassDoc(
BOB_EXT_MODULE_PREFIX ".IVectorMachine",
"An IVectorMachine consists of a Total Variability subspace \f$T\f$ and allows the extraction of IVector"
"References: [Dehak2010]",
"An IVectorMachine consists of a Total Variability subspace :math:`T` and allows the extraction of IVector"
"References: [Dehak2010]_",
""
).add_constructor(
bob::extension::FunctionDoc(
......@@ -202,7 +202,7 @@ PyObject* PyBobLearnEMIVectorMachine_getSupervectorLength(PyBobLearnEMIVectorMac
static auto T = bob::extension::VariableDoc(
"t",
"array_like <float, 2D>",
"Returns the Total Variability matrix",
"Returns the Total Variability matrix, :math:`T`",
""
);
PyObject* PyBobLearnEMIVectorMachine_getT(PyBobLearnEMIVectorMachineObject* self, void*){
......@@ -289,7 +289,7 @@ int PyBobLearnEMIVectorMachine_setVarianceThreshold(PyBobLearnEMIVectorMachineOb
static auto ubm = bob::extension::VariableDoc(
"ubm",
":py:class:`bob.learn.em.GMMMachine`",
"Returns the UBM (Universal Background Model",
"Returns the UBM (Universal Background Model)",
""
);
PyObject* PyBobLearnEMIVectorMachine_getUBM(PyBobLearnEMIVectorMachineObject* self, void*){
......
......@@ -15,8 +15,8 @@
static auto JFABase_doc = bob::extension::ClassDoc(
BOB_EXT_MODULE_PREFIX ".JFABase",
"A JFABase instance can be seen as a container for U, V and D when performing Joint Factor Analysis (JFA)."
"References: [Vogt2008,McCool2013]",
"A JFABase instance can be seen as a container for :math:`U`, :math:`V` and :math:`D` when performing Joint Factor Analysis (JFA).\n\n"
"References: [Vogt2008]_ [McCool2013]_",
""
).add_constructor(
bob::extension::FunctionDoc(
......@@ -25,14 +25,14 @@ static auto JFABase_doc = bob::extension::ClassDoc(
"",
true
)
.add_prototype("gmm,ru,rv","")
.add_prototype("ubm,ru,rv","")
.add_prototype("other","")
.add_prototype("hdf5","")
.add_prototype("","")
.add_parameter("gmm", ":py:class:`bob.learn.em.GMMMachine`", "The Universal Background Model.")
.add_parameter("ru", "int", "Size of U (Within client variation matrix). In the end the U matrix will have (number_of_gaussians * feature_dimension x ru)")
.add_parameter("rv", "int", "Size of V (Between client variation matrix). In the end the U matrix will have (number_of_gaussians * feature_dimension x rv)")
.add_parameter("ubm", ":py:class:`bob.learn.em.GMMMachine`", "The Universal Background Model.")
.add_parameter("ru", "int", "Size of :math:`U` (Within client variation matrix). In the end the U matrix will have (#gaussians * #feature_dimension x ru)")
.add_parameter("rv", "int", "Size of :math:`V` (Between client variation matrix). In the end the U matrix will have (#gaussians * #feature_dimension x rv)")
.add_parameter("other", ":py:class:`bob.learn.em.JFABase`", "A JFABase object to be copied.")
.add_parameter("hdf5", ":py:class:`bob.io.base.HDF5File`", "An HDF5 file open for reading")
......@@ -176,7 +176,7 @@ int PyBobLearnEMJFABase_Check(PyObject* o) {
static auto shape = bob::extension::VariableDoc(
"shape",
"(int,int, int, int)",
"A tuple that represents the number of gaussians, dimensionality of each Gaussian, dimensionality of the rU (within client variability matrix) and dimensionality of the rV (between client variability matrix) ``(#Gaussians, #Inputs, #rU, #rV)``.",
"A tuple that represents the number of gaussians, dimensionality of each Gaussian, dimensionality of the :math:`rU` (within client variability matrix) and dimensionality of the :math:`rV` (between client variability matrix) ``(#Gaussians, #Inputs, #rU, #rV)``.",
""
);
PyObject* PyBobLearnEMJFABase_getShape(PyBobLearnEMJFABaseObject* self, void*) {
......@@ -206,7 +206,7 @@ PyObject* PyBobLearnEMJFABase_getSupervectorLength(PyBobLearnEMJFABaseObject* se
static auto U = bob::extension::VariableDoc(
"u",
"array_like <float, 2D>",
"Returns the U matrix (within client variability matrix)",
"Returns the :math:`U` matrix (within client variability matrix)",
""
);
PyObject* PyBobLearnEMJFABase_getU(PyBobLearnEMJFABaseObject* self, void*){
......@@ -233,7 +233,7 @@ int PyBobLearnEMJFABase_setU(PyBobLearnEMJFABaseObject* self, PyObject* value, v
static auto V = bob::extension::VariableDoc(
"v",
"array_like <float, 2D>",
"Returns the V matrix (between client variability matrix)",
"Returns the :math:`V` matrix (between client variability matrix)",
""
);
PyObject* PyBobLearnEMJFABase_getV(PyBobLearnEMJFABaseObject* self, void*){
......@@ -261,7 +261,7 @@ int PyBobLearnEMJFABase_setV(PyBobLearnEMJFABaseObject* self, PyObject* value, v
static auto D = bob::extension::VariableDoc(
"d",
"array_like <float, 1D>",
"Returns the diagonal matrix diag(d) (as a 1D vector)",
"Returns the diagonal matrix :math:`diag(d)` (as a 1D vector)",
""
);
PyObject* PyBobLearnEMJFABase_getD(PyBobLearnEMJFABaseObject* self, void*){
......@@ -477,8 +477,8 @@ static auto resize = bob::extension::FunctionDoc(
true
)
.add_prototype("rU,rV")
.add_parameter("rU", "int", "Size of U (Within client variation matrix)")
.add_parameter("rV", "int", "Size of V (Between client variation matrix)");
.add_parameter("rU", "int", "Size of :math:`U` (Within client variation matrix)")
.add_parameter("rV", "int", "Size of :math:`V` (Between client variation matrix)");
static PyObject* PyBobLearnEMJFABase_resize(PyBobLearnEMJFABaseObject* self, PyObject* args, PyObject* kwargs) {
BOB_TRY
......
......@@ -15,8 +15,8 @@
static auto JFAMachine_doc = bob::extension::ClassDoc(
BOB_EXT_MODULE_PREFIX ".JFAMachine",
"A JFAMachine. An attached :py:class:`bob.learn.em.JFABase` should be provided for Joint Factor Analysis. The :py:class:`bob.learn.em.JFAMachine` carries information about the speaker factors y and z, whereas a :py:class:`bob.learn.em.JFABase` carries information about the matrices U, V and D."
"References: [Vogt2008,McCool2013]",
"A JFAMachine. An attached :py:class:`bob.learn.em.JFABase` should be provided for Joint Factor Analysis. The :py:class:`bob.learn.em.JFAMachine` carries information about the speaker factors :math:`y` and :math:`z`, whereas a :py:class:`bob.learn.em.JFABase` carries information about the matrices :math:`U`, :math:`V` and :math:`D`.\n\n"
"References: [Vogt2008]_ [McCool2013]_",
""
).add_constructor(
bob::extension::FunctionDoc(
......@@ -29,7 +29,7 @@ static auto JFAMachine_doc = bob::extension::ClassDoc(
.add_prototype("other","")
.add_prototype("hdf5","")
.add_parameter("jfa", ":py:class:`bob.learn.em.JFABase`", "The JFABase associated with this machine")
.add_parameter("jfa_base", ":py:class:`bob.learn.em.JFABase`", "The JFABase associated with this machine")
.add_parameter("other", ":py:class:`bob.learn.em.JFAMachine`", "A JFAMachine object to be copied.")
.add_parameter("hdf5", ":py:class:`bob.io.base.HDF5File`", "An HDF5 file open for reading")
......@@ -188,7 +188,7 @@ PyObject* PyBobLearnEMJFAMachine_getSupervectorLength(PyBobLearnEMJFAMachineObje
static auto Y = bob::extension::VariableDoc(
"y",
"array_like <float, 1D>",
"Returns the y speaker factor. Eq (30) from [McCool2013]",
"Returns the :math:`y` speaker factor. Eq (30) from [McCool2013]_",
""
);
PyObject* PyBobLearnEMJFAMachine_getY(PyBobLearnEMJFAMachineObject* self, void*){
......@@ -216,7 +216,7 @@ int PyBobLearnEMJFAMachine_setY(PyBobLearnEMJFAMachineObject* self, PyObject* va
static auto Z = bob::extension::VariableDoc(
"z",
"array_like <float, 1D>",
"Returns the z speaker factor. Eq (31) from [McCool2013]",
"Returns the :math:`z` speaker factor. Eq (31) from [McCool2013]_",
""
);
PyObject* PyBobLearnEMJFAMachine_getZ(PyBobLearnEMJFAMachineObject* self, void*){
......@@ -244,8 +244,8 @@ int PyBobLearnEMJFAMachine_setZ(PyBobLearnEMJFAMachineObject* self, PyObject* va
static auto X = bob::extension::VariableDoc(
"x",
"array_like <float, 1D>",
"Returns the X session factor. Eq (29) from [McCool2013]",
"The latent variable x (last one computed). This is a feature provided for convenience, but this attribute is not 'part' of the machine. The session latent variable x is indeed not class-specific, but depends on the sample considered. Furthermore, it is not saved into the machine or used when comparing machines."
"Returns the :math:`X` session factor. Eq (29) from [McCool2013]_",
"The latent variable :math:`x` (last one computed). This is a feature provided for convenience, but this attribute is not 'part' of the machine. The session latent variable :math:`x` is indeed not class-specific, but depends on the sample considered. Furthermore, it is not saved into the machine or used when comparing machines."
);
PyObject* PyBobLearnEMJFAMachine_getX(PyBobLearnEMJFAMachineObject* self, void*){
BOB_TRY
......
......@@ -36,9 +36,9 @@ static auto PLDABase_doc = bob::extension::ClassDoc(
.add_prototype("other","")
.add_prototype("hdf5","")
.add_parameter("dim_D", "int", "Dimensionality of the feature vector.")
.add_parameter("dim_F", "int", "Size of :math:`F`(between class variantion matrix).")
.add_parameter("dim_G", "int", "Size of :math:`G`(within class variantion matrix).")
.add_parameter("dim_d", "int", "Dimensionality of the feature vector.")
.add_parameter("dim_f", "int", "Size of :math:`F` (between class variantion matrix).")
.add_parameter("dim_g", "int", "Size of :math:`G` (within class variantion matrix).")
.add_parameter("variance_threshold", "double", "The smallest possible value of the variance (Ignored if set to 0.)")
.add_parameter("other", ":py:class:`bob.learn.em.PLDABase`", "A PLDABase object to be copied.")
......@@ -190,7 +190,7 @@ int PyBobLearnEMPLDABase_Check(PyObject* o) {
static auto shape = bob::extension::VariableDoc(
"shape",
"(int,int, int)",
"A tuple that represents the dimensionality of the feature vector :math:`dim_d`, the :math:`F` matrix and the :math:`G` matrix.",
"A tuple that represents the dimensionality of the feature vector dim_d, the :math:`F` matrix and the :math:`G` matrix.",
""
);
PyObject* PyBobLearnEMPLDABase_getShape(PyBobLearnEMPLDABaseObject* self, void*) {
......@@ -259,7 +259,7 @@ int PyBobLearnEMPLDABase_setG(PyBobLearnEMPLDABaseObject* self, PyObject* value,
static auto mu = bob::extension::VariableDoc(
"mu",
"array_like <float, 1D>",
"Gets the :math:`mu` mean vector of the PLDA model",
"Gets the :math:`\\mu` mean vector of the PLDA model",
""
);
PyObject* PyBobLearnEMPLDABase_getMu(PyBobLearnEMPLDABaseObject* self, void*){
......@@ -626,14 +626,14 @@ static PyObject* PyBobLearnEMPLDABase_IsSimilarTo(PyBobLearnEMPLDABaseObject* se
/*** resize ***/
static auto resize = bob::extension::FunctionDoc(
"resize",
"Resizes the dimensionality of the PLDA model. Paramaters :math:`\\mu`, :math:`\\F`, :math:`\\G` and :math:`\\Sigma` are reinitialized.",
"Resizes the dimensionality of the PLDA model. Paramaters :math:`\\mu`, :math:`F`, :math:`G` and :math:`\\Sigma` are reinitialized.",
0,
true
)
.add_prototype("dim_D,dim_F,dim_G")
.add_parameter("dim_D", "int", "Dimensionality of the feature vector.")
.add_parameter("dim_F", "int", "Size of :math:`F`(between class variantion matrix).")
.add_parameter("dim_G", "int", "Size of :math:`F`(within class variantion matrix).");
.add_prototype("dim_d,dim_f,dim_g")
.add_parameter("dim_d", "int", "Dimensionality of the feature vector.")
.add_parameter("dim_f", "int", "Size of :math:`F` (between class variantion matrix).")
.add_parameter("dim_g", "int", "Size of :math:`G` (within class variantion matrix).");
static PyObject* PyBobLearnEMPLDABase_resize(PyBobLearnEMPLDABaseObject* self, PyObject* args, PyObject* kwargs) {
BOB_TRY
......@@ -645,17 +645,17 @@ static PyObject* PyBobLearnEMPLDABase_resize(PyBobLearnEMPLDABaseObject* self, P
if (!PyArg_ParseTupleAndKeywords(args, kwargs, "iii", kwlist, &dim_D, &dim_F, &dim_G)) Py_RETURN_NONE;
if(dim_D <= 0){
PyErr_Format(PyExc_TypeError, "dim_D argument must be greater than or equal to one");
PyErr_Format(PyExc_TypeError, "dim_d argument must be greater than or equal to one");
Py_RETURN_NONE;
}
if(dim_F <= 0){
PyErr_Format(PyExc_TypeError, "dim_F argument must be greater than or equal to one");
PyErr_Format(PyExc_TypeError, "dim_f argument must be greater than or equal to one");
Py_RETURN_NONE;
}
if(dim_G <= 0){
PyErr_Format(PyExc_TypeError, "dim_G argument must be greater than or equal to one");
PyErr_Format(PyExc_TypeError, "dim_g argument must be greater than or equal to one");
Py_RETURN_NONE;
}
......@@ -671,7 +671,7 @@ static PyObject* PyBobLearnEMPLDABase_resize(PyBobLearnEMPLDABaseObject* self, P
static auto get_gamma = bob::extension::FunctionDoc(
"get_gamma",
"Gets the :math:`\\gamma_a` matrix for a given :math:`a` (number of samples). "
":math:`gamma_{a} = (Id + a F^T \beta F)^{-1} = \\mathcal{F}_{a}`",
":math:`\\gamma_{a}=(Id + a F^T \\beta F)^{-1} = \\mathcal{F}_{a}`",
0,
true
)
......@@ -694,8 +694,8 @@ static PyObject* PyBobLearnEMPLDABase_getGamma(PyBobLearnEMPLDABaseObject* self,
/***** has_gamma *****/
static auto has_gamma = bob::extension::FunctionDoc(
"has_gamma",
"Tells if the :math:`gamma_a` matrix for a given a (number of samples) exists. "
":math:`gamma_a=(Id + a F^T \\beta F)^{-1}`",
"Tells if the :math:`\\gamma_a` matrix for a given a (number of samples) exists. "
":math:`\\gamma_a=(Id + aF^T \\beta F)^{-1}`",
0,
true
)
......@@ -720,8 +720,8 @@ static PyObject* PyBobLearnEMPLDABase_hasGamma(PyBobLearnEMPLDABaseObject* self,
/***** compute_gamma *****/
static auto compute_gamma = bob::extension::FunctionDoc(
"compute_gamma",
"Tells if the :math:`gamma_a` matrix for a given a (number of samples) exists."
":math:`gamma_a = (Id + a F^T \\beta F)^{-1}`",
"Tells if the :math:`\\gamma_a` matrix for a given a (number of samples) exists."
" :math:`\\gamma_a=(Id + a F^T \\beta F)^{-1}`",
0,
true
)
......@@ -747,7 +747,7 @@ static PyObject* PyBobLearnEMPLDABase_computeGamma(PyBobLearnEMPLDABaseObject* s
static auto get_add_gamma = bob::extension::FunctionDoc(
"get_add_gamma",
"Gets the :math:`gamma_a` matrix for a given :math:`f_a` (number of samples)."
":math:`gamma_a = (Id + a F^T \\beta F)^{-1} = \\mathcal{F}_{a}`."
" :math:`\\gamma_a=(Id + a F^T \\beta F)^{-1} =\\mathcal{F}_{a}`."
"Tries to find it from the base machine and then from this machine.",
0,
true
......@@ -797,7 +797,7 @@ static PyObject* PyBobLearnEMPLDABase_hasLogLikeConstTerm(PyBobLearnEMPLDABaseOb
/***** compute_log_like_const_term" *****/
static auto compute_log_like_const_term = bob::extension::FunctionDoc(
"compute_log_like_const_term",
"Computes the log likelihood constant term for a given :math:`a` (number of samples), given the provided :math:`gamma_a` matrix. "
"Computes the log likelihood constant term for a given :math:`a` (number of samples), given the provided :math:`\\gamma_a` matrix. "
":math:`l_{a} = \\frac{a}{2} ( -D log(2\\pi) -log|\\Sigma| +log|\\alpha| +log|\\gamma_a|)`",
0,
......@@ -851,7 +851,7 @@ static PyObject* PyBobLearnEMPLDABase_getAddLogLikeConstTerm(PyBobLearnEMPLDABas
static auto get_log_like_const_term = bob::extension::FunctionDoc(
"get_log_like_const_term",
"Gets the log likelihood constant term for a given :math:`a` (number of samples). "
":math:`l_{a} = \\frac{a}{2} ( -D log(2\\pi) -log|\\Sigma| +log|\\alpha| +log|\\gamma_a|)",
":math:`l_{a}=\\frac{a}{2} ( -D log(2\\pi) -log|\\Sigma| +log|\\alpha| +log|\\gamma_a|)`",
0,
true
)
......@@ -873,10 +873,11 @@ static PyObject* PyBobLearnEMPLDABase_getLogLikeConstTerm(PyBobLearnEMPLDABaseOb
/***** clear_maps *****/
static auto clear_maps = bob::extension::FunctionDoc(
"clear_maps",
"Clears the maps (:math:`gamma_a` and loglike_constterm_a).",
"Clears the maps (:math:`\\gamma_a` and loglike_constterm_a).",
0,
true
);
)
.add_prototype("","");
static PyObject* PyBobLearnEMPLDABase_clearMaps(PyBobLearnEMPLDABaseObject* self, PyObject* args, PyObject* kwargs) {
BOB_TRY
......
......@@ -19,8 +19,8 @@ static auto PLDAMachine_doc = bob::extension::ClassDoc(
BOB_EXT_MODULE_PREFIX ".PLDAMachine",
"This class is a container for an enrolled identity/class. It contains information extracted from the enrollment samples. "
"It should be used in combination with a PLDABase instance."
"References: [ElShafey2014,PrinceElder2007,LiFu2012]",
"It should be used in combination with a PLDABase instance.\n\n"
"References: [ElShafey2014]_, [PrinceElder2007]_, [LiFu2012]_",
""
).add_constructor(
bob::extension::FunctionDoc(
......@@ -35,7 +35,7 @@ static auto PLDAMachine_doc = bob::extension::ClassDoc(
.add_prototype("other","")
.add_prototype("hdf5,plda_base","")
.add_parameter("plda_base", "`bob.learn.em.PLDABase`", "")
.add_parameter("plda_base", ":py:class:`bob.learn.em.PLDABase`", "")
.add_parameter("other", ":py:class:`bob.learn.em.PLDAMachine`", "A PLDAMachine object to be copied.")
.add_parameter("hdf5", ":py:class:`bob.io.base.HDF5File`", "An HDF5 file open for reading")
......@@ -164,7 +164,7 @@ int PyBobLearnEMPLDAMachine_Check(PyObject* o) {
static auto shape = bob::extension::VariableDoc(
"shape",
"(int,int, int)",
"A tuple that represents the dimensionality of the feature vector :math:`dim_d`, the :math:`F` matrix and the :math:`G` matrix.",
"A tuple that represents the dimensionality of the feature vector dim_d, the :math:`F` matrix and the :math:`G` matrix.",
""
);
PyObject* PyBobLearnEMPLDAMachine_getShape(PyBobLearnEMPLDAMachineObject* self, void*) {
......@@ -273,7 +273,7 @@ int PyBobLearnEMPLDAMachine_setPLDABase(PyBobLearnEMPLDAMachineObject* self, PyO
static auto weighted_sum = bob::extension::VariableDoc(
"weighted_sum",
"array_like <float, 1D>",
"Get/Set :math:``\\sum_{i} F^T \\beta x_{i}` value",
"Get/Set :math:`\\sum_{i} F^T \\beta x_{i}` value",
""
);
static PyObject* PyBobLearnEMPLDAMachine_getWeightedSum(PyBobLearnEMPLDAMachineObject* self, PyObject* args, PyObject* kwargs) {
......@@ -464,7 +464,7 @@ static PyObject* PyBobLearnEMPLDAMachine_IsSimilarTo(PyBobLearnEMPLDAMachineObje
static auto get_gamma = bob::extension::FunctionDoc(
"get_gamma",
"Gets the :math:`\\gamma_a` matrix for a given :math:`a` (number of samples). "
":math:`gamma_{a} = (Id + a F^T \beta F)^{-1} = \\mathcal{F}_{a}`",
":math:`\\gamma_{a}=(Id + a F^T \\beta F)^{-1}= \\mathcal{F}_{a}`",
0,
true
)
......@@ -487,8 +487,8 @@ static PyObject* PyBobLearnEMPLDAMachine_getGamma(PyBobLearnEMPLDAMachineObject*
/***** has_gamma *****/
static auto has_gamma = bob::extension::FunctionDoc(
"has_gamma",
"Tells if the :math:`gamma_a` matrix for a given a (number of samples) exists. "
":math:`gamma_a=(Id + a F^T \\beta F)^{-1}`",
"Tells if the :math:`\\gamma_a` matrix for a given a (number of samples) exists. "
":math:`\\gamma_a=(Id + a F^T \\beta F)^{-1}`",
0,
true
)
......@@ -514,7 +514,7 @@ static PyObject* PyBobLearnEMPLDAMachine_hasGamma(PyBobLearnEMPLDAMachineObject*
static auto get_add_gamma = bob::extension::FunctionDoc(
"get_add_gamma",
"Gets the :math:`gamma_a` matrix for a given :math:`f_a` (number of samples)."
":math:`gamma_a = (Id + a F^T \\beta F)^{-1} = \\mathcal{F}_{a}`."
" :math:`\\gamma_a=(Id + a F^T \\beta F)^{-1} =\\mathcal{F}_{a}`."
"Tries to find it from the base machine and then from this machine.",
0,
true
......@@ -566,7 +566,7 @@ static auto get_add_log_like_const_term = bob::extension::FunctionDoc(
"get_add_log_like_const_term",
"Gets the log likelihood constant term for a given :math:`a` (number of samples). "
":math:`l_{a} = \\frac{a}{2} ( -D log(2\\pi) -log|\\Sigma| +log|\\alpha| +log|\\gamma_a|)`",
":math:`l_{a} = \\frac{a}{2} ( -D log(2\\pi) -log|\\Sigma| +log|\\alpha| +log|gamma_a|)`",
0,
true
)
......@@ -590,7 +590,7 @@ static PyObject* PyBobLearnEMPLDAMachine_getAddLogLikeConstTerm(PyBobLearnEMPLDA
static auto get_log_like_const_term = bob::extension::FunctionDoc(
"get_log_like_const_term",
"Gets the log likelihood constant term for a given :math:`a` (number of samples). "
":math:`l_{a} = \\frac{a}{2} ( -D log(2\\pi) -log|\\Sigma| +log|\\alpha| +log|\\gamma_a|)",
":math:`l_{a}=\\frac{a}{2}( -D log(2\\pi) -log|\\Sigma| +log|\\alpha| + log|\\gamma_a|)`",
0,
true
)
......@@ -612,10 +612,11 @@ static PyObject* PyBobLearnEMPLDAMachine_getLogLikeConstTerm(PyBobLearnEMPLDAMac
/***** clear_maps *****/
static auto clear_maps = bob::extension::FunctionDoc(
"clear_maps",
"Clears the maps (:math:`gamma_a` and loglike_constterm_a).",
"Clears the maps (:math:`\\gamma_a` and loglike_constterm_a).",
0,
true
);
)
.add_prototype("","");
static PyObject* PyBobLearnEMPLDAMachine_clearMaps(PyBobLearnEMPLDAMachineObject* self, PyObject* args, PyObject* kwargs) {
BOB_TRY
......
Markdown is supported
0% or .
You are about to add 0 people to the discussion. Proceed with caution.
Finish editing this message first!
Please register or to comment