Commit d0c2378d authored by Manuel Günther's avatar Manuel Günther
Browse files

Updated documentation of Python bindings to use documentation classes from bob::extension.

parent ad4cb1a9
This diff is collapsed.
This diff is collapsed.
......@@ -13,6 +13,10 @@
#endif
#include <bob.blitz/capi.h>
#include <bob.blitz/cleanup.h>
#include <bob.extension/documentation.h>
extern bool init_File(PyObject* module);
extern bool init_HDF5File(PyObject* module);
/**
* Creates an str object, from a C or C++ string. Returns a **new
......@@ -22,8 +26,15 @@ static PyObject* make_object(const char* s) {
return Py_BuildValue("s", s);
}
static auto s_extensions = bob::extension::FunctionDoc(
"extensions",
"Returns a dictionary containing all extensions and descriptions currently stored on the global codec registry",
"The extensions are returned as a dictionary from the filename extension to a description of the data format."
)
.add_prototype("", "extensions")
.add_return("extensions", "{str : str}", "A dictionary of supported extensions");
static PyObject* PyBobIo_Extensions(PyObject*) {
BOB_TRY
typedef std::map<std::string, std::string> map_type;
const map_type& table = bob::io::base::CodecRegistry::getExtensions();
......@@ -41,23 +52,15 @@ static PyObject* PyBobIo_Extensions(PyObject*) {
}
return Py_BuildValue("O", retval);
BOB_CATCH_FUNCTION("extensions", 0);
}
PyDoc_STRVAR(s_extensions_str, "extensions");
PyDoc_STRVAR(s_extensions_doc,
"extensions() -> dict\n\
\n\
Returns a dictionary containing all extensions and descriptions\n\
currently stored on the global codec registry\n\
");
static PyMethodDef module_methods[] = {
{
s_extensions_str,
s_extensions.name(),
(PyCFunction)PyBobIo_Extensions,
METH_NOARGS,
s_extensions_doc,
s_extensions.doc(),
},
{0} /* Sentinel */
};
......@@ -79,15 +82,6 @@ static PyModuleDef module_definition = {
static PyObject* create_module (void) {
PyBobIoFile_Type.tp_new = PyType_GenericNew;
if (PyType_Ready(&PyBobIoFile_Type) < 0) return 0;
PyBobIoFileIterator_Type.tp_new = PyType_GenericNew;
if (PyType_Ready(&PyBobIoFileIterator_Type) < 0) return 0;
PyBobIoHDF5File_Type.tp_new = PyType_GenericNew;
if (PyType_Ready(&PyBobIoHDF5File_Type) < 0) return 0;
# if PY_VERSION_HEX >= 0x03000000
PyObject* m = PyModule_Create(&module_definition);
# else
......@@ -100,15 +94,8 @@ static PyObject* create_module (void) {
if (PyModule_AddIntConstant(m, "__api_version__", BOB_IO_BASE_API_VERSION) < 0) return 0;
if (PyModule_AddStringConstant(m, "__version__", BOB_EXT_MODULE_VERSION) < 0) return 0;
/* register the types to python */
Py_INCREF(&PyBobIoFile_Type);
if (PyModule_AddObject(m, "File", (PyObject *)&PyBobIoFile_Type) < 0) return 0;
Py_INCREF(&PyBobIoFileIterator_Type);
if (PyModule_AddObject(m, "File.iter", (PyObject *)&PyBobIoFileIterator_Type) < 0) return 0;
Py_INCREF(&PyBobIoHDF5File_Type);
if (PyModule_AddObject(m, "HDF5File", (PyObject *)&PyBobIoHDF5File_Type) < 0) return 0;
if (!init_File(m)) return 0;
if (!init_HDF5File(m)) return 0;
static void* PyBobIo_API[PyBobIo_API_pointers];
......
......@@ -87,7 +87,7 @@ def test_can_create():
# Data that is thrown in the file is immediately accessible, so you can
# interleave read and write operations without any problems.
# There is a single variable in the file, which is a bob arrayset:
nose.tools.eq_(outfile.paths(), ('/testdata',))
nose.tools.eq_(outfile.paths(), ['/testdata'])
# And all the data is *exactly* the same recorded, bit by bit
back = outfile.lread('testdata') # this is how to read the whole data back
......@@ -102,7 +102,7 @@ def test_can_create():
readonly = HDF5File(tmpname, 'r')
# There is a single variable in the file, which is a bob arrayset:
nose.tools.eq_(readonly.paths(), ('/testdata',))
nose.tools.eq_(readonly.paths(), ['/testdata'])
# You can get an overview of what is in the HDF5 dataset using the
# describe() method
......@@ -116,6 +116,7 @@ def test_can_create():
# Test that writing will really fail
nose.tools.assert_raises(RuntimeError, readonly.append, "testdata", arrays[0])
# And all the data is *exactly* the same recorded, bit by bit
back = readonly.lread('testdata') # how to read the whole data back
for i, b in enumerate(back):
......@@ -220,14 +221,14 @@ def test_dataset_management():
outfile.rename('NewDirectory1/Dir2/MyDataset', 'Test2/Bla')
# So, now the original dataset name does not exist anymore
nose.tools.eq_(outfile.paths(), ('/Test2/Bla',))
nose.tools.eq_(outfile.paths(), ['/Test2/Bla'])
# We can also unlink the dataset from the file. Please note this will not
# erase the data in the file, just make it inaccessible
outfile.unlink('Test2/Bla')
# Finally, nothing is there anymore
nose.tools.eq_(outfile.paths(), tuple())
nose.tools.eq_(outfile.paths(), [])
finally:
os.unlink(tmpname)
......@@ -279,7 +280,7 @@ def test_matlab_import():
# This test verifies we can import HDF5 datasets generated in Matlab
mfile = HDF5File(test_utils.datafile('matlab_1d.hdf5', __name__))
nose.tools.eq_(mfile.paths(), ('/array',))
nose.tools.eq_(mfile.paths(), ['/array'])
def test_ioload_unlimited():
......
......@@ -6,32 +6,34 @@
Python API
============
This section includes information for using the pure Python API of
``bob.io.base``.
This section includes information for using the pure Python API of ``bob.io.base``.
Classes
-------
.. autoclass:: bob.io.base.File
.. autoclass:: bob.io.base.HDF5File
.. autosummary::
bob.io.base.File
bob.io.base.HDF5File
Functions
---------
.. autofunction:: bob.io.base.load
.. autofunction:: bob.io.base.merge
.. autofunction:: bob.io.base.save
.. autosummary::
bob.io.base.load
bob.io.base.merge
bob.io.base.save
bob.io.base.append
bob.io.base.peek
bob.io.base.peek_all
bob.io.base.create_directories_safe
.. autofunction:: bob.io.base.append
bob.io.base.extensions
bob.io.base.get_config
.. autofunction:: bob.io.base.peek
.. autofunction:: bob.io.base.peek_all
.. autofunction:: bob.io.base.create_directories_safe
Details
-------
.. automodule::
bob.io.base
Markdown is supported
0% or .
You are about to add 0 people to the discussion. Proceed with caution.
Finish editing this message first!
Please register or to comment