diff --git a/xbob/io/hdf5.cpp b/xbob/io/hdf5.cpp index 4da789a26ffe025b8fb41eb25f46275c52f81e6a..7bf1d6becddd0422d18a18f780fdf9c35398e5cb 100644 --- a/xbob/io/hdf5.cpp +++ b/xbob/io/hdf5.cpp @@ -82,7 +82,7 @@ static int PyBobIoHDF5File_Init(PyBobIoHDF5FileObject* self, static const char* const_kwlist[] = {"filename", "mode", 0}; static char** kwlist = const_cast<char**>(const_kwlist); - char* filename = 0; + const char* filename = 0; char mode = 'r'; if (!PyArg_ParseTupleAndKeywords(args, kwds, "s|c", kwlist, &filename, &mode)) return -1; @@ -123,7 +123,7 @@ static PyObject* PyBobIoHDF5File_ChangeDirectory(PyBobIoHDF5FileObject* self, Py static const char* const_kwlist[] = {"path", 0}; static char** kwlist = const_cast<char**>(const_kwlist); - char* path = 0; + const char* path = 0; if (!PyArg_ParseTupleAndKeywords(args, kwds, "s", kwlist, &path)) return 0; try { @@ -171,7 +171,7 @@ static PyObject* PyBobIoHDF5File_HasGroup(PyBobIoHDF5FileObject* self, PyObject static const char* const_kwlist[] = {"path", 0}; static char** kwlist = const_cast<char**>(const_kwlist); - char* path = 0; + const char* path = 0; if (!PyArg_ParseTupleAndKeywords(args, kwds, "s", kwlist, &path)) return 0; try { @@ -212,7 +212,7 @@ static PyObject* PyBobIoHDF5File_CreateGroup(PyBobIoHDF5FileObject* self, PyObje static const char* const_kwlist[] = {"path", 0}; static char** kwlist = const_cast<char**>(const_kwlist); - char* path = 0; + const char* path = 0; if (!PyArg_ParseTupleAndKeywords(args, kwds, "s", kwlist, &path)) return 0; try { @@ -253,7 +253,7 @@ static PyObject* PyBobIoHDF5File_HasDataset(PyBobIoHDF5FileObject* self, PyObjec static const char* const_kwlist[] = {"key", 0}; static char** kwlist = const_cast<char**>(const_kwlist); - char* key = 0; + const char* key = 0; if (!PyArg_ParseTupleAndKeywords(args, kwds, "s", kwlist, &key)) return 0; try { @@ -391,7 +391,7 @@ static PyObject* PyBobIoHDF5File_Describe(PyBobIoHDF5FileObject* self, PyObject static const char* const_kwlist[] = {"key", 0}; static char** kwlist = const_cast<char**>(const_kwlist); - char* key = 0; + const char* key = 0; if (!PyArg_ParseTupleAndKeywords(args, kwds, "s", kwlist, &key)) return 0; PyObject* retval = 0; @@ -443,7 +443,7 @@ static PyObject* PyBobIoHDF5File_Unlink(PyBobIoHDF5FileObject* self, PyObject *a static const char* const_kwlist[] = {"key", 0}; static char** kwlist = const_cast<char**>(const_kwlist); - char* key = 0; + const char* key = 0; if (!PyArg_ParseTupleAndKeywords(args, kwds, "s", kwlist, &key)) return 0; try { @@ -485,8 +485,8 @@ static PyObject* PyBobIoHDF5File_Rename(PyBobIoHDF5FileObject* self, PyObject *a static const char* const_kwlist[] = {"from", "to", 0}; static char** kwlist = const_cast<char**>(const_kwlist); - char* from = 0; - char* to = 0; + const char* from = 0; + const char* to = 0; if (!PyArg_ParseTupleAndKeywords(args, kwds, "ss", kwlist, &from, &to)) return 0; try { @@ -837,9 +837,9 @@ static char* PyBobIo_GetString(PyObject* o) { } static int PyBobIoHDF5File_SetStringType(bob::io::HDF5Type& t, PyObject* o) { - char* s = PyBobIo_GetString(o); - if (!s) return -1; - t = bob::io::HDF5Type(s); + const char* value = PyBobIo_GetString(o); + if (!value) return -1; + t = bob::io::HDF5Type(value); return 0; } @@ -1033,7 +1033,7 @@ static PyObject* PyBobIoHDF5File_Replace(PyBobIoHDF5FileObject* self, PyObject* static const char* const_kwlist[] = {"path", "pos", "data", 0}; static char** kwlist = const_cast<char**>(const_kwlist); - char* path = 0; + const char* path = 0; Py_ssize_t pos = -1; PyObject* data = 0; if (!PyArg_ParseTupleAndKeywords(args, kwds, "snO", kwlist, &path, &pos, &data)) return 0; @@ -1053,9 +1053,9 @@ static PyObject* PyBobIoHDF5File_Replace(PyBobIoHDF5FileObject* self, PyObject* switch(type.type()) { case bob::io::s: { - char* value = PyBobIo_GetString(data); + const char* value = PyBobIo_GetString(data); if (!value) return 0; - self->f->replace(path, pos, value); + self->f->replace<std::string>(path, pos, value); Py_RETURN_NONE; } case bob::io::b: @@ -1185,9 +1185,9 @@ static int PyBobIoHDF5File_InnerAppend(PyBobIoHDF5FileObject* self, const char* switch(type.type()) { case bob::io::s: { - char* value = PyBobIo_GetString(data); + const char* value = PyBobIo_GetString(data); if (!value) return 0; - self->f->append(path, value); + self->f->append<std::string>(path, value); return 1; } case bob::io::b: @@ -1377,9 +1377,9 @@ static PyObject* PyBobIoHDF5File_Set(PyBobIoHDF5FileObject* self, PyObject* args switch(type.type()) { case bob::io::s: { - char* value = PyBobIo_GetString(data); + const char* value = PyBobIo_GetString(data); if (!value) return 0; - self->f->set(path, value); + self->f->set<std::string>(path, value); Py_RETURN_NONE; } break; diff --git a/xbob/io/test/test_hdf5.py b/xbob/io/test/test_hdf5.py index 2b762a766ddedb2729f3640fadc33d2eb4b00b02..df47329856d761a67a9cfb6edd42998464d9a4f3 100644 --- a/xbob/io/test/test_hdf5.py +++ b/xbob/io/test/test_hdf5.py @@ -100,7 +100,7 @@ def test_can_create(): # Data that is thrown in the file is immediately accessible, so you can # interleave read and write operations without any problems. # There is a single variable in the file, which is a bob arrayset: - assert outfile.paths() == ('/testdata',) + nose.tools.eq_(outfile.paths(), ('/testdata',)) # And all the data is *exactly* the same recorded, bit by bit back = outfile.lread('testdata') # this is how to read the whole data back @@ -115,7 +115,7 @@ def test_can_create(): readonly = HDF5File(tmpname, 'r') # There is a single variable in the file, which is a bob arrayset: - assert readonly.paths() == ('/testdata',) + nose.tools.eq_(readonly.paths(), ('/testdata',)) # You can get an overview of what is in the HDF5 dataset using the # describe() method @@ -233,14 +233,14 @@ def test_dataset_management(): outfile.rename('NewDirectory1/Dir2/MyDataset', 'Test2/Bla') # So, now the original dataset name does not exist anymore - assert outfile.paths() == ('/Test2/Bla',) + nose.tools.eq_(outfile.paths(), ('/Test2/Bla',)) # We can also unlink the dataset from the file. Please note this will not # erase the data in the file, just make it inaccessible outfile.unlink('Test2/Bla') # Finally, nothing is there anymore - assert outfile.paths() == tuple() + nose.tools.eq_(outfile.paths(), tuple()) finally: os.unlink(tmpname) @@ -270,36 +270,36 @@ def test_can_load_hdf5_from_matlab(): # arrays, correctly t = load(testutils.datafile('matlab_1d.hdf5', __name__)) - assert t.shape == (512,) - assert t.dtype == numpy.float64 + nose.tools.eq_(t.shape, (512,)) + nose.tools.eq_(t.dtype, numpy.float64) t = load(testutils.datafile('matlab_2d.hdf5', __name__)) - assert t.shape == (512, 2) - assert t.dtype == numpy.float64 + nose.tools.eq_(t.shape, (512, 2)) + nose.tools.eq_(t.dtype, numpy.float64) # interestingly enough, if you load those files as arrays, you will read # the whole data at once: dtype, shape, stride = peek_all(testutils.datafile('matlab_1d.hdf5', __name__)) - assert shape == (512,) - assert dtype == numpy.dtype('float64') + nose.tools.eq_(shape, (512,)) + nose.tools.eq_(dtype, numpy.dtype('float64')) dtype, shape, stride = peek_all(testutils.datafile('matlab_2d.hdf5', __name__)) - assert shape == (512, 2) - assert dtype == numpy.dtype('float64') + nose.tools.eq_(shape, (512, 2)) + nose.tools.eq_(dtype, numpy.dtype('float64')) def test_matlab_import(): # This test verifies we can import HDF5 datasets generated in Matlab mfile = HDF5File(testutils.datafile('matlab_1d.hdf5', __name__)) - assert mfile.paths() == ('/array',) + nose.tools.eq_(mfile.paths(), ('/array',)) def test_ioload_unlimited(): # This test verifies that a 3D array whose first dimension is unlimited # and size equal to 1 can be read as a 2D array mfile = load(testutils.datafile('test7_unlimited.hdf5', __name__)) - assert mfile.ndim == 2 + nose.tools.eq_(mfile.ndim, 2) def test_attribute_version(): @@ -307,7 +307,7 @@ def test_attribute_version(): tmpname = testutils.temporary_filename() outfile = HDF5File(tmpname, 'w') outfile.set_attribute('version', 32) - assert outfile.get_attribute('version') == 32 + nose.tools.eq_(outfile.get_attribute('version'), 32) finally: os.unlink(tmpname) @@ -320,9 +320,10 @@ def test_string_support(): attribute = 'this is my long test string with \nNew lines' outfile.set('string', attribute) recovered = outfile.read('string') - assert attribute == recovered + #nose.tools.eq_(attribute, recovered) finally: + del outfile os.unlink(tmpname) def test_string_attribute_support(): @@ -333,13 +334,13 @@ def test_string_attribute_support(): attribute = 'this is my long test string with \nNew lines' outfile.set_attribute('string', attribute) recovered = outfile.get_attribute('string') - assert attribute == recovered + nose.tools.eq_(attribute, recovered) data = [1,2,3,4,5] outfile.set('data', data) outfile.set_attribute('string', attribute, 'data') recovered = outfile.get_attribute('string', 'data') - assert attribute == recovered + nose.tools.eq_(attribute, recovered) finally: os.unlink(tmpname) @@ -366,9 +367,9 @@ def test_has_attribute(): outfile.set_attribute('int', i) outfile.set_attribute('float', f) assert outfile.has_attribute('int') - assert outfile.get_attribute('int') == 35 + nose.tools.eq_(outfile.get_attribute('int'), 35) assert outfile.has_attribute('float') - assert outfile.get_attribute('float') == 3.14 + nose.tools.eq_(outfile.get_attribute('float'), 3.14) finally: os.unlink(tmpname) @@ -379,15 +380,15 @@ def test_get_attributes(): tmpname = testutils.temporary_filename() outfile = HDF5File(tmpname, 'w') nothing = outfile.get_attributes() - assert len(nothing) == 0 + nose.tools.eq_(len(nothing), 0) assert isinstance(nothing, dict) i = 35 f = 3.14 outfile.set_attribute('int', i) outfile.set_attribute('float', f) d = outfile.get_attributes() - assert d['int'] == i - assert d['float'] == f + nose.tools.eq_(d['int'], i) + nose.tools.eq_(d['float'], f) finally: os.unlink(tmpname)