Skip to content

Commit

Permalink
switch to hsize_t for hdf5 offset and stride (#814)
Browse files Browse the repository at this point in the history
* switch from int to hsize_t for offset

* adj python hdf5 offset tests, since neg numbers are now longer possible to pass

* update changelog

Co-authored-by: Cyrus Harrison <cyrush@llnl.gov>
  • Loading branch information
Kevin Huynh and cyrush committed Aug 12, 2021
1 parent 8b73f34 commit a46c532
Show file tree
Hide file tree
Showing 5 changed files with 24 additions and 84 deletions.
2 changes: 2 additions & 0 deletions CHANGELOG.md
Expand Up @@ -25,6 +25,8 @@ and this project aspires to adhere to [Semantic Versioning](https://semver.org/s
- Fixed a bug that was causing the `conduit::blueprint::mesh::topology::unstructured::generate_*` functions to produce bad results for polyhedral input topologies with heterogeneous elements (e.g. tets and hexs).
- Added a new function signature for `blueprint::mesh::topology::unstructured::generate_sides`, which performs the same task as the original and also takes fields from the original topology and maps them onto the new topology.

#### Relay
- Changed HDF5 offset support to use 64-bit unsigned integers for offsets, strides, and sizes.

## [0.7.2] - Released 2021-05-19

Expand Down
42 changes: 15 additions & 27 deletions src/libs/relay/conduit_relay_io_hdf5.cpp
Expand Up @@ -1511,23 +1511,18 @@ write_conduit_leaf_to_hdf5_dataset(const Node &node,
hid_t h5_dtype_id = conduit_dtype_to_hdf5_dtype(dt,ref_path);
herr_t h5_status = -1;

int offset = 0;
hsize_t offset = 0;
if(opts.has_child("offset"))
{
offset = opts["offset"].to_value();
}

int stride = 1;
hsize_t stride = 1;
if(opts.has_child("stride"))
{
stride = opts["stride"].to_value();
}

if (offset < 0)
{
CONDUIT_ERROR("Offset must be non-negative.");
}
else if (stride < 1)
if (stride == 0)
{
CONDUIT_ERROR("Stride must be greater than zero.");
}
Expand Down Expand Up @@ -1577,8 +1572,8 @@ write_conduit_leaf_to_hdf5_dataset(const Node &node,
hsize_t node_size[1] = {(hsize_t) dt.number_of_elements()};
hid_t nodespace = H5Screate_simple(1, node_size, NULL);

hsize_t offsets[1] = {(hsize_t) offset};
hsize_t strides[1] = {(hsize_t) stride};
hsize_t offsets[1] = {offset};
hsize_t strides[1] = {stride};

// convert the fixed dataset to an extendible dataset if necessary
if (dataset_max_dims[0] != H5S_UNLIMITED)
Expand Down Expand Up @@ -2496,26 +2491,19 @@ read_hdf5_dataset_into_conduit_node(hid_t hdf5_dset_id,

index_t nelems = H5Sget_simple_extent_npoints(h5_dspace_id);

int offset = 0;
hsize_t offset = 0;
if(opts.has_child("offset"))
{
offset = opts["offset"].to_value();
}

int stride = 1;
hsize_t stride = 1;
if(opts.has_child("stride"))
{
stride = opts["stride"].to_value();
}

if (offset < 0)
{
CONDUIT_HDF5_ERROR(ref_path,
"Error reading HDF5 Dataset with options:"
<< opts.to_yaml() <<
"`offset` must be non-negative.");
}
else if (stride < 1)
if (stride == 0)
{
CONDUIT_HDF5_ERROR(ref_path,
"Error reading HDF5 Dataset with options:"
Expand All @@ -2524,13 +2512,13 @@ read_hdf5_dataset_into_conduit_node(hid_t hdf5_dset_id,
}

// get the number of elements in the dataset given the offset and stride
int nelems_from_offset = (nelems - offset) / stride;
hsize_t nelems_from_offset = (nelems - offset) / stride;
if ((nelems - offset) % stride != 0)
{
nelems_from_offset++;
}

int nelems_to_read = nelems_from_offset;
hsize_t nelems_to_read = nelems_from_offset;
if(opts.has_child("size"))
{
nelems_to_read = opts["size"].to_value();
Expand All @@ -2546,7 +2534,7 @@ read_hdf5_dataset_into_conduit_node(hid_t hdf5_dset_id,
// copy metadata to the node under hard-coded keys
if (only_get_metadata)
{
dest["num_elements"] = (int) nelems_from_offset;
dest["num_elements"] = nelems_from_offset;
}
else
{
Expand Down Expand Up @@ -2594,9 +2582,9 @@ read_hdf5_dataset_into_conduit_node(hid_t hdf5_dset_id,
conduit_dtype_to_hdf5_dtype_cleanup(h5_dtype_id);
}

hsize_t node_size[1] = {(hsize_t) nelems_to_read};
hsize_t offsets[1] = {(hsize_t) offset};
hsize_t strides[1] = {(hsize_t) stride};
hsize_t node_size[1] = {nelems_to_read};
hsize_t offsets[1] = {offset};
hsize_t strides[1] = {stride};
hid_t nodespace = H5Screate_simple(1,node_size,NULL);
hid_t dataspace = H5Dget_space(hdf5_dset_id);

Expand Down Expand Up @@ -2627,7 +2615,7 @@ read_hdf5_dataset_into_conduit_node(hid_t hdf5_dset_id,
{
CONDUIT_HDF5_ERROR(ref_path,
"Error reading HDF5 Dataset with options:"
<< opts.to_yaml()
<< opts.to_yaml()
<< "Cannot read using offset (" << offset << ")"
<< " greater than the number of entries in"
<< " the HDF5 dataset (" << nelems << ")");
Expand Down
24 changes: 0 additions & 24 deletions src/tests/relay/python/t_python_relay_io_handle.py
Expand Up @@ -255,7 +255,6 @@ def test_io_handle_offset_stride_size(self):
# expect no diff
self.assertFalse(n_read.diff(n_check,info))


# huge stride, this will only read the first entry
n_read.reset()
opts.reset()
Expand All @@ -268,29 +267,6 @@ def test_io_handle_offset_stride_size(self):
# expect no diff
self.assertFalse(n_read.diff(n_check,info))


# now some error conditions:
# neg size
n_read.reset()
opts.reset()
opts["size"] = -100
with self.assertRaises(IOError):
h.read(node=n_read,options=opts)

# neg stride
n_read.reset()
opts.reset()
opts["stride"] = -1
with self.assertRaises(IOError):
h.read(node=n_read,options=opts)

# neg offset
n_read.reset()
opts.reset()
opts["offset"] = -1
with self.assertRaises(IOError):
h.read(node=n_read,options=opts)

# huge size
n_read.reset()
opts.reset()
Expand Down
17 changes: 5 additions & 12 deletions src/tests/relay/t_relay_io_handle.cpp
Expand Up @@ -697,17 +697,10 @@ TEST(conduit_relay_io_handle, test_offset_and_stride)
opts["size"] = -100;
EXPECT_THROW(h.read(n_read,opts),conduit::Error);


// neg stride
// zero stride
n_read.reset();
opts.reset();
opts["stride"] = -1;
EXPECT_THROW(h.read(n_read,opts),conduit::Error);

// neg offset
n_read.reset();
opts.reset();
opts["offset"] = -1;
opts["stride"] = 0;
EXPECT_THROW(h.read(n_read,opts),conduit::Error);

// // huge size
Expand Down Expand Up @@ -799,7 +792,7 @@ TEST(conduit_relay_io_handle, test_ref_path_error_msg)

h.read(n_read);
n_read.print();

n_check = n;
// expect no diff
EXPECT_FALSE(n_read.diff(n_check,info));
Expand All @@ -815,10 +808,10 @@ TEST(conduit_relay_io_handle, test_ref_path_error_msg)
catch(conduit::Error &e)
{
std::string msg = e.message();
std::cout << "error message:"
std::cout << "error message:"
<< msg << std::endl;
int count = 0;

std::string::size_type pos = 0;
std::string path = "my/path/to/some/data";

Expand Down
23 changes: 2 additions & 21 deletions src/tests/relay/t_relay_io_hdf5.cpp
Expand Up @@ -367,15 +367,6 @@ TEST(conduit_relay_io_hdf5, write_and_read_conduit_leaf_to_extendible_hdf5_datas
EXPECT_EQ(5,read_vals[7]);
EXPECT_EQ(6,read_vals[8]);

opts["offset"] = -1;
opts["stride"] = 2;
opts_read["offset"] = -1;
opts_read["stride"] = 2;

//this should fail
EXPECT_THROW(io::hdf5_write(n,h5_dset_id,opts),Error);
EXPECT_THROW(io::hdf5_read(h5_dset_id,opts_read,n_read),Error);

opts["offset"] = 0;
opts["stride"] = 0;
opts_read["offset"] = 0;
Expand Down Expand Up @@ -502,15 +493,6 @@ TEST(conduit_relay_io_hdf5, write_and_read_conduit_leaf_to_fixed_hdf5_dataset_ha
EXPECT_EQ(5,read_vals[7]);
EXPECT_EQ(6,read_vals[8]);

opts["offset"] = -1;
opts["stride"] = 2;
opts_read["offset"] = -1;
opts_read["stride"] = 2;

//this should fail
EXPECT_THROW(io::hdf5_write(n,h5_dset_id,opts),Error);
EXPECT_THROW(io::hdf5_read(h5_dset_id,opts_read,n_read),Error);

opts["offset"] = 0;
opts["stride"] = 0;
opts_read["offset"] = 0;
Expand Down Expand Up @@ -1906,10 +1888,10 @@ TEST(conduit_relay_io_hdf5, test_ref_path_error_msg)
catch(conduit::Error &e)
{
std::string msg = e.message();
std::cout << "error message:"
std::cout << "error message:"
<< msg << std::endl;
int count = 0;

std::string::size_type pos = 0;
std::string path = "my/path/to/some/data";

Expand All @@ -1926,4 +1908,3 @@ TEST(conduit_relay_io_hdf5, test_ref_path_error_msg)
}

}

0 comments on commit a46c532

Please sign in to comment.