Skip to content

Commit

Permalink
Fixed compression level detection for tables and datasets
Browse files Browse the repository at this point in the history
  • Loading branch information
DavidAce committed Oct 10, 2020
1 parent 84d97ab commit accd0f8
Show file tree
Hide file tree
Showing 4 changed files with 33 additions and 9 deletions.
12 changes: 6 additions & 6 deletions h5pp/include/h5pp/details/h5ppFile.h
Original file line number Diff line number Diff line change
Expand Up @@ -874,16 +874,16 @@ namespace h5pp {
hsize_t numRecordsToCopy,
std::string_view tgtTablePath,
hsize_t tgtStartIdx,
const OptDimsType & chunkDims = std::nullopt,
const std::optional<unsigned int> compressionLevel = std::nullopt) {
const OptDimsType & chunkDims = std::nullopt,
const std::optional<unsigned int> compression = std::nullopt) {
Options options;
options.linkPath = h5pp::util::safe_str(tgtTablePath);
options.dsetDimsChunk = chunkDims;
options.compression = compressionLevel;
options.compression = getCompressionLevel(compression);
auto tgtInfo = h5pp::scan::readTableInfo(openFileHandle(), options, plists);
if(not tgtInfo.tableExists or not tgtInfo.tableExists.value())
tgtInfo = createTable(
srcInfo.tableType.value(), tgtInfo.tablePath.value(), srcInfo.tableTitle.value(), chunkDims, compressionLevel);
srcInfo.tableType.value(), tgtInfo.tablePath.value(), srcInfo.tableTitle.value(), chunkDims, compression);

copyTableRecords(srcInfo, srcStartIdx, numRecordsToCopy, tgtInfo, tgtStartIdx);
return tgtInfo;
Expand All @@ -896,11 +896,11 @@ namespace h5pp {
std::string_view tgtTablePath,
hsize_t tgtStartIdx,
const std::optional<hsize_t> chunkDims = std::nullopt,
const std::optional<unsigned int> compressionLevel = std::nullopt) {
const std::optional<unsigned int> compression = std::nullopt) {
Options options;
options.linkPath = h5pp::util::safe_str(srcTablePath);
auto srcInfo = h5pp::scan::readTableInfo(srcLocation, options, plists);
return copyTableRecords(srcInfo, srcTableSelection, tgtTablePath, tgtStartIdx, chunkDims, compressionLevel);
return copyTableRecords(srcInfo, srcTableSelection, tgtTablePath, tgtStartIdx, chunkDims, compression);
}

template<typename h5x_src, typename = h5pp::type::sfinae::enable_if_is_h5_loc<h5x_src>>
Expand Down
15 changes: 15 additions & 0 deletions h5pp/include/h5pp/details/h5ppHdf5.h
Original file line number Diff line number Diff line change
Expand Up @@ -118,6 +118,21 @@ namespace h5pp::hdf5 {
return getChunkDimensions(dcpl);
}

[[nodiscard]] inline int getCompressionLevel(const hid::h5p &dsetCreatePropertyList) {
auto nfilter = H5Pget_nfilters(dsetCreatePropertyList);
H5Z_filter_t filter = H5Z_FILTER_NONE;
std::array<unsigned int,1> cdval = {0};
std::array<unsigned long,1> cdelm = {0};
for(int idx=0; idx < nfilter;idx++) {
constexpr size_t size = 10;
filter = H5Pget_filter(dsetCreatePropertyList, idx, nullptr, cdelm.data(), cdval.data(), 0, nullptr, nullptr);
if(filter != H5Z_FILTER_DEFLATE) continue;
H5Pget_filter_by_id(dsetCreatePropertyList, filter, nullptr, cdelm.data(), cdval.data(), 0, nullptr, nullptr);
}
return cdval[0];
}


[[nodiscard]] inline std::optional<std::vector<hsize_t>> getMaxDimensions(const hid::h5s &space, H5D_layout_t layout) {
if(layout != H5D_CHUNKED) return std::nullopt;
if(H5Sget_simple_extent_type(space) != H5S_SIMPLE) return std::nullopt;
Expand Down
12 changes: 10 additions & 2 deletions h5pp/include/h5pp/details/h5ppScan.h
Original file line number Diff line number Diff line change
Expand Up @@ -59,7 +59,10 @@ namespace h5pp::scan {
else
info.resizeMode = h5pp::ResizeMode::RESIZE_TO_FIT;
}

if(not info.compression){
hid::h5p plist = H5Dget_create_plist(info.h5Dset.value());
info.compression = h5pp::hdf5::getCompressionLevel(plist);
}
// Get c++ properties
if(not info.cppTypeIndex or not info.cppTypeName or not info.cppTypeSize)
std::tie(info.cppTypeIndex, info.cppTypeName, info.cppTypeSize) = h5pp::hdf5::getCppType(info.h5Type.value());
Expand Down Expand Up @@ -534,6 +537,11 @@ namespace h5pp::scan {
if(chunkVec and not chunkVec->empty()) info.chunkSize = chunkVec.value()[0];
}

if(not info.compressionLevel){
hid::h5p plist = H5Dget_create_plist(info.tableDset.value());
info.compressionLevel = h5pp::hdf5::getCompressionLevel(plist);
}

if(not info.cppTypeIndex or not info.cppTypeName or not info.cppTypeSize) {
// Get c++ type information
info.cppTypeIndex = std::vector<std::type_index>();
Expand Down Expand Up @@ -569,7 +577,7 @@ namespace h5pp::scan {
if(options.dsetDimsChunk and not options.dsetDimsChunk->empty()) info.chunkSize = options.dsetDimsChunk.value()[0];

if(not info.chunkSize) info.chunkSize = h5pp::util::getChunkDimensions(info.recordBytes.value(), {1}, std::nullopt, H5D_layout_t::H5D_CHUNKED).value()[0];
if(not info.compressionLevel) info.compressionLevel = h5pp::hdf5::getValidCompressionLevel(info.compressionLevel);
if(not info.compressionLevel) info.compressionLevel = h5pp::hdf5::getValidCompressionLevel();

info.fieldTypes = std::vector<h5pp::hid::h5t>();
info.fieldOffsets = std::vector<size_t>();
Expand Down
3 changes: 2 additions & 1 deletion tests/test-readWriteTables.cpp
Original file line number Diff line number Diff line change
Expand Up @@ -29,6 +29,7 @@ h5pp::hid::h5t MY_HDF5_PARTICLE_TYPE;

TEST_CASE("Test reading columns from table", "[Table fields]") {
SECTION("Initialize file") {
file.setCompressionLevel(6);
// Create a type for the char array from the template H5T_C_S1
// The template describes a string with a single char.
// Set the size with H5Tset_size.
Expand All @@ -52,7 +53,7 @@ TEST_CASE("Test reading columns from table", "[Table fields]") {
}

SECTION("Create table") {
auto tableInfo = file.createTable(MY_HDF5_PARTICLE_TYPE, "somegroup/particleTable", "particleTable");
auto tableInfo = file.createTable(MY_HDF5_PARTICLE_TYPE, "somegroup/particleTable", "particleTable",std::nullopt,6);
CHECK(tableInfo.tableTitle.value() == "particleTable");
CHECK(tableInfo.numRecords.value() == 0);
CHECK(tableInfo.recordBytes.value() == sizeof(Particle));
Expand Down

0 comments on commit accd0f8

Please sign in to comment.