diff --git a/.github/workflows/cmake-build.yml b/.github/workflows/cmake-build.yml index cda19e1003..b57c9248b5 100644 --- a/.github/workflows/cmake-build.yml +++ b/.github/workflows/cmake-build.yml @@ -363,4 +363,29 @@ jobs: - name: Stop the docker run: | docker exec --user root build /bin/bash -c "chown -R 1001 /home/p00user/src " - docker container stop build + docker container stop build + + pixi_tests: + runs-on: ubuntu-latest + container: + image: debian:trixie + steps: + - uses: actions/checkout@v2 + - uses: actions/setup-python@v2 + + - name: install pixi + shell: bash + run: | + apt-get update + apt-get -qq -y dist-upgrade + apt-get -qq update && apt-get install -qq -y flake8 python3 curl bash + curl -fsSL https://pixi.sh/install.sh | sh + export PATH=/github/home/.pixi/bin:$PATH + pixi shell-hook > .sh.sh + source .sh.sh + pixi add rattler-build compilers + - name: build h5cpp + shell: bash + run: | + source .sh.sh + pixi run rattler-build build --recipe .github/workflows/pixi/recipe.yaml diff --git a/.github/workflows/pixi/recipe.yaml b/.github/workflows/pixi/recipe.yaml new file mode 100644 index 0000000000..d8e5f7b3b0 --- /dev/null +++ b/.github/workflows/pixi/recipe.yaml @@ -0,0 +1,67 @@ +context: + version: 0.7.1 + +package: + name: h5cpp + version: ${{ version }} + +source: + - path: ../../../ + +build: + number: 2 + script: + - cmake -B build ${CMAKE_ARGS} -DH5CPP_CONAN=DISABLE -DCMAKE_INSTALL_PREFIX=${PREFIX} + - cmake --build build + - cmake --install build + - ctest --test-dir build --extra-verbose --no-tests=error + +requirements: + build: + - hdf5 + - cmake + - ninja + - zlib + - make + host: + - hdf5 + - catch2 + - zlib + run_exports: + - ${{ pin_subpackage("h5cpp", upper_bound="x.x") }} + +tests: + - package_contents: + include: + - h5cpp/file/file.hpp + - h5cpp/core/*.hpp + - h5cpp/attribute/*.hpp + - h5cpp/dataspace/*.hpp + - h5cpp/datatype/*.hpp + - h5cpp/error/*.hpp + - h5cpp/file/*.hpp + - h5cpp/filter/*.hpp + - h5cpp/node/*.hpp + - h5cpp/property/*.hpp + - h5cpp/utilities/*.hpp + - h5cpp/contrib/nexus/ebool.hpp + - h5cpp/contrib/stl/*.hpp + lib: + - h5cpp + +about: + homepage: https://github.com/ess-dmsc/h5cpp + license: LGPL-2.1-only + license_file: LICENSE + summary: C++ wrapper for hdf5 + description: | + h5cpp is C++ wrapper for hdf5 + which significantly simplifies development of HDF5 code. + It provides a high level abstraction to the HDF5 low-level types. + documentation: https://ess-dmsc.github.io/h5cpp + repository: https://github.com/ess-dmsc/h5cpp + +extra: + recipe-maintainers: + - jkotan + - yuelongyu diff --git a/.github/workflows/pixi/variants.yaml b/.github/workflows/pixi/variants.yaml new file mode 100644 index 0000000000..b638de6ce5 --- /dev/null +++ b/.github/workflows/pixi/variants.yaml @@ -0,0 +1,4 @@ +zlib: + - 1 +# hdf5: +# - 2.0.0 diff --git a/pixi.toml b/pixi.toml new file mode 100644 index 0000000000..c68a864d71 --- /dev/null +++ b/pixi.toml @@ -0,0 +1,15 @@ +[workspace] +authors = ["Jan Kotanski "] +channels = ["conda-forge"] +name = "h5cpp" +platforms = ["linux-64"] +# platforms = ["linux-64", "linux-aarch64", "osx-arm64", "osx-64", "win-64"] +version = "0.7.1" + +[tasks] + +[dependencies] +rattler-build = ">=0.60.0,<0.61" +ipython = ">=9.11.0,<10" +python = "==3.14" +conda-smithy = ">=3.56.3,<4" diff --git a/src/h5cpp/node/dataset.hpp b/src/h5cpp/node/dataset.hpp index fcf8eb9307..6db2598e56 100644 --- a/src/h5cpp/node/dataset.hpp +++ b/src/h5cpp/node/dataset.hpp @@ -415,6 +415,50 @@ class DLL_EXPORT Dataset : public Node property::DatasetTransferList::get()) const; + //! + //! \brief read dataset chunk (*since hdf5 1.10.2*) + //! + //! Read a chunk from a dataset to an instance of T with given byte size. + //! + //! \throws std::runtime_error in case of a failure + //! \tparam T source type + //! \param data reference to the source instance of T + //! \param byte_size of data + //! \param offset logical position of the first element of the chunk in the dataset's dataspace + //! \param dtpl reference to a dataset transfer property list + //! \return filter_mask mask of which filters are used with the chunk + //! + template + std::uint32_t read_chunk(T &data, + size_t byte_size, + std::vector offset, + const property::DatasetTransferList &dtpl = + property::DatasetTransferList::get()) const; + + + //! + //! \brief read dataset chunk + //! + //! Read a chunk from a dataset to an instance of T. + //! + //! \throws std::runtime_error in case of a failure + //! \tparam T source type + //! \param data reference to the source instance of T + //! \param byte_size of data + //! \param mem_type reference to the memory data type + //! \param offset logical position of the first element of the chunk in the dataset's dataspace + //! \param dtpl reference to a dataset transfer property list + //! \return filter_mask mask of which filters are used with the chunk + //! + template + std::uint32_t read_chunk(T &data, + size_t byte_size, + const datatype::Datatype &mem_type, + std::vector & offset, + const property::DatasetTransferList &dtpl = + property::DatasetTransferList::get()) const; + + //! //! \brief read dataset chunk //! @@ -940,6 +984,16 @@ std::uint32_t Dataset::read_chunk(T &data, return read_chunk(data, mem_type_holder.get(data), offset, dtpl); } +template +std::uint32_t Dataset::read_chunk(T &data, + size_t byte_size, + std::vector offset, + const property::DatasetTransferList &dtpl) const +{ + hdf5::datatype::DatatypeHolder mem_type_holder; + return read_chunk(data, byte_size, mem_type_holder.get(data), offset, dtpl); +} + template std::uint32_t Dataset::read_chunk(T &data, const datatype::Datatype &mem_type, @@ -949,7 +1003,18 @@ std::uint32_t Dataset::read_chunk(T &data, std::uint32_t filter_mask; if(mem_type.get_class() == datatype::Class::Integer) { -#if H5_VERSION_GE(1,10,3) +#if H5_VERSION_GE(2,0,0) + if(H5Dread_chunk1(static_cast(*this), + static_cast(dtpl), + offset.data(), + &filter_mask, + dataspace::ptr(data))<0) + { + std::stringstream ss; + ss<<"Failure to read chunk data from dataset ["<(*this), static_cast(dtpl), offset.data(), @@ -982,6 +1047,42 @@ std::uint32_t Dataset::read_chunk(T &data, return filter_mask; } + +template +std::uint32_t Dataset::read_chunk(T &data, + size_t byte_size, + const datatype::Datatype &mem_type, + std::vector & offset, + const property::DatasetTransferList &dtpl) const +{ + std::uint32_t filter_mask; + if(mem_type.get_class() == datatype::Class::Integer) + { +#if H5_VERSION_GE(2,0,0) + if(H5Dread_chunk(static_cast(*this), + static_cast(dtpl), + offset.data(), + &filter_mask, + dataspace::ptr(data), &byte_size)<0) + { + std::stringstream ss; + ss<<"Failure to read chunk data from dataset ["< diff --git a/src/h5cpp/property/file_access.hpp b/src/h5cpp/property/file_access.hpp index 7926a78fa2..1442cce087 100644 --- a/src/h5cpp/property/file_access.hpp +++ b/src/h5cpp/property/file_access.hpp @@ -48,6 +48,9 @@ enum class LibVersion : std::underlying_type::type { #endif #if H5_VERSION_GE(1,13,0) V114 = H5F_LIBVER_V114, +#endif +#if H5_VERSION_GE(2,0,0) + V200 = H5F_LIBVER_V200, #endif Earliest = H5F_LIBVER_EARLIEST }; diff --git a/test/node/dataset_direct_chunk_test.cpp b/test/node/dataset_direct_chunk_test.cpp index 4bf81ac902..f9aba3e35a 100644 --- a/test/node/dataset_direct_chunk_test.cpp +++ b/test/node/dataset_direct_chunk_test.cpp @@ -111,6 +111,20 @@ SCENARIO("testing dataset access via chunks") { dataset.read(read_value, framespace); REQUIRE(frame == read_value); } + AND_THEN("we can read chunk the data back") { + UShorts read_chunk_value(xdim * ydim); + for (long long unsigned int i = 0; i != nframe; i++) { + dataset.read_chunk(read_chunk_value, {i, 0, 0}); + REQUIRE(frame == read_chunk_value); + } + AND_THEN("we can read chunk the data back with given buffer byte_size") { + UShorts read_chunk_svalue(xdim * ydim); + for (long long unsigned int i = 0; i != nframe; i++) { + dataset.read_chunk(read_chunk_svalue, xdim * ydim * sizeof(UShorts), {i, 0, 0}); + REQUIRE(frame == read_chunk_svalue); + } + } + } } } }