@@ -2501,13 +2501,23 @@ void Component::write_HDF5(HighFive::Group& group, bool masses, bool IDs)
25012501 auto dcplI = HighFive::DataSetCreateProps{};
25022502 auto dcplD = HighFive::DataSetCreateProps{};
25032503
2504- if (H5compress or H5chunk) {
2504+ // Compression and chunking. Do not set chunk size larger than
2505+ // nbodies. Turn off compression altogether if nbodies = 0 to avoid
2506+ // HDF5 errors.
2507+ //
2508+ if ((H5compress or H5chunk) and nbodies > 0 ) {
25052509 int chunk = H5chunk;
25062510
2507- // Sanity
2511+ // Clamp chunk to [1, nbodies]: use nbodies/8 as a downsize when
2512+ // H5chunk would exceed the dataset extent, then ensure at least 1
25082513 if (H5chunk >= nbodies) {
25092514 chunk = nbodies/8 ;
25102515 }
2516+ if (chunk < 1 ) {
2517+ chunk = 1 ;
2518+ } else if (static_cast <unsigned int >(chunk) > nbodies) {
2519+ chunk = static_cast <int >(nbodies);
2520+ }
25112521
25122522 dcpl1.add (HighFive::Chunking (chunk));
25132523 if (H5shuffle) dcpl1.add (HighFive::Shuffle ());
@@ -2636,14 +2646,20 @@ void Component::write_H5(H5::Group& group)
26362646
26372647 // This could be generalized by registering a user filter, like
26382648 // blosc. Right now, we're using the default (which is gzip)
2639- if (H5compress or H5chunk) {
2649+ //
2650+ // Do not set chunk size larger than number of particles. If the
2651+ // particle number is zero, do not compress.
2652+ //
2653+ if ((H5compress or H5chunk) and h5_particles.size () > 0 ) {
26402654 // Set chunking
26412655 if (H5chunk) {
2642- // Sanity
2656+ // Clamp chunk to [1, nbodies]: use nbodies/8 as a downsize when
2657+ // H5chunk would exceed the dataset extent, then ensure at least 1
26432658 int chunk = H5chunk;
26442659 if (H5chunk >= nbodies) {
26452660 chunk = nbodies/8 ;
26462661 }
2662+ chunk = std::clamp<int >(chunk, 1 , static_cast <int >(nbodies));
26472663 hsize_t chunk_dims[1 ] = {static_cast <hsize_t >(chunk)};
26482664 dcpl.setChunk (1 , chunk_dims);
26492665 }
0 commit comments