Skip to content
New issue

Have a question about this project? Sign up for a free GitHub account to open an issue and contact its maintainers and the community.

By clicking “Sign up for GitHub”, you agree to our terms of service and privacy statement. We’ll occasionally send you account related emails.

Already on GitHub? Sign in to your account

HDF5 I/O optimizations #1129

Merged
merged 11 commits into from
Oct 27, 2021
Prev Previous commit
Next Next commit
MPI Guard: H5Pset_all_coll_metadata*
  • Loading branch information
ax3l authored Oct 22, 2021
commit ae9911680840b95423a4b107c08f67a84a60efdc
20 changes: 10 additions & 10 deletions src/IO/HDF5/HDF5IOHandler.cpp
Original file line number Diff line number Diff line change
Expand Up @@ -118,7 +118,7 @@ HDF5IOHandlerImpl::HDF5IOHandlerImpl(
}
}

#if H5_VERSION_GE(1,10,0)
#if H5_VERSION_GE(1,10,0) && openPMD_HAVE_MPI
auto const hdf5_collective_metadata = auxiliary::getEnvString( "OPENPMD_HDF5_COLLECTIVE_METADATA", "ON" );
if( hdf5_collective_metadata == "ON" )
m_hdf5_collective_metadata = 1;
ax3l marked this conversation as resolved.
Show resolved Hide resolved
Expand Down Expand Up @@ -211,7 +211,7 @@ HDF5IOHandlerImpl::createPath(Writable* writable,
throw std::runtime_error("[HDF5] Creating a path in a file opened as read only is not possible.");

hid_t gapl = H5Pcreate(H5P_GROUP_ACCESS);
ax3l marked this conversation as resolved.
Show resolved Hide resolved
#if H5_VERSION_GE(1,10,0)
#if H5_VERSION_GE(1,10,0) && openPMD_HAVE_MPI
if( m_hdf5_collective_metadata )
{
H5Pset_all_coll_metadata_ops(gapl, true);
Expand Down Expand Up @@ -324,7 +324,7 @@ HDF5IOHandlerImpl::createDataset(Writable* writable,
}

hid_t gapl = H5Pcreate(H5P_GROUP_ACCESS);
#if H5_VERSION_GE(1,10,0)
#if H5_VERSION_GE(1,10,0) && openPMD_HAVE_MPI
if( m_hdf5_collective_metadata )
{
H5Pset_all_coll_metadata_ops(gapl, true);
Expand Down Expand Up @@ -632,7 +632,7 @@ HDF5IOHandlerImpl::openPath(
hid_t node_id, path_id;

hid_t gapl = H5Pcreate(H5P_GROUP_ACCESS);
#if H5_VERSION_GE(1,10,0)
#if H5_VERSION_GE(1,10,0) && openPMD_HAVE_MPI
if( m_hdf5_collective_metadata )
{
H5Pset_all_coll_metadata_ops(gapl, true);
Expand Down Expand Up @@ -683,7 +683,7 @@ HDF5IOHandlerImpl::openDataset(Writable* writable,
hid_t node_id, dataset_id;

hid_t gapl = H5Pcreate(H5P_GROUP_ACCESS);
#if H5_VERSION_GE(1,10,0)
#if H5_VERSION_GE(1,10,0) && openPMD_HAVE_MPI
if( m_hdf5_collective_metadata )
{
H5Pset_all_coll_metadata_ops(gapl, true);
Expand Down Expand Up @@ -1041,7 +1041,7 @@ HDF5IOHandlerImpl::writeAttribute(Writable* writable,
hid_t node_id, attribute_id;

hid_t fapl = H5Pcreate(H5P_LINK_ACCESS);
#if H5_VERSION_GE(1,10,0)
#if H5_VERSION_GE(1,10,0) && openPMD_HAVE_MPI
if( m_hdf5_collective_metadata )
{
H5Pset_all_coll_metadata_ops(fapl, true);
Expand Down Expand Up @@ -1412,7 +1412,7 @@ HDF5IOHandlerImpl::readAttribute(Writable* writable,
herr_t status;

hid_t fapl = H5Pcreate(H5P_LINK_ACCESS);
#if H5_VERSION_GE(1,10,0)
#if H5_VERSION_GE(1,10,0) && openPMD_HAVE_MPI
if( m_hdf5_collective_metadata )
{
H5Pset_all_coll_metadata_ops(fapl, true);
Expand Down Expand Up @@ -1850,7 +1850,7 @@ HDF5IOHandlerImpl::listPaths(Writable* writable,
File file = res ? res.get() : getFile( writable->parent ).get();

hid_t gapl = H5Pcreate(H5P_GROUP_ACCESS);
#if H5_VERSION_GE(1,10,0)
#if H5_VERSION_GE(1,10,0) && openPMD_HAVE_MPI
if( m_hdf5_collective_metadata )
{
H5Pset_all_coll_metadata_ops(gapl, true);
Expand Down Expand Up @@ -1895,7 +1895,7 @@ HDF5IOHandlerImpl::listDatasets(Writable* writable,
File file = res ? res.get() : getFile( writable->parent ).get();

hid_t gapl = H5Pcreate(H5P_GROUP_ACCESS);
#if H5_VERSION_GE(1,10,0)
#if H5_VERSION_GE(1,10,0) && openPMD_HAVE_MPI
if( m_hdf5_collective_metadata )
{
H5Pset_all_coll_metadata_ops(gapl, true);
Expand Down Expand Up @@ -1940,7 +1940,7 @@ void HDF5IOHandlerImpl::listAttributes(Writable* writable,
hid_t node_id;

hid_t fapl = H5Pcreate(H5P_LINK_ACCESS);
ax3l marked this conversation as resolved.
Show resolved Hide resolved
#if H5_VERSION_GE(1,10,0)
#if H5_VERSION_GE(1,10,0) && openPMD_HAVE_MPI
if( m_hdf5_collective_metadata )
{
H5Pset_all_coll_metadata_ops(fapl, true);
Expand Down