From e7581f06d95a0062211a9dbd5789bd1fda3f7507 Mon Sep 17 00:00:00 2001 From: Vitesh Shah Date: Fri, 23 Nov 2018 17:54:02 +0100 Subject: [PATCH 1/5] Fixed error with parallel write --- src/HDF5_utilities.f90 | 9 ++++++++- 1 file changed, 8 insertions(+), 1 deletion(-) diff --git a/src/HDF5_utilities.f90 b/src/HDF5_utilities.f90 index cda585363..57c505645 100644 --- a/src/HDF5_utilities.f90 +++ b/src/HDF5_utilities.f90 @@ -381,11 +381,18 @@ subroutine HDF5_read_pReal_5(dataset,loc_id,datasetName) integer(HID_T), intent(in) :: loc_id !< file or group handle character(len=*), intent(in) :: datasetName !< name of the dataset in the file integer(pInt),dimension(:), allocatable :: myShape + + integer(HID_T) :: dset_id, filespace_id, memspace_id, plist_id integer(HDF5_ERR_TYPE) :: hdferr - integer(HID_T) :: dset_id myShape = shape(dataset) + +!-------------------------------------------------------------------------------------------------- +!creating a property list for transfer properties + call h5pcreate_f(H5P_DATASET_XFER_F, plist_id, hdferr) + + call h5dopen_f(loc_id,datasetName,dset_id,hdferr) if (hdferr < 0) call IO_error(0_pInt,ext_msg='HDF5_read_pReal_shape5: h5dopen_f') call h5dread_f(dset_id,H5T_NATIVE_DOUBLE,dataset,int(myShape,HSIZE_T),hdferr) From ef1e9cce0df29fc0b080466ca960f58eba991b33 Mon Sep 17 00:00:00 2001 From: Vitesh Shah Date: Mon, 26 Nov 2018 15:38:31 +0100 Subject: [PATCH 2/5] Functionality to avoid creating datasets in HDF5 of zero dimensions --- src/HDF5_utilities.f90 | 220 ++++++++++++++++++++++++++++------------- 1 file changed, 151 insertions(+), 69 deletions(-) mode change 100644 => 100755 src/HDF5_utilities.f90 diff --git a/src/HDF5_utilities.f90 b/src/HDF5_utilities.f90 old mode 100644 new mode 100755 index 57c505645..3d882da7b --- a/src/HDF5_utilities.f90 +++ b/src/HDF5_utilities.f90 @@ -374,7 +374,10 @@ end subroutine HDF5_read_pReal_4 !-------------------------------------------------------------------------------------------------- !> @brief subroutine for reading dataset of the type pReal with 5 dimensions !-------------------------------------------------------------------------------------------------- -subroutine HDF5_read_pReal_5(dataset,loc_id,datasetName) +subroutine HDF5_read_pReal_5(dataset,loc_id,datasetName,parallel) + use numerics, only: & + worldrank, & + worldsize implicit none real(pReal), intent(out), dimension(:,:,:,:,:) :: dataset @@ -383,8 +386,16 @@ subroutine HDF5_read_pReal_5(dataset,loc_id,datasetName) integer(pInt),dimension(:), allocatable :: myShape integer(HID_T) :: dset_id, filespace_id, memspace_id, plist_id + logical, intent(in), optional :: parallel + integer :: ierr integer(HDF5_ERR_TYPE) :: hdferr + integer(pInt), dimension(:), allocatable :: & + globalShape, & !< shape of the dataset (all processes) + localShape, & !< shape of the dataset (this process) + readSize !< contribution of all processes + integer(HSIZE_T), dimension(5) :: myStart + myShape = shape(dataset) @@ -392,13 +403,44 @@ subroutine HDF5_read_pReal_5(dataset,loc_id,datasetName) !creating a property list for transfer properties call h5pcreate_f(H5P_DATASET_XFER_F, plist_id, hdferr) - +!-------------------------------------------------------------------------------------------------- +! determine shape of dataset + localShape = shape(dataset) + allocate(readSize(worldsize), source = 0_pInt) + readSize(worldrank+1) = localShape(5) +#ifdef PETSc + if (present(parallel)) then; if (parallel) then + call h5pset_dxpl_mpio_f(plist_id, H5FD_MPIO_COLLECTIVE_F, hdferr) + if (hdferr < 0) call IO_error(1_pInt,ext_msg='HDF5_read_pReal5: h5pset_dxpl_mpio_f') + call MPI_allreduce(MPI_IN_PLACE,readSize,worldsize,MPI_LONG,MPI_SUM,PETSC_COMM_WORLD,ierr) ! get total output size over each process + if (ierr /= 0) call IO_error(894_pInt,ext_msg='HDF5_read_pReal5: MPI_allreduce') + endif; endif +#endif + myStart = int([0,0,0,0,sum(readSize(1:worldrank))],HSIZE_T) + globalShape = [localShape(1:4),sum(readSize)] + +!-------------------------------------------------------------------------------------------------- +! open the dataset in the file call h5dopen_f(loc_id,datasetName,dset_id,hdferr) if (hdferr < 0) call IO_error(0_pInt,ext_msg='HDF5_read_pReal_shape5: h5dopen_f') - call h5dread_f(dset_id,H5T_NATIVE_DOUBLE,dataset,int(myShape,HSIZE_T),hdferr) +!-------------------------------------------------------------------------------------------------- +!get the space_id of dataset in the file + call h5dget_space_f(dset_id, filespace_id, hdferr) + if (hdferr < 0) call IO_error(0_pInt,ext_msg='HDF5_read_pReal_shape5: h5dget_space_f') +!-------------------------------------------------------------------------------------------------- +! select a hyperslab (the portion of the current process) in the file + call h5sselect_hyperslab_f(filespace_id, H5S_SELECT_SET_F, myStart, int(localShape,HSIZE_T), hdferr) + if (hdferr < 0) call IO_error(1_pInt,ext_msg='HDF5_read_pReal5: h5sselect_hyperslab_f') +!-------------------------------------------------------------------------------------------------- +! read + call h5dread_f(dset_id,H5T_NATIVE_DOUBLE,dataset,int(globalShape,HSIZE_T),hdferr) if (hdferr < 0) call IO_error(0_pInt,ext_msg='HDF5_read_pReal_shape5: h5dread_f') - call h5dclose_f(dset_id,hdferr) - if (hdferr < 0) call IO_error(0_pInt,ext_msg='HDF5_read_pReal_shape5: h5dclose_f') +!-------------------------------------------------------------------------------------------------- +! close property lists and datatypes + call h5pclose_f(plist_id, hdferr) + if (hdferr < 0) call IO_error(1_pInt,ext_msg='HDF5_read_pReal5: plist_id') + call h5dclose_f(dset_id, hdferr) + if (hdferr < 0) call IO_error(1_pInt,ext_msg='HDF5_read_pReal5: h5dclose_f') end subroutine HDF5_read_pReal_5 @@ -618,7 +660,6 @@ subroutine HDF5_read_pInt_7(dataset,loc_id,datasetName) end subroutine HDF5_read_pInt_7 - !-------------------------------------------------------------------------------------------------- !> @brief subroutine for writing dataset of type pReal with 1 dimensions !-------------------------------------------------------------------------------------------------- @@ -638,26 +679,29 @@ subroutine HDF5_write_pReal1(dataset,loc_id,datasetName,parallel) globalShape, & !< shape of the dataset (all processes) localShape, & !< shape of the dataset (this process) outputSize !< contribution of all processes + integer :: ierr integer(HDF5_ERR_TYPE) :: hdferr - integer :: ierr integer(HID_T) :: dset_id, filespace_id, memspace_id, plist_id integer(HSIZE_T), dimension(1) :: myStart +!------------------------------------------------------------------------------------------------- +! determine shape of dataset + localShape = shape(dataset) + if (any(localShape(1:size(localShape)) == 0)) return + call h5pcreate_f(H5P_DATASET_XFER_F, plist_id, hdferr) !-------------------------------------------------------------------------------------------------- -! determine shape of dataset - localShape = shape(dataset) allocate(outputSize(worldsize), source = 0_pInt) outputSize(worldrank+1) = localShape(1) -#ifdef PETSc + #ifdef PETSc if (present(parallel)) then; if (parallel) then call h5pset_dxpl_mpio_f(plist_id, H5FD_MPIO_COLLECTIVE_F, hdferr) if (hdferr < 0) call IO_error(1_pInt,ext_msg='HDF5_write_pReal1: h5pset_dxpl_mpio_f') call MPI_allreduce(MPI_IN_PLACE,outputSize,worldsize,MPI_LONG,MPI_SUM,PETSC_COMM_WORLD,ierr) ! get total output size over each process if (ierr /= 0) call IO_error(894_pInt,ext_msg='HDF5_write_pReal1: MPI_allreduce') endif; endif -#endif + #endif myStart = int([sum(outputSize(1:worldrank))],HSIZE_T) globalShape = [localShape(1:0),sum(outputSize)] @@ -693,15 +737,15 @@ subroutine HDF5_write_pReal1(dataset,loc_id,datasetName,parallel) !-------------------------------------------------------------------------------------------------- !close types, dataspaces call h5pclose_f(plist_id, hdferr) - if (hdferr < 0) call IO_error(1_pInt,ext_msg='HDF5_write_pReal1: plist_id') + if (hdferr < 0) call IO_error(1_pInt,ext_msg='HDF5_write_PReal1: plist_id') call h5dclose_f(dset_id, hdferr) - if (hdferr < 0) call IO_error(1_pInt,ext_msg='HDF5_write_pReal1: h5dclose_f') + if (hdferr < 0) call IO_error(1_pInt,ext_msg='HDF5_write_PReal1: h5dclose_f') call h5sclose_f(filespace_id, hdferr) - if (hdferr < 0) call IO_error(1_pInt,ext_msg='HDF5_write_pReal1: h5sclose_f/filespace_id') + if (hdferr < 0) call IO_error(1_pInt,ext_msg='HDF5_write_PReal1: h5sclose_f/filespace_id') call h5sclose_f(memspace_id, hdferr) - if (hdferr < 0) call IO_error(1_pInt,ext_msg='HDF5_write_pReal1: h5sclose_f/memspace_id') + if (hdferr < 0) call IO_error(1_pInt,ext_msg='HDF5_write_PReal1: h5sclose_f/memspace_id') -end subroutine HDF5_write_pReal1 +end subroutine HDF5_write_PReal1 !-------------------------------------------------------------------------------------------------- @@ -723,16 +767,19 @@ subroutine HDF5_write_pReal2(dataset,loc_id,datasetName,parallel) globalShape, & !< shape of the dataset (all processes) localShape, & !< shape of the dataset (this process) outputSize !< contribution of all processes + integer :: ierr integer(HDF5_ERR_TYPE) :: hdferr - integer :: ierr integer(HID_T) :: dset_id, filespace_id, memspace_id, plist_id integer(HSIZE_T), dimension(2) :: myStart +!------------------------------------------------------------------------------------------------- +! determine shape of dataset + localShape = shape(dataset) + if (any(localShape(1:size(localShape)) == 0)) return + call h5pcreate_f(H5P_DATASET_XFER_F, plist_id, hdferr) !-------------------------------------------------------------------------------------------------- -! determine shape of dataset - localShape = shape(dataset) allocate(outputSize(worldsize), source = 0_pInt) outputSize(worldrank+1) = localShape(2) #ifdef PETSc @@ -740,7 +787,7 @@ subroutine HDF5_write_pReal2(dataset,loc_id,datasetName,parallel) call h5pset_dxpl_mpio_f(plist_id, H5FD_MPIO_COLLECTIVE_F, hdferr) if (hdferr < 0) call IO_error(1_pInt,ext_msg='HDF5_write_pReal2: h5pset_dxpl_mpio_f') call MPI_allreduce(MPI_IN_PLACE,outputSize,worldsize,MPI_LONG,MPI_SUM,PETSC_COMM_WORLD,ierr) ! get total output size over each process - if (ierr /= 0) call IO_error(894_pInt,ext_msg='HDF5_write_pReal2: MPI_allreduce') + if (ierr /= 0) call IO_error(894_pInt,ext_msg='HDF5_write_pInt2: MPI_allreduce') endif; endif #endif myStart = int([0,sum(outputSize(1:worldrank))],HSIZE_T) @@ -808,16 +855,19 @@ subroutine HDF5_write_pReal3(dataset,loc_id,datasetName,parallel) globalShape, & !< shape of the dataset (all processes) localShape, & !< shape of the dataset (this process) outputSize !< contribution of all processes + integer :: ierr integer(HDF5_ERR_TYPE) :: hdferr - integer :: ierr integer(HID_T) :: dset_id, filespace_id, memspace_id, plist_id integer(HSIZE_T), dimension(3) :: myStart +!------------------------------------------------------------------------------------------------- +! determine shape of dataset + localShape = shape(dataset) + if (any(localShape(1:size(localShape)) == 0)) return + call h5pcreate_f(H5P_DATASET_XFER_F, plist_id, hdferr) !-------------------------------------------------------------------------------------------------- -! determine shape of dataset - localShape = shape(dataset) allocate(outputSize(worldsize), source = 0_pInt) outputSize(worldrank+1) = localShape(3) #ifdef PETSc @@ -825,7 +875,7 @@ subroutine HDF5_write_pReal3(dataset,loc_id,datasetName,parallel) call h5pset_dxpl_mpio_f(plist_id, H5FD_MPIO_COLLECTIVE_F, hdferr) if (hdferr < 0) call IO_error(1_pInt,ext_msg='HDF5_write_pReal3: h5pset_dxpl_mpio_f') call MPI_allreduce(MPI_IN_PLACE,outputSize,worldsize,MPI_LONG,MPI_SUM,PETSC_COMM_WORLD,ierr) ! get total output size over each process - if (ierr /= 0) call IO_error(894_pInt,ext_msg='HDF5_write_pReal3: MPI_allreduce') + if (ierr /= 0) call IO_error(894_pInt,ext_msg='HDF5_write_pInt3: MPI_allreduce') endif; endif #endif myStart = int([0,0,sum(outputSize(1:worldrank))],HSIZE_T) @@ -893,16 +943,19 @@ subroutine HDF5_write_pReal4(dataset,loc_id,datasetName,parallel) globalShape, & !< shape of the dataset (all processes) localShape, & !< shape of the dataset (this process) outputSize !< contribution of all processes + integer :: ierr integer(HDF5_ERR_TYPE) :: hdferr - integer :: ierr integer(HID_T) :: dset_id, filespace_id, memspace_id, plist_id integer(HSIZE_T), dimension(4) :: myStart +!------------------------------------------------------------------------------------------------- +! determine shape of dataset + localShape = shape(dataset) + if (any(localShape(1:size(localShape)) == 0)) return + call h5pcreate_f(H5P_DATASET_XFER_F, plist_id, hdferr) !-------------------------------------------------------------------------------------------------- -! determine shape of dataset - localShape = shape(dataset) allocate(outputSize(worldsize), source = 0_pInt) outputSize(worldrank+1) = localShape(4) #ifdef PETSc @@ -910,7 +963,7 @@ subroutine HDF5_write_pReal4(dataset,loc_id,datasetName,parallel) call h5pset_dxpl_mpio_f(plist_id, H5FD_MPIO_COLLECTIVE_F, hdferr) if (hdferr < 0) call IO_error(1_pInt,ext_msg='HDF5_write_pReal4: h5pset_dxpl_mpio_f') call MPI_allreduce(MPI_IN_PLACE,outputSize,worldsize,MPI_LONG,MPI_SUM,PETSC_COMM_WORLD,ierr) ! get total output size over each process - if (ierr /= 0) call IO_error(894_pInt,ext_msg='HDF5_write_pReal4: MPI_allreduce') + if (ierr /= 0) call IO_error(894_pInt,ext_msg='HDF5_write_pInt4: MPI_allreduce') endif; endif #endif myStart = int([0,0,0,sum(outputSize(1:worldrank))],HSIZE_T) @@ -978,16 +1031,19 @@ subroutine HDF5_write_pReal5(dataset,loc_id,datasetName,parallel) globalShape, & !< shape of the dataset (all processes) localShape, & !< shape of the dataset (this process) outputSize !< contribution of all processes + integer :: ierr integer(HDF5_ERR_TYPE) :: hdferr - integer :: ierr integer(HID_T) :: dset_id, filespace_id, memspace_id, plist_id integer(HSIZE_T), dimension(5) :: myStart +!------------------------------------------------------------------------------------------------- +! determine shape of dataset + localShape = shape(dataset) + if (any(localShape(1:size(localShape)) == 0)) return + call h5pcreate_f(H5P_DATASET_XFER_F, plist_id, hdferr) !-------------------------------------------------------------------------------------------------- -! determine shape of dataset - localShape = shape(dataset) allocate(outputSize(worldsize), source = 0_pInt) outputSize(worldrank+1) = localShape(5) #ifdef PETSc @@ -995,7 +1051,7 @@ subroutine HDF5_write_pReal5(dataset,loc_id,datasetName,parallel) call h5pset_dxpl_mpio_f(plist_id, H5FD_MPIO_COLLECTIVE_F, hdferr) if (hdferr < 0) call IO_error(1_pInt,ext_msg='HDF5_write_pReal5: h5pset_dxpl_mpio_f') call MPI_allreduce(MPI_IN_PLACE,outputSize,worldsize,MPI_LONG,MPI_SUM,PETSC_COMM_WORLD,ierr) ! get total output size over each process - if (ierr /= 0) call IO_error(894_pInt,ext_msg='HDF5_write_pReal5: MPI_allreduce') + if (ierr /= 0) call IO_error(894_pInt,ext_msg='HDF5_write_pInt5: MPI_allreduce') endif; endif #endif myStart = int([0,0,0,0,sum(outputSize(1:worldrank))],HSIZE_T) @@ -1063,16 +1119,19 @@ subroutine HDF5_write_pReal6(dataset,loc_id,datasetName,parallel) globalShape, & !< shape of the dataset (all processes) localShape, & !< shape of the dataset (this process) outputSize !< contribution of all processes + integer :: ierr integer(HDF5_ERR_TYPE) :: hdferr - integer :: ierr integer(HID_T) :: dset_id, filespace_id, memspace_id, plist_id integer(HSIZE_T), dimension(6) :: myStart +!------------------------------------------------------------------------------------------------- +! determine shape of dataset + localShape = shape(dataset) + if (any(localShape(1:size(localShape)) == 0)) return + call h5pcreate_f(H5P_DATASET_XFER_F, plist_id, hdferr) !-------------------------------------------------------------------------------------------------- -! determine shape of dataset - localShape = shape(dataset) allocate(outputSize(worldsize), source = 0_pInt) outputSize(worldrank+1) = localShape(6) #ifdef PETSc @@ -1080,7 +1139,7 @@ subroutine HDF5_write_pReal6(dataset,loc_id,datasetName,parallel) call h5pset_dxpl_mpio_f(plist_id, H5FD_MPIO_COLLECTIVE_F, hdferr) if (hdferr < 0) call IO_error(1_pInt,ext_msg='HDF5_write_pReal6: h5pset_dxpl_mpio_f') call MPI_allreduce(MPI_IN_PLACE,outputSize,worldsize,MPI_LONG,MPI_SUM,PETSC_COMM_WORLD,ierr) ! get total output size over each process - if (ierr /= 0) call IO_error(894_pInt,ext_msg='HDF5_write_pReal6: MPI_allreduce') + if (ierr /= 0) call IO_error(894_pInt,ext_msg='HDF5_write_pInt6: MPI_allreduce') endif; endif #endif myStart = int([0,0,0,0,0,sum(outputSize(1:worldrank))],HSIZE_T) @@ -1148,16 +1207,19 @@ subroutine HDF5_write_pReal7(dataset,loc_id,datasetName,parallel) globalShape, & !< shape of the dataset (all processes) localShape, & !< shape of the dataset (this process) outputSize !< contribution of all processes + integer :: ierr integer(HDF5_ERR_TYPE) :: hdferr - integer :: ierr integer(HID_T) :: dset_id, filespace_id, memspace_id, plist_id integer(HSIZE_T), dimension(7) :: myStart +!------------------------------------------------------------------------------------------------- +! determine shape of dataset + localShape = shape(dataset) + if (any(localShape(1:size(localShape)) == 0)) return + call h5pcreate_f(H5P_DATASET_XFER_F, plist_id, hdferr) !-------------------------------------------------------------------------------------------------- -! determine shape of dataset - localShape = shape(dataset) allocate(outputSize(worldsize), source = 0_pInt) outputSize(worldrank+1) = localShape(7) #ifdef PETSc @@ -1165,7 +1227,7 @@ subroutine HDF5_write_pReal7(dataset,loc_id,datasetName,parallel) call h5pset_dxpl_mpio_f(plist_id, H5FD_MPIO_COLLECTIVE_F, hdferr) if (hdferr < 0) call IO_error(1_pInt,ext_msg='HDF5_write_pReal7: h5pset_dxpl_mpio_f') call MPI_allreduce(MPI_IN_PLACE,outputSize,worldsize,MPI_LONG,MPI_SUM,PETSC_COMM_WORLD,ierr) ! get total output size over each process - if (ierr /= 0) call IO_error(894_pInt,ext_msg='HDF5_write_pReal7: MPI_allreduce') + if (ierr /= 0) call IO_error(894_pInt,ext_msg='HDF5_write_pInt7: MPI_allreduce') endif; endif #endif myStart = int([0,0,0,0,0,0,sum(outputSize(1:worldrank))],HSIZE_T) @@ -1214,8 +1276,10 @@ subroutine HDF5_write_pReal7(dataset,loc_id,datasetName,parallel) end subroutine HDF5_write_pReal7 + + !-------------------------------------------------------------------------------------------------- -!> @brief subroutine for writing dataset of the type pInt with 1 dimensions +!> @brief subroutine for writing dataset of type pInt with 1 dimensions !-------------------------------------------------------------------------------------------------- subroutine HDF5_write_pInt1(dataset,loc_id,datasetName,parallel) use numerics, only: & @@ -1238,12 +1302,14 @@ subroutine HDF5_write_pInt1(dataset,loc_id,datasetName,parallel) integer(HID_T) :: dset_id, filespace_id, memspace_id, plist_id integer(HSIZE_T), dimension(1) :: myStart - call h5pcreate_f(H5P_DATASET_XFER_F, plist_id, hdferr) - if (hdferr < 0) call IO_error(1_pInt,ext_msg='HDF5_write_pInt1: h5pcreate_f') - -!-------------------------------------------------------------------------------------------------- +!------------------------------------------------------------------------------------------------- ! determine shape of dataset localShape = shape(dataset) + if (any(localShape(1:size(localShape)) == 0)) return + + call h5pcreate_f(H5P_DATASET_XFER_F, plist_id, hdferr) + +!-------------------------------------------------------------------------------------------------- allocate(outputSize(worldsize), source = 0_pInt) outputSize(worldrank+1) = localShape(1) #ifdef PETSc @@ -1324,12 +1390,14 @@ subroutine HDF5_write_pInt2(dataset,loc_id,datasetName,parallel) integer(HID_T) :: dset_id, filespace_id, memspace_id, plist_id integer(HSIZE_T), dimension(2) :: myStart - call h5pcreate_f(H5P_DATASET_XFER_F, plist_id, hdferr) - if (hdferr < 0) call IO_error(1_pInt,ext_msg='HDF5_write_pInt2: h5pcreate_f') - -!-------------------------------------------------------------------------------------------------- +!------------------------------------------------------------------------------------------------- ! determine shape of dataset localShape = shape(dataset) + if (any(localShape(1:size(localShape)) == 0)) return + + call h5pcreate_f(H5P_DATASET_XFER_F, plist_id, hdferr) + +!-------------------------------------------------------------------------------------------------- allocate(outputSize(worldsize), source = 0_pInt) outputSize(worldrank+1) = localShape(2) #ifdef PETSc @@ -1410,12 +1478,14 @@ subroutine HDF5_write_pInt3(dataset,loc_id,datasetName,parallel) integer(HID_T) :: dset_id, filespace_id, memspace_id, plist_id integer(HSIZE_T), dimension(3) :: myStart - call h5pcreate_f(H5P_DATASET_XFER_F, plist_id, hdferr) - if (hdferr < 0) call IO_error(1_pInt,ext_msg='HDF5_write_pInt3: h5pcreate_f') - -!-------------------------------------------------------------------------------------------------- +!------------------------------------------------------------------------------------------------- ! determine shape of dataset localShape = shape(dataset) + if (any(localShape(1:size(localShape)) == 0)) return + + call h5pcreate_f(H5P_DATASET_XFER_F, plist_id, hdferr) + +!-------------------------------------------------------------------------------------------------- allocate(outputSize(worldsize), source = 0_pInt) outputSize(worldrank+1) = localShape(3) #ifdef PETSc @@ -1496,12 +1566,14 @@ subroutine HDF5_write_pInt4(dataset,loc_id,datasetName,parallel) integer(HID_T) :: dset_id, filespace_id, memspace_id, plist_id integer(HSIZE_T), dimension(4) :: myStart - call h5pcreate_f(H5P_DATASET_XFER_F, plist_id, hdferr) - if (hdferr < 0) call IO_error(1_pInt,ext_msg='HDF5_write_pInt4: h5pcreate_f') - -!-------------------------------------------------------------------------------------------------- +!------------------------------------------------------------------------------------------------- ! determine shape of dataset localShape = shape(dataset) + if (any(localShape(1:size(localShape)) == 0)) return + + call h5pcreate_f(H5P_DATASET_XFER_F, plist_id, hdferr) + +!-------------------------------------------------------------------------------------------------- allocate(outputSize(worldsize), source = 0_pInt) outputSize(worldrank+1) = localShape(4) #ifdef PETSc @@ -1582,12 +1654,14 @@ subroutine HDF5_write_pInt5(dataset,loc_id,datasetName,parallel) integer(HID_T) :: dset_id, filespace_id, memspace_id, plist_id integer(HSIZE_T), dimension(5) :: myStart - call h5pcreate_f(H5P_DATASET_XFER_F, plist_id, hdferr) - if (hdferr < 0) call IO_error(1_pInt,ext_msg='HDF5_write_pInt5: h5pcreate_f') - -!-------------------------------------------------------------------------------------------------- +!------------------------------------------------------------------------------------------------- ! determine shape of dataset localShape = shape(dataset) + if (any(localShape(1:size(localShape)) == 0)) return + + call h5pcreate_f(H5P_DATASET_XFER_F, plist_id, hdferr) + +!-------------------------------------------------------------------------------------------------- allocate(outputSize(worldsize), source = 0_pInt) outputSize(worldrank+1) = localShape(5) #ifdef PETSc @@ -1668,12 +1742,14 @@ subroutine HDF5_write_pInt6(dataset,loc_id,datasetName,parallel) integer(HID_T) :: dset_id, filespace_id, memspace_id, plist_id integer(HSIZE_T), dimension(6) :: myStart - call h5pcreate_f(H5P_DATASET_XFER_F, plist_id, hdferr) - if (hdferr < 0) call IO_error(1_pInt,ext_msg='HDF5_write_pInt6: h5pcreate_f') - -!-------------------------------------------------------------------------------------------------- +!------------------------------------------------------------------------------------------------- ! determine shape of dataset localShape = shape(dataset) + if (any(localShape(1:size(localShape)) == 0)) return + + call h5pcreate_f(H5P_DATASET_XFER_F, plist_id, hdferr) + +!-------------------------------------------------------------------------------------------------- allocate(outputSize(worldsize), source = 0_pInt) outputSize(worldrank+1) = localShape(6) #ifdef PETSc @@ -1754,12 +1830,14 @@ subroutine HDF5_write_pInt7(dataset,loc_id,datasetName,parallel) integer(HID_T) :: dset_id, filespace_id, memspace_id, plist_id integer(HSIZE_T), dimension(7) :: myStart - call h5pcreate_f(H5P_DATASET_XFER_F, plist_id, hdferr) - if (hdferr < 0) call IO_error(1_pInt,ext_msg='HDF5_write_pInt7: h5pcreate_f') - -!-------------------------------------------------------------------------------------------------- +!------------------------------------------------------------------------------------------------- ! determine shape of dataset localShape = shape(dataset) + if (any(localShape(1:size(localShape)) == 0)) return + + call h5pcreate_f(H5P_DATASET_XFER_F, plist_id, hdferr) + +!-------------------------------------------------------------------------------------------------- allocate(outputSize(worldsize), source = 0_pInt) outputSize(worldrank+1) = localShape(7) #ifdef PETSc @@ -1820,3 +1898,7 @@ end module HDF5_Utilities + + + + From 0265732e084f8e3c9cdcd286fb1b06a5fc0d99d6 Mon Sep 17 00:00:00 2001 From: Vitesh Shah Date: Tue, 27 Nov 2018 18:50:51 +0100 Subject: [PATCH 3/5] Parallel writing and reading of integer datasets working --- src/HDF5_utilities.f90 | 1401 ++++++++++++++++++++++++++++++++-------- 1 file changed, 1130 insertions(+), 271 deletions(-) diff --git a/src/HDF5_utilities.f90 b/src/HDF5_utilities.f90 index 3d882da7b..01ca3407d 100755 --- a/src/HDF5_utilities.f90 +++ b/src/HDF5_utilities.f90 @@ -5,11 +5,11 @@ !> @author Martin Diehl, Max-Planck-Institut für Eisenforschung GmbH !-------------------------------------------------------------------------------------------------- module HDF5_utilities - use prec - use IO - use HDF5 + use prec + use IO + use HDF5 #ifdef PETSc - use PETSC + use PETSC #endif implicit none @@ -21,21 +21,21 @@ module HDF5_utilities !> @brief reads pInt or pReal data of defined shape from file !-------------------------------------------------------------------------------------------------- interface HDF5_read - module procedure HDF5_read_pReal_1 - module procedure HDF5_read_pReal_2 - module procedure HDF5_read_pReal_3 - module procedure HDF5_read_pReal_4 - module procedure HDF5_read_pReal_5 - module procedure HDF5_read_pReal_6 - module procedure HDF5_read_pReal_7 + module procedure HDF5_read_pReal1 + module procedure HDF5_read_pReal2 + module procedure HDF5_read_pReal3 + module procedure HDF5_read_pReal4 + module procedure HDF5_read_pReal5 + module procedure HDF5_read_pReal6 + module procedure HDF5_read_pReal7 - module procedure HDF5_read_pInt_1 - module procedure HDF5_read_pInt_2 - module procedure HDF5_read_pInt_3 - module procedure HDF5_read_pInt_4 - module procedure HDF5_read_pInt_5 - module procedure HDF5_read_pInt_6 - module procedure HDF5_read_pInt_7 !ABOVE 8 DIMENSIONS IT GIVES ERROR: THE CALL TO H5DREAD_F DOESNT WORK + module procedure HDF5_read_pInt1 + module procedure HDF5_read_pInt2 + module procedure HDF5_read_pInt3 + module procedure HDF5_read_pInt4 + module procedure HDF5_read_pInt5 + module procedure HDF5_read_pInt6 + module procedure HDF5_read_pInt7 !ABOVE 8 DIMENSIONS IT GIVES ERROR: THE CALL TO H5DREAD_F DOESNT WORK end interface HDF5_read @@ -277,135 +277,392 @@ subroutine HDF5_addIntegerAttribute(entity,attrLabel,attrValue) end subroutine HDF5_addIntegerAttribute -!-------------------------------------------------------------------------------------------------- -!> @brief subroutine for reading dataset of the type pReal with 1 dimension -!-------------------------------------------------------------------------------------------------- -subroutine HDF5_read_pReal_1(dataset,loc_id,datasetName) - - implicit none - real(pReal), intent(out), dimension(:) :: dataset - integer(HID_T), intent(in) :: loc_id !< file or group handle - character(len=*), intent(in) :: datasetName !< name of the dataset in the file - integer(pInt),dimension(:), allocatable :: myShape - integer(HDF5_ERR_TYPE) :: hdferr - integer(HID_T) :: dset_id - myShape = shape(dataset) - - call h5dopen_f(loc_id,datasetName,dset_id,hdferr) - if (hdferr < 0) call IO_error(0_pInt,ext_msg='HDF5_read_pReal_shape1: h5dopen_f') - call h5dread_f(dset_id,H5T_NATIVE_DOUBLE,dataset,int(myShape,HSIZE_T),hdferr) - if (hdferr < 0) call IO_error(0_pInt,ext_msg='HDF5_read_pReal_shape1: h5dread_f') - call h5dclose_f(dset_id,hdferr) - if (hdferr < 0) call IO_error(0_pInt,ext_msg='HDF5_read_pReal_shape1: h5dclose_f') - -end subroutine HDF5_read_pReal_1 !-------------------------------------------------------------------------------------------------- -!> @brief subroutine for reading dataset of the type pReal with 2 dimensions +!> @brief subroutine for reading dataset of type pReal with 1 dimensions !-------------------------------------------------------------------------------------------------- -subroutine HDF5_read_pReal_2(dataset,loc_id,datasetName) - - implicit none - real(pReal), intent(out), dimension(:,:) :: dataset - integer(HID_T), intent(in) :: loc_id !< file or group handle - character(len=*), intent(in) :: datasetName !< name of the dataset in the file - integer(pInt),dimension(:), allocatable :: myShape - - integer(HDF5_ERR_TYPE) :: hdferr - integer(HID_T) :: dset_id - myShape = shape(dataset) - - call h5dopen_f(loc_id,datasetName,dset_id,hdferr) - if (hdferr < 0) call IO_error(0_pInt,ext_msg='HDF5_read_pReal_shape2: h5dopen_f') - call h5dread_f(dset_id,H5T_NATIVE_DOUBLE,dataset,int(myShape,HSIZE_T),hdferr) - if (hdferr < 0) call IO_error(0_pInt,ext_msg='HDF5_read_pReal_shape2: h5dread_f') - call h5dclose_f(dset_id,hdferr) - if (hdferr < 0) call IO_error(0_pInt,ext_msg='HDF5_read_pReal_shape2: h5dclose_f') - -end subroutine HDF5_read_pReal_2 - -!-------------------------------------------------------------------------------------------------- -!> @brief subroutine for reading dataset of the type pReal with 3 dimensions -!-------------------------------------------------------------------------------------------------- -subroutine HDF5_read_pReal_3(dataset,loc_id,datasetName) - - implicit none - real(pReal), intent(out), dimension(:,:,:) :: dataset - integer(HID_T), intent(in) :: loc_id !< file or group handle - character(len=*), intent(in) :: datasetName !< name of the dataset in the file - integer(pInt),dimension(:), allocatable :: myShape - integer(HDF5_ERR_TYPE) :: hdferr - integer(HID_T) :: dset_id - myShape = shape(dataset) - - call h5dopen_f(loc_id,datasetName,dset_id,hdferr) - if (hdferr < 0) call IO_error(0_pInt,ext_msg='HDF5_read_pReal_shape3: h5dopen_f') - call h5dread_f(dset_id,H5T_NATIVE_DOUBLE,dataset,int(myShape,HSIZE_T),hdferr) - if (hdferr < 0) call IO_error(0_pInt,ext_msg='HDF5_read_pReal_shape3: h5dread_f') - call h5dclose_f(dset_id,hdferr) - if (hdferr < 0) call IO_error(0_pInt,ext_msg='HDF5_read_pReal_shape3: h5dclose_f') - -end subroutine HDF5_read_pReal_3 - -!-------------------------------------------------------------------------------------------------- -!> @brief subroutine for reading dataset of the type pReal with 4 dimensions -!-------------------------------------------------------------------------------------------------- -subroutine HDF5_read_pReal_4(dataset,loc_id,datasetName) - - implicit none - real(pReal), intent(out), dimension(:,:,:,:) :: dataset - integer(HID_T), intent(in) :: loc_id !< file or group handle - character(len=*), intent(in) :: datasetName !< name of the dataset in the file - integer(pInt),dimension(:), allocatable :: myShape - - integer(HDF5_ERR_TYPE) :: hdferr - integer(HID_T) :: dset_id - myShape = shape(dataset) - - call h5dopen_f(loc_id,datasetName,dset_id,hdferr) - if (hdferr < 0) call IO_error(0_pInt,ext_msg='HDF5_read_pReal_shape4: h5dopen_f') - call h5dread_f(dset_id,H5T_NATIVE_DOUBLE,dataset,int(myShape,HSIZE_T),hdferr) - if (hdferr < 0) call IO_error(0_pInt,ext_msg='HDF5_read_pReal_shape4: h5dread_f') - call h5dclose_f(dset_id,hdferr) - if (hdferr < 0) call IO_error(0_pInt,ext_msg='HDF5_read_pReal_shape4: h5dclose_f') - -end subroutine HDF5_read_pReal_4 - -!-------------------------------------------------------------------------------------------------- -!> @brief subroutine for reading dataset of the type pReal with 5 dimensions -!-------------------------------------------------------------------------------------------------- -subroutine HDF5_read_pReal_5(dataset,loc_id,datasetName,parallel) +subroutine HDF5_read_pReal1(dataset,loc_id,datasetName,parallel) use numerics, only: & worldrank, & worldsize implicit none - real(pReal), intent(out), dimension(:,:,:,:,:) :: dataset + real(pReal), intent(inout), dimension(:) :: dataset integer(HID_T), intent(in) :: loc_id !< file or group handle character(len=*), intent(in) :: datasetName !< name of the dataset in the file - integer(pInt),dimension(:), allocatable :: myShape - - integer(HID_T) :: dset_id, filespace_id, memspace_id, plist_id logical, intent(in), optional :: parallel - integer :: ierr - integer(HDF5_ERR_TYPE) :: hdferr + integer(pInt), dimension(:), allocatable :: & globalShape, & !< shape of the dataset (all processes) localShape, & !< shape of the dataset (this process) readSize !< contribution of all processes - integer(HSIZE_T), dimension(5) :: myStart - - myShape = shape(dataset) + integer :: ierr + integer(HDF5_ERR_TYPE) :: hdferr + integer(HID_T) :: dset_id, filespace_id, memspace_id, plist_id, aplist_id + integer(HSIZE_T), dimension(1) :: myStart - -!-------------------------------------------------------------------------------------------------- -!creating a property list for transfer properties - call h5pcreate_f(H5P_DATASET_XFER_F, plist_id, hdferr) - -!-------------------------------------------------------------------------------------------------- +!------------------------------------------------------------------------------------------------- ! determine shape of dataset localShape = shape(dataset) + if (any(localShape(1:size(localShape)) == 0)) return + +!------------------------------------------------------------------------------------------------- +! creating a property list for transfer properties + call h5pcreate_f(H5P_DATASET_XFER_F, plist_id, hdferr) + +!------------------------------------------------------------------------------------------------- +! creating a property list for data access properties + call h5pcreate_f(H5P_DATASET_ACCESS_F, aplist_id, hdferr) +!-------------------------------------------------------------------------------------------------- + allocate(readSize(worldsize), source = 0_pInt) + readSize(worldrank+1) = localShape(1) +#ifdef PETSc + if (present(parallel)) then; if (parallel) then + call h5pset_dxpl_mpio_f(plist_id, H5FD_MPIO_COLLECTIVE_F, hdferr) + if (hdferr < 0) call IO_error(1_pInt,ext_msg='HDF5_read_pReal1: h5pset_dxpl_mpio_f') + call MPI_allreduce(MPI_IN_PLACE,readSize,worldsize,MPI_LONG,MPI_SUM,PETSC_COMM_WORLD,ierr) ! get total output size over each process + if (ierr /= 0) call IO_error(894_pInt,ext_msg='HDF5_read_pReal1: MPI_allreduce') + endif; endif +#endif + myStart = int([sum(readSize(1:worldrank))],HSIZE_T) + globalShape = [localShape(1:0),sum(readSize)] + + +!-------------------------------------------------------------------------------------------------- +! set I/O mode for read operations to collective + call h5pset_all_coll_metadata_ops_f(aplist_id, .true., hdferr) + if (hdferr < 0) call IO_error(1_pInt,ext_msg='HDF5_read_pReal1: h5pset_all_coll_metadata_ops_f') +! open the dataset in the file + call h5dopen_f(loc_id,datasetName,dset_id,hdferr, dapl_id = aplist_id) + if (hdferr < 0) call IO_error(1_pInt,ext_msg='HDF5_read_pReal1: h5dopen_f') + +!-------------------------------------------------------------------------------------------------- +! get the space_id of dataset in the file + call h5dget_space_f(dset_id, filespace_id, hdferr) + if (hdferr < 0) call IO_error(1_pInt,ext_msg='HDF5_read_pReal1: h5dget_space_f') + +!-------------------------------------------------------------------------------------------------- +! select a hyperslab (the portion of the current process) in the file + call h5sselect_hyperslab_f(filespace_id, H5S_SELECT_SET_F, myStart, int(localShape,HSIZE_T), hdferr) + if (hdferr < 0) call IO_error(1_pInt,ext_msg='HDF5_read_pReal1: h5sselect_hyperslab_f') + +!-------------------------------------------------------------------------------------------------- +! read + call h5dread_f(dset_id, H5T_NATIVE_DOUBLE,dataset,int(globalShape,HSIZE_T), hdferr,& + file_space_id = filespace_id, xfer_prp = plist_id) + if (hdferr < 0) call IO_error(1_pInt,ext_msg='HDF5_read_pReal1: h5dread_f') + +!-------------------------------------------------------------------------------------------------- +!close types, dataspaces + call h5pclose_f(plist_id, hdferr) + if (hdferr < 0) call IO_error(1_pInt,ext_msg='HDF5_read_pReal1: plist_id') + call h5dclose_f(dset_id, hdferr) + if (hdferr < 0) call IO_error(1_pInt,ext_msg='HDF5_read_pReal1: h5dclose_f') + call h5sclose_f(filespace_id, hdferr) + if (hdferr < 0) call IO_error(1_pInt,ext_msg='HDF5_write_pInt1: h5sclose_f/filespace_id') + +end subroutine HDF5_read_pReal1 + + +!-------------------------------------------------------------------------------------------------- +!> @brief subroutine for reading dataset of type pReal with 2 dimensions +!-------------------------------------------------------------------------------------------------- +subroutine HDF5_read_pReal2(dataset,loc_id,datasetName,parallel) + use numerics, only: & + worldrank, & + worldsize + + implicit none + real(pReal), intent(inout), dimension(:,:) :: dataset + integer(HID_T), intent(in) :: loc_id !< file or group handle + character(len=*), intent(in) :: datasetName !< name of the dataset in the file + logical, intent(in), optional :: parallel + + + integer(pInt), dimension(:), allocatable :: & + globalShape, & !< shape of the dataset (all processes) + localShape, & !< shape of the dataset (this process) + readSize !< contribution of all processes + integer :: ierr + integer(HDF5_ERR_TYPE) :: hdferr + integer(HID_T) :: dset_id, filespace_id, memspace_id, plist_id, aplist_id + integer(HSIZE_T), dimension(2) :: myStart + +!------------------------------------------------------------------------------------------------- +! determine shape of dataset + localShape = shape(dataset) + if (any(localShape(1:size(localShape)) == 0)) return + +!------------------------------------------------------------------------------------------------- +! creating a property list for transfer properties + call h5pcreate_f(H5P_DATASET_XFER_F, plist_id, hdferr) + +!------------------------------------------------------------------------------------------------- +! creating a property list for data access properties + call h5pcreate_f(H5P_DATASET_ACCESS_F, aplist_id, hdferr) +!-------------------------------------------------------------------------------------------------- + allocate(readSize(worldsize), source = 0_pInt) + readSize(worldrank+1) = localShape(2) +#ifdef PETSc + if (present(parallel)) then; if (parallel) then + call h5pset_dxpl_mpio_f(plist_id, H5FD_MPIO_COLLECTIVE_F, hdferr) + if (hdferr < 0) call IO_error(1_pInt,ext_msg='HDF5_read_pReal2: h5pset_dxpl_mpio_f') + call MPI_allreduce(MPI_IN_PLACE,readSize,worldsize,MPI_LONG,MPI_SUM,PETSC_COMM_WORLD,ierr) ! get total output size over each process + if (ierr /= 0) call IO_error(894_pInt,ext_msg='HDF5_read_pReal2: MPI_allreduce') + endif; endif +#endif + myStart = int([0,sum(readSize(1:worldrank))],HSIZE_T) + globalShape = [localShape(1:1),sum(readSize)] + + +!-------------------------------------------------------------------------------------------------- +! set I/O mode for read operations to collective + call h5pset_all_coll_metadata_ops_f(aplist_id, .true., hdferr) + if (hdferr < 0) call IO_error(1_pInt,ext_msg='HDF5_read_pReal2: h5pset_all_coll_metadata_ops_f') +! open the dataset in the file + call h5dopen_f(loc_id,datasetName,dset_id,hdferr, dapl_id = aplist_id) + if (hdferr < 0) call IO_error(1_pInt,ext_msg='HDF5_read_pReal2: h5dopen_f') + +!-------------------------------------------------------------------------------------------------- +! get the space_id of dataset in the file + call h5dget_space_f(dset_id, filespace_id, hdferr) + if (hdferr < 0) call IO_error(1_pInt,ext_msg='HDF5_read_pReal2: h5dget_space_f') + +!-------------------------------------------------------------------------------------------------- +! select a hyperslab (the portion of the current process) in the file + call h5sselect_hyperslab_f(filespace_id, H5S_SELECT_SET_F, myStart, int(localShape,HSIZE_T), hdferr) + if (hdferr < 0) call IO_error(1_pInt,ext_msg='HDF5_read_pReal2: h5sselect_hyperslab_f') + +!-------------------------------------------------------------------------------------------------- +! read + call h5dread_f(dset_id, H5T_NATIVE_DOUBLE,dataset,int(globalShape,HSIZE_T), hdferr,& + file_space_id = filespace_id, xfer_prp = plist_id) + if (hdferr < 0) call IO_error(1_pInt,ext_msg='HDF5_read_pReal2: h5dread_f') + +!-------------------------------------------------------------------------------------------------- +!close types, dataspaces + call h5pclose_f(plist_id, hdferr) + if (hdferr < 0) call IO_error(1_pInt,ext_msg='HDF5_read_pReal2: plist_id') + call h5dclose_f(dset_id, hdferr) + if (hdferr < 0) call IO_error(1_pInt,ext_msg='HDF5_read_pReal2: h5dclose_f') + call h5sclose_f(filespace_id, hdferr) + if (hdferr < 0) call IO_error(1_pInt,ext_msg='HDF5_write_pInt2: h5sclose_f/filespace_id') + +end subroutine HDF5_read_pReal2 + + +!-------------------------------------------------------------------------------------------------- +!> @brief subroutine for reading dataset of type pReal with 3 dimensions +!-------------------------------------------------------------------------------------------------- +subroutine HDF5_read_pReal3(dataset,loc_id,datasetName,parallel) + use numerics, only: & + worldrank, & + worldsize + + implicit none + real(pReal), intent(inout), dimension(:,:,:) :: dataset + integer(HID_T), intent(in) :: loc_id !< file or group handle + character(len=*), intent(in) :: datasetName !< name of the dataset in the file + logical, intent(in), optional :: parallel + + + integer(pInt), dimension(:), allocatable :: & + globalShape, & !< shape of the dataset (all processes) + localShape, & !< shape of the dataset (this process) + readSize !< contribution of all processes + integer :: ierr + integer(HDF5_ERR_TYPE) :: hdferr + integer(HID_T) :: dset_id, filespace_id, memspace_id, plist_id, aplist_id + integer(HSIZE_T), dimension(3) :: myStart + +!------------------------------------------------------------------------------------------------- +! determine shape of dataset + localShape = shape(dataset) + if (any(localShape(1:size(localShape)) == 0)) return + +!------------------------------------------------------------------------------------------------- +! creating a property list for transfer properties + call h5pcreate_f(H5P_DATASET_XFER_F, plist_id, hdferr) + +!------------------------------------------------------------------------------------------------- +! creating a property list for data access properties + call h5pcreate_f(H5P_DATASET_ACCESS_F, aplist_id, hdferr) +!-------------------------------------------------------------------------------------------------- + allocate(readSize(worldsize), source = 0_pInt) + readSize(worldrank+1) = localShape(3) +#ifdef PETSc + if (present(parallel)) then; if (parallel) then + call h5pset_dxpl_mpio_f(plist_id, H5FD_MPIO_COLLECTIVE_F, hdferr) + if (hdferr < 0) call IO_error(1_pInt,ext_msg='HDF5_read_pReal3: h5pset_dxpl_mpio_f') + call MPI_allreduce(MPI_IN_PLACE,readSize,worldsize,MPI_LONG,MPI_SUM,PETSC_COMM_WORLD,ierr) ! get total output size over each process + if (ierr /= 0) call IO_error(894_pInt,ext_msg='HDF5_read_pReal3: MPI_allreduce') + endif; endif +#endif + myStart = int([0,0,sum(readSize(1:worldrank))],HSIZE_T) + globalShape = [localShape(1:2),sum(readSize)] + + +!-------------------------------------------------------------------------------------------------- +! set I/O mode for read operations to collective + call h5pset_all_coll_metadata_ops_f(aplist_id, .true., hdferr) + if (hdferr < 0) call IO_error(1_pInt,ext_msg='HDF5_read_pReal3: h5pset_all_coll_metadata_ops_f') +! open the dataset in the file + call h5dopen_f(loc_id,datasetName,dset_id,hdferr, dapl_id = aplist_id) + if (hdferr < 0) call IO_error(1_pInt,ext_msg='HDF5_read_pReal3: h5dopen_f') + +!-------------------------------------------------------------------------------------------------- +! get the space_id of dataset in the file + call h5dget_space_f(dset_id, filespace_id, hdferr) + if (hdferr < 0) call IO_error(1_pInt,ext_msg='HDF5_read_pReal3: h5dget_space_f') + +!-------------------------------------------------------------------------------------------------- +! select a hyperslab (the portion of the current process) in the file + call h5sselect_hyperslab_f(filespace_id, H5S_SELECT_SET_F, myStart, int(localShape,HSIZE_T), hdferr) + if (hdferr < 0) call IO_error(1_pInt,ext_msg='HDF5_read_pReal3: h5sselect_hyperslab_f') + +!-------------------------------------------------------------------------------------------------- +! read + call h5dread_f(dset_id, H5T_NATIVE_DOUBLE,dataset,int(globalShape,HSIZE_T), hdferr,& + file_space_id = filespace_id, xfer_prp = plist_id) + if (hdferr < 0) call IO_error(1_pInt,ext_msg='HDF5_read_pReal3: h5dread_f') + +!-------------------------------------------------------------------------------------------------- +!close types, dataspaces + call h5pclose_f(plist_id, hdferr) + if (hdferr < 0) call IO_error(1_pInt,ext_msg='HDF5_read_pReal3: plist_id') + call h5dclose_f(dset_id, hdferr) + if (hdferr < 0) call IO_error(1_pInt,ext_msg='HDF5_read_pReal3: h5dclose_f') + call h5sclose_f(filespace_id, hdferr) + if (hdferr < 0) call IO_error(1_pInt,ext_msg='HDF5_write_pInt3: h5sclose_f/filespace_id') + +end subroutine HDF5_read_pReal3 + + +!-------------------------------------------------------------------------------------------------- +!> @brief subroutine for reading dataset of type pReal with 4 dimensions +!-------------------------------------------------------------------------------------------------- +subroutine HDF5_read_pReal4(dataset,loc_id,datasetName,parallel) + use numerics, only: & + worldrank, & + worldsize + + implicit none + real(pReal), intent(inout), dimension(:,:,:,:) :: dataset + integer(HID_T), intent(in) :: loc_id !< file or group handle + character(len=*), intent(in) :: datasetName !< name of the dataset in the file + logical, intent(in), optional :: parallel + + + integer(pInt), dimension(:), allocatable :: & + globalShape, & !< shape of the dataset (all processes) + localShape, & !< shape of the dataset (this process) + readSize !< contribution of all processes + integer :: ierr + integer(HDF5_ERR_TYPE) :: hdferr + integer(HID_T) :: dset_id, filespace_id, memspace_id, plist_id, aplist_id + integer(HSIZE_T), dimension(4) :: myStart + +!------------------------------------------------------------------------------------------------- +! determine shape of dataset + localShape = shape(dataset) + if (any(localShape(1:size(localShape)) == 0)) return + +!------------------------------------------------------------------------------------------------- +! creating a property list for transfer properties + call h5pcreate_f(H5P_DATASET_XFER_F, plist_id, hdferr) + +!------------------------------------------------------------------------------------------------- +! creating a property list for data access properties + call h5pcreate_f(H5P_DATASET_ACCESS_F, aplist_id, hdferr) +!-------------------------------------------------------------------------------------------------- + allocate(readSize(worldsize), source = 0_pInt) + readSize(worldrank+1) = localShape(4) +#ifdef PETSc + if (present(parallel)) then; if (parallel) then + call h5pset_dxpl_mpio_f(plist_id, H5FD_MPIO_COLLECTIVE_F, hdferr) + if (hdferr < 0) call IO_error(1_pInt,ext_msg='HDF5_read_pReal4: h5pset_dxpl_mpio_f') + call MPI_allreduce(MPI_IN_PLACE,readSize,worldsize,MPI_LONG,MPI_SUM,PETSC_COMM_WORLD,ierr) ! get total output size over each process + if (ierr /= 0) call IO_error(894_pInt,ext_msg='HDF5_read_pReal4: MPI_allreduce') + endif; endif +#endif + myStart = int([0,0,0,sum(readSize(1:worldrank))],HSIZE_T) + globalShape = [localShape(1:3),sum(readSize)] + + +!-------------------------------------------------------------------------------------------------- +! set I/O mode for read operations to collective + call h5pset_all_coll_metadata_ops_f(aplist_id, .true., hdferr) + if (hdferr < 0) call IO_error(1_pInt,ext_msg='HDF5_read_pReal4: h5pset_all_coll_metadata_ops_f') +! open the dataset in the file + call h5dopen_f(loc_id,datasetName,dset_id,hdferr, dapl_id = aplist_id) + if (hdferr < 0) call IO_error(1_pInt,ext_msg='HDF5_read_pReal4: h5dopen_f') + +!-------------------------------------------------------------------------------------------------- +! get the space_id of dataset in the file + call h5dget_space_f(dset_id, filespace_id, hdferr) + if (hdferr < 0) call IO_error(1_pInt,ext_msg='HDF5_read_pReal4: h5dget_space_f') + +!-------------------------------------------------------------------------------------------------- +! select a hyperslab (the portion of the current process) in the file + call h5sselect_hyperslab_f(filespace_id, H5S_SELECT_SET_F, myStart, int(localShape,HSIZE_T), hdferr) + if (hdferr < 0) call IO_error(1_pInt,ext_msg='HDF5_read_pReal4: h5sselect_hyperslab_f') + +!-------------------------------------------------------------------------------------------------- +! read + call h5dread_f(dset_id, H5T_NATIVE_DOUBLE,dataset,int(globalShape,HSIZE_T), hdferr,& + file_space_id = filespace_id, xfer_prp = plist_id) + if (hdferr < 0) call IO_error(1_pInt,ext_msg='HDF5_read_pReal4: h5dread_f') + +!-------------------------------------------------------------------------------------------------- +!close types, dataspaces + call h5pclose_f(plist_id, hdferr) + if (hdferr < 0) call IO_error(1_pInt,ext_msg='HDF5_read_pReal4: plist_id') + call h5dclose_f(dset_id, hdferr) + if (hdferr < 0) call IO_error(1_pInt,ext_msg='HDF5_read_pReal4: h5dclose_f') + call h5sclose_f(filespace_id, hdferr) + if (hdferr < 0) call IO_error(1_pInt,ext_msg='HDF5_write_pInt4: h5sclose_f/filespace_id') + +end subroutine HDF5_read_pReal4 + + +!-------------------------------------------------------------------------------------------------- +!> @brief subroutine for reading dataset of type pReal with 5 dimensions +!-------------------------------------------------------------------------------------------------- +subroutine HDF5_read_pReal5(dataset,loc_id,datasetName,parallel) + use numerics, only: & + worldrank, & + worldsize + + implicit none + real(pReal), intent(inout), dimension(:,:,:,:,:) :: dataset + integer(HID_T), intent(in) :: loc_id !< file or group handle + character(len=*), intent(in) :: datasetName !< name of the dataset in the file + logical, intent(in), optional :: parallel + + + integer(pInt), dimension(:), allocatable :: & + globalShape, & !< shape of the dataset (all processes) + localShape, & !< shape of the dataset (this process) + readSize !< contribution of all processes + integer :: ierr + integer(HDF5_ERR_TYPE) :: hdferr + integer(HID_T) :: dset_id, filespace_id, memspace_id, plist_id, aplist_id + integer(HSIZE_T), dimension(5) :: myStart + +!------------------------------------------------------------------------------------------------- +! determine shape of dataset + localShape = shape(dataset) + if (any(localShape(1:size(localShape)) == 0)) return + +!------------------------------------------------------------------------------------------------- +! creating a property list for transfer properties + call h5pcreate_f(H5P_DATASET_XFER_F, plist_id, hdferr) + +!------------------------------------------------------------------------------------------------- +! creating a property list for data access properties + call h5pcreate_f(H5P_DATASET_ACCESS_F, aplist_id, hdferr) +!-------------------------------------------------------------------------------------------------- allocate(readSize(worldsize), source = 0_pInt) readSize(worldrank+1) = localShape(5) #ifdef PETSc @@ -419,246 +676,845 @@ subroutine HDF5_read_pReal_5(dataset,loc_id,datasetName,parallel) myStart = int([0,0,0,0,sum(readSize(1:worldrank))],HSIZE_T) globalShape = [localShape(1:4),sum(readSize)] + !-------------------------------------------------------------------------------------------------- +! set I/O mode for read operations to collective + call h5pset_all_coll_metadata_ops_f(aplist_id, .true., hdferr) + if (hdferr < 0) call IO_error(1_pInt,ext_msg='HDF5_read_pReal5: h5pset_all_coll_metadata_ops_f') ! open the dataset in the file - call h5dopen_f(loc_id,datasetName,dset_id,hdferr) - if (hdferr < 0) call IO_error(0_pInt,ext_msg='HDF5_read_pReal_shape5: h5dopen_f') + call h5dopen_f(loc_id,datasetName,dset_id,hdferr, dapl_id = aplist_id) + if (hdferr < 0) call IO_error(1_pInt,ext_msg='HDF5_read_pReal5: h5dopen_f') + !-------------------------------------------------------------------------------------------------- -!get the space_id of dataset in the file +! get the space_id of dataset in the file call h5dget_space_f(dset_id, filespace_id, hdferr) - if (hdferr < 0) call IO_error(0_pInt,ext_msg='HDF5_read_pReal_shape5: h5dget_space_f') + if (hdferr < 0) call IO_error(1_pInt,ext_msg='HDF5_read_pReal5: h5dget_space_f') + !-------------------------------------------------------------------------------------------------- ! select a hyperslab (the portion of the current process) in the file call h5sselect_hyperslab_f(filespace_id, H5S_SELECT_SET_F, myStart, int(localShape,HSIZE_T), hdferr) if (hdferr < 0) call IO_error(1_pInt,ext_msg='HDF5_read_pReal5: h5sselect_hyperslab_f') + !-------------------------------------------------------------------------------------------------- ! read - call h5dread_f(dset_id,H5T_NATIVE_DOUBLE,dataset,int(globalShape,HSIZE_T),hdferr) - if (hdferr < 0) call IO_error(0_pInt,ext_msg='HDF5_read_pReal_shape5: h5dread_f') + call h5dread_f(dset_id, H5T_NATIVE_DOUBLE,dataset,int(globalShape,HSIZE_T), hdferr,& + file_space_id = filespace_id, xfer_prp = plist_id) + if (hdferr < 0) call IO_error(1_pInt,ext_msg='HDF5_read_pReal5: h5dread_f') + !-------------------------------------------------------------------------------------------------- -! close property lists and datatypes +!close types, dataspaces call h5pclose_f(plist_id, hdferr) if (hdferr < 0) call IO_error(1_pInt,ext_msg='HDF5_read_pReal5: plist_id') call h5dclose_f(dset_id, hdferr) if (hdferr < 0) call IO_error(1_pInt,ext_msg='HDF5_read_pReal5: h5dclose_f') + call h5sclose_f(filespace_id, hdferr) + if (hdferr < 0) call IO_error(1_pInt,ext_msg='HDF5_write_pInt5: h5sclose_f/filespace_id') + +end subroutine HDF5_read_pReal5 -end subroutine HDF5_read_pReal_5 !-------------------------------------------------------------------------------------------------- -!> @brief subroutine for reading dataset of the type pReal with 6 dimensions +!> @brief subroutine for reading dataset of type pReal with 6 dimensions !-------------------------------------------------------------------------------------------------- -subroutine HDF5_read_pReal_6(dataset,loc_id,datasetName) +subroutine HDF5_read_pReal6(dataset,loc_id,datasetName,parallel) + use numerics, only: & + worldrank, & + worldsize implicit none - real(pReal), intent(out), dimension(:,:,:,:,:,:) :: dataset + real(pReal), intent(inout), dimension(:,:,:,:,:,:) :: dataset integer(HID_T), intent(in) :: loc_id !< file or group handle character(len=*), intent(in) :: datasetName !< name of the dataset in the file - integer(pInt),dimension(:), allocatable :: myShape + logical, intent(in), optional :: parallel - integer(HDF5_ERR_TYPE) :: hdferr - integer(HID_T) :: dset_id - myShape = shape(dataset) - call h5dopen_f(loc_id,datasetName,dset_id,hdferr) - if (hdferr < 0) call IO_error(0_pInt,ext_msg='HDF5_read_pReal_shape6: h5dopen_f') - call h5dread_f(dset_id,H5T_NATIVE_DOUBLE,dataset,int(myShape,HSIZE_T),hdferr) - if (hdferr < 0) call IO_error(0_pInt,ext_msg='HDF5_read_pReal_shape6: h5dread_f') - call h5dclose_f(dset_id,hdferr) - if (hdferr < 0) call IO_error(0_pInt,ext_msg='HDF5_read_pReal_shape6: h5dclose_f') + integer(pInt), dimension(:), allocatable :: & + globalShape, & !< shape of the dataset (all processes) + localShape, & !< shape of the dataset (this process) + readSize !< contribution of all processes + integer :: ierr + integer(HDF5_ERR_TYPE) :: hdferr + integer(HID_T) :: dset_id, filespace_id, memspace_id, plist_id, aplist_id + integer(HSIZE_T), dimension(6) :: myStart + +!------------------------------------------------------------------------------------------------- +! determine shape of dataset + localShape = shape(dataset) + if (any(localShape(1:size(localShape)) == 0)) return + +!------------------------------------------------------------------------------------------------- +! creating a property list for transfer properties + call h5pcreate_f(H5P_DATASET_XFER_F, plist_id, hdferr) + +!------------------------------------------------------------------------------------------------- +! creating a property list for data access properties + call h5pcreate_f(H5P_DATASET_ACCESS_F, aplist_id, hdferr) +!-------------------------------------------------------------------------------------------------- + allocate(readSize(worldsize), source = 0_pInt) + readSize(worldrank+1) = localShape(6) +#ifdef PETSc + if (present(parallel)) then; if (parallel) then + call h5pset_dxpl_mpio_f(plist_id, H5FD_MPIO_COLLECTIVE_F, hdferr) + if (hdferr < 0) call IO_error(1_pInt,ext_msg='HDF5_read_pReal6: h5pset_dxpl_mpio_f') + call MPI_allreduce(MPI_IN_PLACE,readSize,worldsize,MPI_LONG,MPI_SUM,PETSC_COMM_WORLD,ierr) ! get total output size over each process + if (ierr /= 0) call IO_error(894_pInt,ext_msg='HDF5_read_pReal6: MPI_allreduce') + endif; endif +#endif + myStart = int([0,0,0,0,0,sum(readSize(1:worldrank))],HSIZE_T) + globalShape = [localShape(1:5),sum(readSize)] -end subroutine HDF5_read_pReal_6 !-------------------------------------------------------------------------------------------------- -!> @brief subroutine for reading dataset of the type pReal with 7 dimensions +! set I/O mode for read operations to collective + call h5pset_all_coll_metadata_ops_f(aplist_id, .true., hdferr) + if (hdferr < 0) call IO_error(1_pInt,ext_msg='HDF5_read_pReal6: h5pset_all_coll_metadata_ops_f') +! open the dataset in the file + call h5dopen_f(loc_id,datasetName,dset_id,hdferr, dapl_id = aplist_id) + if (hdferr < 0) call IO_error(1_pInt,ext_msg='HDF5_read_pReal6: h5dopen_f') + !-------------------------------------------------------------------------------------------------- -subroutine HDF5_read_pReal_7(dataset,loc_id,datasetName) +! get the space_id of dataset in the file + call h5dget_space_f(dset_id, filespace_id, hdferr) + if (hdferr < 0) call IO_error(1_pInt,ext_msg='HDF5_read_pReal6: h5dget_space_f') + +!-------------------------------------------------------------------------------------------------- +! select a hyperslab (the portion of the current process) in the file + call h5sselect_hyperslab_f(filespace_id, H5S_SELECT_SET_F, myStart, int(localShape,HSIZE_T), hdferr) + if (hdferr < 0) call IO_error(1_pInt,ext_msg='HDF5_read_pReal6: h5sselect_hyperslab_f') + +!-------------------------------------------------------------------------------------------------- +! read + call h5dread_f(dset_id, H5T_NATIVE_DOUBLE,dataset,int(globalShape,HSIZE_T), hdferr,& + file_space_id = filespace_id, xfer_prp = plist_id) + if (hdferr < 0) call IO_error(1_pInt,ext_msg='HDF5_read_pReal6: h5dread_f') + +!-------------------------------------------------------------------------------------------------- +!close types, dataspaces + call h5pclose_f(plist_id, hdferr) + if (hdferr < 0) call IO_error(1_pInt,ext_msg='HDF5_read_pReal6: plist_id') + call h5dclose_f(dset_id, hdferr) + if (hdferr < 0) call IO_error(1_pInt,ext_msg='HDF5_read_pReal6: h5dclose_f') + call h5sclose_f(filespace_id, hdferr) + if (hdferr < 0) call IO_error(1_pInt,ext_msg='HDF5_write_pInt6: h5sclose_f/filespace_id') + +end subroutine HDF5_read_pReal6 + + +!-------------------------------------------------------------------------------------------------- +!> @brief subroutine for reading dataset of type pReal with 7 dimensions +!-------------------------------------------------------------------------------------------------- +subroutine HDF5_read_pReal7(dataset,loc_id,datasetName,parallel) + use numerics, only: & + worldrank, & + worldsize implicit none - real(pReal), intent(out), dimension(:,:,:,:,:,:,:) :: dataset + real(pReal), intent(inout), dimension(:,:,:,:,:,:,:) :: dataset integer(HID_T), intent(in) :: loc_id !< file or group handle character(len=*), intent(in) :: datasetName !< name of the dataset in the file - integer(pInt),dimension(:), allocatable :: myShape + logical, intent(in), optional :: parallel - integer(HDF5_ERR_TYPE) :: hdferr - integer(HID_T) :: dset_id - myShape = shape(dataset) - call h5dopen_f(loc_id,datasetName,dset_id,hdferr) - if (hdferr < 0) call IO_error(0_pInt,ext_msg='HDF5_read_pReal_shape7: h5dopen_f') - call h5dread_f(dset_id,H5T_NATIVE_DOUBLE,dataset,int(myShape,HSIZE_T),hdferr) - if (hdferr < 0) call IO_error(0_pInt,ext_msg='HDF5_read_pReal_shape7: h5dread_f') - call h5dclose_f(dset_id,hdferr) - if (hdferr < 0) call IO_error(0_pInt,ext_msg='HDF5_read_pReal_shape7: h5dclose_f') + integer(pInt), dimension(:), allocatable :: & + globalShape, & !< shape of the dataset (all processes) + localShape, & !< shape of the dataset (this process) + readSize !< contribution of all processes + integer :: ierr + integer(HDF5_ERR_TYPE) :: hdferr + integer(HID_T) :: dset_id, filespace_id, memspace_id, plist_id, aplist_id + integer(HSIZE_T), dimension(7) :: myStart + +!------------------------------------------------------------------------------------------------- +! determine shape of dataset + localShape = shape(dataset) + if (any(localShape(1:size(localShape)) == 0)) return + +!------------------------------------------------------------------------------------------------- +! creating a property list for transfer properties + call h5pcreate_f(H5P_DATASET_XFER_F, plist_id, hdferr) + +!------------------------------------------------------------------------------------------------- +! creating a property list for data access properties + call h5pcreate_f(H5P_DATASET_ACCESS_F, aplist_id, hdferr) +!-------------------------------------------------------------------------------------------------- + allocate(readSize(worldsize), source = 0_pInt) + readSize(worldrank+1) = localShape(7) +#ifdef PETSc + if (present(parallel)) then; if (parallel) then + call h5pset_dxpl_mpio_f(plist_id, H5FD_MPIO_COLLECTIVE_F, hdferr) + if (hdferr < 0) call IO_error(1_pInt,ext_msg='HDF5_read_pReal7: h5pset_dxpl_mpio_f') + call MPI_allreduce(MPI_IN_PLACE,readSize,worldsize,MPI_LONG,MPI_SUM,PETSC_COMM_WORLD,ierr) ! get total output size over each process + if (ierr /= 0) call IO_error(894_pInt,ext_msg='HDF5_read_pReal7: MPI_allreduce') + endif; endif +#endif + myStart = int([0,0,0,0,0,0,sum(readSize(1:worldrank))],HSIZE_T) + globalShape = [localShape(1:6),sum(readSize)] -end subroutine HDF5_read_pReal_7 !-------------------------------------------------------------------------------------------------- -!> @brief subroutine for reading dataset of the type pInt with 1 dimension +! set I/O mode for read operations to collective + call h5pset_all_coll_metadata_ops_f(aplist_id, .true., hdferr) + if (hdferr < 0) call IO_error(1_pInt,ext_msg='HDF5_read_pReal7: h5pset_all_coll_metadata_ops_f') +! open the dataset in the file + call h5dopen_f(loc_id,datasetName,dset_id,hdferr, dapl_id = aplist_id) + if (hdferr < 0) call IO_error(1_pInt,ext_msg='HDF5_read_pReal7: h5dopen_f') + !-------------------------------------------------------------------------------------------------- -subroutine HDF5_read_pInt_1(dataset,loc_id,datasetName) +! get the space_id of dataset in the file + call h5dget_space_f(dset_id, filespace_id, hdferr) + if (hdferr < 0) call IO_error(1_pInt,ext_msg='HDF5_read_pReal7: h5dget_space_f') + +!-------------------------------------------------------------------------------------------------- +! select a hyperslab (the portion of the current process) in the file + call h5sselect_hyperslab_f(filespace_id, H5S_SELECT_SET_F, myStart, int(localShape,HSIZE_T), hdferr) + if (hdferr < 0) call IO_error(1_pInt,ext_msg='HDF5_read_pReal7: h5sselect_hyperslab_f') + +!-------------------------------------------------------------------------------------------------- +! read + call h5dread_f(dset_id, H5T_NATIVE_DOUBLE,dataset,int(globalShape,HSIZE_T), hdferr,& + file_space_id = filespace_id, xfer_prp = plist_id) + if (hdferr < 0) call IO_error(1_pInt,ext_msg='HDF5_read_pReal7: h5dread_f') + +!-------------------------------------------------------------------------------------------------- +!close types, dataspaces + call h5pclose_f(plist_id, hdferr) + if (hdferr < 0) call IO_error(1_pInt,ext_msg='HDF5_read_pReal7: plist_id') + call h5dclose_f(dset_id, hdferr) + if (hdferr < 0) call IO_error(1_pInt,ext_msg='HDF5_read_pReal7: h5dclose_f') + call h5sclose_f(filespace_id, hdferr) + if (hdferr < 0) call IO_error(1_pInt,ext_msg='HDF5_write_pInt7: h5sclose_f/filespace_id') + +end subroutine HDF5_read_pReal7 + + +!-------------------------------------------------------------------------------------------------- +!> @brief subroutine for reading dataset of type pInt with 1 dimensions +!-------------------------------------------------------------------------------------------------- +subroutine HDF5_read_pInt1(dataset,loc_id,datasetName,parallel) + use numerics, only: & + worldrank, & + worldsize implicit none - integer(pInt), intent(out), dimension(:) :: dataset + integer(pInt), intent(inout), dimension(:) :: dataset integer(HID_T), intent(in) :: loc_id !< file or group handle character(len=*), intent(in) :: datasetName !< name of the dataset in the file - integer(pInt),dimension(:), allocatable :: myShape + logical, intent(in), optional :: parallel - integer(HDF5_ERR_TYPE) :: hdferr - integer(HID_T) :: dset_id - myShape = shape(dataset) - call h5dopen_f(loc_id,datasetName,dset_id,hdferr) - if (hdferr < 0) call IO_error(0_pInt,ext_msg='HDF5_read_pInt__shape1: h5dopen_f') - call h5dread_f(dset_id,H5T_NATIVE_INTEGER,dataset,int(myShape,HSIZE_T),hdferr) - if (hdferr < 0) call IO_error(0_pInt,ext_msg='HDF5_read_pInt__shape1: h5dread_f') - call h5dclose_f(dset_id,hdferr) - if (hdferr < 0) call IO_error(0_pInt,ext_msg='HDF5_read_pInt__shape1: h5dclose_f') + integer(pInt), dimension(:), allocatable :: & + globalShape, & !< shape of the dataset (all processes) + localShape, & !< shape of the dataset (this process) + readSize !< contribution of all processes + integer :: ierr + integer(HDF5_ERR_TYPE) :: hdferr + integer(HID_T) :: dset_id, filespace_id, memspace_id, plist_id, aplist_id + integer(HSIZE_T), dimension(1) :: myStart -end subroutine HDF5_read_pInt_1 +!------------------------------------------------------------------------------------------------- +! determine shape of dataset + localShape = shape(dataset) + if (any(localShape(1:size(localShape)) == 0)) return + +!------------------------------------------------------------------------------------------------- +! creating a property list for transfer properties + call h5pcreate_f(H5P_DATASET_XFER_F, plist_id, hdferr) !-------------------------------------------------------------------------------------------------- -!> @brief subroutine for reading dataset of the type pInt with 2 dimensions +! creating a property list for data access properties + call h5pcreate_f(H5P_DATASET_ACCESS_F, aplist_id, hdferr) !-------------------------------------------------------------------------------------------------- -subroutine HDF5_read_pInt_2(dataset,loc_id,datasetName) + allocate(readSize(worldsize), source = 0_pInt) + readSize(worldrank+1) = localShape(1) + +#ifdef PETSc + if (present(parallel)) then; if (parallel) then + call h5pset_dxpl_mpio_f(plist_id, H5FD_MPIO_COLLECTIVE_F, hdferr) + if (hdferr < 0) call IO_error(1_pInt,ext_msg='HDF5_read_pInt1: h5pset_dxpl_mpio_f') + call MPI_allreduce(MPI_IN_PLACE,readSize,worldsize,MPI_LONG,MPI_SUM,PETSC_COMM_WORLD,ierr) ! get total output size over each process + if (ierr /= 0) call IO_error(894_pInt,ext_msg='HDF5_read_pInt1: MPI_allreduce') + endif; endif +#endif + + myStart = int([sum(readSize(1:worldrank))],HSIZE_T) + globalShape = [localShape(1:0),sum(readSize)] + +!-------------------------------------------------------------------------------------------------- +! set I/O mode for read operations to collective + call h5pset_all_coll_metadata_ops_f(aplist_id, .true., hdferr) + if (hdferr < 0) call IO_error(1_pInt,ext_msg='HDF5_read_pReal1: h5pset_all_coll_metadata_ops_f') + +!-------------------------------------------------------------------------------------------------- +! open the dataset in the file + call h5dopen_f(loc_id,datasetName,dset_id,hdferr,dapl_id = aplist_id) + if (hdferr < 0) call IO_error(1_pInt,ext_msg='HDF5_read_pInt1: h5dopen_f') + +!-------------------------------------------------------------------------------------------------- +! get the space_id of dataset in the file + call h5dget_space_f(dset_id, filespace_id, hdferr) + if (hdferr < 0) call IO_error(1_pInt,ext_msg='HDF5_read_pInt1: h5dget_space_f') + +!-------------------------------------------------------------------------------------------------- +! select a hyperslab (the portion of the current process) in the file + call h5sselect_hyperslab_f(filespace_id, H5S_SELECT_SET_F, myStart, int(localShape,HSIZE_T), hdferr) + if (hdferr < 0) call IO_error(1_pInt,ext_msg='HDF5_read_pInt1: h5sselect_hyperslab_f') + +!-------------------------------------------------------------------------------------------------- +! read + call h5dread_f(dset_id, H5T_NATIVE_INTEGER,dataset,int(globalShape,HSIZE_T), hdferr, & + mem_space_id = filespace_id, xfer_prp = plist_id) + if (hdferr < 0) call IO_error(1_pInt,ext_msg='HDF5_read_pInt1: h5dread_f') + +!-------------------------------------------------------------------------------------------------- +!close types, dataspaces + call h5pclose_f(plist_id, hdferr) + if (hdferr < 0) call IO_error(1_pInt,ext_msg='HDF5_read_pInt1: plist_id') + call h5dclose_f(dset_id, hdferr) + if (hdferr < 0) call IO_error(1_pInt,ext_msg='HDF5_read_pInt1: h5dclose_f') + call h5sclose_f(filespace_id, hdferr) + if (hdferr < 0) call IO_error(1_pInt,ext_msg='HDF5_write_pInt1: h5sclose_f/filespace_id') + +end subroutine HDF5_read_pInt1 + + +!-------------------------------------------------------------------------------------------------- +!> @brief subroutine for reading dataset of type pInt with 2 dimensions +!-------------------------------------------------------------------------------------------------- +subroutine HDF5_read_pInt2(dataset,loc_id,datasetName,parallel) + use numerics, only: & + worldrank, & + worldsize implicit none - integer(pInt), intent(out), dimension(:,:) :: dataset + integer(pInt), intent(inout), dimension(:,:) :: dataset integer(HID_T), intent(in) :: loc_id !< file or group handle character(len=*), intent(in) :: datasetName !< name of the dataset in the file - integer(pInt),dimension(:), allocatable :: myShape + logical, intent(in), optional :: parallel - integer(HDF5_ERR_TYPE) :: hdferr - integer(HID_T) :: dset_id - myShape = shape(dataset) - call h5dopen_f(loc_id,datasetName,dset_id,hdferr) - if (hdferr < 0) call IO_error(0_pInt,ext_msg='HDF5_read_pInt__shape2: h5dopen_f') - call h5dread_f(dset_id,H5T_NATIVE_INTEGER,dataset,int(myShape,HSIZE_T),hdferr) - if (hdferr < 0) call IO_error(0_pInt,ext_msg='HDF5_read_pInt__shape2: h5dread_f') - call h5dclose_f(dset_id,hdferr) - if (hdferr < 0) call IO_error(0_pInt,ext_msg='HDF5_read_pInt__shape2: h5dclose_f') + integer(pInt), dimension(:), allocatable :: & + globalShape, & !< shape of the dataset (all processes) + localShape, & !< shape of the dataset (this process) + readSize !< contribution of all processes + integer :: ierr + integer(HDF5_ERR_TYPE) :: hdferr + integer(HID_T) :: dset_id, filespace_id, memspace_id, plist_id, aplist_id + integer(HSIZE_T), dimension(2) :: myStart -end subroutine HDF5_read_pInt_2 +!------------------------------------------------------------------------------------------------- +! determine shape of dataset + localShape = shape(dataset) + if (any(localShape(1:size(localShape)) == 0)) return + +!------------------------------------------------------------------------------------------------- +! creating a property list for transfer properties + call h5pcreate_f(H5P_DATASET_XFER_F, plist_id, hdferr) !-------------------------------------------------------------------------------------------------- -!> @brief subroutine for reading dataset of the type pInt with 3 dimensions +! creating a property list for data access properties + call h5pcreate_f(H5P_DATASET_ACCESS_F, aplist_id, hdferr) !-------------------------------------------------------------------------------------------------- -subroutine HDF5_read_pInt_3(dataset,loc_id,datasetName) + allocate(readSize(worldsize), source = 0_pInt) + readSize(worldrank+1) = localShape(2) + +#ifdef PETSc + if (present(parallel)) then; if (parallel) then + call h5pset_dxpl_mpio_f(plist_id, H5FD_MPIO_COLLECTIVE_F, hdferr) + if (hdferr < 0) call IO_error(1_pInt,ext_msg='HDF5_read_pInt2: h5pset_dxpl_mpio_f') + call MPI_allreduce(MPI_IN_PLACE,readSize,worldsize,MPI_LONG,MPI_SUM,PETSC_COMM_WORLD,ierr) ! get total output size over each process + if (ierr /= 0) call IO_error(894_pInt,ext_msg='HDF5_read_pInt2: MPI_allreduce') + endif; endif +#endif + + myStart = int([0,sum(readSize(1:worldrank))],HSIZE_T) + globalShape = [localShape(1:1),sum(readSize)] + +!-------------------------------------------------------------------------------------------------- +! set I/O mode for read operations to collective + call h5pset_all_coll_metadata_ops_f(aplist_id, .true., hdferr) + if (hdferr < 0) call IO_error(1_pInt,ext_msg='HDF5_read_pReal2: h5pset_all_coll_metadata_ops_f') + +!-------------------------------------------------------------------------------------------------- +! open the dataset in the file + call h5dopen_f(loc_id,datasetName,dset_id,hdferr,dapl_id = aplist_id) + if (hdferr < 0) call IO_error(1_pInt,ext_msg='HDF5_read_pInt2: h5dopen_f') + +!-------------------------------------------------------------------------------------------------- +! get the space_id of dataset in the file + call h5dget_space_f(dset_id, filespace_id, hdferr) + if (hdferr < 0) call IO_error(1_pInt,ext_msg='HDF5_read_pInt2: h5dget_space_f') + +!-------------------------------------------------------------------------------------------------- +! select a hyperslab (the portion of the current process) in the file + call h5sselect_hyperslab_f(filespace_id, H5S_SELECT_SET_F, myStart, int(localShape,HSIZE_T), hdferr) + if (hdferr < 0) call IO_error(1_pInt,ext_msg='HDF5_read_pInt2: h5sselect_hyperslab_f') + +!-------------------------------------------------------------------------------------------------- +! read + call h5dread_f(dset_id, H5T_NATIVE_INTEGER,dataset,int(globalShape,HSIZE_T), hdferr, & + mem_space_id = filespace_id, xfer_prp = plist_id) + if (hdferr < 0) call IO_error(1_pInt,ext_msg='HDF5_read_pInt2: h5dread_f') + +!-------------------------------------------------------------------------------------------------- +!close types, dataspaces + call h5pclose_f(plist_id, hdferr) + if (hdferr < 0) call IO_error(1_pInt,ext_msg='HDF5_read_pInt2: plist_id') + call h5dclose_f(dset_id, hdferr) + if (hdferr < 0) call IO_error(1_pInt,ext_msg='HDF5_read_pInt2: h5dclose_f') + call h5sclose_f(filespace_id, hdferr) + if (hdferr < 0) call IO_error(1_pInt,ext_msg='HDF5_write_pInt2: h5sclose_f/filespace_id') + +end subroutine HDF5_read_pInt2 + + +!-------------------------------------------------------------------------------------------------- +!> @brief subroutine for reading dataset of type pInt with 3 dimensions +!-------------------------------------------------------------------------------------------------- +subroutine HDF5_read_pInt3(dataset,loc_id,datasetName,parallel) + use numerics, only: & + worldrank, & + worldsize implicit none - integer(pInt), intent(out), dimension(:,:,:) :: dataset + integer(pInt), intent(inout), dimension(:,:,:) :: dataset integer(HID_T), intent(in) :: loc_id !< file or group handle character(len=*), intent(in) :: datasetName !< name of the dataset in the file - integer(pInt),dimension(:), allocatable :: myShape + logical, intent(in), optional :: parallel - integer(HDF5_ERR_TYPE) :: hdferr - integer(HID_T) :: dset_id - myShape = shape(dataset) - call h5dopen_f(loc_id,datasetName,dset_id,hdferr) - if (hdferr < 0) call IO_error(0_pInt,ext_msg='HDF5_read_pInt__shape3: h5dopen_f') - call h5dread_f(dset_id,H5T_NATIVE_INTEGER,dataset,int(myShape,HSIZE_T),hdferr) - if (hdferr < 0) call IO_error(0_pInt,ext_msg='HDF5_read_pInt__shape3: h5dread_f') - call h5dclose_f(dset_id,hdferr) - if (hdferr < 0) call IO_error(0_pInt,ext_msg='HDF5_read_pInt__shape3: h5dclose_f') + integer(pInt), dimension(:), allocatable :: & + globalShape, & !< shape of the dataset (all processes) + localShape, & !< shape of the dataset (this process) + readSize !< contribution of all processes + integer :: ierr + integer(HDF5_ERR_TYPE) :: hdferr + integer(HID_T) :: dset_id, filespace_id, memspace_id, plist_id, aplist_id + integer(HSIZE_T), dimension(3) :: myStart -end subroutine HDF5_read_pInt_3 +!------------------------------------------------------------------------------------------------- +! determine shape of dataset + localShape = shape(dataset) + if (any(localShape(1:size(localShape)) == 0)) return + +!------------------------------------------------------------------------------------------------- +! creating a property list for transfer properties + call h5pcreate_f(H5P_DATASET_XFER_F, plist_id, hdferr) !-------------------------------------------------------------------------------------------------- -!> @brief subroutine for reading dataset of the type pInt with 4 dimensions +! creating a property list for data access properties + call h5pcreate_f(H5P_DATASET_ACCESS_F, aplist_id, hdferr) !-------------------------------------------------------------------------------------------------- -subroutine HDF5_read_pInt_4(dataset,loc_id,datasetName) + allocate(readSize(worldsize), source = 0_pInt) + readSize(worldrank+1) = localShape(3) + +#ifdef PETSc + if (present(parallel)) then; if (parallel) then + call h5pset_dxpl_mpio_f(plist_id, H5FD_MPIO_COLLECTIVE_F, hdferr) + if (hdferr < 0) call IO_error(1_pInt,ext_msg='HDF5_read_pInt3: h5pset_dxpl_mpio_f') + call MPI_allreduce(MPI_IN_PLACE,readSize,worldsize,MPI_LONG,MPI_SUM,PETSC_COMM_WORLD,ierr) ! get total output size over each process + if (ierr /= 0) call IO_error(894_pInt,ext_msg='HDF5_read_pInt3: MPI_allreduce') + endif; endif +#endif + + myStart = int([0,0,sum(readSize(1:worldrank))],HSIZE_T) + globalShape = [localShape(1:2),sum(readSize)] + +!-------------------------------------------------------------------------------------------------- +! set I/O mode for read operations to collective + call h5pset_all_coll_metadata_ops_f(aplist_id, .true., hdferr) + if (hdferr < 0) call IO_error(1_pInt,ext_msg='HDF5_read_pReal3: h5pset_all_coll_metadata_ops_f') + +!-------------------------------------------------------------------------------------------------- +! open the dataset in the file + call h5dopen_f(loc_id,datasetName,dset_id,hdferr,dapl_id = aplist_id) + if (hdferr < 0) call IO_error(1_pInt,ext_msg='HDF5_read_pInt3: h5dopen_f') + +!-------------------------------------------------------------------------------------------------- +! get the space_id of dataset in the file + call h5dget_space_f(dset_id, filespace_id, hdferr) + if (hdferr < 0) call IO_error(1_pInt,ext_msg='HDF5_read_pInt3: h5dget_space_f') + +!-------------------------------------------------------------------------------------------------- +! select a hyperslab (the portion of the current process) in the file + call h5sselect_hyperslab_f(filespace_id, H5S_SELECT_SET_F, myStart, int(localShape,HSIZE_T), hdferr) + if (hdferr < 0) call IO_error(1_pInt,ext_msg='HDF5_read_pInt3: h5sselect_hyperslab_f') + +!-------------------------------------------------------------------------------------------------- +! read + call h5dread_f(dset_id, H5T_NATIVE_INTEGER,dataset,int(globalShape,HSIZE_T), hdferr, & + file_space_id = filespace_id, xfer_prp = plist_id) + if (hdferr < 0) call IO_error(1_pInt,ext_msg='HDF5_read_pInt3: h5dread_f') + +!-------------------------------------------------------------------------------------------------- +!close types, dataspaces + call h5pclose_f(plist_id, hdferr) + if (hdferr < 0) call IO_error(1_pInt,ext_msg='HDF5_read_pInt3: plist_id') + call h5dclose_f(dset_id, hdferr) + if (hdferr < 0) call IO_error(1_pInt,ext_msg='HDF5_read_pInt3: h5dclose_f') + call h5sclose_f(filespace_id, hdferr) + if (hdferr < 0) call IO_error(1_pInt,ext_msg='HDF5_write_pInt3: h5sclose_f/filespace_id') + +end subroutine HDF5_read_pInt3 + + +!-------------------------------------------------------------------------------------------------- +!> @brief subroutine for reading dataset of type pInt with 4 dimensions +!-------------------------------------------------------------------------------------------------- +subroutine HDF5_read_pInt4(dataset,loc_id,datasetName,parallel) + use numerics, only: & + worldrank, & + worldsize implicit none - integer(pInt), intent(out), dimension(:,:,:,:) :: dataset + integer(pInt), intent(inout), dimension(:,:,:,:) :: dataset integer(HID_T), intent(in) :: loc_id !< file or group handle character(len=*), intent(in) :: datasetName !< name of the dataset in the file - integer(pInt),dimension(:), allocatable :: myShape + logical, intent(in), optional :: parallel - integer(HDF5_ERR_TYPE) :: hdferr - integer(HID_T) :: dset_id - myShape = shape(dataset) - call h5dopen_f(loc_id,datasetName,dset_id,hdferr) - if (hdferr < 0) call IO_error(0_pInt,ext_msg='HDF5_read_pInt__shape4: h5dopen_f') - call h5dread_f(dset_id,H5T_NATIVE_INTEGER,dataset,int(myShape,HSIZE_T),hdferr) - if (hdferr < 0) call IO_error(0_pInt,ext_msg='HDF5_read_pInt__shape4: h5dread_f') - call h5dclose_f(dset_id,hdferr) - if (hdferr < 0) call IO_error(0_pInt,ext_msg='HDF5_read_pInt__shape4: h5dclose_f') + integer(pInt), dimension(:), allocatable :: & + globalShape, & !< shape of the dataset (all processes) + localShape, & !< shape of the dataset (this process) + readSize !< contribution of all processes + integer :: ierr + integer(HDF5_ERR_TYPE) :: hdferr + integer(HID_T) :: dset_id, filespace_id, memspace_id, plist_id, aplist_id + integer(HSIZE_T), dimension(4) :: myStart -end subroutine HDF5_read_pInt_4 +!------------------------------------------------------------------------------------------------- +! determine shape of dataset + localShape = shape(dataset) + if (any(localShape(1:size(localShape)) == 0)) return + +!------------------------------------------------------------------------------------------------- +! creating a property list for transfer properties + call h5pcreate_f(H5P_DATASET_XFER_F, plist_id, hdferr) !-------------------------------------------------------------------------------------------------- -!> @brief subroutine for reading dataset of the type pInt with 5 dimensions +! creating a property list for data access properties + call h5pcreate_f(H5P_DATASET_ACCESS_F, aplist_id, hdferr) !-------------------------------------------------------------------------------------------------- -subroutine HDF5_read_pInt_5(dataset,loc_id,datasetName) + allocate(readSize(worldsize), source = 0_pInt) + readSize(worldrank+1) = localShape(4) + +#ifdef PETSc + if (present(parallel)) then; if (parallel) then + call h5pset_dxpl_mpio_f(plist_id, H5FD_MPIO_COLLECTIVE_F, hdferr) + if (hdferr < 0) call IO_error(1_pInt,ext_msg='HDF5_read_pInt4: h5pset_dxpl_mpio_f') + call MPI_allreduce(MPI_IN_PLACE,readSize,worldsize,MPI_LONG,MPI_SUM,PETSC_COMM_WORLD,ierr) ! get total output size over each process + if (ierr /= 0) call IO_error(894_pInt,ext_msg='HDF5_read_pInt4: MPI_allreduce') + endif; endif +#endif + + myStart = int([0,0,0,sum(readSize(1:worldrank))],HSIZE_T) + globalShape = [localShape(1:3),sum(readSize)] + +!-------------------------------------------------------------------------------------------------- +! set I/O mode for read operations to collective + call h5pset_all_coll_metadata_ops_f(aplist_id, .true., hdferr) + if (hdferr < 0) call IO_error(1_pInt,ext_msg='HDF5_read_pReal4: h5pset_all_coll_metadata_ops_f') + +!-------------------------------------------------------------------------------------------------- +! open the dataset in the file + call h5dopen_f(loc_id,datasetName,dset_id,hdferr,dapl_id = aplist_id) + if (hdferr < 0) call IO_error(1_pInt,ext_msg='HDF5_read_pInt4: h5dopen_f') + +!-------------------------------------------------------------------------------------------------- +! get the space_id of dataset in the file + call h5dget_space_f(dset_id, filespace_id, hdferr) + if (hdferr < 0) call IO_error(1_pInt,ext_msg='HDF5_read_pInt4: h5dget_space_f') + +!-------------------------------------------------------------------------------------------------- +! select a hyperslab (the portion of the current process) in the file + call h5sselect_hyperslab_f(filespace_id, H5S_SELECT_SET_F, myStart, int(localShape,HSIZE_T), hdferr) + if (hdferr < 0) call IO_error(1_pInt,ext_msg='HDF5_read_pInt4: h5sselect_hyperslab_f') + +!-------------------------------------------------------------------------------------------------- +! read + call h5dread_f(dset_id, H5T_NATIVE_INTEGER,dataset,int(globalShape,HSIZE_T), hdferr, & + file_space_id = filespace_id, xfer_prp = plist_id) + if (hdferr < 0) call IO_error(1_pInt,ext_msg='HDF5_read_pInt4: h5dread_f') + +!-------------------------------------------------------------------------------------------------- +!close types, dataspaces + call h5pclose_f(plist_id, hdferr) + if (hdferr < 0) call IO_error(1_pInt,ext_msg='HDF5_read_pInt4: plist_id') + call h5dclose_f(dset_id, hdferr) + if (hdferr < 0) call IO_error(1_pInt,ext_msg='HDF5_read_pInt4: h5dclose_f') + call h5sclose_f(filespace_id, hdferr) + if (hdferr < 0) call IO_error(1_pInt,ext_msg='HDF5_write_pInt4: h5sclose_f/filespace_id') + +end subroutine HDF5_read_pInt4 + + +!-------------------------------------------------------------------------------------------------- +!> @brief subroutine for reading dataset of type pInt with 5 dimensions +!-------------------------------------------------------------------------------------------------- +subroutine HDF5_read_pInt5(dataset,loc_id,datasetName,parallel) + use numerics, only: & + worldrank, & + worldsize implicit none - integer(pInt), intent(out), dimension(:,:,:,:,:) :: dataset + integer(pInt), intent(inout), dimension(:,:,:,:,:) :: dataset integer(HID_T), intent(in) :: loc_id !< file or group handle character(len=*), intent(in) :: datasetName !< name of the dataset in the file - integer(pInt),dimension(:), allocatable :: myShape + logical, intent(in), optional :: parallel - integer(HDF5_ERR_TYPE) :: hdferr - integer(HID_T) :: dset_id - myShape = shape(dataset) - call h5dopen_f(loc_id,datasetName,dset_id,hdferr) - if (hdferr < 0) call IO_error(0_pInt,ext_msg='HDF5_read_pInt__shape5: h5dopen_f') - call h5dread_f(dset_id,H5T_NATIVE_INTEGER,dataset,int(myShape,HSIZE_T),hdferr) - if (hdferr < 0) call IO_error(0_pInt,ext_msg='HDF5_read_pInt__shape5: h5dread_f') - call h5dclose_f(dset_id,hdferr) - if (hdferr < 0) call IO_error(0_pInt,ext_msg='HDF5_read_pInt__shape5: h5dclose_f') + integer(pInt), dimension(:), allocatable :: & + globalShape, & !< shape of the dataset (all processes) + localShape, & !< shape of the dataset (this process) + readSize !< contribution of all processes + integer :: ierr + integer(HDF5_ERR_TYPE) :: hdferr + integer(HID_T) :: dset_id, filespace_id, memspace_id, plist_id, aplist_id + integer(HSIZE_T), dimension(5) :: myStart -end subroutine HDF5_read_pInt_5 +!------------------------------------------------------------------------------------------------- +! determine shape of dataset + localShape = shape(dataset) + if (any(localShape(1:size(localShape)) == 0)) return + +!------------------------------------------------------------------------------------------------- +! creating a property list for transfer properties + call h5pcreate_f(H5P_DATASET_XFER_F, plist_id, hdferr) !-------------------------------------------------------------------------------------------------- -!> @brief subroutine for reading dataset of the type pInt with 6 dimensions +! creating a property list for data access properties + call h5pcreate_f(H5P_DATASET_ACCESS_F, aplist_id, hdferr) !-------------------------------------------------------------------------------------------------- -subroutine HDF5_read_pInt_6(dataset,loc_id,datasetName) + allocate(readSize(worldsize), source = 0_pInt) + readSize(worldrank+1) = localShape(5) + +#ifdef PETSc + if (present(parallel)) then; if (parallel) then + call h5pset_dxpl_mpio_f(plist_id, H5FD_MPIO_COLLECTIVE_F, hdferr) + if (hdferr < 0) call IO_error(1_pInt,ext_msg='HDF5_read_pInt5: h5pset_dxpl_mpio_f') + call MPI_allreduce(MPI_IN_PLACE,readSize,worldsize,MPI_LONG,MPI_SUM,PETSC_COMM_WORLD,ierr) ! get total output size over each process + if (ierr /= 0) call IO_error(894_pInt,ext_msg='HDF5_read_pInt5: MPI_allreduce') + endif; endif +#endif + + myStart = int([0,0,0,0,sum(readSize(1:worldrank))],HSIZE_T) + globalShape = [localShape(1:4),sum(readSize)] + +!-------------------------------------------------------------------------------------------------- +! set I/O mode for read operations to collective + call h5pset_all_coll_metadata_ops_f(aplist_id, .true., hdferr) + if (hdferr < 0) call IO_error(1_pInt,ext_msg='HDF5_read_pReal5: h5pset_all_coll_metadata_ops_f') + +!-------------------------------------------------------------------------------------------------- +! open the dataset in the file + call h5dopen_f(loc_id,datasetName,dset_id,hdferr,dapl_id = aplist_id) + if (hdferr < 0) call IO_error(1_pInt,ext_msg='HDF5_read_pInt5: h5dopen_f') + +!-------------------------------------------------------------------------------------------------- +! get the space_id of dataset in the file + call h5dget_space_f(dset_id, filespace_id, hdferr) + if (hdferr < 0) call IO_error(1_pInt,ext_msg='HDF5_read_pInt5: h5dget_space_f') + +!-------------------------------------------------------------------------------------------------- +! select a hyperslab (the portion of the current process) in the file + call h5sselect_hyperslab_f(filespace_id, H5S_SELECT_SET_F, myStart, int(localShape,HSIZE_T), hdferr) + if (hdferr < 0) call IO_error(1_pInt,ext_msg='HDF5_read_pInt5: h5sselect_hyperslab_f') + +!-------------------------------------------------------------------------------------------------- +! read + call h5dread_f(dset_id, H5T_NATIVE_INTEGER,dataset,int(globalShape,HSIZE_T), hdferr, & + file_space_id = filespace_id, xfer_prp = plist_id) + if (hdferr < 0) call IO_error(1_pInt,ext_msg='HDF5_read_pInt5: h5dread_f') + +!-------------------------------------------------------------------------------------------------- +!close types, dataspaces + call h5pclose_f(plist_id, hdferr) + if (hdferr < 0) call IO_error(1_pInt,ext_msg='HDF5_read_pInt5: plist_id') + call h5dclose_f(dset_id, hdferr) + if (hdferr < 0) call IO_error(1_pInt,ext_msg='HDF5_read_pInt5: h5dclose_f') + call h5sclose_f(filespace_id, hdferr) + if (hdferr < 0) call IO_error(1_pInt,ext_msg='HDF5_write_pInt5: h5sclose_f/filespace_id') + +end subroutine HDF5_read_pInt5 + + +!-------------------------------------------------------------------------------------------------- +!> @brief subroutine for reading dataset of type pInt with 6 dimensions +!-------------------------------------------------------------------------------------------------- +subroutine HDF5_read_pInt6(dataset,loc_id,datasetName,parallel) + use numerics, only: & + worldrank, & + worldsize implicit none - integer(pInt), intent(out), dimension(:,:,:,:,:,:) :: dataset + integer(pInt), intent(inout), dimension(:,:,:,:,:,:) :: dataset integer(HID_T), intent(in) :: loc_id !< file or group handle character(len=*), intent(in) :: datasetName !< name of the dataset in the file - integer(pInt),dimension(:), allocatable :: myShape + logical, intent(in), optional :: parallel - integer(HDF5_ERR_TYPE) :: hdferr - integer(HID_T) :: dset_id - myShape = shape(dataset) - call h5dopen_f(loc_id,datasetName,dset_id,hdferr) - if (hdferr < 0) call IO_error(0_pInt,ext_msg='HDF5_read_pInt__shape6: h5dopen_f') - call h5dread_f(dset_id,H5T_NATIVE_INTEGER,dataset,int(myShape,HSIZE_T),hdferr) - if (hdferr < 0) call IO_error(0_pInt,ext_msg='HDF5_read_pInt__shape6: h5dread_f') - call h5dclose_f(dset_id,hdferr) - if (hdferr < 0) call IO_error(0_pInt,ext_msg='HDF5_read_pInt__shape6: h5dclose_f') + integer(pInt), dimension(:), allocatable :: & + globalShape, & !< shape of the dataset (all processes) + localShape, & !< shape of the dataset (this process) + readSize !< contribution of all processes + integer :: ierr + integer(HDF5_ERR_TYPE) :: hdferr + integer(HID_T) :: dset_id, filespace_id, memspace_id, plist_id, aplist_id + integer(HSIZE_T), dimension(6) :: myStart -end subroutine HDF5_read_pInt_6 +!------------------------------------------------------------------------------------------------- +! determine shape of dataset + localShape = shape(dataset) + if (any(localShape(1:size(localShape)) == 0)) return + +!------------------------------------------------------------------------------------------------- +! creating a property list for transfer properties + call h5pcreate_f(H5P_DATASET_XFER_F, plist_id, hdferr) !-------------------------------------------------------------------------------------------------- -!> @brief subroutine for reading dataset of the type pInt with 7 dimensions +! creating a property list for data access properties + call h5pcreate_f(H5P_DATASET_ACCESS_F, aplist_id, hdferr) !-------------------------------------------------------------------------------------------------- -subroutine HDF5_read_pInt_7(dataset,loc_id,datasetName) + allocate(readSize(worldsize), source = 0_pInt) + readSize(worldrank+1) = localShape(6) + +#ifdef PETSc + if (present(parallel)) then; if (parallel) then + call h5pset_dxpl_mpio_f(plist_id, H5FD_MPIO_COLLECTIVE_F, hdferr) + if (hdferr < 0) call IO_error(1_pInt,ext_msg='HDF5_read_pInt6: h5pset_dxpl_mpio_f') + call MPI_allreduce(MPI_IN_PLACE,readSize,worldsize,MPI_LONG,MPI_SUM,PETSC_COMM_WORLD,ierr) ! get total output size over each process + if (ierr /= 0) call IO_error(894_pInt,ext_msg='HDF5_read_pInt6: MPI_allreduce') + endif; endif +#endif + + myStart = int([0,0,0,0,0,sum(readSize(1:worldrank))],HSIZE_T) + globalShape = [localShape(1:5),sum(readSize)] + +!-------------------------------------------------------------------------------------------------- +! set I/O mode for read operations to collective + call h5pset_all_coll_metadata_ops_f(aplist_id, .true., hdferr) + if (hdferr < 0) call IO_error(1_pInt,ext_msg='HDF5_read_pReal6: h5pset_all_coll_metadata_ops_f') + +!-------------------------------------------------------------------------------------------------- +! open the dataset in the file + call h5dopen_f(loc_id,datasetName,dset_id,hdferr,dapl_id = aplist_id) + if (hdferr < 0) call IO_error(1_pInt,ext_msg='HDF5_read_pInt6: h5dopen_f') + +!-------------------------------------------------------------------------------------------------- +! get the space_id of dataset in the file + call h5dget_space_f(dset_id, filespace_id, hdferr) + if (hdferr < 0) call IO_error(1_pInt,ext_msg='HDF5_read_pInt6: h5dget_space_f') + +!-------------------------------------------------------------------------------------------------- +! select a hyperslab (the portion of the current process) in the file + call h5sselect_hyperslab_f(filespace_id, H5S_SELECT_SET_F, myStart, int(localShape,HSIZE_T), hdferr) + if (hdferr < 0) call IO_error(1_pInt,ext_msg='HDF5_read_pInt6: h5sselect_hyperslab_f') + +!-------------------------------------------------------------------------------------------------- +! read + call h5dread_f(dset_id, H5T_NATIVE_INTEGER,dataset,int(globalShape,HSIZE_T), hdferr, & + file_space_id = filespace_id, xfer_prp = plist_id) + if (hdferr < 0) call IO_error(1_pInt,ext_msg='HDF5_read_pInt6: h5dread_f') + +!-------------------------------------------------------------------------------------------------- +!close types, dataspaces + call h5pclose_f(plist_id, hdferr) + if (hdferr < 0) call IO_error(1_pInt,ext_msg='HDF5_read_pInt6: plist_id') + call h5dclose_f(dset_id, hdferr) + if (hdferr < 0) call IO_error(1_pInt,ext_msg='HDF5_read_pInt6: h5dclose_f') + call h5sclose_f(filespace_id, hdferr) + if (hdferr < 0) call IO_error(1_pInt,ext_msg='HDF5_write_pInt6: h5sclose_f/filespace_id') + +end subroutine HDF5_read_pInt6 + + +!-------------------------------------------------------------------------------------------------- +!> @brief subroutine for reading dataset of type pInt with 7 dimensions +!-------------------------------------------------------------------------------------------------- +subroutine HDF5_read_pInt7(dataset,loc_id,datasetName,parallel) + use numerics, only: & + worldrank, & + worldsize implicit none - integer(pInt), intent(out), dimension(:,:,:,:,:,:,:) :: dataset + integer(pInt), intent(inout), dimension(:,:,:,:,:,:,:) :: dataset integer(HID_T), intent(in) :: loc_id !< file or group handle character(len=*), intent(in) :: datasetName !< name of the dataset in the file - integer(pInt),dimension(:), allocatable :: myShape + logical, intent(in), optional :: parallel - integer(HDF5_ERR_TYPE) :: hdferr - integer(HID_T) :: dset_id - myShape = shape(dataset) - call h5dopen_f(loc_id,datasetName,dset_id,hdferr) - if (hdferr < 0) call IO_error(0_pInt,ext_msg='HDF5_read_pInt__shape7: h5dopen_f') - call h5dread_f(dset_id,H5T_NATIVE_INTEGER,dataset,int(myShape,HSIZE_T),hdferr) - if (hdferr < 0) call IO_error(0_pInt,ext_msg='HDF5_read_pInt__shape7: h5dread_f') - call h5dclose_f(dset_id,hdferr) - if (hdferr < 0) call IO_error(0_pInt,ext_msg='HDF5_read_pInt__shape7: h5dclose_f') + integer(pInt), dimension(:), allocatable :: & + globalShape, & !< shape of the dataset (all processes) + localShape, & !< shape of the dataset (this process) + readSize !< contribution of all processes + integer :: ierr + integer(HDF5_ERR_TYPE) :: hdferr + integer(HID_T) :: dset_id, filespace_id, memspace_id, plist_id, aplist_id + integer(HSIZE_T), dimension(7) :: myStart -end subroutine HDF5_read_pInt_7 +!------------------------------------------------------------------------------------------------- +! determine shape of dataset + localShape = shape(dataset) + if (any(localShape(1:size(localShape)) == 0)) return + +!------------------------------------------------------------------------------------------------- +! creating a property list for transfer properties + call h5pcreate_f(H5P_DATASET_XFER_F, plist_id, hdferr) + +!-------------------------------------------------------------------------------------------------- +! creating a property list for data access properties + call h5pcreate_f(H5P_DATASET_ACCESS_F, aplist_id, hdferr) +!-------------------------------------------------------------------------------------------------- + allocate(readSize(worldsize), source = 0_pInt) + readSize(worldrank+1) = localShape(7) + +#ifdef PETSc + if (present(parallel)) then; if (parallel) then + call h5pset_dxpl_mpio_f(plist_id, H5FD_MPIO_COLLECTIVE_F, hdferr) + if (hdferr < 0) call IO_error(1_pInt,ext_msg='HDF5_read_pInt7: h5pset_dxpl_mpio_f') + call MPI_allreduce(MPI_IN_PLACE,readSize,worldsize,MPI_LONG,MPI_SUM,PETSC_COMM_WORLD,ierr) ! get total output size over each process + if (ierr /= 0) call IO_error(894_pInt,ext_msg='HDF5_read_pInt7: MPI_allreduce') + endif; endif +#endif + + myStart = int([0,0,0,0,0,0,sum(readSize(1:worldrank))],HSIZE_T) + globalShape = [localShape(1:6),sum(readSize)] + +!-------------------------------------------------------------------------------------------------- +! set I/O mode for read operations to collective + call h5pset_all_coll_metadata_ops_f(aplist_id, .true., hdferr) + if (hdferr < 0) call IO_error(1_pInt,ext_msg='HDF5_read_pReal7: h5pset_all_coll_metadata_ops_f') + +!-------------------------------------------------------------------------------------------------- +! open the dataset in the file + call h5dopen_f(loc_id,datasetName,dset_id,hdferr,dapl_id = aplist_id) + if (hdferr < 0) call IO_error(1_pInt,ext_msg='HDF5_read_pInt7: h5dopen_f') + +!-------------------------------------------------------------------------------------------------- +! get the space_id of dataset in the file + call h5dget_space_f(dset_id, filespace_id, hdferr) + if (hdferr < 0) call IO_error(1_pInt,ext_msg='HDF5_read_pInt7: h5dget_space_f') + +!-------------------------------------------------------------------------------------------------- +! select a hyperslab (the portion of the current process) in the file + call h5sselect_hyperslab_f(filespace_id, H5S_SELECT_SET_F, myStart, int(localShape,HSIZE_T), hdferr) + if (hdferr < 0) call IO_error(1_pInt,ext_msg='HDF5_read_pInt7: h5sselect_hyperslab_f') + +!-------------------------------------------------------------------------------------------------- +! read + call h5dread_f(dset_id, H5T_NATIVE_INTEGER,dataset,int(globalShape,HSIZE_T), hdferr, & + file_space_id = filespace_id, xfer_prp = plist_id) + if (hdferr < 0) call IO_error(1_pInt,ext_msg='HDF5_read_pInt7: h5dread_f') + +!-------------------------------------------------------------------------------------------------- +!close types, dataspaces + call h5pclose_f(plist_id, hdferr) + if (hdferr < 0) call IO_error(1_pInt,ext_msg='HDF5_read_pInt7: plist_id') + call h5dclose_f(dset_id, hdferr) + if (hdferr < 0) call IO_error(1_pInt,ext_msg='HDF5_read_pInt7: h5dclose_f') + call h5sclose_f(filespace_id, hdferr) + if (hdferr < 0) call IO_error(1_pInt,ext_msg='HDF5_write_pInt7: h5sclose_f/filespace_id') + +end subroutine HDF5_read_pInt7 !-------------------------------------------------------------------------------------------------- !> @brief subroutine for writing dataset of type pReal with 1 dimensions @@ -667,7 +1523,7 @@ subroutine HDF5_write_pReal1(dataset,loc_id,datasetName,parallel) use numerics, only: & worldrank, & worldsize - + implicit none real(pReal), intent(inout), dimension(:) :: dataset integer(HID_T), intent(in) :: loc_id !< file or group handle @@ -694,14 +1550,14 @@ subroutine HDF5_write_pReal1(dataset,loc_id,datasetName,parallel) !-------------------------------------------------------------------------------------------------- allocate(outputSize(worldsize), source = 0_pInt) outputSize(worldrank+1) = localShape(1) - #ifdef PETSc +#ifdef PETSc if (present(parallel)) then; if (parallel) then call h5pset_dxpl_mpio_f(plist_id, H5FD_MPIO_COLLECTIVE_F, hdferr) if (hdferr < 0) call IO_error(1_pInt,ext_msg='HDF5_write_pReal1: h5pset_dxpl_mpio_f') call MPI_allreduce(MPI_IN_PLACE,outputSize,worldsize,MPI_LONG,MPI_SUM,PETSC_COMM_WORLD,ierr) ! get total output size over each process if (ierr /= 0) call IO_error(894_pInt,ext_msg='HDF5_write_pReal1: MPI_allreduce') endif; endif - #endif +#endif myStart = int([sum(outputSize(1:worldrank))],HSIZE_T) globalShape = [localShape(1:0),sum(outputSize)] @@ -787,7 +1643,7 @@ subroutine HDF5_write_pReal2(dataset,loc_id,datasetName,parallel) call h5pset_dxpl_mpio_f(plist_id, H5FD_MPIO_COLLECTIVE_F, hdferr) if (hdferr < 0) call IO_error(1_pInt,ext_msg='HDF5_write_pReal2: h5pset_dxpl_mpio_f') call MPI_allreduce(MPI_IN_PLACE,outputSize,worldsize,MPI_LONG,MPI_SUM,PETSC_COMM_WORLD,ierr) ! get total output size over each process - if (ierr /= 0) call IO_error(894_pInt,ext_msg='HDF5_write_pInt2: MPI_allreduce') + if (ierr /= 0) call IO_error(894_pInt,ext_msg='HDF5_write_pReal2: MPI_allreduce') endif; endif #endif myStart = int([0,sum(outputSize(1:worldrank))],HSIZE_T) @@ -875,7 +1731,7 @@ subroutine HDF5_write_pReal3(dataset,loc_id,datasetName,parallel) call h5pset_dxpl_mpio_f(plist_id, H5FD_MPIO_COLLECTIVE_F, hdferr) if (hdferr < 0) call IO_error(1_pInt,ext_msg='HDF5_write_pReal3: h5pset_dxpl_mpio_f') call MPI_allreduce(MPI_IN_PLACE,outputSize,worldsize,MPI_LONG,MPI_SUM,PETSC_COMM_WORLD,ierr) ! get total output size over each process - if (ierr /= 0) call IO_error(894_pInt,ext_msg='HDF5_write_pInt3: MPI_allreduce') + if (ierr /= 0) call IO_error(894_pInt,ext_msg='HDF5_write_pReal3: MPI_allreduce') endif; endif #endif myStart = int([0,0,sum(outputSize(1:worldrank))],HSIZE_T) @@ -963,7 +1819,7 @@ subroutine HDF5_write_pReal4(dataset,loc_id,datasetName,parallel) call h5pset_dxpl_mpio_f(plist_id, H5FD_MPIO_COLLECTIVE_F, hdferr) if (hdferr < 0) call IO_error(1_pInt,ext_msg='HDF5_write_pReal4: h5pset_dxpl_mpio_f') call MPI_allreduce(MPI_IN_PLACE,outputSize,worldsize,MPI_LONG,MPI_SUM,PETSC_COMM_WORLD,ierr) ! get total output size over each process - if (ierr /= 0) call IO_error(894_pInt,ext_msg='HDF5_write_pInt4: MPI_allreduce') + if (ierr /= 0) call IO_error(894_pInt,ext_msg='HDF5_write_pReal4: MPI_allreduce') endif; endif #endif myStart = int([0,0,0,sum(outputSize(1:worldrank))],HSIZE_T) @@ -1051,7 +1907,7 @@ subroutine HDF5_write_pReal5(dataset,loc_id,datasetName,parallel) call h5pset_dxpl_mpio_f(plist_id, H5FD_MPIO_COLLECTIVE_F, hdferr) if (hdferr < 0) call IO_error(1_pInt,ext_msg='HDF5_write_pReal5: h5pset_dxpl_mpio_f') call MPI_allreduce(MPI_IN_PLACE,outputSize,worldsize,MPI_LONG,MPI_SUM,PETSC_COMM_WORLD,ierr) ! get total output size over each process - if (ierr /= 0) call IO_error(894_pInt,ext_msg='HDF5_write_pInt5: MPI_allreduce') + if (ierr /= 0) call IO_error(894_pInt,ext_msg='HDF5_write_pReal5: MPI_allreduce') endif; endif #endif myStart = int([0,0,0,0,sum(outputSize(1:worldrank))],HSIZE_T) @@ -1139,7 +1995,7 @@ subroutine HDF5_write_pReal6(dataset,loc_id,datasetName,parallel) call h5pset_dxpl_mpio_f(plist_id, H5FD_MPIO_COLLECTIVE_F, hdferr) if (hdferr < 0) call IO_error(1_pInt,ext_msg='HDF5_write_pReal6: h5pset_dxpl_mpio_f') call MPI_allreduce(MPI_IN_PLACE,outputSize,worldsize,MPI_LONG,MPI_SUM,PETSC_COMM_WORLD,ierr) ! get total output size over each process - if (ierr /= 0) call IO_error(894_pInt,ext_msg='HDF5_write_pInt6: MPI_allreduce') + if (ierr /= 0) call IO_error(894_pInt,ext_msg='HDF5_write_pReal6: MPI_allreduce') endif; endif #endif myStart = int([0,0,0,0,0,sum(outputSize(1:worldrank))],HSIZE_T) @@ -1227,7 +2083,7 @@ subroutine HDF5_write_pReal7(dataset,loc_id,datasetName,parallel) call h5pset_dxpl_mpio_f(plist_id, H5FD_MPIO_COLLECTIVE_F, hdferr) if (hdferr < 0) call IO_error(1_pInt,ext_msg='HDF5_write_pReal7: h5pset_dxpl_mpio_f') call MPI_allreduce(MPI_IN_PLACE,outputSize,worldsize,MPI_LONG,MPI_SUM,PETSC_COMM_WORLD,ierr) ! get total output size over each process - if (ierr /= 0) call IO_error(894_pInt,ext_msg='HDF5_write_pInt7: MPI_allreduce') + if (ierr /= 0) call IO_error(894_pInt,ext_msg='HDF5_write_pReal7: MPI_allreduce') endif; endif #endif myStart = int([0,0,0,0,0,0,sum(outputSize(1:worldrank))],HSIZE_T) @@ -1896,6 +2752,9 @@ end subroutine HDF5_write_pInt7 end module HDF5_Utilities +!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!! + + From d33df38b429ebf2749a2a6c381881cdd2ce994b8 Mon Sep 17 00:00:00 2001 From: Vitesh Shah Date: Tue, 27 Nov 2018 18:52:54 +0100 Subject: [PATCH 4/5] made it non-executable --- src/HDF5_utilities.f90 | 0 1 file changed, 0 insertions(+), 0 deletions(-) mode change 100755 => 100644 src/HDF5_utilities.f90 diff --git a/src/HDF5_utilities.f90 b/src/HDF5_utilities.f90 old mode 100755 new mode 100644 From ab59274c357c40be21ebba6c9572df37f16dce78 Mon Sep 17 00:00:00 2001 From: Vitesh Shah Date: Fri, 30 Nov 2018 10:16:04 +0100 Subject: [PATCH 5/5] Able to read the independent datasets (outside the groups) in parallel --- src/HDF5_utilities.f90 | 144 ++++++++++++++++++++++++++++++++--------- 1 file changed, 115 insertions(+), 29 deletions(-) diff --git a/src/HDF5_utilities.f90 b/src/HDF5_utilities.f90 index 01ca3407d..60d4c705d 100644 --- a/src/HDF5_utilities.f90 +++ b/src/HDF5_utilities.f90 @@ -193,7 +193,19 @@ integer(HID_T) function HDF5_openGroup2(FileReadID,groupName) integer(HDF5_ERR_TYPE) :: hdferr integer(HID_T), intent(in) :: FileReadID - call h5gopen_f(FileReadID, trim(groupName), HDF5_openGroup2, hdferr) + integer(HID_T) :: aplist_id + + !------------------------------------------------------------------------------------------------- +! creating a property list for data access properties + call h5pcreate_f(H5P_GROUP_ACCESS_F, aplist_id, hdferr) + if (hdferr < 0) call IO_error(1_pInt,ext_msg = 'HDF5_openGroup2: h5pcreate_f ('//trim(groupName)//')') + !------------------------------------------------------------------------------------------------- +! setting I/O mode to collective + call h5pset_all_coll_metadata_ops_f(aplist_id, .true., hdferr) + if (hdferr < 0) call IO_error(1_pInt,ext_msg = 'HDF5_openGroup2: h5pset_all_coll_metadata_ops_f ('//trim(groupName)//')') + !------------------------------------------------------------------------------------------------- +! opening the group + call h5gopen_f(FileReadID, trim(groupName), HDF5_openGroup2, hdferr, gapl_id = aplist_id) if (hdferr < 0) call IO_error(1_pInt,ext_msg = 'HDF5_openGroup2: h5gopen_f ('//trim(groupName)//')') end function HDF5_openGroup2 @@ -321,14 +333,18 @@ subroutine HDF5_read_pReal1(dataset,loc_id,datasetName,parallel) if (present(parallel)) then; if (parallel) then call h5pset_dxpl_mpio_f(plist_id, H5FD_MPIO_COLLECTIVE_F, hdferr) if (hdferr < 0) call IO_error(1_pInt,ext_msg='HDF5_read_pReal1: h5pset_dxpl_mpio_f') - call MPI_allreduce(MPI_IN_PLACE,readSize,worldsize,MPI_LONG,MPI_SUM,PETSC_COMM_WORLD,ierr) ! get total output size over each process + call MPI_allreduce(MPI_IN_PLACE,readSize,worldsize,MPI_INT,MPI_SUM,PETSC_COMM_WORLD,ierr) ! get total output size over each process if (ierr /= 0) call IO_error(894_pInt,ext_msg='HDF5_read_pReal1: MPI_allreduce') endif; endif #endif myStart = int([sum(readSize(1:worldrank))],HSIZE_T) globalShape = [localShape(1:0),sum(readSize)] - + !-------------------------------------------------------------------------------------------------- +! create dataspace in memory (local shape) + call h5screate_simple_f(size(localShape), int(localShape,HSIZE_T), memspace_id, hdferr, & + int(localShape,HSIZE_T)) + if (hdferr < 0) call IO_error(1_pInt,ext_msg='HDF5_write_pReal1: h5screate_simple_f/memspace_id') !-------------------------------------------------------------------------------------------------- ! set I/O mode for read operations to collective call h5pset_all_coll_metadata_ops_f(aplist_id, .true., hdferr) @@ -408,14 +424,18 @@ subroutine HDF5_read_pReal2(dataset,loc_id,datasetName,parallel) if (present(parallel)) then; if (parallel) then call h5pset_dxpl_mpio_f(plist_id, H5FD_MPIO_COLLECTIVE_F, hdferr) if (hdferr < 0) call IO_error(1_pInt,ext_msg='HDF5_read_pReal2: h5pset_dxpl_mpio_f') - call MPI_allreduce(MPI_IN_PLACE,readSize,worldsize,MPI_LONG,MPI_SUM,PETSC_COMM_WORLD,ierr) ! get total output size over each process + call MPI_allreduce(MPI_IN_PLACE,readSize,worldsize,MPI_INT,MPI_SUM,PETSC_COMM_WORLD,ierr) ! get total output size over each process if (ierr /= 0) call IO_error(894_pInt,ext_msg='HDF5_read_pReal2: MPI_allreduce') endif; endif #endif myStart = int([0,sum(readSize(1:worldrank))],HSIZE_T) globalShape = [localShape(1:1),sum(readSize)] - + !-------------------------------------------------------------------------------------------------- +! create dataspace in memory (local shape) + call h5screate_simple_f(size(localShape), int(localShape,HSIZE_T), memspace_id, hdferr, & + int(localShape,HSIZE_T)) + if (hdferr < 0) call IO_error(1_pInt,ext_msg='HDF5_write_pReal2: h5screate_simple_f/memspace_id') !-------------------------------------------------------------------------------------------------- ! set I/O mode for read operations to collective call h5pset_all_coll_metadata_ops_f(aplist_id, .true., hdferr) @@ -495,14 +515,18 @@ subroutine HDF5_read_pReal3(dataset,loc_id,datasetName,parallel) if (present(parallel)) then; if (parallel) then call h5pset_dxpl_mpio_f(plist_id, H5FD_MPIO_COLLECTIVE_F, hdferr) if (hdferr < 0) call IO_error(1_pInt,ext_msg='HDF5_read_pReal3: h5pset_dxpl_mpio_f') - call MPI_allreduce(MPI_IN_PLACE,readSize,worldsize,MPI_LONG,MPI_SUM,PETSC_COMM_WORLD,ierr) ! get total output size over each process + call MPI_allreduce(MPI_IN_PLACE,readSize,worldsize,MPI_INT,MPI_SUM,PETSC_COMM_WORLD,ierr) ! get total output size over each process if (ierr /= 0) call IO_error(894_pInt,ext_msg='HDF5_read_pReal3: MPI_allreduce') endif; endif #endif myStart = int([0,0,sum(readSize(1:worldrank))],HSIZE_T) globalShape = [localShape(1:2),sum(readSize)] - + !-------------------------------------------------------------------------------------------------- +! create dataspace in memory (local shape) + call h5screate_simple_f(size(localShape), int(localShape,HSIZE_T), memspace_id, hdferr, & + int(localShape,HSIZE_T)) + if (hdferr < 0) call IO_error(1_pInt,ext_msg='HDF5_write_pReal3: h5screate_simple_f/memspace_id') !-------------------------------------------------------------------------------------------------- ! set I/O mode for read operations to collective call h5pset_all_coll_metadata_ops_f(aplist_id, .true., hdferr) @@ -582,14 +606,19 @@ subroutine HDF5_read_pReal4(dataset,loc_id,datasetName,parallel) if (present(parallel)) then; if (parallel) then call h5pset_dxpl_mpio_f(plist_id, H5FD_MPIO_COLLECTIVE_F, hdferr) if (hdferr < 0) call IO_error(1_pInt,ext_msg='HDF5_read_pReal4: h5pset_dxpl_mpio_f') - call MPI_allreduce(MPI_IN_PLACE,readSize,worldsize,MPI_LONG,MPI_SUM,PETSC_COMM_WORLD,ierr) ! get total output size over each process + call MPI_allreduce(MPI_IN_PLACE,readSize,worldsize,MPI_INT,MPI_SUM,PETSC_COMM_WORLD,ierr) ! get total output size over each process if (ierr /= 0) call IO_error(894_pInt,ext_msg='HDF5_read_pReal4: MPI_allreduce') endif; endif #endif myStart = int([0,0,0,sum(readSize(1:worldrank))],HSIZE_T) globalShape = [localShape(1:3),sum(readSize)] - + !-------------------------------------------------------------------------------------------------- +! create dataspace in memory (local shape) + call h5screate_simple_f(size(localShape), int(localShape,HSIZE_T), memspace_id, hdferr, & + int(localShape,HSIZE_T)) + if (hdferr < 0) call IO_error(1_pInt,ext_msg='HDF5_write_pReal4: h5screate_simple_f/memspace_id') + !-------------------------------------------------------------------------------------------------- ! set I/O mode for read operations to collective call h5pset_all_coll_metadata_ops_f(aplist_id, .true., hdferr) @@ -669,13 +698,19 @@ subroutine HDF5_read_pReal5(dataset,loc_id,datasetName,parallel) if (present(parallel)) then; if (parallel) then call h5pset_dxpl_mpio_f(plist_id, H5FD_MPIO_COLLECTIVE_F, hdferr) if (hdferr < 0) call IO_error(1_pInt,ext_msg='HDF5_read_pReal5: h5pset_dxpl_mpio_f') - call MPI_allreduce(MPI_IN_PLACE,readSize,worldsize,MPI_LONG,MPI_SUM,PETSC_COMM_WORLD,ierr) ! get total output size over each process + call MPI_allreduce(MPI_IN_PLACE,readSize,worldsize,MPI_INT,MPI_SUM,PETSC_COMM_WORLD,ierr) ! get total output size over each process if (ierr /= 0) call IO_error(894_pInt,ext_msg='HDF5_read_pReal5: MPI_allreduce') endif; endif #endif myStart = int([0,0,0,0,sum(readSize(1:worldrank))],HSIZE_T) globalShape = [localShape(1:4),sum(readSize)] + + !-------------------------------------------------------------------------------------------------- +! create dataspace in memory (local shape) + call h5screate_simple_f(size(localShape), int(localShape,HSIZE_T), memspace_id, hdferr, & + int(localShape,HSIZE_T)) + if (hdferr < 0) call IO_error(1_pInt,ext_msg='HDF5_write_pReal5: h5screate_simple_f/memspace_id') !-------------------------------------------------------------------------------------------------- ! set I/O mode for read operations to collective @@ -698,7 +733,7 @@ subroutine HDF5_read_pReal5(dataset,loc_id,datasetName,parallel) !-------------------------------------------------------------------------------------------------- ! read call h5dread_f(dset_id, H5T_NATIVE_DOUBLE,dataset,int(globalShape,HSIZE_T), hdferr,& - file_space_id = filespace_id, xfer_prp = plist_id) + file_space_id = filespace_id, xfer_prp = plist_id,mem_space_id = memspace_id) if (hdferr < 0) call IO_error(1_pInt,ext_msg='HDF5_read_pReal5: h5dread_f') !-------------------------------------------------------------------------------------------------- @@ -756,14 +791,18 @@ subroutine HDF5_read_pReal6(dataset,loc_id,datasetName,parallel) if (present(parallel)) then; if (parallel) then call h5pset_dxpl_mpio_f(plist_id, H5FD_MPIO_COLLECTIVE_F, hdferr) if (hdferr < 0) call IO_error(1_pInt,ext_msg='HDF5_read_pReal6: h5pset_dxpl_mpio_f') - call MPI_allreduce(MPI_IN_PLACE,readSize,worldsize,MPI_LONG,MPI_SUM,PETSC_COMM_WORLD,ierr) ! get total output size over each process + call MPI_allreduce(MPI_IN_PLACE,readSize,worldsize,MPI_INT,MPI_SUM,PETSC_COMM_WORLD,ierr) ! get total output size over each process if (ierr /= 0) call IO_error(894_pInt,ext_msg='HDF5_read_pReal6: MPI_allreduce') endif; endif #endif myStart = int([0,0,0,0,0,sum(readSize(1:worldrank))],HSIZE_T) globalShape = [localShape(1:5),sum(readSize)] - + !-------------------------------------------------------------------------------------------------- +! create dataspace in memory (local shape) + call h5screate_simple_f(size(localShape), int(localShape,HSIZE_T), memspace_id, hdferr, & + int(localShape,HSIZE_T)) + if (hdferr < 0) call IO_error(1_pInt,ext_msg='HDF5_write_pReal6: h5screate_simple_f/memspace_id') !-------------------------------------------------------------------------------------------------- ! set I/O mode for read operations to collective call h5pset_all_coll_metadata_ops_f(aplist_id, .true., hdferr) @@ -843,14 +882,19 @@ subroutine HDF5_read_pReal7(dataset,loc_id,datasetName,parallel) if (present(parallel)) then; if (parallel) then call h5pset_dxpl_mpio_f(plist_id, H5FD_MPIO_COLLECTIVE_F, hdferr) if (hdferr < 0) call IO_error(1_pInt,ext_msg='HDF5_read_pReal7: h5pset_dxpl_mpio_f') - call MPI_allreduce(MPI_IN_PLACE,readSize,worldsize,MPI_LONG,MPI_SUM,PETSC_COMM_WORLD,ierr) ! get total output size over each process + call MPI_allreduce(MPI_IN_PLACE,readSize,worldsize,MPI_INT,MPI_SUM,PETSC_COMM_WORLD,ierr) ! get total output size over each process if (ierr /= 0) call IO_error(894_pInt,ext_msg='HDF5_read_pReal7: MPI_allreduce') endif; endif #endif myStart = int([0,0,0,0,0,0,sum(readSize(1:worldrank))],HSIZE_T) globalShape = [localShape(1:6),sum(readSize)] - + !-------------------------------------------------------------------------------------------------- +! create dataspace in memory (local shape) + call h5screate_simple_f(size(localShape), int(localShape,HSIZE_T), memspace_id, hdferr, & + int(localShape,HSIZE_T)) + if (hdferr < 0) call IO_error(1_pInt,ext_msg='HDF5_write_pReal7: h5screate_simple_f/memspace_id') + !-------------------------------------------------------------------------------------------------- ! set I/O mode for read operations to collective call h5pset_all_coll_metadata_ops_f(aplist_id, .true., hdferr) @@ -931,7 +975,7 @@ subroutine HDF5_read_pInt1(dataset,loc_id,datasetName,parallel) if (present(parallel)) then; if (parallel) then call h5pset_dxpl_mpio_f(plist_id, H5FD_MPIO_COLLECTIVE_F, hdferr) if (hdferr < 0) call IO_error(1_pInt,ext_msg='HDF5_read_pInt1: h5pset_dxpl_mpio_f') - call MPI_allreduce(MPI_IN_PLACE,readSize,worldsize,MPI_LONG,MPI_SUM,PETSC_COMM_WORLD,ierr) ! get total output size over each process + call MPI_allreduce(MPI_IN_PLACE,readSize,worldsize,MPI_INT,MPI_SUM,PETSC_COMM_WORLD,ierr) ! get total output size over each process if (ierr /= 0) call IO_error(894_pInt,ext_msg='HDF5_read_pInt1: MPI_allreduce') endif; endif #endif @@ -939,10 +983,16 @@ subroutine HDF5_read_pInt1(dataset,loc_id,datasetName,parallel) myStart = int([sum(readSize(1:worldrank))],HSIZE_T) globalShape = [localShape(1:0),sum(readSize)] + !-------------------------------------------------------------------------------------------------- +! create dataspace in memory (local shape) + call h5screate_simple_f(size(localShape), int(localShape,HSIZE_T), memspace_id, hdferr, & + int(localShape,HSIZE_T)) + if (hdferr < 0) call IO_error(1_pInt,ext_msg='HDF5_write_pInt1: h5screate_simple_f/memspace_id') + !-------------------------------------------------------------------------------------------------- ! set I/O mode for read operations to collective call h5pset_all_coll_metadata_ops_f(aplist_id, .true., hdferr) - if (hdferr < 0) call IO_error(1_pInt,ext_msg='HDF5_read_pReal1: h5pset_all_coll_metadata_ops_f') + if (hdferr < 0) call IO_error(1_pInt,ext_msg='HDF5_read_pInt1: h5pset_all_coll_metadata_ops_f') !-------------------------------------------------------------------------------------------------- ! open the dataset in the file @@ -1021,7 +1071,7 @@ subroutine HDF5_read_pInt2(dataset,loc_id,datasetName,parallel) if (present(parallel)) then; if (parallel) then call h5pset_dxpl_mpio_f(plist_id, H5FD_MPIO_COLLECTIVE_F, hdferr) if (hdferr < 0) call IO_error(1_pInt,ext_msg='HDF5_read_pInt2: h5pset_dxpl_mpio_f') - call MPI_allreduce(MPI_IN_PLACE,readSize,worldsize,MPI_LONG,MPI_SUM,PETSC_COMM_WORLD,ierr) ! get total output size over each process + call MPI_allreduce(MPI_IN_PLACE,readSize,worldsize,MPI_INT,MPI_SUM,PETSC_COMM_WORLD,ierr) ! get total output size over each process if (ierr /= 0) call IO_error(894_pInt,ext_msg='HDF5_read_pInt2: MPI_allreduce') endif; endif #endif @@ -1029,10 +1079,16 @@ subroutine HDF5_read_pInt2(dataset,loc_id,datasetName,parallel) myStart = int([0,sum(readSize(1:worldrank))],HSIZE_T) globalShape = [localShape(1:1),sum(readSize)] + !-------------------------------------------------------------------------------------------------- +! create dataspace in memory (local shape) + call h5screate_simple_f(size(localShape), int(localShape,HSIZE_T), memspace_id, hdferr, & + int(localShape,HSIZE_T)) + if (hdferr < 0) call IO_error(1_pInt,ext_msg='HDF5_write_pInt2: h5screate_simple_f/memspace_id') + !-------------------------------------------------------------------------------------------------- ! set I/O mode for read operations to collective call h5pset_all_coll_metadata_ops_f(aplist_id, .true., hdferr) - if (hdferr < 0) call IO_error(1_pInt,ext_msg='HDF5_read_pReal2: h5pset_all_coll_metadata_ops_f') + if (hdferr < 0) call IO_error(1_pInt,ext_msg='HDF5_read_pInt2: h5pset_all_coll_metadata_ops_f') !-------------------------------------------------------------------------------------------------- ! open the dataset in the file @@ -1111,7 +1167,7 @@ subroutine HDF5_read_pInt3(dataset,loc_id,datasetName,parallel) if (present(parallel)) then; if (parallel) then call h5pset_dxpl_mpio_f(plist_id, H5FD_MPIO_COLLECTIVE_F, hdferr) if (hdferr < 0) call IO_error(1_pInt,ext_msg='HDF5_read_pInt3: h5pset_dxpl_mpio_f') - call MPI_allreduce(MPI_IN_PLACE,readSize,worldsize,MPI_LONG,MPI_SUM,PETSC_COMM_WORLD,ierr) ! get total output size over each process + call MPI_allreduce(MPI_IN_PLACE,readSize,worldsize,MPI_INT,MPI_SUM,PETSC_COMM_WORLD,ierr) ! get total output size over each process if (ierr /= 0) call IO_error(894_pInt,ext_msg='HDF5_read_pInt3: MPI_allreduce') endif; endif #endif @@ -1119,10 +1175,16 @@ subroutine HDF5_read_pInt3(dataset,loc_id,datasetName,parallel) myStart = int([0,0,sum(readSize(1:worldrank))],HSIZE_T) globalShape = [localShape(1:2),sum(readSize)] + !-------------------------------------------------------------------------------------------------- +! create dataspace in memory (local shape) + call h5screate_simple_f(size(localShape), int(localShape,HSIZE_T), memspace_id, hdferr, & + int(localShape,HSIZE_T)) + if (hdferr < 0) call IO_error(1_pInt,ext_msg='HDF5_write_pInt3: h5screate_simple_f/memspace_id') + !-------------------------------------------------------------------------------------------------- ! set I/O mode for read operations to collective call h5pset_all_coll_metadata_ops_f(aplist_id, .true., hdferr) - if (hdferr < 0) call IO_error(1_pInt,ext_msg='HDF5_read_pReal3: h5pset_all_coll_metadata_ops_f') + if (hdferr < 0) call IO_error(1_pInt,ext_msg='HDF5_read_pInt3: h5pset_all_coll_metadata_ops_f') !-------------------------------------------------------------------------------------------------- ! open the dataset in the file @@ -1201,7 +1263,7 @@ subroutine HDF5_read_pInt4(dataset,loc_id,datasetName,parallel) if (present(parallel)) then; if (parallel) then call h5pset_dxpl_mpio_f(plist_id, H5FD_MPIO_COLLECTIVE_F, hdferr) if (hdferr < 0) call IO_error(1_pInt,ext_msg='HDF5_read_pInt4: h5pset_dxpl_mpio_f') - call MPI_allreduce(MPI_IN_PLACE,readSize,worldsize,MPI_LONG,MPI_SUM,PETSC_COMM_WORLD,ierr) ! get total output size over each process + call MPI_allreduce(MPI_IN_PLACE,readSize,worldsize,MPI_INT,MPI_SUM,PETSC_COMM_WORLD,ierr) ! get total output size over each process if (ierr /= 0) call IO_error(894_pInt,ext_msg='HDF5_read_pInt4: MPI_allreduce') endif; endif #endif @@ -1209,10 +1271,16 @@ subroutine HDF5_read_pInt4(dataset,loc_id,datasetName,parallel) myStart = int([0,0,0,sum(readSize(1:worldrank))],HSIZE_T) globalShape = [localShape(1:3),sum(readSize)] + !-------------------------------------------------------------------------------------------------- +! create dataspace in memory (local shape) + call h5screate_simple_f(size(localShape), int(localShape,HSIZE_T), memspace_id, hdferr, & + int(localShape,HSIZE_T)) + if (hdferr < 0) call IO_error(1_pInt,ext_msg='HDF5_write_pInt4: h5screate_simple_f/memspace_id') + !-------------------------------------------------------------------------------------------------- ! set I/O mode for read operations to collective call h5pset_all_coll_metadata_ops_f(aplist_id, .true., hdferr) - if (hdferr < 0) call IO_error(1_pInt,ext_msg='HDF5_read_pReal4: h5pset_all_coll_metadata_ops_f') + if (hdferr < 0) call IO_error(1_pInt,ext_msg='HDF5_read_pInt4: h5pset_all_coll_metadata_ops_f') !-------------------------------------------------------------------------------------------------- ! open the dataset in the file @@ -1291,7 +1359,7 @@ subroutine HDF5_read_pInt5(dataset,loc_id,datasetName,parallel) if (present(parallel)) then; if (parallel) then call h5pset_dxpl_mpio_f(plist_id, H5FD_MPIO_COLLECTIVE_F, hdferr) if (hdferr < 0) call IO_error(1_pInt,ext_msg='HDF5_read_pInt5: h5pset_dxpl_mpio_f') - call MPI_allreduce(MPI_IN_PLACE,readSize,worldsize,MPI_LONG,MPI_SUM,PETSC_COMM_WORLD,ierr) ! get total output size over each process + call MPI_allreduce(MPI_IN_PLACE,readSize,worldsize,MPI_INT,MPI_SUM,PETSC_COMM_WORLD,ierr) ! get total output size over each process if (ierr /= 0) call IO_error(894_pInt,ext_msg='HDF5_read_pInt5: MPI_allreduce') endif; endif #endif @@ -1299,10 +1367,16 @@ subroutine HDF5_read_pInt5(dataset,loc_id,datasetName,parallel) myStart = int([0,0,0,0,sum(readSize(1:worldrank))],HSIZE_T) globalShape = [localShape(1:4),sum(readSize)] + !-------------------------------------------------------------------------------------------------- +! create dataspace in memory (local shape) + call h5screate_simple_f(size(localShape), int(localShape,HSIZE_T), memspace_id, hdferr, & + int(localShape,HSIZE_T)) + if (hdferr < 0) call IO_error(1_pInt,ext_msg='HDF5_write_pInt5: h5screate_simple_f/memspace_id') + !-------------------------------------------------------------------------------------------------- ! set I/O mode for read operations to collective call h5pset_all_coll_metadata_ops_f(aplist_id, .true., hdferr) - if (hdferr < 0) call IO_error(1_pInt,ext_msg='HDF5_read_pReal5: h5pset_all_coll_metadata_ops_f') + if (hdferr < 0) call IO_error(1_pInt,ext_msg='HDF5_read_pInt5: h5pset_all_coll_metadata_ops_f') !-------------------------------------------------------------------------------------------------- ! open the dataset in the file @@ -1381,7 +1455,7 @@ subroutine HDF5_read_pInt6(dataset,loc_id,datasetName,parallel) if (present(parallel)) then; if (parallel) then call h5pset_dxpl_mpio_f(plist_id, H5FD_MPIO_COLLECTIVE_F, hdferr) if (hdferr < 0) call IO_error(1_pInt,ext_msg='HDF5_read_pInt6: h5pset_dxpl_mpio_f') - call MPI_allreduce(MPI_IN_PLACE,readSize,worldsize,MPI_LONG,MPI_SUM,PETSC_COMM_WORLD,ierr) ! get total output size over each process + call MPI_allreduce(MPI_IN_PLACE,readSize,worldsize,MPI_INT,MPI_SUM,PETSC_COMM_WORLD,ierr) ! get total output size over each process if (ierr /= 0) call IO_error(894_pInt,ext_msg='HDF5_read_pInt6: MPI_allreduce') endif; endif #endif @@ -1389,10 +1463,16 @@ subroutine HDF5_read_pInt6(dataset,loc_id,datasetName,parallel) myStart = int([0,0,0,0,0,sum(readSize(1:worldrank))],HSIZE_T) globalShape = [localShape(1:5),sum(readSize)] + !-------------------------------------------------------------------------------------------------- +! create dataspace in memory (local shape) + call h5screate_simple_f(size(localShape), int(localShape,HSIZE_T), memspace_id, hdferr, & + int(localShape,HSIZE_T)) + if (hdferr < 0) call IO_error(1_pInt,ext_msg='HDF5_write_pInt6: h5screate_simple_f/memspace_id') + !-------------------------------------------------------------------------------------------------- ! set I/O mode for read operations to collective call h5pset_all_coll_metadata_ops_f(aplist_id, .true., hdferr) - if (hdferr < 0) call IO_error(1_pInt,ext_msg='HDF5_read_pReal6: h5pset_all_coll_metadata_ops_f') + if (hdferr < 0) call IO_error(1_pInt,ext_msg='HDF5_read_pInt6: h5pset_all_coll_metadata_ops_f') !-------------------------------------------------------------------------------------------------- ! open the dataset in the file @@ -1471,7 +1551,7 @@ subroutine HDF5_read_pInt7(dataset,loc_id,datasetName,parallel) if (present(parallel)) then; if (parallel) then call h5pset_dxpl_mpio_f(plist_id, H5FD_MPIO_COLLECTIVE_F, hdferr) if (hdferr < 0) call IO_error(1_pInt,ext_msg='HDF5_read_pInt7: h5pset_dxpl_mpio_f') - call MPI_allreduce(MPI_IN_PLACE,readSize,worldsize,MPI_LONG,MPI_SUM,PETSC_COMM_WORLD,ierr) ! get total output size over each process + call MPI_allreduce(MPI_IN_PLACE,readSize,worldsize,MPI_INT,MPI_SUM,PETSC_COMM_WORLD,ierr) ! get total output size over each process if (ierr /= 0) call IO_error(894_pInt,ext_msg='HDF5_read_pInt7: MPI_allreduce') endif; endif #endif @@ -1479,10 +1559,16 @@ subroutine HDF5_read_pInt7(dataset,loc_id,datasetName,parallel) myStart = int([0,0,0,0,0,0,sum(readSize(1:worldrank))],HSIZE_T) globalShape = [localShape(1:6),sum(readSize)] + !-------------------------------------------------------------------------------------------------- +! create dataspace in memory (local shape) + call h5screate_simple_f(size(localShape), int(localShape,HSIZE_T), memspace_id, hdferr, & + int(localShape,HSIZE_T)) + if (hdferr < 0) call IO_error(1_pInt,ext_msg='HDF5_write_pInt7: h5screate_simple_f/memspace_id') + !-------------------------------------------------------------------------------------------------- ! set I/O mode for read operations to collective call h5pset_all_coll_metadata_ops_f(aplist_id, .true., hdferr) - if (hdferr < 0) call IO_error(1_pInt,ext_msg='HDF5_read_pReal7: h5pset_all_coll_metadata_ops_f') + if (hdferr < 0) call IO_error(1_pInt,ext_msg='HDF5_read_pInt7: h5pset_all_coll_metadata_ops_f') !-------------------------------------------------------------------------------------------------- ! open the dataset in the file