From 3870a70a4e3bc2cafa80d9b8a73e9dcf3239cadf Mon Sep 17 00:00:00 2001 From: Martin Diehl Date: Fri, 9 Jul 2021 12:37:39 +0200 Subject: [PATCH] HDF5 seems to use MPI (F90 style) --- src/HDF5_utilities.f90 | 3 --- src/results.f90 | 11 ++++------- 2 files changed, 4 insertions(+), 10 deletions(-) diff --git a/src/HDF5_utilities.f90 b/src/HDF5_utilities.f90 index 35d499e17..afcdd0a64 100644 --- a/src/HDF5_utilities.f90 +++ b/src/HDF5_utilities.f90 @@ -8,9 +8,6 @@ module HDF5_utilities use HDF5 #ifdef PETSC use PETSc -#ifndef PETSC_HAVE_MPI_F90MODULE_VISIBILITY - use MPI -#endif #endif use prec diff --git a/src/results.f90 b/src/results.f90 index e74aced05..8b1e24b51 100644 --- a/src/results.f90 +++ b/src/results.f90 @@ -13,9 +13,6 @@ module results use HDF5 #ifdef PETSC use PETSc -#ifndef PETSC_HAVE_MPI_F90MODULE_VISIBILITY - use MPI_f08 -#endif #endif implicit none @@ -464,7 +461,7 @@ subroutine results_mapping_phase(ID,entry,label) call h5pset_dxpl_mpio_f(plist_id, H5FD_MPIO_COLLECTIVE_F, hdferr) if(hdferr < 0) error stop 'HDF5 error' - call MPI_Allreduce(MPI_IN_PLACE,writeSize,worldsize,MPI_INT,MPI_SUM,MPI_COMM_WORLD,ierr) ! get output at each process + call MPI_Allreduce(MPI_IN_PLACE,writeSize,worldsize,MPI_INT,MPI_SUM,PETSC_COMM_WORLD,ierr) ! get output at each process if(ierr /= 0) error stop 'MPI error' entryOffset = 0 @@ -473,7 +470,7 @@ subroutine results_mapping_phase(ID,entry,label) entryOffset(ID(co,ce),worldrank) = entryOffset(ID(co,ce),worldrank) +1 enddo enddo - call MPI_Allreduce(MPI_IN_PLACE,entryOffset,size(entryOffset),MPI_INT,MPI_SUM,MPI_COMM_WORLD,ierr)! get offset at each process + call MPI_Allreduce(MPI_IN_PLACE,entryOffset,size(entryOffset),MPI_INT,MPI_SUM,PETSC_COMM_WORLD,ierr)! get offset at each process if(ierr /= 0) error stop 'MPI error' entryOffset(:,worldrank) = sum(entryOffset(:,0:worldrank-1),2) do co = 1, size(ID,1) @@ -617,14 +614,14 @@ subroutine results_mapping_homogenization(ID,entry,label) call h5pset_dxpl_mpio_f(plist_id, H5FD_MPIO_COLLECTIVE_F, hdferr) if(hdferr < 0) error stop 'HDF5 error' - call MPI_Allreduce(MPI_IN_PLACE,writeSize,worldsize,MPI_INT,MPI_SUM,MPI_COMM_WORLD,ierr) ! get output at each process + call MPI_Allreduce(MPI_IN_PLACE,writeSize,worldsize,MPI_INT,MPI_SUM,PETSC_COMM_WORLD,ierr) ! get output at each process if(ierr /= 0) error stop 'MPI error' entryOffset = 0 do ce = 1, size(ID,1) entryOffset(ID(ce),worldrank) = entryOffset(ID(ce),worldrank) +1 enddo - call MPI_Allreduce(MPI_IN_PLACE,entryOffset,size(entryOffset),MPI_INT,MPI_SUM,MPI_COMM_WORLD,ierr)! get offset at each process + call MPI_Allreduce(MPI_IN_PLACE,entryOffset,size(entryOffset),MPI_INT,MPI_SUM,PETSC_COMM_WORLD,ierr)! get offset at each process if(ierr /= 0) error stop 'MPI error' entryOffset(:,worldrank) = sum(entryOffset(:,0:worldrank-1),2) do ce = 1, size(ID,1)