From 136a4b13778c39e7be2ea1bca8aae4e1fa6e6c82 Mon Sep 17 00:00:00 2001 From: Martin Diehl Date: Fri, 9 Jul 2021 18:48:25 +0200 Subject: [PATCH] PETSc defines are rather complicated now mpi_f08 can be used on newer PETSc installations if old MPI modules are not exposed --- src/HDF5_utilities.f90 | 6 +++++- src/grid/DAMASK_grid.f90 | 2 +- src/grid/discretization_grid.f90 | 2 +- src/grid/grid_damage_spectral.f90 | 2 +- src/grid/grid_mech_FEM.f90 | 2 +- src/grid/grid_mech_spectral_basic.f90 | 4 ++-- src/grid/grid_mech_spectral_polarisation.f90 | 4 ++-- src/grid/grid_thermal_spectral.f90 | 2 +- src/grid/spectral_utilities.f90 | 6 +++--- src/mesh/FEM_utilities.f90 | 3 +-- src/mesh/discretization_mesh.f90 | 2 +- src/mesh/mesh_mech_FEM.f90 | 3 +-- src/parallelization.f90 | 3 ++- src/results.f90 | 18 +++++++++++------- 14 files changed, 33 insertions(+), 26 deletions(-) diff --git a/src/HDF5_utilities.f90 b/src/HDF5_utilities.f90 index afcdd0a64..638b74c0e 100644 --- a/src/HDF5_utilities.f90 +++ b/src/HDF5_utilities.f90 @@ -7,7 +7,11 @@ module HDF5_utilities use HDF5 #ifdef PETSC - use PETSc +#include + use PETScSys +#if (PETSC_VERSION_MAJOR==3 && PETSC_VERSION_MINOR>14) && !defined(PETSC_HAVE_MPI_F90MODULE_VISIBILITY) + use MPI +#endif #endif use prec diff --git a/src/grid/DAMASK_grid.f90 b/src/grid/DAMASK_grid.f90 index c915db98a..e98b2d818 100644 --- a/src/grid/DAMASK_grid.f90 +++ b/src/grid/DAMASK_grid.f90 @@ -9,7 +9,7 @@ program DAMASK_grid #include use PETScSys -#if !defined(PETSC_HAVE_MPI_F90MODULE_VISIBILITY) && !defined(PETSC_HAVE_MPI_F90MODULE) +#if (PETSC_VERSION_MAJOR==3 && PETSC_VERSION_MINOR>14) && !defined(PETSC_HAVE_MPI_F90MODULE_VISIBILITY) use MPI_f08 #endif diff --git a/src/grid/discretization_grid.f90 b/src/grid/discretization_grid.f90 index 1454aa050..8d1d38503 100644 --- a/src/grid/discretization_grid.f90 +++ b/src/grid/discretization_grid.f90 @@ -7,7 +7,7 @@ module discretization_grid #include use PETScSys -#if !defined(PETSC_HAVE_MPI_F90MODULE_VISIBILITY) && !defined(PETSC_HAVE_MPI_F90MODULE) +#if (PETSC_VERSION_MAJOR==3 && PETSC_VERSION_MINOR>14) && !defined(PETSC_HAVE_MPI_F90MODULE_VISIBILITY) use MPI_f08 #endif diff --git a/src/grid/grid_damage_spectral.f90 b/src/grid/grid_damage_spectral.f90 index cfe668329..162d665cb 100644 --- a/src/grid/grid_damage_spectral.f90 +++ b/src/grid/grid_damage_spectral.f90 @@ -9,7 +9,7 @@ module grid_damage_spectral #include use PETScDMDA use PETScSNES -#if !defined(PETSC_HAVE_MPI_F90MODULE_VISIBILITY) && !defined(PETSC_HAVE_MPI_F90MODULE) +#if (PETSC_VERSION_MAJOR==3 && PETSC_VERSION_MINOR>14) && !defined(PETSC_HAVE_MPI_F90MODULE_VISIBILITY) use MPI_f08 #endif diff --git a/src/grid/grid_mech_FEM.f90 b/src/grid/grid_mech_FEM.f90 index 004635018..77678137d 100644 --- a/src/grid/grid_mech_FEM.f90 +++ b/src/grid/grid_mech_FEM.f90 @@ -9,7 +9,7 @@ module grid_mechanical_FEM #include use PETScDMDA use PETScSNES -#if !defined(PETSC_HAVE_MPI_F90MODULE_VISIBILITY) && !defined(PETSC_HAVE_MPI_F90MODULE) +#if (PETSC_VERSION_MAJOR==3 && PETSC_VERSION_MINOR>14) && !defined(PETSC_HAVE_MPI_F90MODULE_VISIBILITY) use MPI_f08 #endif diff --git a/src/grid/grid_mech_spectral_basic.f90 b/src/grid/grid_mech_spectral_basic.f90 index 2fbb483c0..83b961023 100644 --- a/src/grid/grid_mech_spectral_basic.f90 +++ b/src/grid/grid_mech_spectral_basic.f90 @@ -9,7 +9,7 @@ module grid_mechanical_spectral_basic #include use PETScDMDA use PETScSNES -#if !defined(PETSC_HAVE_MPI_F90MODULE_VISIBILITY) && !defined(PETSC_HAVE_MPI_F90MODULE) +#if (PETSC_VERSION_MAJOR==3 && PETSC_VERSION_MINOR>14) && !defined(PETSC_HAVE_MPI_F90MODULE_VISIBILITY) use MPI_f08 #endif @@ -102,7 +102,7 @@ subroutine grid_mechanical_spectral_basic_init F ! pointer to solution data PetscInt, dimension(0:worldsize-1) :: localK integer(HID_T) :: fileHandle, groupHandle -#if !defined(PETSC_HAVE_MPI_F90MODULE_VISIBILITY) && !defined(PETSC_HAVE_MPI_F90MODULE) +#if (PETSC_VERSION_MAJOR==3 && PETSC_VERSION_MINOR>14) && !defined(PETSC_HAVE_MPI_F90MODULE_VISIBILITY) type(MPI_File) :: fileUnit #else integer :: fileUnit diff --git a/src/grid/grid_mech_spectral_polarisation.f90 b/src/grid/grid_mech_spectral_polarisation.f90 index a951fbd62..07edc84b5 100644 --- a/src/grid/grid_mech_spectral_polarisation.f90 +++ b/src/grid/grid_mech_spectral_polarisation.f90 @@ -9,7 +9,7 @@ module grid_mechanical_spectral_polarisation #include use PETScDMDA use PETScSNES -#if !defined(PETSC_HAVE_MPI_F90MODULE_VISIBILITY) && !defined(PETSC_HAVE_MPI_F90MODULE) +#if (PETSC_VERSION_MAJOR==3 && PETSC_VERSION_MINOR>14) && !defined(PETSC_HAVE_MPI_F90MODULE_VISIBILITY) use MPI_f08 #endif @@ -115,7 +115,7 @@ subroutine grid_mechanical_spectral_polarisation_init F_tau ! specific (sub)pointer PetscInt, dimension(0:worldsize-1) :: localK integer(HID_T) :: fileHandle, groupHandle -#if !defined(PETSC_HAVE_MPI_F90MODULE_VISIBILITY) && !defined(PETSC_HAVE_MPI_F90MODULE) +#if (PETSC_VERSION_MAJOR==3 && PETSC_VERSION_MINOR>14) && !defined(PETSC_HAVE_MPI_F90MODULE_VISIBILITY) type(MPI_File) :: fileUnit #else integer :: fileUnit diff --git a/src/grid/grid_thermal_spectral.f90 b/src/grid/grid_thermal_spectral.f90 index 1bf815a0f..47c49e76f 100644 --- a/src/grid/grid_thermal_spectral.f90 +++ b/src/grid/grid_thermal_spectral.f90 @@ -9,7 +9,7 @@ module grid_thermal_spectral #include use PETScDMDA use PETScSNES -#if !defined(PETSC_HAVE_MPI_F90MODULE_VISIBILITY) && !defined(PETSC_HAVE_MPI_F90MODULE) +#if (PETSC_VERSION_MAJOR==3 && PETSC_VERSION_MINOR>14) && !defined(PETSC_HAVE_MPI_F90MODULE_VISIBILITY) use MPI_f08 #endif diff --git a/src/grid/spectral_utilities.f90 b/src/grid/spectral_utilities.f90 index 38e208770..aa7c2f4a7 100644 --- a/src/grid/spectral_utilities.f90 +++ b/src/grid/spectral_utilities.f90 @@ -8,7 +8,7 @@ module spectral_utilities #include use PETScSys -#if !defined(PETSC_HAVE_MPI_F90MODULE_VISIBILITY) && !defined(PETSC_HAVE_MPI_F90MODULE) +#if (PETSC_VERSION_MAJOR==3 && PETSC_VERSION_MINOR>14) && !defined(PETSC_HAVE_MPI_F90MODULE_VISIBILITY) use MPI_f08 #endif @@ -985,7 +985,7 @@ subroutine utilities_updateCoords(F) rank_t, rank_b, & c, & ierr -#if !defined(PETSC_HAVE_MPI_F90MODULE_VISIBILITY) && !defined(PETSC_HAVE_MPI_F90MODULE) +#if (PETSC_VERSION_MAJOR==3 && PETSC_VERSION_MINOR>14) && !defined(PETSC_HAVE_MPI_F90MODULE_VISIBILITY) type(MPI_Request), dimension(4) :: request type(MPI_Status), dimension(4) :: status #else @@ -1050,7 +1050,7 @@ subroutine utilities_updateCoords(F) call MPI_Waitall(4,request,status,ierr) if(ierr /=0) error stop 'MPI error' -#if !defined(PETSC_HAVE_MPI_F90MODULE_VISIBILITY) && !defined(PETSC_HAVE_MPI_F90MODULE) +#if (PETSC_VERSION_MAJOR==3 && PETSC_VERSION_MINOR>14) && !defined(PETSC_HAVE_MPI_F90MODULE_VISIBILITY) ! ToDo #else if(any(status(MPI_ERROR,:) /= 0)) error stop 'MPI error' diff --git a/src/mesh/FEM_utilities.f90 b/src/mesh/FEM_utilities.f90 index 981fbe529..6765d3d0d 100644 --- a/src/mesh/FEM_utilities.f90 +++ b/src/mesh/FEM_utilities.f90 @@ -6,11 +6,10 @@ module FEM_utilities #include #include #include - use PETScDMplex use PETScDMDA use PETScIS -#if !defined(PETSC_HAVE_MPI_F90MODULE_VISIBILITY) && !defined(PETSC_HAVE_MPI_F90MODULE) +#if (PETSC_VERSION_MAJOR==3 && PETSC_VERSION_MINOR>14) && !defined(PETSC_HAVE_MPI_F90MODULE_VISIBILITY) use MPI_f08 #endif diff --git a/src/mesh/discretization_mesh.f90 b/src/mesh/discretization_mesh.f90 index 6b05e6096..88a19ade9 100644 --- a/src/mesh/discretization_mesh.f90 +++ b/src/mesh/discretization_mesh.f90 @@ -11,7 +11,7 @@ module discretization_mesh use PETScDMplex use PETScDMDA use PETScIS -#if !defined(PETSC_HAVE_MPI_F90MODULE_VISIBILITY) && !defined(PETSC_HAVE_MPI_F90MODULE) +#if (PETSC_VERSION_MAJOR==3 && PETSC_VERSION_MINOR>14) && !defined(PETSC_HAVE_MPI_F90MODULE_VISIBILITY) use MPI_f08 #endif diff --git a/src/mesh/mesh_mech_FEM.f90 b/src/mesh/mesh_mech_FEM.f90 index d3749e8ff..6fa2f668b 100644 --- a/src/mesh/mesh_mech_FEM.f90 +++ b/src/mesh/mesh_mech_FEM.f90 @@ -8,12 +8,11 @@ module mesh_mechanical_FEM #include #include #include - use PETScSNES use PETScDM use PETScDMplex use PETScDT -#if !defined(PETSC_HAVE_MPI_F90MODULE_VISIBILITY) && !defined(PETSC_HAVE_MPI_F90MODULE) +#if (PETSC_VERSION_MAJOR==3 && PETSC_VERSION_MINOR>14) && !defined(PETSC_HAVE_MPI_F90MODULE_VISIBILITY) use MPI_f08 #endif diff --git a/src/parallelization.f90 b/src/parallelization.f90 index f8eb3207d..534478cef 100644 --- a/src/parallelization.f90 +++ b/src/parallelization.f90 @@ -9,11 +9,12 @@ module parallelization #ifdef PETSC #include use PETScSys -#if !defined(PETSC_HAVE_MPI_F90MODULE_VISIBILITY) && !defined(PETSC_HAVE_MPI_F90MODULE) +#if (PETSC_VERSION_MAJOR==3 && PETSC_VERSION_MINOR>14) && !defined(PETSC_HAVE_MPI_F90MODULE_VISIBILITY) use MPI_f08 #endif !$ use OMP_LIB #endif + use prec implicit none diff --git a/src/results.f90 b/src/results.f90 index 8b1e24b51..94625a4b9 100644 --- a/src/results.f90 +++ b/src/results.f90 @@ -12,7 +12,11 @@ module results use HDF5_utilities use HDF5 #ifdef PETSC - use PETSc +#include + use PETScSys +#if (PETSC_VERSION_MAJOR==3 && PETSC_VERSION_MINOR>14) && !defined(PETSC_HAVE_MPI_F90MODULE_VISIBILITY) + use MPI_f08 +#endif #endif implicit none @@ -453,7 +457,7 @@ subroutine results_mapping_phase(ID,entry,label) call h5pcreate_f(H5P_DATASET_XFER_F, plist_id, hdferr) if(hdferr < 0) error stop 'HDF5 error' -#ifndef PETSc +#ifndef PETSC entryGlobal = entry -1 ! 0-based #else !-------------------------------------------------------------------------------------------------- @@ -461,7 +465,7 @@ subroutine results_mapping_phase(ID,entry,label) call h5pset_dxpl_mpio_f(plist_id, H5FD_MPIO_COLLECTIVE_F, hdferr) if(hdferr < 0) error stop 'HDF5 error' - call MPI_Allreduce(MPI_IN_PLACE,writeSize,worldsize,MPI_INT,MPI_SUM,PETSC_COMM_WORLD,ierr) ! get output at each process + call MPI_Allreduce(MPI_IN_PLACE,writeSize,worldsize,MPI_INT,MPI_SUM,MPI_COMM_WORLD,ierr) ! get output at each process if(ierr /= 0) error stop 'MPI error' entryOffset = 0 @@ -470,7 +474,7 @@ subroutine results_mapping_phase(ID,entry,label) entryOffset(ID(co,ce),worldrank) = entryOffset(ID(co,ce),worldrank) +1 enddo enddo - call MPI_Allreduce(MPI_IN_PLACE,entryOffset,size(entryOffset),MPI_INT,MPI_SUM,PETSC_COMM_WORLD,ierr)! get offset at each process + call MPI_Allreduce(MPI_IN_PLACE,entryOffset,size(entryOffset),MPI_INT,MPI_SUM,MPI_COMM_WORLD,ierr)! get offset at each process if(ierr /= 0) error stop 'MPI error' entryOffset(:,worldrank) = sum(entryOffset(:,0:worldrank-1),2) do co = 1, size(ID,1) @@ -606,7 +610,7 @@ subroutine results_mapping_homogenization(ID,entry,label) call h5pcreate_f(H5P_DATASET_XFER_F, plist_id, hdferr) if(hdferr < 0) error stop 'HDF5 error' -#ifndef PETSc +#ifndef PETSC entryGlobal = entry -1 ! 0-based #else !-------------------------------------------------------------------------------------------------- @@ -614,14 +618,14 @@ subroutine results_mapping_homogenization(ID,entry,label) call h5pset_dxpl_mpio_f(plist_id, H5FD_MPIO_COLLECTIVE_F, hdferr) if(hdferr < 0) error stop 'HDF5 error' - call MPI_Allreduce(MPI_IN_PLACE,writeSize,worldsize,MPI_INT,MPI_SUM,PETSC_COMM_WORLD,ierr) ! get output at each process + call MPI_Allreduce(MPI_IN_PLACE,writeSize,worldsize,MPI_INT,MPI_SUM,MPI_COMM_WORLD,ierr) ! get output at each process if(ierr /= 0) error stop 'MPI error' entryOffset = 0 do ce = 1, size(ID,1) entryOffset(ID(ce),worldrank) = entryOffset(ID(ce),worldrank) +1 enddo - call MPI_Allreduce(MPI_IN_PLACE,entryOffset,size(entryOffset),MPI_INT,MPI_SUM,PETSC_COMM_WORLD,ierr)! get offset at each process + call MPI_Allreduce(MPI_IN_PLACE,entryOffset,size(entryOffset),MPI_INT,MPI_SUM,MPI_COMM_WORLD,ierr)! get offset at each process if(ierr /= 0) error stop 'MPI error' entryOffset(:,worldrank) = sum(entryOffset(:,0:worldrank-1),2) do ce = 1, size(ID,1)