PETSc defines are rather complicated

now mpi_f08 can be used on newer PETSc installations if old MPI modules
are not exposed
This commit is contained in:
Martin Diehl 2021-07-09 18:48:25 +02:00
parent 637f78bd52
commit 136a4b1377
14 changed files with 33 additions and 26 deletions

View File

@ -7,7 +7,11 @@
module HDF5_utilities module HDF5_utilities
use HDF5 use HDF5
#ifdef PETSC #ifdef PETSC
use PETSc #include <petsc/finclude/petscsys.h>
use PETScSys
#if (PETSC_VERSION_MAJOR==3 && PETSC_VERSION_MINOR>14) && !defined(PETSC_HAVE_MPI_F90MODULE_VISIBILITY)
use MPI
#endif
#endif #endif
use prec use prec

View File

@ -9,7 +9,7 @@
program DAMASK_grid program DAMASK_grid
#include <petsc/finclude/petscsys.h> #include <petsc/finclude/petscsys.h>
use PETScSys use PETScSys
#if !defined(PETSC_HAVE_MPI_F90MODULE_VISIBILITY) && !defined(PETSC_HAVE_MPI_F90MODULE) #if (PETSC_VERSION_MAJOR==3 && PETSC_VERSION_MINOR>14) && !defined(PETSC_HAVE_MPI_F90MODULE_VISIBILITY)
use MPI_f08 use MPI_f08
#endif #endif

View File

@ -7,7 +7,7 @@
module discretization_grid module discretization_grid
#include <petsc/finclude/petscsys.h> #include <petsc/finclude/petscsys.h>
use PETScSys use PETScSys
#if !defined(PETSC_HAVE_MPI_F90MODULE_VISIBILITY) && !defined(PETSC_HAVE_MPI_F90MODULE) #if (PETSC_VERSION_MAJOR==3 && PETSC_VERSION_MINOR>14) && !defined(PETSC_HAVE_MPI_F90MODULE_VISIBILITY)
use MPI_f08 use MPI_f08
#endif #endif

View File

@ -9,7 +9,7 @@ module grid_damage_spectral
#include <petsc/finclude/petscdmda.h> #include <petsc/finclude/petscdmda.h>
use PETScDMDA use PETScDMDA
use PETScSNES use PETScSNES
#if !defined(PETSC_HAVE_MPI_F90MODULE_VISIBILITY) && !defined(PETSC_HAVE_MPI_F90MODULE) #if (PETSC_VERSION_MAJOR==3 && PETSC_VERSION_MINOR>14) && !defined(PETSC_HAVE_MPI_F90MODULE_VISIBILITY)
use MPI_f08 use MPI_f08
#endif #endif

View File

@ -9,7 +9,7 @@ module grid_mechanical_FEM
#include <petsc/finclude/petscdmda.h> #include <petsc/finclude/petscdmda.h>
use PETScDMDA use PETScDMDA
use PETScSNES use PETScSNES
#if !defined(PETSC_HAVE_MPI_F90MODULE_VISIBILITY) && !defined(PETSC_HAVE_MPI_F90MODULE) #if (PETSC_VERSION_MAJOR==3 && PETSC_VERSION_MINOR>14) && !defined(PETSC_HAVE_MPI_F90MODULE_VISIBILITY)
use MPI_f08 use MPI_f08
#endif #endif

View File

@ -9,7 +9,7 @@ module grid_mechanical_spectral_basic
#include <petsc/finclude/petscdmda.h> #include <petsc/finclude/petscdmda.h>
use PETScDMDA use PETScDMDA
use PETScSNES use PETScSNES
#if !defined(PETSC_HAVE_MPI_F90MODULE_VISIBILITY) && !defined(PETSC_HAVE_MPI_F90MODULE) #if (PETSC_VERSION_MAJOR==3 && PETSC_VERSION_MINOR>14) && !defined(PETSC_HAVE_MPI_F90MODULE_VISIBILITY)
use MPI_f08 use MPI_f08
#endif #endif
@ -102,7 +102,7 @@ subroutine grid_mechanical_spectral_basic_init
F ! pointer to solution data F ! pointer to solution data
PetscInt, dimension(0:worldsize-1) :: localK PetscInt, dimension(0:worldsize-1) :: localK
integer(HID_T) :: fileHandle, groupHandle integer(HID_T) :: fileHandle, groupHandle
#if !defined(PETSC_HAVE_MPI_F90MODULE_VISIBILITY) && !defined(PETSC_HAVE_MPI_F90MODULE) #if (PETSC_VERSION_MAJOR==3 && PETSC_VERSION_MINOR>14) && !defined(PETSC_HAVE_MPI_F90MODULE_VISIBILITY)
type(MPI_File) :: fileUnit type(MPI_File) :: fileUnit
#else #else
integer :: fileUnit integer :: fileUnit

View File

@ -9,7 +9,7 @@ module grid_mechanical_spectral_polarisation
#include <petsc/finclude/petscdmda.h> #include <petsc/finclude/petscdmda.h>
use PETScDMDA use PETScDMDA
use PETScSNES use PETScSNES
#if !defined(PETSC_HAVE_MPI_F90MODULE_VISIBILITY) && !defined(PETSC_HAVE_MPI_F90MODULE) #if (PETSC_VERSION_MAJOR==3 && PETSC_VERSION_MINOR>14) && !defined(PETSC_HAVE_MPI_F90MODULE_VISIBILITY)
use MPI_f08 use MPI_f08
#endif #endif
@ -115,7 +115,7 @@ subroutine grid_mechanical_spectral_polarisation_init
F_tau ! specific (sub)pointer F_tau ! specific (sub)pointer
PetscInt, dimension(0:worldsize-1) :: localK PetscInt, dimension(0:worldsize-1) :: localK
integer(HID_T) :: fileHandle, groupHandle integer(HID_T) :: fileHandle, groupHandle
#if !defined(PETSC_HAVE_MPI_F90MODULE_VISIBILITY) && !defined(PETSC_HAVE_MPI_F90MODULE) #if (PETSC_VERSION_MAJOR==3 && PETSC_VERSION_MINOR>14) && !defined(PETSC_HAVE_MPI_F90MODULE_VISIBILITY)
type(MPI_File) :: fileUnit type(MPI_File) :: fileUnit
#else #else
integer :: fileUnit integer :: fileUnit

View File

@ -9,7 +9,7 @@ module grid_thermal_spectral
#include <petsc/finclude/petscdmda.h> #include <petsc/finclude/petscdmda.h>
use PETScDMDA use PETScDMDA
use PETScSNES use PETScSNES
#if !defined(PETSC_HAVE_MPI_F90MODULE_VISIBILITY) && !defined(PETSC_HAVE_MPI_F90MODULE) #if (PETSC_VERSION_MAJOR==3 && PETSC_VERSION_MINOR>14) && !defined(PETSC_HAVE_MPI_F90MODULE_VISIBILITY)
use MPI_f08 use MPI_f08
#endif #endif

View File

@ -8,7 +8,7 @@ module spectral_utilities
#include <petsc/finclude/petscsys.h> #include <petsc/finclude/petscsys.h>
use PETScSys use PETScSys
#if !defined(PETSC_HAVE_MPI_F90MODULE_VISIBILITY) && !defined(PETSC_HAVE_MPI_F90MODULE) #if (PETSC_VERSION_MAJOR==3 && PETSC_VERSION_MINOR>14) && !defined(PETSC_HAVE_MPI_F90MODULE_VISIBILITY)
use MPI_f08 use MPI_f08
#endif #endif
@ -985,7 +985,7 @@ subroutine utilities_updateCoords(F)
rank_t, rank_b, & rank_t, rank_b, &
c, & c, &
ierr ierr
#if !defined(PETSC_HAVE_MPI_F90MODULE_VISIBILITY) && !defined(PETSC_HAVE_MPI_F90MODULE) #if (PETSC_VERSION_MAJOR==3 && PETSC_VERSION_MINOR>14) && !defined(PETSC_HAVE_MPI_F90MODULE_VISIBILITY)
type(MPI_Request), dimension(4) :: request type(MPI_Request), dimension(4) :: request
type(MPI_Status), dimension(4) :: status type(MPI_Status), dimension(4) :: status
#else #else
@ -1050,7 +1050,7 @@ subroutine utilities_updateCoords(F)
call MPI_Waitall(4,request,status,ierr) call MPI_Waitall(4,request,status,ierr)
if(ierr /=0) error stop 'MPI error' if(ierr /=0) error stop 'MPI error'
#if !defined(PETSC_HAVE_MPI_F90MODULE_VISIBILITY) && !defined(PETSC_HAVE_MPI_F90MODULE) #if (PETSC_VERSION_MAJOR==3 && PETSC_VERSION_MINOR>14) && !defined(PETSC_HAVE_MPI_F90MODULE_VISIBILITY)
! ToDo ! ToDo
#else #else
if(any(status(MPI_ERROR,:) /= 0)) error stop 'MPI error' if(any(status(MPI_ERROR,:) /= 0)) error stop 'MPI error'

View File

@ -6,11 +6,10 @@ module FEM_utilities
#include <petsc/finclude/petscdmplex.h> #include <petsc/finclude/petscdmplex.h>
#include <petsc/finclude/petscdmda.h> #include <petsc/finclude/petscdmda.h>
#include <petsc/finclude/petscis.h> #include <petsc/finclude/petscis.h>
use PETScDMplex use PETScDMplex
use PETScDMDA use PETScDMDA
use PETScIS use PETScIS
#if !defined(PETSC_HAVE_MPI_F90MODULE_VISIBILITY) && !defined(PETSC_HAVE_MPI_F90MODULE) #if (PETSC_VERSION_MAJOR==3 && PETSC_VERSION_MINOR>14) && !defined(PETSC_HAVE_MPI_F90MODULE_VISIBILITY)
use MPI_f08 use MPI_f08
#endif #endif

View File

@ -11,7 +11,7 @@ module discretization_mesh
use PETScDMplex use PETScDMplex
use PETScDMDA use PETScDMDA
use PETScIS use PETScIS
#if !defined(PETSC_HAVE_MPI_F90MODULE_VISIBILITY) && !defined(PETSC_HAVE_MPI_F90MODULE) #if (PETSC_VERSION_MAJOR==3 && PETSC_VERSION_MINOR>14) && !defined(PETSC_HAVE_MPI_F90MODULE_VISIBILITY)
use MPI_f08 use MPI_f08
#endif #endif

View File

@ -8,12 +8,11 @@ module mesh_mechanical_FEM
#include <petsc/finclude/petscdmplex.h> #include <petsc/finclude/petscdmplex.h>
#include <petsc/finclude/petscdm.h> #include <petsc/finclude/petscdm.h>
#include <petsc/finclude/petsc.h> #include <petsc/finclude/petsc.h>
use PETScSNES use PETScSNES
use PETScDM use PETScDM
use PETScDMplex use PETScDMplex
use PETScDT use PETScDT
#if !defined(PETSC_HAVE_MPI_F90MODULE_VISIBILITY) && !defined(PETSC_HAVE_MPI_F90MODULE) #if (PETSC_VERSION_MAJOR==3 && PETSC_VERSION_MINOR>14) && !defined(PETSC_HAVE_MPI_F90MODULE_VISIBILITY)
use MPI_f08 use MPI_f08
#endif #endif

View File

@ -9,11 +9,12 @@ module parallelization
#ifdef PETSC #ifdef PETSC
#include <petsc/finclude/petscsys.h> #include <petsc/finclude/petscsys.h>
use PETScSys use PETScSys
#if !defined(PETSC_HAVE_MPI_F90MODULE_VISIBILITY) && !defined(PETSC_HAVE_MPI_F90MODULE) #if (PETSC_VERSION_MAJOR==3 && PETSC_VERSION_MINOR>14) && !defined(PETSC_HAVE_MPI_F90MODULE_VISIBILITY)
use MPI_f08 use MPI_f08
#endif #endif
!$ use OMP_LIB !$ use OMP_LIB
#endif #endif
use prec use prec
implicit none implicit none

View File

@ -12,7 +12,11 @@ module results
use HDF5_utilities use HDF5_utilities
use HDF5 use HDF5
#ifdef PETSC #ifdef PETSC
use PETSc #include <petsc/finclude/petscsys.h>
use PETScSys
#if (PETSC_VERSION_MAJOR==3 && PETSC_VERSION_MINOR>14) && !defined(PETSC_HAVE_MPI_F90MODULE_VISIBILITY)
use MPI_f08
#endif
#endif #endif
implicit none implicit none
@ -453,7 +457,7 @@ subroutine results_mapping_phase(ID,entry,label)
call h5pcreate_f(H5P_DATASET_XFER_F, plist_id, hdferr) call h5pcreate_f(H5P_DATASET_XFER_F, plist_id, hdferr)
if(hdferr < 0) error stop 'HDF5 error' if(hdferr < 0) error stop 'HDF5 error'
#ifndef PETSc #ifndef PETSC
entryGlobal = entry -1 ! 0-based entryGlobal = entry -1 ! 0-based
#else #else
!-------------------------------------------------------------------------------------------------- !--------------------------------------------------------------------------------------------------
@ -461,7 +465,7 @@ subroutine results_mapping_phase(ID,entry,label)
call h5pset_dxpl_mpio_f(plist_id, H5FD_MPIO_COLLECTIVE_F, hdferr) call h5pset_dxpl_mpio_f(plist_id, H5FD_MPIO_COLLECTIVE_F, hdferr)
if(hdferr < 0) error stop 'HDF5 error' if(hdferr < 0) error stop 'HDF5 error'
call MPI_Allreduce(MPI_IN_PLACE,writeSize,worldsize,MPI_INT,MPI_SUM,PETSC_COMM_WORLD,ierr) ! get output at each process call MPI_Allreduce(MPI_IN_PLACE,writeSize,worldsize,MPI_INT,MPI_SUM,MPI_COMM_WORLD,ierr) ! get output at each process
if(ierr /= 0) error stop 'MPI error' if(ierr /= 0) error stop 'MPI error'
entryOffset = 0 entryOffset = 0
@ -470,7 +474,7 @@ subroutine results_mapping_phase(ID,entry,label)
entryOffset(ID(co,ce),worldrank) = entryOffset(ID(co,ce),worldrank) +1 entryOffset(ID(co,ce),worldrank) = entryOffset(ID(co,ce),worldrank) +1
enddo enddo
enddo enddo
call MPI_Allreduce(MPI_IN_PLACE,entryOffset,size(entryOffset),MPI_INT,MPI_SUM,PETSC_COMM_WORLD,ierr)! get offset at each process call MPI_Allreduce(MPI_IN_PLACE,entryOffset,size(entryOffset),MPI_INT,MPI_SUM,MPI_COMM_WORLD,ierr)! get offset at each process
if(ierr /= 0) error stop 'MPI error' if(ierr /= 0) error stop 'MPI error'
entryOffset(:,worldrank) = sum(entryOffset(:,0:worldrank-1),2) entryOffset(:,worldrank) = sum(entryOffset(:,0:worldrank-1),2)
do co = 1, size(ID,1) do co = 1, size(ID,1)
@ -606,7 +610,7 @@ subroutine results_mapping_homogenization(ID,entry,label)
call h5pcreate_f(H5P_DATASET_XFER_F, plist_id, hdferr) call h5pcreate_f(H5P_DATASET_XFER_F, plist_id, hdferr)
if(hdferr < 0) error stop 'HDF5 error' if(hdferr < 0) error stop 'HDF5 error'
#ifndef PETSc #ifndef PETSC
entryGlobal = entry -1 ! 0-based entryGlobal = entry -1 ! 0-based
#else #else
!-------------------------------------------------------------------------------------------------- !--------------------------------------------------------------------------------------------------
@ -614,14 +618,14 @@ subroutine results_mapping_homogenization(ID,entry,label)
call h5pset_dxpl_mpio_f(plist_id, H5FD_MPIO_COLLECTIVE_F, hdferr) call h5pset_dxpl_mpio_f(plist_id, H5FD_MPIO_COLLECTIVE_F, hdferr)
if(hdferr < 0) error stop 'HDF5 error' if(hdferr < 0) error stop 'HDF5 error'
call MPI_Allreduce(MPI_IN_PLACE,writeSize,worldsize,MPI_INT,MPI_SUM,PETSC_COMM_WORLD,ierr) ! get output at each process call MPI_Allreduce(MPI_IN_PLACE,writeSize,worldsize,MPI_INT,MPI_SUM,MPI_COMM_WORLD,ierr) ! get output at each process
if(ierr /= 0) error stop 'MPI error' if(ierr /= 0) error stop 'MPI error'
entryOffset = 0 entryOffset = 0
do ce = 1, size(ID,1) do ce = 1, size(ID,1)
entryOffset(ID(ce),worldrank) = entryOffset(ID(ce),worldrank) +1 entryOffset(ID(ce),worldrank) = entryOffset(ID(ce),worldrank) +1
enddo enddo
call MPI_Allreduce(MPI_IN_PLACE,entryOffset,size(entryOffset),MPI_INT,MPI_SUM,PETSC_COMM_WORLD,ierr)! get offset at each process call MPI_Allreduce(MPI_IN_PLACE,entryOffset,size(entryOffset),MPI_INT,MPI_SUM,MPI_COMM_WORLD,ierr)! get offset at each process
if(ierr /= 0) error stop 'MPI error' if(ierr /= 0) error stop 'MPI error'
entryOffset(:,worldrank) = sum(entryOffset(:,0:worldrank-1),2) entryOffset(:,worldrank) = sum(entryOffset(:,0:worldrank-1),2)
do ce = 1, size(ID,1) do ce = 1, size(ID,1)