PETSc defines are rather complicated

now mpi_f08 can be used on newer PETSc installations if old MPI modules
are not exposed
This commit is contained in:
Martin Diehl 2021-07-09 18:48:25 +02:00
parent 637f78bd52
commit 136a4b1377
14 changed files with 33 additions and 26 deletions

View File

@ -7,7 +7,11 @@
module HDF5_utilities
use HDF5
#ifdef PETSC
use PETSc
#include <petsc/finclude/petscsys.h>
use PETScSys
#if (PETSC_VERSION_MAJOR==3 && PETSC_VERSION_MINOR>14) && !defined(PETSC_HAVE_MPI_F90MODULE_VISIBILITY)
use MPI
#endif
#endif
use prec

View File

@ -9,7 +9,7 @@
program DAMASK_grid
#include <petsc/finclude/petscsys.h>
use PETScSys
#if !defined(PETSC_HAVE_MPI_F90MODULE_VISIBILITY) && !defined(PETSC_HAVE_MPI_F90MODULE)
#if (PETSC_VERSION_MAJOR==3 && PETSC_VERSION_MINOR>14) && !defined(PETSC_HAVE_MPI_F90MODULE_VISIBILITY)
use MPI_f08
#endif

View File

@ -7,7 +7,7 @@
module discretization_grid
#include <petsc/finclude/petscsys.h>
use PETScSys
#if !defined(PETSC_HAVE_MPI_F90MODULE_VISIBILITY) && !defined(PETSC_HAVE_MPI_F90MODULE)
#if (PETSC_VERSION_MAJOR==3 && PETSC_VERSION_MINOR>14) && !defined(PETSC_HAVE_MPI_F90MODULE_VISIBILITY)
use MPI_f08
#endif

View File

@ -9,7 +9,7 @@ module grid_damage_spectral
#include <petsc/finclude/petscdmda.h>
use PETScDMDA
use PETScSNES
#if !defined(PETSC_HAVE_MPI_F90MODULE_VISIBILITY) && !defined(PETSC_HAVE_MPI_F90MODULE)
#if (PETSC_VERSION_MAJOR==3 && PETSC_VERSION_MINOR>14) && !defined(PETSC_HAVE_MPI_F90MODULE_VISIBILITY)
use MPI_f08
#endif

View File

@ -9,7 +9,7 @@ module grid_mechanical_FEM
#include <petsc/finclude/petscdmda.h>
use PETScDMDA
use PETScSNES
#if !defined(PETSC_HAVE_MPI_F90MODULE_VISIBILITY) && !defined(PETSC_HAVE_MPI_F90MODULE)
#if (PETSC_VERSION_MAJOR==3 && PETSC_VERSION_MINOR>14) && !defined(PETSC_HAVE_MPI_F90MODULE_VISIBILITY)
use MPI_f08
#endif

View File

@ -9,7 +9,7 @@ module grid_mechanical_spectral_basic
#include <petsc/finclude/petscdmda.h>
use PETScDMDA
use PETScSNES
#if !defined(PETSC_HAVE_MPI_F90MODULE_VISIBILITY) && !defined(PETSC_HAVE_MPI_F90MODULE)
#if (PETSC_VERSION_MAJOR==3 && PETSC_VERSION_MINOR>14) && !defined(PETSC_HAVE_MPI_F90MODULE_VISIBILITY)
use MPI_f08
#endif
@ -102,7 +102,7 @@ subroutine grid_mechanical_spectral_basic_init
F ! pointer to solution data
PetscInt, dimension(0:worldsize-1) :: localK
integer(HID_T) :: fileHandle, groupHandle
#if !defined(PETSC_HAVE_MPI_F90MODULE_VISIBILITY) && !defined(PETSC_HAVE_MPI_F90MODULE)
#if (PETSC_VERSION_MAJOR==3 && PETSC_VERSION_MINOR>14) && !defined(PETSC_HAVE_MPI_F90MODULE_VISIBILITY)
type(MPI_File) :: fileUnit
#else
integer :: fileUnit

View File

@ -9,7 +9,7 @@ module grid_mechanical_spectral_polarisation
#include <petsc/finclude/petscdmda.h>
use PETScDMDA
use PETScSNES
#if !defined(PETSC_HAVE_MPI_F90MODULE_VISIBILITY) && !defined(PETSC_HAVE_MPI_F90MODULE)
#if (PETSC_VERSION_MAJOR==3 && PETSC_VERSION_MINOR>14) && !defined(PETSC_HAVE_MPI_F90MODULE_VISIBILITY)
use MPI_f08
#endif
@ -115,7 +115,7 @@ subroutine grid_mechanical_spectral_polarisation_init
F_tau ! specific (sub)pointer
PetscInt, dimension(0:worldsize-1) :: localK
integer(HID_T) :: fileHandle, groupHandle
#if !defined(PETSC_HAVE_MPI_F90MODULE_VISIBILITY) && !defined(PETSC_HAVE_MPI_F90MODULE)
#if (PETSC_VERSION_MAJOR==3 && PETSC_VERSION_MINOR>14) && !defined(PETSC_HAVE_MPI_F90MODULE_VISIBILITY)
type(MPI_File) :: fileUnit
#else
integer :: fileUnit

View File

@ -9,7 +9,7 @@ module grid_thermal_spectral
#include <petsc/finclude/petscdmda.h>
use PETScDMDA
use PETScSNES
#if !defined(PETSC_HAVE_MPI_F90MODULE_VISIBILITY) && !defined(PETSC_HAVE_MPI_F90MODULE)
#if (PETSC_VERSION_MAJOR==3 && PETSC_VERSION_MINOR>14) && !defined(PETSC_HAVE_MPI_F90MODULE_VISIBILITY)
use MPI_f08
#endif

View File

@ -8,7 +8,7 @@ module spectral_utilities
#include <petsc/finclude/petscsys.h>
use PETScSys
#if !defined(PETSC_HAVE_MPI_F90MODULE_VISIBILITY) && !defined(PETSC_HAVE_MPI_F90MODULE)
#if (PETSC_VERSION_MAJOR==3 && PETSC_VERSION_MINOR>14) && !defined(PETSC_HAVE_MPI_F90MODULE_VISIBILITY)
use MPI_f08
#endif
@ -985,7 +985,7 @@ subroutine utilities_updateCoords(F)
rank_t, rank_b, &
c, &
ierr
#if !defined(PETSC_HAVE_MPI_F90MODULE_VISIBILITY) && !defined(PETSC_HAVE_MPI_F90MODULE)
#if (PETSC_VERSION_MAJOR==3 && PETSC_VERSION_MINOR>14) && !defined(PETSC_HAVE_MPI_F90MODULE_VISIBILITY)
type(MPI_Request), dimension(4) :: request
type(MPI_Status), dimension(4) :: status
#else
@ -1050,7 +1050,7 @@ subroutine utilities_updateCoords(F)
call MPI_Waitall(4,request,status,ierr)
if(ierr /=0) error stop 'MPI error'
#if !defined(PETSC_HAVE_MPI_F90MODULE_VISIBILITY) && !defined(PETSC_HAVE_MPI_F90MODULE)
#if (PETSC_VERSION_MAJOR==3 && PETSC_VERSION_MINOR>14) && !defined(PETSC_HAVE_MPI_F90MODULE_VISIBILITY)
! ToDo
#else
if(any(status(MPI_ERROR,:) /= 0)) error stop 'MPI error'

View File

@ -6,11 +6,10 @@ module FEM_utilities
#include <petsc/finclude/petscdmplex.h>
#include <petsc/finclude/petscdmda.h>
#include <petsc/finclude/petscis.h>
use PETScDMplex
use PETScDMDA
use PETScIS
#if !defined(PETSC_HAVE_MPI_F90MODULE_VISIBILITY) && !defined(PETSC_HAVE_MPI_F90MODULE)
#if (PETSC_VERSION_MAJOR==3 && PETSC_VERSION_MINOR>14) && !defined(PETSC_HAVE_MPI_F90MODULE_VISIBILITY)
use MPI_f08
#endif

View File

@ -11,7 +11,7 @@ module discretization_mesh
use PETScDMplex
use PETScDMDA
use PETScIS
#if !defined(PETSC_HAVE_MPI_F90MODULE_VISIBILITY) && !defined(PETSC_HAVE_MPI_F90MODULE)
#if (PETSC_VERSION_MAJOR==3 && PETSC_VERSION_MINOR>14) && !defined(PETSC_HAVE_MPI_F90MODULE_VISIBILITY)
use MPI_f08
#endif

View File

@ -8,12 +8,11 @@ module mesh_mechanical_FEM
#include <petsc/finclude/petscdmplex.h>
#include <petsc/finclude/petscdm.h>
#include <petsc/finclude/petsc.h>
use PETScSNES
use PETScDM
use PETScDMplex
use PETScDT
#if !defined(PETSC_HAVE_MPI_F90MODULE_VISIBILITY) && !defined(PETSC_HAVE_MPI_F90MODULE)
#if (PETSC_VERSION_MAJOR==3 && PETSC_VERSION_MINOR>14) && !defined(PETSC_HAVE_MPI_F90MODULE_VISIBILITY)
use MPI_f08
#endif

View File

@ -9,11 +9,12 @@ module parallelization
#ifdef PETSC
#include <petsc/finclude/petscsys.h>
use PETScSys
#if !defined(PETSC_HAVE_MPI_F90MODULE_VISIBILITY) && !defined(PETSC_HAVE_MPI_F90MODULE)
#if (PETSC_VERSION_MAJOR==3 && PETSC_VERSION_MINOR>14) && !defined(PETSC_HAVE_MPI_F90MODULE_VISIBILITY)
use MPI_f08
#endif
!$ use OMP_LIB
#endif
use prec
implicit none

View File

@ -12,7 +12,11 @@ module results
use HDF5_utilities
use HDF5
#ifdef PETSC
use PETSc
#include <petsc/finclude/petscsys.h>
use PETScSys
#if (PETSC_VERSION_MAJOR==3 && PETSC_VERSION_MINOR>14) && !defined(PETSC_HAVE_MPI_F90MODULE_VISIBILITY)
use MPI_f08
#endif
#endif
implicit none
@ -453,7 +457,7 @@ subroutine results_mapping_phase(ID,entry,label)
call h5pcreate_f(H5P_DATASET_XFER_F, plist_id, hdferr)
if(hdferr < 0) error stop 'HDF5 error'
#ifndef PETSc
#ifndef PETSC
entryGlobal = entry -1 ! 0-based
#else
!--------------------------------------------------------------------------------------------------
@ -461,7 +465,7 @@ subroutine results_mapping_phase(ID,entry,label)
call h5pset_dxpl_mpio_f(plist_id, H5FD_MPIO_COLLECTIVE_F, hdferr)
if(hdferr < 0) error stop 'HDF5 error'
call MPI_Allreduce(MPI_IN_PLACE,writeSize,worldsize,MPI_INT,MPI_SUM,PETSC_COMM_WORLD,ierr) ! get output at each process
call MPI_Allreduce(MPI_IN_PLACE,writeSize,worldsize,MPI_INT,MPI_SUM,MPI_COMM_WORLD,ierr) ! get output at each process
if(ierr /= 0) error stop 'MPI error'
entryOffset = 0
@ -470,7 +474,7 @@ subroutine results_mapping_phase(ID,entry,label)
entryOffset(ID(co,ce),worldrank) = entryOffset(ID(co,ce),worldrank) +1
enddo
enddo
call MPI_Allreduce(MPI_IN_PLACE,entryOffset,size(entryOffset),MPI_INT,MPI_SUM,PETSC_COMM_WORLD,ierr)! get offset at each process
call MPI_Allreduce(MPI_IN_PLACE,entryOffset,size(entryOffset),MPI_INT,MPI_SUM,MPI_COMM_WORLD,ierr)! get offset at each process
if(ierr /= 0) error stop 'MPI error'
entryOffset(:,worldrank) = sum(entryOffset(:,0:worldrank-1),2)
do co = 1, size(ID,1)
@ -606,7 +610,7 @@ subroutine results_mapping_homogenization(ID,entry,label)
call h5pcreate_f(H5P_DATASET_XFER_F, plist_id, hdferr)
if(hdferr < 0) error stop 'HDF5 error'
#ifndef PETSc
#ifndef PETSC
entryGlobal = entry -1 ! 0-based
#else
!--------------------------------------------------------------------------------------------------
@ -614,14 +618,14 @@ subroutine results_mapping_homogenization(ID,entry,label)
call h5pset_dxpl_mpio_f(plist_id, H5FD_MPIO_COLLECTIVE_F, hdferr)
if(hdferr < 0) error stop 'HDF5 error'
call MPI_Allreduce(MPI_IN_PLACE,writeSize,worldsize,MPI_INT,MPI_SUM,PETSC_COMM_WORLD,ierr) ! get output at each process
call MPI_Allreduce(MPI_IN_PLACE,writeSize,worldsize,MPI_INT,MPI_SUM,MPI_COMM_WORLD,ierr) ! get output at each process
if(ierr /= 0) error stop 'MPI error'
entryOffset = 0
do ce = 1, size(ID,1)
entryOffset(ID(ce),worldrank) = entryOffset(ID(ce),worldrank) +1
enddo
call MPI_Allreduce(MPI_IN_PLACE,entryOffset,size(entryOffset),MPI_INT,MPI_SUM,PETSC_COMM_WORLD,ierr)! get offset at each process
call MPI_Allreduce(MPI_IN_PLACE,entryOffset,size(entryOffset),MPI_INT,MPI_SUM,MPI_COMM_WORLD,ierr)! get offset at each process
if(ierr /= 0) error stop 'MPI error'
entryOffset(:,worldrank) = sum(entryOffset(:,0:worldrank-1),2)
do ce = 1, size(ID,1)