DAMASK_EICMD/src/parallelization.f90

142 lines
5.3 KiB
Fortran
Raw Normal View History

!--------------------------------------------------------------------------------------------------
!> @author Martin Diehl, Max-Planck-Institut für Eisenforschung GmbH
!> @brief Inquires variables related to parallelization (openMP, MPI)
!--------------------------------------------------------------------------------------------------
module parallelization
2020-09-19 14:20:32 +05:30
use, intrinsic :: ISO_fortran_env, only: &
OUTPUT_UNIT
2021-07-08 18:35:01 +05:30
#ifdef PETSC
#include <petsc/finclude/petscsys.h>
use PETScSys
#if (PETSC_VERSION_MAJOR==3 && PETSC_VERSION_MINOR>14) && !defined(PETSC_HAVE_MPI_F90MODULE_VISIBILITY)
use MPI_f08
#endif
!$ use OMP_LIB
#endif
2020-09-19 14:20:32 +05:30
use prec
implicit none
private
2020-09-13 13:49:38 +05:30
integer, protected, public :: &
worldrank = 0, & !< MPI worldrank (/=0 for MPI simulations only)
worldsize = 1 !< MPI worldsize (/=1 for MPI simulations only)
2021-07-27 12:24:17 +05:30
#ifndef PETSC
public :: parallelization_bcast_str
2021-07-27 12:24:17 +05:30
contains
subroutine parallelization_bcast_str(string)
character(len=:), allocatable, intent(inout) :: string
2021-07-27 12:24:17 +05:30
end subroutine parallelization_bcast_str
#else
public :: &
2021-07-27 12:24:17 +05:30
parallelization_init, &
parallelization_bcast_str
contains
!--------------------------------------------------------------------------------------------------
2021-05-23 03:40:46 +05:30
!> @brief Initialize shared memory (openMP) and distributed memory (MPI) parallelization.
!--------------------------------------------------------------------------------------------------
subroutine parallelization_init
2020-09-19 14:20:32 +05:30
integer :: err, typeSize
!$ integer :: got_env, threadLevel
!$ integer(pI32) :: OMP_NUM_THREADS
!$ character(len=6) NumThreadsString
PetscErrorCode :: err_PETSc
2020-09-13 13:49:38 +05:30
#ifdef _OPENMP
! If openMP is enabled, check if the MPI libary supports it and initialize accordingly.
! Otherwise, the first call to PETSc will do the initialization.
call MPI_Init_Thread(MPI_THREAD_FUNNELED,threadLevel,err)
2021-05-23 03:40:46 +05:30
if (err /= 0) error stop 'MPI init failed'
if (threadLevel<MPI_THREAD_FUNNELED) error stop 'MPI library does not support OpenMP'
2020-09-13 13:49:38 +05:30
#endif
#if defined(DEBUG)
call PetscInitialize(PETSC_NULL_CHARACTER,err_PETSc)
#else
call PetscInitializeNoArguments(err_PETSc)
#endif
CHKERRQ(err_PETSc)
2020-09-13 13:49:38 +05:30
#if defined(DEBUG) && defined(__INTEL_COMPILER)
call PetscSetFPTrap(PETSC_FP_TRAP_ON,err_PETSc)
2020-11-11 16:16:12 +05:30
#else
call PetscSetFPTrap(PETSC_FP_TRAP_OFF,err_PETSc)
2020-11-11 16:16:12 +05:30
#endif
CHKERRQ(err_PETSc)
2020-11-11 16:16:12 +05:30
call MPI_Comm_rank(MPI_COMM_WORLD,worldrank,err)
if (err /= 0) error stop 'Could not determine worldrank'
if (worldrank == 0) print'(/,1x,a)', '<<<+- parallelization init -+>>>'
call MPI_Comm_size(MPI_COMM_WORLD,worldsize,err)
if (err /= 0) error stop 'Could not determine worldsize'
if (worldrank == 0) print'(/,1x,a,i3)', 'MPI processes: ',worldsize
2020-09-13 13:49:38 +05:30
call MPI_Type_size(MPI_INTEGER,typeSize,err)
if (err /= 0) error stop 'Could not determine MPI integer size'
if (typeSize*8 /= bit_size(0)) error stop 'Mismatch between MPI and DAMASK integer'
2022-01-12 21:26:24 +05:30
call MPI_Type_size(MPI_INTEGER8,typeSize,err)
if (err /= 0) error stop 'Could not determine MPI integer size'
2022-01-12 22:28:44 +05:30
if (int(typeSize,pI64)*8_pI64 /= bit_size(0_pI64)) &
error stop 'Mismatch between MPI and DAMASK integer (long long)'
2022-01-12 21:26:24 +05:30
2020-09-13 13:49:38 +05:30
call MPI_Type_size(MPI_DOUBLE,typeSize,err)
if (err /= 0) error stop 'Could not determine MPI real size'
if (typeSize*8 /= storage_size(0.0_pReal)) error stop 'Mismatch between MPI and DAMASK real'
2020-09-19 14:20:32 +05:30
if (worldrank /= 0) then
close(OUTPUT_UNIT) ! disable output
open(OUTPUT_UNIT,file='/dev/null',status='replace') ! close() alone will leave some temp files in cwd
endif
2020-09-13 13:49:38 +05:30
2021-01-13 17:18:20 +05:30
!$ call get_environment_variable(name='OMP_NUM_THREADS',value=NumThreadsString,STATUS=got_env)
2020-09-13 13:49:38 +05:30
!$ if(got_env /= 0) then
!$ print'(1x,a)', 'Could not get $OMP_NUM_THREADS, using default'
!$ OMP_NUM_THREADS = 4_pI32
!$ else
2021-01-13 17:18:20 +05:30
!$ read(NumThreadsString,'(i6)') OMP_NUM_THREADS
!$ if (OMP_NUM_THREADS < 1_pI32) then
!$ print'(1x,a)', 'Invalid OMP_NUM_THREADS: "'//trim(NumThreadsString)//'", using default'
!$ OMP_NUM_THREADS = 4_pI32
!$ endif
!$ endif
!$ print'(1x,a,1x,i2)', 'OMP_NUM_THREADS:',OMP_NUM_THREADS
2021-01-13 17:18:20 +05:30
!$ call omp_set_num_threads(OMP_NUM_THREADS)
2020-09-13 13:49:38 +05:30
end subroutine parallelization_init
2021-07-27 12:24:17 +05:30
!--------------------------------------------------------------------------------------------------
!> @brief Broadcast a string from process 0.
!--------------------------------------------------------------------------------------------------
subroutine parallelization_bcast_str(string)
character(len=:), allocatable, intent(inout) :: string
integer :: strlen, ierr ! pI64 for strlen not supported by MPI
2021-07-27 13:20:23 +05:30
if (worldrank == 0) strlen = len(string)
2021-07-27 12:24:17 +05:30
call MPI_Bcast(strlen,1,MPI_INTEGER,0,MPI_COMM_WORLD, ierr)
if (worldrank /= 0) allocate(character(len=strlen)::string)
call MPI_Bcast(string,strlen,MPI_CHARACTER,0,MPI_COMM_WORLD, ierr)
end subroutine parallelization_bcast_str
#endif
end module parallelization