handling of MPI in 'parallelization'

This commit is contained in:
Martin Diehl 2020-09-13 10:19:38 +02:00
parent 126ef8be9f
commit da0e16520c
20 changed files with 84 additions and 93 deletions

View File

@ -48,7 +48,7 @@ module CPFEM
end type tNumerics end type tNumerics
type(tNumerics), private :: num type(tNumerics), private :: num
type, private :: tDebugOptions type, private :: tDebugOptions
logical :: & logical :: &
basic, & basic, &
@ -58,9 +58,9 @@ module CPFEM
element, & element, &
ip ip
end type tDebugOptions end type tDebugOptions
type(tDebugOptions), private :: debugCPFEM type(tDebugOptions), private :: debugCPFEM
public :: & public :: &
CPFEM_general, & CPFEM_general, &
CPFEM_initAll, & CPFEM_initAll, &
@ -74,7 +74,9 @@ contains
!-------------------------------------------------------------------------------------------------- !--------------------------------------------------------------------------------------------------
subroutine CPFEM_initAll subroutine CPFEM_initAll
call parallelization_init
call DAMASK_interface_init call DAMASK_interface_init
call prec_init
call IO_init call IO_init
call config_init call config_init
call math_init call math_init
@ -112,13 +114,13 @@ subroutine CPFEM_init
allocate(CPFEM_dcsdE_knownGood(6,6,discretization_nIP,discretization_nElem), source= 0.0_pReal) allocate(CPFEM_dcsdE_knownGood(6,6,discretization_nIP,discretization_nElem), source= 0.0_pReal)
!------------------------------------------------------------------------------ !------------------------------------------------------------------------------
! read numerical parameters and do sanity check ! read numerical parameters and do sanity check
num_commercialFEM => numerics_root%get('commercialFEM',defaultVal=emptyDict) num_commercialFEM => numerics_root%get('commercialFEM',defaultVal=emptyDict)
num%iJacoStiffness = num_commercialFEM%get_asInt('ijacostiffness',defaultVal=1) num%iJacoStiffness = num_commercialFEM%get_asInt('ijacostiffness',defaultVal=1)
if (num%iJacoStiffness < 1) call IO_error(301,ext_msg='iJacoStiffness') if (num%iJacoStiffness < 1) call IO_error(301,ext_msg='iJacoStiffness')
!------------------------------------------------------------------------------ !------------------------------------------------------------------------------
! read debug options ! read debug options
debug_CPFEM => debug_root%get('cpfem',defaultVal=emptyList) debug_CPFEM => debug_root%get('cpfem',defaultVal=emptyList)
debugCPFEM%basic = debug_CPFEM%contains('basic') debugCPFEM%basic = debug_CPFEM%contains('basic')

View File

@ -40,7 +40,9 @@ contains
!-------------------------------------------------------------------------------------------------- !--------------------------------------------------------------------------------------------------
subroutine CPFEM_initAll subroutine CPFEM_initAll
call parallelization_init
call DAMASK_interface_init ! Spectral and FEM interface to commandline call DAMASK_interface_init ! Spectral and FEM interface to commandline
call prec_init
call IO_init call IO_init
call base64_init call base64_init
#ifdef Mesh #ifdef Mesh

View File

@ -73,13 +73,7 @@ subroutine DAMASK_interface_init
userName !< name of user calling the executable userName !< name of user calling the executable
integer :: & integer :: &
stat, & stat, &
i, & i
#ifdef _OPENMP
threadLevel, &
#endif
worldrank = 0, &
worldsize = 0, &
typeSize
integer, dimension(8) :: & integer, dimension(8) :: &
dateAndTime dateAndTime
integer :: err integer :: err
@ -87,44 +81,10 @@ subroutine DAMASK_interface_init
external :: & external :: &
quit quit
open(6, encoding='UTF-8') ! for special characters in output
!--------------------------------------------------------------------------------------------------
! PETSc Init
#ifdef _OPENMP
! If openMP is enabled, check if the MPI libary supports it and initialize accordingly.
! Otherwise, the first call to PETSc will do the initialization.
call MPI_Init_Thread(MPI_THREAD_FUNNELED,threadLevel,err)
if (err /= 0) call quit(1)
if (threadLevel<MPI_THREAD_FUNNELED) then
write(6,'(/,a)') ' ERROR: MPI library does not support OpenMP'
call quit(1)
endif
#endif
call PETScInitializeNoArguments(petsc_err) ! according to PETSc manual, that should be the first line in the code
CHKERRQ(petsc_err) ! this is a macro definition, it is case sensitive
call MPI_Comm_rank(PETSC_COMM_WORLD,worldrank,err)
if (err /= 0) call quit(1)
call MPI_Comm_size(PETSC_COMM_WORLD,worldsize,err)
if (err /= 0) call quit(1)
mainProcess: if (worldrank == 0) then
if (output_unit /= 6) then
write(output_unit,'(/,a)') ' ERROR: STDOUT != 6'
call quit(1)
endif
if (error_unit /= 0) then
write(output_unit,'(/,a)') ' ERROR: STDERR != 0'
call quit(1)
endif
else mainProcess
close(6) ! disable output for non-master processes (open 6 to rank specific file for debug)
open(6,file='/dev/null',status='replace') ! close(6) alone will leave some temp files in cwd
endif mainProcess
write(6,'(/,a)') ' <<<+- DAMASK_interface init -+>>>' write(6,'(/,a)') ' <<<+- DAMASK_interface init -+>>>'
open(6, encoding='UTF-8') ! for special characters in output
! http://patorjk.com/software/taag/#p=display&f=Lean&t=DAMASK%203 ! http://patorjk.com/software/taag/#p=display&f=Lean&t=DAMASK%203
#ifdef DEBUG #ifdef DEBUG
write(6,*) achar(27)//'[31m' write(6,*) achar(27)//'[31m'
@ -162,20 +122,6 @@ subroutine DAMASK_interface_init
write(6,'(/,a,2(i2.2,a),i4.4)') ' Date: ',dateAndTime(3),'/',dateAndTime(2),'/', dateAndTime(1) write(6,'(/,a,2(i2.2,a),i4.4)') ' Date: ',dateAndTime(3),'/',dateAndTime(2),'/', dateAndTime(1)
write(6,'(a,2(i2.2,a),i2.2)') ' Time: ',dateAndTime(5),':', dateAndTime(6),':', dateAndTime(7) write(6,'(a,2(i2.2,a),i2.2)') ' Time: ',dateAndTime(5),':', dateAndTime(6),':', dateAndTime(7)
call MPI_Type_size(MPI_INTEGER,typeSize,err)
if (err /= 0) call quit(1)
if (typeSize*8 /= bit_size(0)) then
write(6,'(a)') ' Mismatch between MPI and DAMASK integer'
call quit(1)
endif
call MPI_Type_size(MPI_DOUBLE,typeSize,err)
if (err /= 0) call quit(1)
if (typeSize*8 /= storage_size(0.0_pReal)) then
write(6,'(a)') ' Mismatch between MPI and DAMASK real'
call quit(1)
endif
do i = 1, command_argument_count() do i = 1, command_argument_count()
call get_command_argument(i,arg,status=err) call get_command_argument(i,arg,status=err)
if (err /= 0) call quit(1) if (err /= 0) call quit(1)
@ -269,9 +215,6 @@ subroutine DAMASK_interface_init
call setSIGUSR1(.false.) call setSIGUSR1(.false.)
call setSIGUSR2(.false.) call setSIGUSR2(.false.)
call prec_init
call parallelization_init
end subroutine DAMASK_interface_init end subroutine DAMASK_interface_init

View File

@ -26,7 +26,6 @@
#define PASTE(x,y) x ## y #define PASTE(x,y) x ## y
#include "prec.f90" #include "prec.f90"
#include "parallelization.f90"
module DAMASK_interface module DAMASK_interface
use prec use prec
@ -44,7 +43,6 @@ module DAMASK_interface
logical, protected, public :: symmetricSolver logical, protected, public :: symmetricSolver
character(len=*), parameter, public :: INPUTFILEEXTENSION = '.dat' character(len=*), parameter, public :: INPUTFILEEXTENSION = '.dat'
public :: & public :: &
DAMASK_interface_init, & DAMASK_interface_init, &
getSolverJobName getSolverJobName
@ -91,9 +89,6 @@ subroutine DAMASK_interface_init
endif endif
symmetricSolver = solverIsSymmetric() symmetricSolver = solverIsSymmetric()
call prec_init
call parallelization_init
end subroutine DAMASK_interface_init end subroutine DAMASK_interface_init

View File

@ -11,9 +11,9 @@ module HDF5_utilities
#endif #endif
use prec use prec
use parallelization
use IO use IO
use rotations use rotations
use config
implicit none implicit none
public public

View File

@ -3,6 +3,7 @@
!> @brief all DAMASK files without solver !> @brief all DAMASK files without solver
!> @details List of files needed by MSC.Marc !> @details List of files needed by MSC.Marc
!-------------------------------------------------------------------------------------------------- !--------------------------------------------------------------------------------------------------
#include "parallelization.f90"
#include "IO.f90" #include "IO.f90"
#include "YAML_types.f90" #include "YAML_types.f90"
#include "YAML_parse.f90" #include "YAML_parse.f90"

View File

@ -2,7 +2,7 @@
!> @author Martin Diehl, Max-Planck-Institut für Eisenforschung GmbH !> @author Martin Diehl, Max-Planck-Institut für Eisenforschung GmbH
!> @brief Reads in the material, numerics & debug configuration from their respective file !> @brief Reads in the material, numerics & debug configuration from their respective file
!> @details Reads the material configuration file, where solverJobName.yaml takes !> @details Reads the material configuration file, where solverJobName.yaml takes
!! precedence over material.yaml. !! precedence over material.yaml.
!-------------------------------------------------------------------------------------------------- !--------------------------------------------------------------------------------------------------
module config module config
use prec use prec
@ -24,10 +24,6 @@ module config
numerics_root, & numerics_root, &
debug_root debug_root
integer, protected, public :: &
worldrank = 0, & !< MPI worldrank (/=0 for MPI simulations only)
worldsize = 1 !< MPI worldsize (/=1 for MPI simulations only)
public :: & public :: &
config_init, & config_init, &
config_deallocate config_deallocate
@ -40,7 +36,7 @@ contains
subroutine config_init subroutine config_init
write(6,'(/,a)') ' <<<+- config init -+>>>'; flush(6) write(6,'(/,a)') ' <<<+- config init -+>>>'; flush(6)
call parse_material call parse_material
call parse_numerics call parse_numerics
call parse_debug call parse_debug
@ -76,12 +72,6 @@ end subroutine parse_material
subroutine parse_numerics subroutine parse_numerics
logical :: fexist logical :: fexist
integer :: ierr
#ifdef PETSc
call MPI_Comm_rank(PETSC_COMM_WORLD,worldrank,ierr);CHKERRQ(ierr)
call MPI_Comm_size(PETSC_COMM_WORLD,worldsize,ierr);CHKERRQ(ierr)
#endif
numerics_root => emptyDict numerics_root => emptyDict
inquire(file='numerics.yaml', exist=fexist) inquire(file='numerics.yaml', exist=fexist)
@ -90,7 +80,6 @@ subroutine parse_numerics
numerics_root => parse_flow(to_flow(IO_read('numerics.yaml'))) numerics_root => parse_flow(to_flow(IO_read('numerics.yaml')))
endif endif
end subroutine parse_numerics end subroutine parse_numerics
@ -99,7 +88,7 @@ end subroutine parse_numerics
!-------------------------------------------------------------------------------------------------- !--------------------------------------------------------------------------------------------------
subroutine parse_debug subroutine parse_debug
logical :: fexist logical :: fexist
debug_root => emptyDict debug_root => emptyDict
inquire(file='debug.yaml', exist=fexist) inquire(file='debug.yaml', exist=fexist)
@ -113,10 +102,11 @@ end subroutine parse_debug
!-------------------------------------------------------------------------------------------------- !--------------------------------------------------------------------------------------------------
!> @brief deallocates material.yaml structure !> @brief deallocates material.yaml structure
!ToDo: deallocation of numerics debug (optional)
!-------------------------------------------------------------------------------------------------- !--------------------------------------------------------------------------------------------------
subroutine config_deallocate subroutine config_deallocate
deallocate(material_root) !ToDo: deallocation of numerics debug (optional) deallocate(material_root)
end subroutine config_deallocate end subroutine config_deallocate

View File

@ -10,6 +10,7 @@
module crystallite module crystallite
use prec use prec
use parallelization
use IO use IO
use HDF5_utilities use HDF5_utilities
use DAMASK_interface use DAMASK_interface

View File

@ -10,6 +10,7 @@ program DAMASK_grid
#include <petsc/finclude/petscsys.h> #include <petsc/finclude/petscsys.h>
use PETScsys use PETScsys
use prec use prec
use parallelization
use DAMASK_interface use DAMASK_interface
use IO use IO
use config use config

View File

@ -9,6 +9,7 @@ module discretization_grid
use PETScsys use PETScsys
use prec use prec
use parallelization
use system_routines use system_routines
use base64 use base64
use zlib use zlib

View File

@ -11,12 +11,13 @@ module grid_damage_spectral
use PETScsnes use PETScsnes
use prec use prec
use parallelization
use IO use IO
use spectral_utilities use spectral_utilities
use discretization_grid use discretization_grid
use damage_nonlocal use damage_nonlocal
use config
use YAML_types use YAML_types
use config
implicit none implicit none
private private

View File

@ -11,6 +11,7 @@ module grid_mech_FEM
use PETScsnes use PETScsnes
use prec use prec
use parallelization
use DAMASK_interface use DAMASK_interface
use HDF5_utilities use HDF5_utilities
use math use math

View File

@ -11,6 +11,7 @@ module grid_mech_spectral_basic
use PETScsnes use PETScsnes
use prec use prec
use parallelization
use DAMASK_interface use DAMASK_interface
use HDF5_utilities use HDF5_utilities
use math use math

View File

@ -11,6 +11,7 @@ module grid_mech_spectral_polarisation
use PETScsnes use PETScsnes
use prec use prec
use parallelization
use DAMASK_interface use DAMASK_interface
use HDF5_utilities use HDF5_utilities
use math use math

View File

@ -11,6 +11,7 @@ module grid_thermal_spectral
use PETScsnes use PETScsnes
use prec use prec
use parallelization
use IO use IO
use spectral_utilities use spectral_utilities
use discretization_grid use discretization_grid

View File

@ -10,11 +10,11 @@ module spectral_utilities
use prec use prec
use DAMASK_interface use DAMASK_interface
use parallelization
use math use math
use rotations use rotations
use IO use IO
use discretization_grid use discretization_grid
use config
use discretization use discretization
use homogenization use homogenization

View File

@ -11,6 +11,7 @@ program DAMASK_mesh
use PetscDM use PetscDM
use prec use prec
use DAMASK_interface use DAMASK_interface
use parallelization
use IO use IO
use math use math
use CPFEM2 use CPFEM2

View File

@ -13,6 +13,7 @@ module discretization_mesh
use PETScis use PETScis
use DAMASK_interface use DAMASK_interface
use parallelization
use IO use IO
use config use config
use discretization use discretization

View File

@ -4,6 +4,7 @@
!-------------------------------------------------------------------------------------------------- !--------------------------------------------------------------------------------------------------
module parallelization module parallelization
use prec use prec
use, intrinsic :: iso_fortran_env
#ifdef PETSc #ifdef PETSc
#include <petsc/finclude/petscsys.h> #include <petsc/finclude/petscsys.h>
@ -14,6 +15,10 @@ module parallelization
implicit none implicit none
private private
integer, protected, public :: &
worldrank = 0, & !< MPI worldrank (/=0 for MPI simulations only)
worldsize = 1 !< MPI worldsize (/=1 for MPI simulations only)
public :: & public :: &
parallelization_init parallelization_init
@ -24,13 +29,56 @@ contains
!-------------------------------------------------------------------------------------------------- !--------------------------------------------------------------------------------------------------
subroutine parallelization_init subroutine parallelization_init
!$ integer :: got_env, DAMASK_NUM_THREADS integer :: err, typeSize
!$ integer :: got_env, DAMASK_NUM_THREADS, threadLevel
!$ character(len=6) NumThreadsString !$ character(len=6) NumThreadsString
#ifdef PETSc
PetscErrorCode :: petsc_err
#else
write(6,'(/,a)') ' <<<+- parallelization init -+>>>'; flush(6) write(6,'(/,a)') ' <<<+- parallelization init -+>>>'; flush(6)
#endif
!$ call get_environment_variable(name='DAMASK_NUM_THREADS',value=NumThreadsString,STATUS=got_env) ! get environment variable DAMASK_NUM_THREADS... #ifdef PETSc
!$ if(got_env /= 0) then ! could not get number of threads, set it to 1 #ifdef _OPENMP
! If openMP is enabled, check if the MPI libary supports it and initialize accordingly.
! Otherwise, the first call to PETSc will do the initialization.
call MPI_Init_Thread(MPI_THREAD_FUNNELED,threadLevel,err)
if (err /= 0) error stop 'MPI init failed'
if (threadLevel<MPI_THREAD_FUNNELED) error stop 'MPI library does not support OpenMP'
#endif
call PETScInitializeNoArguments(petsc_err) ! first line in the code according to PETSc manual
CHKERRQ(petsc_err)
call MPI_Comm_rank(PETSC_COMM_WORLD,worldrank,err)
if (err /= 0) error stop 'Could not determine worldrank'
if (worldrank == 1) write(6,'(/,a)') ' <<<+- parallelization init -+>>>'; flush(6)
call MPI_Comm_size(PETSC_COMM_WORLD,worldsize,err)
if (err /= 0) error stop 'Could not determine worldsize'
call MPI_Type_size(MPI_INTEGER,typeSize,err)
if (err /= 0) error stop 'Could not determine MPI integer size'
if (typeSize*8 /= bit_size(0)) error stop 'Mismatch between MPI and DAMASK integer'
call MPI_Type_size(MPI_DOUBLE,typeSize,err)
if (err /= 0) error stop 'Could not determine MPI real size'
if (typeSize*8 /= storage_size(0.0_pReal)) error stop 'Mismatch between MPI and DAMASK real'
#endif
mainProcess: if (worldrank == 0) then
if (output_unit /= 6) error stop 'STDOUT != 6'
if (error_unit /= 0) error stop 'STDERR != 0'
else mainProcess
close(6) ! disable output for non-master processes (open 6 to rank specific file for debug)
open(6,file='/dev/null',status='replace') ! close(6) alone will leave some temp files in cwd
endif mainProcess
!$ call get_environment_variable(name='DAMASK_NUM_THREADS',value=NumThreadsString,STATUS=got_env)
!$ if(got_env /= 0) then
!$ write(6,*) 'Could not determine value of $DAMASK_NUM_THREADS' !$ write(6,*) 'Could not determine value of $DAMASK_NUM_THREADS'
!$ DAMASK_NUM_THREADS = 1_pI32 !$ DAMASK_NUM_THREADS = 1_pI32
!$ else !$ else
@ -42,7 +90,7 @@ subroutine parallelization_init
!$ endif !$ endif
!$ write(6,'(a,i8,/)') ' DAMASK_NUM_THREADS: ',DAMASK_NUM_THREADS !$ write(6,'(a,i8,/)') ' DAMASK_NUM_THREADS: ',DAMASK_NUM_THREADS
!$ call omp_set_num_threads(DAMASK_NUM_THREADS) !$ call omp_set_num_threads(DAMASK_NUM_THREADS)
end subroutine parallelization_init end subroutine parallelization_init
end module parallelization end module parallelization

View File

@ -6,8 +6,8 @@
!-------------------------------------------------------------------------------------------------- !--------------------------------------------------------------------------------------------------
module results module results
use DAMASK_interface use DAMASK_interface
use parallelization
use rotations use rotations
use config
use HDF5_utilities use HDF5_utilities
#ifdef PETSc #ifdef PETSc
use PETSC use PETSC