only rank 0 reads file for MPI

This commit is contained in:
Martin Diehl 2021-07-27 08:54:17 +02:00
parent 812b0f07f5
commit b9d4eb23cc
3 changed files with 44 additions and 12 deletions

View File

@ -52,14 +52,14 @@ subroutine parse_material()
inquire(file='material.yaml',exist=fileExists)
if(.not. fileExists) call IO_error(100,ext_msg='material.yaml')
print*, 'reading material.yaml'; flush(IO_STDOUT)
fileContent = IO_read('material.yaml')
if (worldrank == 0) then
print*, 'reading material.yaml'; flush(IO_STDOUT)
fileContent = IO_read('material.yaml')
call results_openJobFile(parallel=.false.)
call results_writeDataset_str(fileContent,'setup','material.yaml','DAMASK main configuration')
call results_closeJobFile
endif
call parallelization_bcast_str(fileContent)
config_material => YAML_parse_str(fileContent)
@ -80,14 +80,14 @@ subroutine parse_numerics()
inquire(file='numerics.yaml', exist=fileExists)
if (fileExists) then
print*, 'reading numerics.yaml'; flush(IO_STDOUT)
fileContent = IO_read('numerics.yaml')
if (worldrank == 0) then
print*, 'reading numerics.yaml'; flush(IO_STDOUT)
fileContent = IO_read('numerics.yaml')
call results_openJobFile(parallel=.false.)
call results_writeDataset_str(fileContent,'setup','numerics.yaml','numerics configuration (optional)')
call results_closeJobFile
endif
call parallelization_bcast_str(fileContent)
config_numerics => YAML_parse_str(fileContent)
@ -110,14 +110,14 @@ subroutine parse_debug()
inquire(file='debug.yaml', exist=fileExists)
if (fileExists) then
print*, 'reading debug.yaml'; flush(IO_STDOUT)
fileContent = IO_read('debug.yaml')
if (worldrank == 0) then
print*, 'reading debug.yaml'; flush(IO_STDOUT)
fileContent = IO_read('debug.yaml')
call results_openJobFile(parallel=.false.)
call results_writeDataset_str(fileContent,'setup','debug.yaml','debug configuration (optional)')
call results_closeJobFile
endif
call parallelization_bcast_str(fileContent)
config_debug => YAML_parse_str(fileContent)

View File

@ -129,12 +129,14 @@ program DAMASK_grid
if (stagItMax < 0) call IO_error(301,ext_msg='maxStaggeredIter')
if (maxCutBack < 0) call IO_error(301,ext_msg='maxCutBack')
fileContent = IO_read(interface_loadFile)
if (worldrank == 0) then
fileContent = IO_read(interface_loadFile)
call results_openJobFile(parallel=.false.)
call results_writeDataset_str(fileContent,'setup',interface_loadFile,'load case definition (grid solver)')
call results_closeJobFile
endif
call parallelization_bcast_str(fileContent)
config_load => YAML_parse_str(fileContent)
solver => config_load%get('solver')

View File

@ -24,9 +24,18 @@ module parallelization
worldrank = 0, & !< MPI worldrank (/=0 for MPI simulations only)
worldsize = 1 !< MPI worldsize (/=1 for MPI simulations only)
#ifdef PETSC
#ifndef PETSC
public :: parallelization_broadcast_str
contains
subroutine parallelization_bcast_str(string)
character(len=*), allocatable, intent(inout) :: string
end subroutine parallelization_bcast_str
#else
public :: &
parallelization_init
parallelization_init, &
parallelization_bcast_str
contains
@ -101,6 +110,27 @@ subroutine parallelization_init
!$ call omp_set_num_threads(OMP_NUM_THREADS)
end subroutine parallelization_init
!--------------------------------------------------------------------------------------------------
!> @brief Broadcast a string from process 0.
!--------------------------------------------------------------------------------------------------
subroutine parallelization_bcast_str(string)
character(len=:), allocatable, intent(inout) :: string
integer :: strlen, ierr ! pI64 for strlen not supported by MPI
if (worldrank == 0) strlen = len(string,pI64)
call MPI_Bcast(strlen,1,MPI_INTEGER,0,MPI_COMM_WORLD, ierr)
if (worldrank /= 0) allocate(character(len=strlen)::string)
call MPI_Bcast(string,strlen,MPI_CHARACTER,0,MPI_COMM_WORLD, ierr)
end subroutine parallelization_bcast_str
#endif
end module parallelization