diff --git a/src/config.f90 b/src/config.f90 index b80658213..8f5680158 100644 --- a/src/config.f90 +++ b/src/config.f90 @@ -52,14 +52,14 @@ subroutine parse_material() inquire(file='material.yaml',exist=fileExists) if(.not. fileExists) call IO_error(100,ext_msg='material.yaml') - print*, 'reading material.yaml'; flush(IO_STDOUT) - fileContent = IO_read('material.yaml') - if (worldrank == 0) then + print*, 'reading material.yaml'; flush(IO_STDOUT) + fileContent = IO_read('material.yaml') call results_openJobFile(parallel=.false.) call results_writeDataset_str(fileContent,'setup','material.yaml','DAMASK main configuration') call results_closeJobFile endif + call parallelization_bcast_str(fileContent) config_material => YAML_parse_str(fileContent) @@ -80,14 +80,14 @@ subroutine parse_numerics() inquire(file='numerics.yaml', exist=fileExists) if (fileExists) then - print*, 'reading numerics.yaml'; flush(IO_STDOUT) - fileContent = IO_read('numerics.yaml') - if (worldrank == 0) then + print*, 'reading numerics.yaml'; flush(IO_STDOUT) + fileContent = IO_read('numerics.yaml') call results_openJobFile(parallel=.false.) call results_writeDataset_str(fileContent,'setup','numerics.yaml','numerics configuration (optional)') call results_closeJobFile endif + call parallelization_bcast_str(fileContent) config_numerics => YAML_parse_str(fileContent) @@ -110,14 +110,14 @@ subroutine parse_debug() inquire(file='debug.yaml', exist=fileExists) if (fileExists) then - print*, 'reading debug.yaml'; flush(IO_STDOUT) - fileContent = IO_read('debug.yaml') - if (worldrank == 0) then + print*, 'reading debug.yaml'; flush(IO_STDOUT) + fileContent = IO_read('debug.yaml') call results_openJobFile(parallel=.false.) call results_writeDataset_str(fileContent,'setup','debug.yaml','debug configuration (optional)') call results_closeJobFile endif + call parallelization_bcast_str(fileContent) config_debug => YAML_parse_str(fileContent) diff --git a/src/grid/DAMASK_grid.f90 b/src/grid/DAMASK_grid.f90 index 65c050b0c..89521a223 100644 --- a/src/grid/DAMASK_grid.f90 +++ b/src/grid/DAMASK_grid.f90 @@ -129,12 +129,14 @@ program DAMASK_grid if (stagItMax < 0) call IO_error(301,ext_msg='maxStaggeredIter') if (maxCutBack < 0) call IO_error(301,ext_msg='maxCutBack') - fileContent = IO_read(interface_loadFile) if (worldrank == 0) then + fileContent = IO_read(interface_loadFile) call results_openJobFile(parallel=.false.) call results_writeDataset_str(fileContent,'setup',interface_loadFile,'load case definition (grid solver)') call results_closeJobFile endif + + call parallelization_bcast_str(fileContent) config_load => YAML_parse_str(fileContent) solver => config_load%get('solver') diff --git a/src/parallelization.f90 b/src/parallelization.f90 index 534478cef..31da0f969 100644 --- a/src/parallelization.f90 +++ b/src/parallelization.f90 @@ -24,9 +24,18 @@ module parallelization worldrank = 0, & !< MPI worldrank (/=0 for MPI simulations only) worldsize = 1 !< MPI worldsize (/=1 for MPI simulations only) -#ifdef PETSC +#ifndef PETSC +public :: parallelization_broadcast_str + +contains +subroutine parallelization_bcast_str(string) + character(len=*), allocatable, intent(inout) :: string +end subroutine parallelization_bcast_str + +#else public :: & - parallelization_init + parallelization_init, & + parallelization_bcast_str contains @@ -101,6 +110,27 @@ subroutine parallelization_init !$ call omp_set_num_threads(OMP_NUM_THREADS) end subroutine parallelization_init + + +!-------------------------------------------------------------------------------------------------- +!> @brief Broadcast a string from process 0. +!-------------------------------------------------------------------------------------------------- +subroutine parallelization_bcast_str(string) + + character(len=:), allocatable, intent(inout) :: string + + integer :: strlen, ierr ! pI64 for strlen not supported by MPI + + + if (worldrank == 0) strlen = len(string,pI64) + call MPI_Bcast(strlen,1,MPI_INTEGER,0,MPI_COMM_WORLD, ierr) + if (worldrank /= 0) allocate(character(len=strlen)::string) + + call MPI_Bcast(string,strlen,MPI_CHARACTER,0,MPI_COMM_WORLD, ierr) + + +end subroutine parallelization_bcast_str + #endif end module parallelization