avoid repeated random sequences on different processes

we don't use random numbers a lot and there are no reported problems
yet.
Still, this is safer.
This commit is contained in:
Martin Diehl 2022-10-27 09:21:37 +02:00
parent 94849d9c38
commit ae3b049d30
2 changed files with 12 additions and 5 deletions

View File

@ -10,8 +10,17 @@ module math
use IO
use config
use YAML_types
use parallelization
use LAPACK_interface
#ifdef PETSC
#include <petsc/finclude/petscsys.h>
use PETScSys
#if (PETSC_VERSION_MAJOR==3 && PETSC_VERSION_MINOR>14) && !defined(PETSC_HAVE_MPI_F90MODULE_VISIBILITY)
use MPI_f08
#endif
#endif
implicit none(type,external)
public
#if __INTEL_COMPILER >= 1900
@ -105,7 +114,7 @@ subroutine math_init()
call random_seed(get = seed)
end if
call random_seed(put = seed)
call random_seed(put = seed + worldrank*42_MPI_INTEGER_KIND)
call random_number(randTest)
print'(/,a,i2)', ' size of random seed: ', randSize

View File

@ -27,15 +27,13 @@ module parallelization
#ifndef PETSC
integer, parameter, public :: &
MPI_INTEGER_KIND = pI64
MPI_INTEGER_KIND = pI64 !< needed for MSC.Marc
integer(MPI_INTEGER_KIND), parameter, public :: &
worldrank = 0_MPI_INTEGER_KIND, & !< MPI dummy worldrank
worldsize = 1_MPI_INTEGER_KIND !< MPI dummy worldsize
#else
integer(MPI_INTEGER_KIND), protected, public :: &
#endif
worldrank = 0_MPI_INTEGER_KIND, & !< MPI worldrank (/=0 for MPI simulations only)
worldsize = 1_MPI_INTEGER_KIND !< MPI worldsize (/=1 for MPI simulations only)
#endif
#ifndef PETSC
public :: parallelization_bcast_str