polishing
MPI, HDF5, PETSc, and DAMASK might have different integer kinds ..
This commit is contained in:
parent
3fb5bd459c
commit
8223dc7fa7
|
@ -292,12 +292,12 @@ subroutine spectral_utilities_init
|
||||||
tensorSize, FFTW_MPI_DEFAULT_BLOCK, FFTW_MPI_DEFAULT_BLOCK, &! no. of transforms, default iblock and oblock
|
tensorSize, FFTW_MPI_DEFAULT_BLOCK, FFTW_MPI_DEFAULT_BLOCK, &! no. of transforms, default iblock and oblock
|
||||||
tensorField_real, tensorField_fourier, & ! input data, output data
|
tensorField_real, tensorField_fourier, & ! input data, output data
|
||||||
PETSC_COMM_WORLD, FFTW_planner_flag) ! use all processors, planer precision
|
PETSC_COMM_WORLD, FFTW_planner_flag) ! use all processors, planer precision
|
||||||
if (.not. C_ASSOCIATED(planTensorForth)) error stop 'FFTW error'
|
if (.not. c_associated(planTensorForth)) error stop 'FFTW error'
|
||||||
planTensorBack = fftw_mpi_plan_many_dft_c2r(3, [gridFFTW(3),gridFFTW(2),gridFFTW(1)], & ! dimension, logical length in each dimension in reversed order
|
planTensorBack = fftw_mpi_plan_many_dft_c2r(3, [gridFFTW(3),gridFFTW(2),gridFFTW(1)], & ! dimension, logical length in each dimension in reversed order
|
||||||
tensorSize, FFTW_MPI_DEFAULT_BLOCK, FFTW_MPI_DEFAULT_BLOCK, &! no. of transforms, default iblock and oblock
|
tensorSize, FFTW_MPI_DEFAULT_BLOCK, FFTW_MPI_DEFAULT_BLOCK, &! no. of transforms, default iblock and oblock
|
||||||
tensorField_fourier,tensorField_real, & ! input data, output data
|
tensorField_fourier,tensorField_real, & ! input data, output data
|
||||||
PETSC_COMM_WORLD, FFTW_planner_flag) ! all processors, planer precision
|
PETSC_COMM_WORLD, FFTW_planner_flag) ! all processors, planer precision
|
||||||
if (.not. C_ASSOCIATED(planTensorBack)) error stop 'FFTW error'
|
if (.not. c_associated(planTensorBack)) error stop 'FFTW error'
|
||||||
|
|
||||||
!--------------------------------------------------------------------------------------------------
|
!--------------------------------------------------------------------------------------------------
|
||||||
! vector MPI fftw plans
|
! vector MPI fftw plans
|
||||||
|
|
|
@ -50,7 +50,7 @@ subroutine parallelization_init
|
||||||
!$ character(len=6) NumThreadsString
|
!$ character(len=6) NumThreadsString
|
||||||
|
|
||||||
|
|
||||||
PetscErrorCode :: petsc_err
|
PetscErrorCode :: err_PETSc
|
||||||
#ifdef _OPENMP
|
#ifdef _OPENMP
|
||||||
! If openMP is enabled, check if the MPI libary supports it and initialize accordingly.
|
! If openMP is enabled, check if the MPI libary supports it and initialize accordingly.
|
||||||
! Otherwise, the first call to PETSc will do the initialization.
|
! Otherwise, the first call to PETSc will do the initialization.
|
||||||
|
@ -60,18 +60,18 @@ subroutine parallelization_init
|
||||||
#endif
|
#endif
|
||||||
|
|
||||||
#if defined(DEBUG)
|
#if defined(DEBUG)
|
||||||
call PetscInitialize(PETSC_NULL_CHARACTER,petsc_err)
|
call PetscInitialize(PETSC_NULL_CHARACTER,err_PETSc)
|
||||||
#else
|
#else
|
||||||
call PetscInitializeNoArguments(petsc_err)
|
call PetscInitializeNoArguments(err_PETSc)
|
||||||
#endif
|
#endif
|
||||||
CHKERRQ(petsc_err)
|
CHKERRQ(err_PETSc)
|
||||||
|
|
||||||
#if defined(DEBUG) && defined(__INTEL_COMPILER)
|
#if defined(DEBUG) && defined(__INTEL_COMPILER)
|
||||||
call PetscSetFPTrap(PETSC_FP_TRAP_ON,petsc_err)
|
call PetscSetFPTrap(PETSC_FP_TRAP_ON,err_PETSc)
|
||||||
#else
|
#else
|
||||||
call PetscSetFPTrap(PETSC_FP_TRAP_OFF,petsc_err)
|
call PetscSetFPTrap(PETSC_FP_TRAP_OFF,err_PETSc)
|
||||||
#endif
|
#endif
|
||||||
CHKERRQ(petsc_err)
|
CHKERRQ(err_PETSc)
|
||||||
|
|
||||||
call MPI_Comm_rank(MPI_COMM_WORLD,worldrank,err)
|
call MPI_Comm_rank(MPI_COMM_WORLD,worldrank,err)
|
||||||
if (err /= 0) error stop 'Could not determine worldrank'
|
if (err /= 0) error stop 'Could not determine worldrank'
|
||||||
|
|
26
src/quit.f90
26
src/quit.f90
|
@ -15,20 +15,21 @@ subroutine quit(stop_id)
|
||||||
implicit none
|
implicit none
|
||||||
integer, intent(in) :: stop_id
|
integer, intent(in) :: stop_id
|
||||||
integer, dimension(8) :: dateAndTime
|
integer, dimension(8) :: dateAndTime
|
||||||
integer :: error
|
integer :: err_HDF5
|
||||||
PetscErrorCode :: ierr = 0
|
integer(MPI_INTEGER_KIND) :: err_MPI
|
||||||
|
PetscErrorCode :: err_PETSc
|
||||||
|
|
||||||
call h5open_f(error)
|
call h5open_f(err_HDF5)
|
||||||
if (error /= 0) write(6,'(a,i5)') ' Error in h5open_f ',error ! prevents error if not opened yet
|
if (err_HDF5 /= 0) write(6,'(a,i5)') ' Error in h5open_f ',err_HDF5 ! prevents error if not opened yet
|
||||||
call h5close_f(error)
|
call h5close_f(err_HDF5)
|
||||||
if (error /= 0) write(6,'(a,i5)') ' Error in h5close_f ',error
|
if (err_HDF5 /= 0) write(6,'(a,i5)') ' Error in h5close_f ',err_HDF5
|
||||||
|
|
||||||
call PetscFinalize(ierr)
|
call PetscFinalize(err_PETSc)
|
||||||
CHKERRQ(ierr)
|
CHKERRQ(err_PETSc)
|
||||||
|
|
||||||
#ifdef _OPENMP
|
#ifdef _OPENMP
|
||||||
call MPI_finalize(error)
|
call MPI_finalize(err_MPI)
|
||||||
if (error /= 0) write(6,'(a,i5)') ' Error in MPI_finalize',error
|
if (err_MPI /= 0) write(6,'(a,i5)') ' Error in MPI_finalize',err_MPI
|
||||||
#endif
|
#endif
|
||||||
|
|
||||||
call date_and_time(values = dateAndTime)
|
call date_and_time(values = dateAndTime)
|
||||||
|
@ -40,7 +41,10 @@ subroutine quit(stop_id)
|
||||||
dateAndTime(6),':',&
|
dateAndTime(6),':',&
|
||||||
dateAndTime(7)
|
dateAndTime(7)
|
||||||
|
|
||||||
if (stop_id == 0 .and. ierr == 0 .and. error == 0) stop 0 ! normal termination
|
if (stop_id == 0 .and. &
|
||||||
|
err_HDF5 == 0 .and. &
|
||||||
|
err_MPI == 0_MPI_INTEGER_KIND .and. &
|
||||||
|
err_PETSC == 0) stop 0 ! normal termination
|
||||||
stop 1 ! error (message from IO_error)
|
stop 1 ! error (message from IO_error)
|
||||||
|
|
||||||
end subroutine quit
|
end subroutine quit
|
||||||
|
|
Loading…
Reference in New Issue