polishing

MPI, HDF5, PETSc, and DAMASK might have different integer kinds ..
This commit is contained in:
Martin Diehl 2022-01-13 08:17:31 +01:00
parent 3fb5bd459c
commit 8223dc7fa7
3 changed files with 24 additions and 20 deletions

View File

@ -292,12 +292,12 @@ subroutine spectral_utilities_init
tensorSize, FFTW_MPI_DEFAULT_BLOCK, FFTW_MPI_DEFAULT_BLOCK, &! no. of transforms, default iblock and oblock
tensorField_real, tensorField_fourier, & ! input data, output data
PETSC_COMM_WORLD, FFTW_planner_flag) ! use all processors, planer precision
if (.not. C_ASSOCIATED(planTensorForth)) error stop 'FFTW error'
if (.not. c_associated(planTensorForth)) error stop 'FFTW error'
planTensorBack = fftw_mpi_plan_many_dft_c2r(3, [gridFFTW(3),gridFFTW(2),gridFFTW(1)], & ! dimension, logical length in each dimension in reversed order
tensorSize, FFTW_MPI_DEFAULT_BLOCK, FFTW_MPI_DEFAULT_BLOCK, &! no. of transforms, default iblock and oblock
tensorField_fourier,tensorField_real, & ! input data, output data
PETSC_COMM_WORLD, FFTW_planner_flag) ! all processors, planer precision
if (.not. C_ASSOCIATED(planTensorBack)) error stop 'FFTW error'
if (.not. c_associated(planTensorBack)) error stop 'FFTW error'
!--------------------------------------------------------------------------------------------------
! vector MPI fftw plans

View File

@ -50,7 +50,7 @@ subroutine parallelization_init
!$ character(len=6) NumThreadsString
PetscErrorCode :: petsc_err
PetscErrorCode :: err_PETSc
#ifdef _OPENMP
! If openMP is enabled, check if the MPI libary supports it and initialize accordingly.
! Otherwise, the first call to PETSc will do the initialization.
@ -60,18 +60,18 @@ subroutine parallelization_init
#endif
#if defined(DEBUG)
call PetscInitialize(PETSC_NULL_CHARACTER,petsc_err)
call PetscInitialize(PETSC_NULL_CHARACTER,err_PETSc)
#else
call PetscInitializeNoArguments(petsc_err)
call PetscInitializeNoArguments(err_PETSc)
#endif
CHKERRQ(petsc_err)
CHKERRQ(err_PETSc)
#if defined(DEBUG) && defined(__INTEL_COMPILER)
call PetscSetFPTrap(PETSC_FP_TRAP_ON,petsc_err)
call PetscSetFPTrap(PETSC_FP_TRAP_ON,err_PETSc)
#else
call PetscSetFPTrap(PETSC_FP_TRAP_OFF,petsc_err)
call PetscSetFPTrap(PETSC_FP_TRAP_OFF,err_PETSc)
#endif
CHKERRQ(petsc_err)
CHKERRQ(err_PETSc)
call MPI_Comm_rank(MPI_COMM_WORLD,worldrank,err)
if (err /= 0) error stop 'Could not determine worldrank'

View File

@ -15,20 +15,21 @@ subroutine quit(stop_id)
implicit none
integer, intent(in) :: stop_id
integer, dimension(8) :: dateAndTime
integer :: error
PetscErrorCode :: ierr = 0
integer :: err_HDF5
integer(MPI_INTEGER_KIND) :: err_MPI
PetscErrorCode :: err_PETSc
call h5open_f(error)
if (error /= 0) write(6,'(a,i5)') ' Error in h5open_f ',error ! prevents error if not opened yet
call h5close_f(error)
if (error /= 0) write(6,'(a,i5)') ' Error in h5close_f ',error
call h5open_f(err_HDF5)
if (err_HDF5 /= 0) write(6,'(a,i5)') ' Error in h5open_f ',err_HDF5 ! prevents error if not opened yet
call h5close_f(err_HDF5)
if (err_HDF5 /= 0) write(6,'(a,i5)') ' Error in h5close_f ',err_HDF5
call PetscFinalize(ierr)
CHKERRQ(ierr)
call PetscFinalize(err_PETSc)
CHKERRQ(err_PETSc)
#ifdef _OPENMP
call MPI_finalize(error)
if (error /= 0) write(6,'(a,i5)') ' Error in MPI_finalize',error
call MPI_finalize(err_MPI)
if (err_MPI /= 0) write(6,'(a,i5)') ' Error in MPI_finalize',err_MPI
#endif
call date_and_time(values = dateAndTime)
@ -40,7 +41,10 @@ subroutine quit(stop_id)
dateAndTime(6),':',&
dateAndTime(7)
if (stop_id == 0 .and. ierr == 0 .and. error == 0) stop 0 ! normal termination
if (stop_id == 0 .and. &
err_HDF5 == 0 .and. &
err_MPI == 0_MPI_INTEGER_KIND .and. &
err_PETSC == 0) stop 0 ! normal termination
stop 1 ! error (message from IO_error)
end subroutine quit