From 4ca0ea6af238bd1aaef9339d8039fdf06d71328b Mon Sep 17 00:00:00 2001 From: Martin Diehl Date: Sat, 5 Feb 2022 18:32:53 +0100 Subject: [PATCH] avoid linking issues with gfortran+MPI most likely related to the fact that HDF5 uses the old Fortran inferface, not MPI_f08 as DAMASK --- src/grid/grid_mech_spectral_basic.f90 | 8 +++++++- src/grid/grid_mech_spectral_polarisation.f90 | 8 +++++++- 2 files changed, 14 insertions(+), 2 deletions(-) diff --git a/src/grid/grid_mech_spectral_basic.f90 b/src/grid/grid_mech_spectral_basic.f90 index fa2e17bd9..2f2b73f01 100644 --- a/src/grid/grid_mech_spectral_basic.f90 +++ b/src/grid/grid_mech_spectral_basic.f90 @@ -79,6 +79,12 @@ module grid_mechanical_spectral_basic err_BC, & !< deviation from stress BC err_div !< RMS of div of P +#if (PETSC_VERSION_MAJOR==3 && PETSC_VERSION_MINOR>14) && !defined(PETSC_HAVE_MPI_F90MODULE_VISIBILITY) + type(MPI_Status) :: status +#else + integer, dimension(MPI_STATUS_SIZE) :: status +#endif + integer :: & totalIter = 0 !< total iteration in current increment @@ -244,7 +250,7 @@ subroutine grid_mechanical_spectral_basic_init call MPI_File_open(MPI_COMM_WORLD, trim(getSolverJobName())//'.C_ref', & MPI_MODE_RDONLY,MPI_INFO_NULL,fileUnit,err_MPI) if (err_MPI /= 0_MPI_INTEGER_KIND) error stop 'MPI error' - call MPI_File_read(fileUnit,C_minMaxAvg,81_MPI_INTEGER_KIND,MPI_DOUBLE,MPI_STATUS_IGNORE,err_MPI) + call MPI_File_read(fileUnit,C_minMaxAvg,81_MPI_INTEGER_KIND,MPI_DOUBLE,status,err_MPI) if (err_MPI /= 0_MPI_INTEGER_KIND) error stop 'MPI error' call MPI_File_close(fileUnit,err_MPI) if (err_MPI /= 0_MPI_INTEGER_KIND) error stop 'MPI error' diff --git a/src/grid/grid_mech_spectral_polarisation.f90 b/src/grid/grid_mech_spectral_polarisation.f90 index ee87c77a6..b72cc4232 100644 --- a/src/grid/grid_mech_spectral_polarisation.f90 +++ b/src/grid/grid_mech_spectral_polarisation.f90 @@ -90,6 +90,12 @@ module grid_mechanical_spectral_polarisation err_curl, & !< RMS of curl of F err_div !< RMS of div of P +#if (PETSC_VERSION_MAJOR==3 && PETSC_VERSION_MINOR>14) && !defined(PETSC_HAVE_MPI_F90MODULE_VISIBILITY) + type(MPI_Status) :: status +#else + integer, dimension(MPI_STATUS_SIZE) :: status +#endif + integer :: & totalIter = 0 !< total iteration in current increment @@ -270,7 +276,7 @@ subroutine grid_mechanical_spectral_polarisation_init call MPI_File_open(MPI_COMM_WORLD, trim(getSolverJobName())//'.C_ref', & MPI_MODE_RDONLY,MPI_INFO_NULL,fileUnit,err_MPI) if (err_MPI /= 0_MPI_INTEGER_KIND) error stop 'MPI error' - call MPI_File_read(fileUnit,C_minMaxAvg,81_MPI_INTEGER_KIND,MPI_DOUBLE,MPI_STATUS_IGNORE,err_MPI) + call MPI_File_read(fileUnit,C_minMaxAvg,81_MPI_INTEGER_KIND,MPI_DOUBLE,status,err_MPI) if (err_MPI /= 0_MPI_INTEGER_KIND) error stop 'MPI error' call MPI_File_close(fileUnit,err_MPI) if (err_MPI /= 0_MPI_INTEGER_KIND) error stop 'MPI error'