use correct kind of constants for calls to MPI/PETSc

This commit is contained in:
Martin Diehl 2022-01-13 13:50:30 +01:00
parent 91a3ea96ec
commit 29530da579
9 changed files with 57 additions and 56 deletions

View File

@ -88,12 +88,12 @@ subroutine discretization_grid_init(restart)
end if
call MPI_Bcast(grid,3,MPI_INTEGER,0,MPI_COMM_WORLD, err_MPI)
call MPI_Bcast(grid,3_MPI_INTEGER_KIND,MPI_INTEGER,0_MPI_INTEGER_KIND,MPI_COMM_WORLD, err_MPI)
if (err_MPI /= 0_MPI_INTEGER_KIND) error stop 'MPI error'
if (grid(1) < 2) call IO_error(844, ext_msg='cells(1) must be larger than 1')
call MPI_Bcast(geomSize,3,MPI_DOUBLE,0,MPI_COMM_WORLD, err_MPI)
call MPI_Bcast(geomSize,3_MPI_INTEGER_KIND,MPI_DOUBLE,0_MPI_INTEGER_KIND,MPI_COMM_WORLD, err_MPI)
if (err_MPI /= 0_MPI_INTEGER_KIND) error stop 'MPI error'
call MPI_Bcast(origin,3,MPI_DOUBLE,0,MPI_COMM_WORLD, err_MPI)
call MPI_Bcast(origin,3_MPI_INTEGER_KIND,MPI_DOUBLE,0_MPI_INTEGER_KIND,MPI_COMM_WORLD, err_MPI)
if (err_MPI /= 0_MPI_INTEGER_KIND) error stop 'MPI error'
print'(/,1x,a,3(i12,1x))', 'cells a b c: ', grid

View File

@ -194,10 +194,10 @@ function grid_damage_spectral_solution(Delta_t) result(solution)
solution%iterationsNeeded = totalIter
end if
stagNorm = maxval(abs(phi_current - phi_stagInc))
call MPI_Allreduce(MPI_IN_PLACE,stagNorm,1,MPI_DOUBLE,MPI_MAX,MPI_COMM_WORLD,err_MPI)
call MPI_Allreduce(MPI_IN_PLACE,stagNorm,1_MPI_INTEGER_KIND,MPI_DOUBLE,MPI_MAX,MPI_COMM_WORLD,err_MPI)
if (err_MPI /= 0_MPI_INTEGER_KIND) error stop 'MPI error'
solution%stagConverged = stagNorm < max(num%eps_damage_atol, num%eps_damage_rtol*maxval(phi_current))
call MPI_Allreduce(MPI_IN_PLACE,solution%stagConverged,1,MPI_LOGICAL,MPI_LAND,MPI_COMM_WORLD,err_MPI)
call MPI_Allreduce(MPI_IN_PLACE,solution%stagConverged,1_MPI_INTEGER_KIND,MPI_LOGICAL,MPI_LAND,MPI_COMM_WORLD,err_MPI)
if (err_MPI /= 0_MPI_INTEGER_KIND) error stop 'MPI error'
phi_stagInc = phi_current

View File

@ -241,16 +241,16 @@ subroutine grid_mechanical_FEM_init
groupHandle = HDF5_openGroup(fileHandle,'solver')
call HDF5_read(P_aim,groupHandle,'P_aim',.false.)
call MPI_Bcast(P_aim,9,MPI_DOUBLE,0,MPI_COMM_WORLD,err_MPI)
call MPI_Bcast(P_aim,9_MPI_INTEGER_KIND,MPI_DOUBLE,0_MPI_INTEGER_KIND,MPI_COMM_WORLD,err_MPI)
if(err_MPI /= 0_MPI_INTEGER_KIND) error stop 'MPI error'
call HDF5_read(F_aim,groupHandle,'F_aim',.false.)
call MPI_Bcast(F_aim,9,MPI_DOUBLE,0,MPI_COMM_WORLD,err_MPI)
call MPI_Bcast(F_aim,9_MPI_INTEGER_KIND,MPI_DOUBLE,0_MPI_INTEGER_KIND,MPI_COMM_WORLD,err_MPI)
if(err_MPI /= 0_MPI_INTEGER_KIND) error stop 'MPI error'
call HDF5_read(F_aim_lastInc,groupHandle,'F_aim_lastInc',.false.)
call MPI_Bcast(F_aim_lastInc,9,MPI_DOUBLE,0,MPI_COMM_WORLD,err_MPI)
call MPI_Bcast(F_aim_lastInc,9_MPI_INTEGER_KIND,MPI_DOUBLE,0_MPI_INTEGER_KIND,MPI_COMM_WORLD,err_MPI)
if(err_MPI /= 0_MPI_INTEGER_KIND) error stop 'MPI error'
call HDF5_read(F_aimDot,groupHandle,'F_aimDot',.false.)
call MPI_Bcast(F_aimDot,9,MPI_DOUBLE,0,MPI_COMM_WORLD,err_MPI)
call MPI_Bcast(F_aimDot,9_MPI_INTEGER_KIND,MPI_DOUBLE,0_MPI_INTEGER_KIND,MPI_COMM_WORLD,err_MPI)
if(err_MPI /= 0_MPI_INTEGER_KIND) error stop 'MPI error'
call HDF5_read(F,groupHandle,'F')
call HDF5_read(F_lastInc,groupHandle,'F_lastInc')

View File

@ -194,16 +194,16 @@ subroutine grid_mechanical_spectral_basic_init
groupHandle = HDF5_openGroup(fileHandle,'solver')
call HDF5_read(P_aim,groupHandle,'P_aim',.false.)
call MPI_Bcast(P_aim,9,MPI_DOUBLE,0,MPI_COMM_WORLD,err_MPI)
call MPI_Bcast(P_aim,9_MPI_INTEGER_KIND,MPI_DOUBLE,0_MPI_INTEGER_KIND,MPI_COMM_WORLD,err_MPI)
if (err_MPI /= 0_MPI_INTEGER_KIND) error stop 'MPI error'
call HDF5_read(F_aim,groupHandle,'F_aim',.false.)
call MPI_Bcast(F_aim,9,MPI_DOUBLE,0,MPI_COMM_WORLD,err_MPI)
call MPI_Bcast(F_aim,9_MPI_INTEGER_KIND,MPI_DOUBLE,0_MPI_INTEGER_KIND,MPI_COMM_WORLD,err_MPI)
if (err_MPI /= 0_MPI_INTEGER_KIND) error stop 'MPI error'
call HDF5_read(F_aim_lastInc,groupHandle,'F_aim_lastInc',.false.)
call MPI_Bcast(F_aim_lastInc,9,MPI_DOUBLE,0,MPI_COMM_WORLD,err_MPI)
call MPI_Bcast(F_aim_lastInc,9_MPI_INTEGER_KIND,MPI_DOUBLE,0_MPI_INTEGER_KIND,MPI_COMM_WORLD,err_MPI)
if (err_MPI /= 0_MPI_INTEGER_KIND) error stop 'MPI error'
call HDF5_read(F_aimDot,groupHandle,'F_aimDot',.false.)
call MPI_Bcast(F_aimDot,9,MPI_DOUBLE,0,MPI_COMM_WORLD,err_MPI)
call MPI_Bcast(F_aimDot,9_MPI_INTEGER_KIND,MPI_DOUBLE,0_MPI_INTEGER_KIND,MPI_COMM_WORLD,err_MPI)
if (err_MPI /= 0_MPI_INTEGER_KIND) error stop 'MPI error'
call HDF5_read(F,groupHandle,'F')
call HDF5_read(F_lastInc,groupHandle,'F_lastInc')
@ -223,10 +223,10 @@ subroutine grid_mechanical_spectral_basic_init
restartRead2: if (interface_restartInc > 0) then
print'(1x,a,i0,a)', 'reading more restart data of increment ', interface_restartInc, ' from file'
call HDF5_read(C_volAvg,groupHandle,'C_volAvg',.false.)
call MPI_Bcast(C_volAvg,81,MPI_DOUBLE,0,MPI_COMM_WORLD,err_MPI)
call MPI_Bcast(C_volAvg,81_MPI_INTEGER_KIND,MPI_DOUBLE,0_MPI_INTEGER_KIND,MPI_COMM_WORLD,err_MPI)
if (err_MPI /= 0_MPI_INTEGER_KIND) error stop 'MPI error'
call HDF5_read(C_volAvgLastInc,groupHandle,'C_volAvgLastInc',.false.)
call MPI_Bcast(C_volAvgLastInc,81,MPI_DOUBLE,0,MPI_COMM_WORLD,err_MPI)
call MPI_Bcast(C_volAvgLastInc,81_MPI_INTEGER_KIND,MPI_DOUBLE,0_MPI_INTEGER_KIND,MPI_COMM_WORLD,err_MPI)
if (err_MPI /= 0_MPI_INTEGER_KIND) error stop 'MPI error'
call HDF5_closeGroup(groupHandle)
@ -235,7 +235,7 @@ subroutine grid_mechanical_spectral_basic_init
call MPI_File_open(MPI_COMM_WORLD, trim(getSolverJobName())//'.C_ref', &
MPI_MODE_RDONLY,MPI_INFO_NULL,fileUnit,err_MPI)
if (err_MPI /= 0_MPI_INTEGER_KIND) error stop 'MPI error'
call MPI_File_read(fileUnit,C_minMaxAvg,81,MPI_DOUBLE,MPI_STATUS_IGNORE,err_MPI)
call MPI_File_read(fileUnit,C_minMaxAvg,81_MPI_INTEGER_KIND,MPI_DOUBLE,MPI_STATUS_IGNORE,err_MPI)
if (err_MPI /= 0_MPI_INTEGER_KIND) error stop 'MPI error'
call MPI_File_close(fileUnit,err_MPI)
if (err_MPI /= 0_MPI_INTEGER_KIND) error stop 'MPI error'
@ -503,7 +503,7 @@ subroutine formResidual(in, F, &
call utilities_constitutiveResponse(residuum, & ! "residuum" gets field of first PK stress (to save memory)
P_av,C_volAvg,C_minMaxAvg, &
F,params%Delta_t,params%rotation_BC)
call MPI_Allreduce(MPI_IN_PLACE,terminallyIll,1,MPI_LOGICAL,MPI_LOR,MPI_COMM_WORLD,err_MPI)
call MPI_Allreduce(MPI_IN_PLACE,terminallyIll,1_MPI_INTEGER_KIND,MPI_LOGICAL,MPI_LOR,MPI_COMM_WORLD,err_MPI)
if (err_MPI /= 0_MPI_INTEGER_KIND) error stop 'MPI error'
!--------------------------------------------------------------------------------------------------

View File

@ -216,16 +216,16 @@ subroutine grid_mechanical_spectral_polarisation_init
groupHandle = HDF5_openGroup(fileHandle,'solver')
call HDF5_read(P_aim,groupHandle,'P_aim',.false.)
call MPI_Bcast(P_aim,9,MPI_DOUBLE,0,MPI_COMM_WORLD,err_MPI)
call MPI_Bcast(P_aim,9_MPI_INTEGER_KIND,MPI_DOUBLE,0_MPI_INTEGER_KIND,MPI_COMM_WORLD,err_MPI)
if (err_MPI /= 0_MPI_INTEGER_KIND) error stop 'MPI error'
call HDF5_read(F_aim,groupHandle,'F_aim',.false.)
call MPI_Bcast(F_aim,9,MPI_DOUBLE,0,MPI_COMM_WORLD,err_MPI)
call MPI_Bcast(F_aim,9_MPI_INTEGER_KIND,MPI_DOUBLE,0_MPI_INTEGER_KIND,MPI_COMM_WORLD,err_MPI)
if (err_MPI /= 0_MPI_INTEGER_KIND) error stop 'MPI error'
call HDF5_read(F_aim_lastInc,groupHandle,'F_aim_lastInc',.false.)
call MPI_Bcast(F_aim_lastInc,9,MPI_DOUBLE,0,MPI_COMM_WORLD,err_MPI)
call MPI_Bcast(F_aim_lastInc,9_MPI_INTEGER_KIND,MPI_DOUBLE,0_MPI_INTEGER_KIND,MPI_COMM_WORLD,err_MPI)
if (err_MPI /= 0_MPI_INTEGER_KIND) error stop 'MPI error'
call HDF5_read(F_aimDot,groupHandle,'F_aimDot',.false.)
call MPI_Bcast(F_aimDot,9,MPI_DOUBLE,0,MPI_COMM_WORLD,err_MPI)
call MPI_Bcast(F_aimDot,9_MPI_INTEGER_KIND,MPI_DOUBLE,0_MPI_INTEGER_KIND,MPI_COMM_WORLD,err_MPI)
if (err_MPI /= 0_MPI_INTEGER_KIND) error stop 'MPI error'
call HDF5_read(F,groupHandle,'F')
call HDF5_read(F_lastInc,groupHandle,'F_lastInc')
@ -250,10 +250,10 @@ subroutine grid_mechanical_spectral_polarisation_init
restartRead2: if (interface_restartInc > 0) then
print'(1x,a,i0,a)', 'reading more restart data of increment ', interface_restartInc, ' from file'
call HDF5_read(C_volAvg,groupHandle,'C_volAvg',.false.)
call MPI_Bcast(C_volAvg,81,MPI_DOUBLE,0,MPI_COMM_WORLD,err_MPI)
call MPI_Bcast(C_volAvg,81_MPI_INTEGER_KIND,MPI_DOUBLE,0_MPI_INTEGER_KIND,MPI_COMM_WORLD,err_MPI)
if (err_MPI /= 0_MPI_INTEGER_KIND) error stop 'MPI error'
call HDF5_read(C_volAvgLastInc,groupHandle,'C_volAvgLastInc',.false.)
call MPI_Bcast(C_volAvgLastInc,81,MPI_DOUBLE,0,MPI_COMM_WORLD,err_MPI)
call MPI_Bcast(C_volAvgLastInc,81_MPI_INTEGER_KIND,MPI_DOUBLE,0_MPI_INTEGER_KIND,MPI_COMM_WORLD,err_MPI)
if (err_MPI /= 0_MPI_INTEGER_KIND) error stop 'MPI error'
call HDF5_closeGroup(groupHandle)
@ -262,7 +262,7 @@ subroutine grid_mechanical_spectral_polarisation_init
call MPI_File_open(MPI_COMM_WORLD, trim(getSolverJobName())//'.C_ref', &
MPI_MODE_RDONLY,MPI_INFO_NULL,fileUnit,err_MPI)
if (err_MPI /= 0_MPI_INTEGER_KIND) error stop 'MPI error'
call MPI_File_read(fileUnit,C_minMaxAvg,81,MPI_DOUBLE,MPI_STATUS_IGNORE,err_MPI)
call MPI_File_read(fileUnit,C_minMaxAvg,81_MPI_INTEGER_KIND,MPI_DOUBLE,MPI_STATUS_IGNORE,err_MPI)
if (err_MPI /= 0_MPI_INTEGER_KIND) error stop 'MPI error'
call MPI_File_close(fileUnit,err_MPI)
if (err_MPI /= 0_MPI_INTEGER_KIND) error stop 'MPI error'
@ -564,7 +564,7 @@ subroutine formResidual(in, FandF_tau, &
X_RANGE, Y_RANGE, Z_RANGE)
F_av = sum(sum(sum(F,dim=5),dim=4),dim=3) * wgt
call MPI_Allreduce(MPI_IN_PLACE,F_av,9,MPI_DOUBLE,MPI_SUM,MPI_COMM_WORLD,err_MPI)
call MPI_Allreduce(MPI_IN_PLACE,F_av,9_MPI_INTEGER_KIND,MPI_DOUBLE,MPI_SUM,MPI_COMM_WORLD,err_MPI)
if (err_MPI /= 0_MPI_INTEGER_KIND) error stop 'MPI error'
call SNESGetNumberFunctionEvals(snes,nfuncs,err_PETSc); CHKERRQ(err_PETSc)
@ -609,7 +609,7 @@ subroutine formResidual(in, FandF_tau, &
call utilities_constitutiveResponse(residual_F, & ! "residuum" gets field of first PK stress (to save memory)
P_av,C_volAvg,C_minMaxAvg, &
F - residual_F_tau/num%beta,params%Delta_t,params%rotation_BC)
call MPI_Allreduce(MPI_IN_PLACE,terminallyIll,1,MPI_LOGICAL,MPI_LOR,MPI_COMM_WORLD,err_MPI)
call MPI_Allreduce(MPI_IN_PLACE,terminallyIll,1_MPI_INTEGER_KIND,MPI_LOGICAL,MPI_LOR,MPI_COMM_WORLD,err_MPI)
!--------------------------------------------------------------------------------------------------
! stress BC handling

View File

@ -192,10 +192,10 @@ function grid_thermal_spectral_solution(Delta_t) result(solution)
solution%iterationsNeeded = totalIter
end if
stagNorm = maxval(abs(T_current - T_stagInc))
call MPI_Allreduce(MPI_IN_PLACE,stagNorm,1,MPI_DOUBLE,MPI_MAX,MPI_COMM_WORLD,err_MPI)
call MPI_Allreduce(MPI_IN_PLACE,stagNorm,1_MPI_INTEGER_KIND,MPI_DOUBLE,MPI_MAX,MPI_COMM_WORLD,err_MPI)
if (err_MPI /= 0_MPI_INTEGER_KIND) error stop 'MPI error'
solution%stagConverged = stagNorm < max(num%eps_thermal_atol, num%eps_thermal_rtol*maxval(T_current))
call MPI_Allreduce(MPI_IN_PLACE,solution%stagConverged,1,MPI_LOGICAL,MPI_LAND,MPI_COMM_WORLD,err_MPI)
call MPI_Allreduce(MPI_IN_PLACE,solution%stagConverged,1_MPI_INTEGER_KIND,MPI_LOGICAL,MPI_LAND,MPI_COMM_WORLD,err_MPI)
if (err_MPI /= 0_MPI_INTEGER_KIND) error stop 'MPI error'
T_stagInc = T_current
@ -326,10 +326,10 @@ subroutine updateReference()
end do
K_ref = K_ref*wgt
call MPI_Allreduce(MPI_IN_PLACE,K_ref,9,MPI_DOUBLE,MPI_SUM,MPI_COMM_WORLD,err_MPI)
call MPI_Allreduce(MPI_IN_PLACE,K_ref,9_MPI_INTEGER_KIND,MPI_DOUBLE,MPI_SUM,MPI_COMM_WORLD,err_MPI)
if (err_MPI /= 0_MPI_INTEGER_KIND) error stop 'MPI error'
mu_ref = mu_ref*wgt
call MPI_Allreduce(MPI_IN_PLACE,mu_ref,1,MPI_DOUBLE,MPI_SUM,MPI_COMM_WORLD,err_MPI)
call MPI_Allreduce(MPI_IN_PLACE,mu_ref,1_MPI_INTEGER_KIND,MPI_DOUBLE,MPI_SUM,MPI_COMM_WORLD,err_MPI)
if (err_MPI /= 0_MPI_INTEGER_KIND) error stop 'MPI error'
end subroutine updateReference

View File

@ -590,7 +590,7 @@ real(pReal) function utilities_divergenceRMS()
conjg(-xi1st(1:3,grid1Red,j,k))*rescaledGeom))**2)
enddo; enddo
if (grid(1) == 1) utilities_divergenceRMS = utilities_divergenceRMS * 0.5_pReal ! counted twice in case of grid(1) == 1
call MPI_Allreduce(MPI_IN_PLACE,utilities_divergenceRMS,1,MPI_DOUBLE,MPI_SUM,MPI_COMM_WORLD,err_MPI)
call MPI_Allreduce(MPI_IN_PLACE,utilities_divergenceRMS,1_MPI_INTEGER_KIND,MPI_DOUBLE,MPI_SUM,MPI_COMM_WORLD,err_MPI)
if (err_MPI /= 0_MPI_INTEGER_KIND) error stop 'MPI error'
utilities_divergenceRMS = sqrt(utilities_divergenceRMS) * wgt ! RMS in real space calculated with Parsevals theorem from Fourier space
@ -651,7 +651,7 @@ real(pReal) function utilities_curlRMS()
+ sum(curl_fourier%re**2 + curl_fourier%im**2) ! this layer (Nyquist) does not have a conjugate complex counterpart (if grid(1) /= 1)
enddo; enddo
call MPI_Allreduce(MPI_IN_PLACE,utilities_curlRMS,1,MPI_DOUBLE,MPI_SUM,MPI_COMM_WORLD,err_MPI)
call MPI_Allreduce(MPI_IN_PLACE,utilities_curlRMS,1_MPI_INTEGER_KIND,MPI_DOUBLE,MPI_SUM,MPI_COMM_WORLD,err_MPI)
if (err_MPI /= 0_MPI_INTEGER_KIND) error stop 'MPI error'
utilities_curlRMS = sqrt(utilities_curlRMS) * wgt
if (grid(1) == 1) utilities_curlRMS = utilities_curlRMS * 0.5_pReal ! counted twice in case of grid(1) == 1
@ -820,7 +820,7 @@ subroutine utilities_constitutiveResponse(P,P_av,C_volAvg,C_minmaxAvg,&
P = reshape(homogenization_P, [3,3,grid(1),grid(2),grid3])
P_av = sum(sum(sum(P,dim=5),dim=4),dim=3) * wgt
call MPI_Allreduce(MPI_IN_PLACE,P_av,9,MPI_DOUBLE,MPI_SUM,MPI_COMM_WORLD,err_MPI)
call MPI_Allreduce(MPI_IN_PLACE,P_av,9_MPI_INTEGER_KIND,MPI_DOUBLE,MPI_SUM,MPI_COMM_WORLD,err_MPI)
if (err_MPI /= 0_MPI_INTEGER_KIND) error stop 'MPI error'
if (debugRotation) print'(/,1x,a,/,2(3(2x,f12.4,1x)/),3(2x,f12.4,1x))', &
'Piola--Kirchhoff stress (lab) / MPa =', transpose(P_av)*1.e-6_pReal
@ -845,21 +845,21 @@ subroutine utilities_constitutiveResponse(P,P_av,C_volAvg,C_minmaxAvg,&
enddo
valueAndRank = [dPdF_norm_max,real(worldrank,pReal)]
call MPI_Allreduce(MPI_IN_PLACE,valueAndRank,1, MPI_2DOUBLE_PRECISION, MPI_MAXLOC, MPI_COMM_WORLD, err_MPI)
call MPI_Allreduce(MPI_IN_PLACE,valueAndRank,1_MPI_INTEGER_KIND,MPI_2DOUBLE_PRECISION,MPI_MAXLOC,MPI_COMM_WORLD,err_MPI)
if (err_MPI /= 0_MPI_INTEGER_KIND) error stop 'MPI error'
call MPI_Bcast(dPdF_max,81,MPI_DOUBLE,int(valueAndRank(2)),MPI_COMM_WORLD, err_MPI)
call MPI_Bcast(dPdF_max,81_MPI_INTEGER_KIND,MPI_DOUBLE,int(valueAndRank(2),MPI_INTEGER_KIND),MPI_COMM_WORLD,err_MPI)
if (err_MPI /= 0_MPI_INTEGER_KIND) error stop 'MPI error'
valueAndRank = [dPdF_norm_min,real(worldrank,pReal)]
call MPI_Allreduce(MPI_IN_PLACE,valueAndRank,1, MPI_2DOUBLE_PRECISION, MPI_MINLOC, MPI_COMM_WORLD, err_MPI)
call MPI_Allreduce(MPI_IN_PLACE,valueAndRank,1_MPI_INTEGER_KIND,MPI_2DOUBLE_PRECISION,MPI_MINLOC,MPI_COMM_WORLD,err_MPI)
if (err_MPI /= 0_MPI_INTEGER_KIND) error stop 'MPI error'
call MPI_Bcast(dPdF_min,81,MPI_DOUBLE,int(valueAndRank(2)),MPI_COMM_WORLD, err_MPI)
call MPI_Bcast(dPdF_min,81_MPI_INTEGER_KIND,MPI_DOUBLE,int(valueAndRank(2),MPI_INTEGER_KIND),MPI_COMM_WORLD,err_MPI)
if (err_MPI /= 0_MPI_INTEGER_KIND) error stop 'MPI error'
C_minmaxAvg = 0.5_pReal*(dPdF_max + dPdF_min)
C_volAvg = sum(homogenization_dPdF,dim=5)
call MPI_Allreduce(MPI_IN_PLACE,C_volAvg,81,MPI_DOUBLE,MPI_SUM,MPI_COMM_WORLD,err_MPI)
call MPI_Allreduce(MPI_IN_PLACE,C_volAvg,81_MPI_INTEGER_KIND,MPI_DOUBLE,MPI_SUM,MPI_COMM_WORLD,err_MPI)
if (err_MPI /= 0_MPI_INTEGER_KIND) error stop 'MPI error'
C_volAvg = C_volAvg * wgt
@ -914,7 +914,7 @@ function utilities_forwardField(Delta_t,field_lastInc,rate,aim)
utilities_forwardField = field_lastInc + rate*Delta_t
if (present(aim)) then !< correct to match average
fieldDiff = sum(sum(sum(utilities_forwardField,dim=5),dim=4),dim=3)*wgt
call MPI_Allreduce(MPI_IN_PLACE,fieldDiff,9,MPI_DOUBLE,MPI_SUM,MPI_COMM_WORLD,err_MPI)
call MPI_Allreduce(MPI_IN_PLACE,fieldDiff,9_MPI_INTEGER_KIND,MPI_DOUBLE,MPI_SUM,MPI_COMM_WORLD,err_MPI)
if (err_MPI /= 0_MPI_INTEGER_KIND) error stop 'MPI error'
fieldDiff = fieldDiff - aim
utilities_forwardField = utilities_forwardField - &
@ -985,8 +985,9 @@ subroutine utilities_updateCoords(F)
real(pReal), dimension(3, grid(1)+1,grid(2)+1,grid3+1) :: nodeCoords
integer :: &
i,j,k,n, &
rank_t, rank_b, &
c
integer(MPI_INTEGER_KIND) :: &
rank_t, rank_b
integer(MPI_INTEGER_KIND) :: err_MPI
#if (PETSC_VERSION_MAJOR==3 && PETSC_VERSION_MINOR>14) && !defined(PETSC_HAVE_MPI_F90MODULE_VISIBILITY)
type(MPI_Request), dimension(4) :: request
@ -1029,26 +1030,26 @@ subroutine utilities_updateCoords(F)
!--------------------------------------------------------------------------------------------------
! average F
if (grid3Offset == 0) Favg = real(tensorField_fourier(1:3,1:3,1,1,1),pReal)*wgt
call MPI_Bcast(Favg,9,MPI_DOUBLE,0,MPI_COMM_WORLD,err_MPI)
call MPI_Bcast(Favg,9_MPI_INTEGER_KIND,MPI_DOUBLE,0_MPI_INTEGER_KIND,MPI_COMM_WORLD,err_MPI)
if (err_MPI /= 0_MPI_INTEGER_KIND) error stop 'MPI error'
!--------------------------------------------------------------------------------------------------
! pad cell center fluctuations along z-direction (needed when running MPI simulation)
IPfluct_padded(1:3,1:grid(1),1:grid(2),2:grid3+1) = vectorField_real(1:3,1:grid(1),1:grid(2),1:grid3)
c = product(shape(IPfluct_padded(:,:,:,1))) !< amount of data to transfer
rank_t = modulo(worldrank+1,worldsize)
rank_b = modulo(worldrank-1,worldsize)
rank_t = modulo(worldrank+1_MPI_INTEGER_KIND,worldsize)
rank_b = modulo(worldrank-1_MPI_INTEGER_KIND,worldsize)
! send bottom layer to process below
call MPI_Isend(IPfluct_padded(:,:,:,2), c,MPI_DOUBLE,rank_b,0,MPI_COMM_WORLD,request(1),err_MPI)
call MPI_Isend(IPfluct_padded(:,:,:,2), c,MPI_DOUBLE,rank_b,0_MPI_INTEGER_KIND,MPI_COMM_WORLD,request(1),err_MPI)
if (err_MPI /= 0_MPI_INTEGER_KIND) error stop 'MPI error'
call MPI_Irecv(IPfluct_padded(:,:,:,grid3+2),c,MPI_DOUBLE,rank_t,0,MPI_COMM_WORLD,request(2),err_MPI)
call MPI_Irecv(IPfluct_padded(:,:,:,grid3+2),c,MPI_DOUBLE,rank_t,0_MPI_INTEGER_KIND,MPI_COMM_WORLD,request(2),err_MPI)
if (err_MPI /= 0_MPI_INTEGER_KIND) error stop 'MPI error'
! send top layer to process above
call MPI_Isend(IPfluct_padded(:,:,:,grid3+1),c,MPI_DOUBLE,rank_t,1,MPI_COMM_WORLD,request(3),err_MPI)
call MPI_Isend(IPfluct_padded(:,:,:,grid3+1),c,MPI_DOUBLE,rank_t,1_MPI_INTEGER_KIND,MPI_COMM_WORLD,request(3),err_MPI)
if (err_MPI /= 0_MPI_INTEGER_KIND) error stop 'MPI error'
call MPI_Irecv(IPfluct_padded(:,:,:,1), c,MPI_DOUBLE,rank_b,1,MPI_COMM_WORLD,request(4),err_MPI)
call MPI_Irecv(IPfluct_padded(:,:,:,1), c,MPI_DOUBLE,rank_b,1_MPI_INTEGER_KIND,MPI_COMM_WORLD,request(4),err_MPI)
if (err_MPI /= 0_MPI_INTEGER_KIND) error stop 'MPI error'
call MPI_Waitall(4,request,status,err_MPI)

View File

@ -156,7 +156,7 @@ subroutine utilities_constitutiveResponse(timeinc,P_av,forwardData)
cutBack = .false.
P_av = sum(homogenization_P,dim=3) * wgt
call MPI_Allreduce(MPI_IN_PLACE,P_av,9,MPI_DOUBLE,MPI_SUM,MPI_COMM_WORLD,err_MPI)
call MPI_Allreduce(MPI_IN_PLACE,P_av,9_MPI_INTEGER_KIND,MPI_DOUBLE,MPI_SUM,MPI_COMM_WORLD,err_MPI)
if (err_MPI /= 0_MPI_INTEGER_KIND) error stop 'MPI error'

View File

@ -114,17 +114,17 @@ subroutine discretization_mesh_init(restart)
! get number of IDs in face sets (for boundary conditions?)
call DMGetLabelSize(globalMesh,'Face Sets',mesh_Nboundaries,err_PETSc)
CHKERRQ(err_PETSc)
call MPI_Bcast(mesh_Nboundaries,1,MPI_INTEGER,0,MPI_COMM_WORLD,err_MPI)
call MPI_Bcast(mesh_Nboundaries,1_MPI_INTEGER_KIND,MPI_INTEGER,0_MPI_INTEGER_KIND,MPI_COMM_WORLD,err_MPI)
if (err_MPI /= 0_MPI_INTEGER_KIND) error stop 'MPI error'
call MPI_Bcast(mesh_NcpElemsGlobal,1,MPI_INTEGER,0,MPI_COMM_WORLD,err_MPI)
call MPI_Bcast(mesh_NcpElemsGlobal,1_MPI_INTEGER_KIND,MPI_INTEGER,0_MPI_INTEGER_KIND,MPI_COMM_WORLD,err_MPI)
if (err_MPI /= 0_MPI_INTEGER_KIND) error stop 'MPI error'
call MPI_Bcast(dimPlex,1,MPI_INTEGER,0,MPI_COMM_WORLD,err_MPI)
call MPI_Bcast(dimPlex,1_MPI_INTEGER_KIND,MPI_INTEGER,0_MPI_INTEGER_KIND,MPI_COMM_WORLD,err_MPI)
if (err_MPI /= 0_MPI_INTEGER_KIND) error stop 'MPI error'
if (worldrank == 0) then
call DMClone(globalMesh,geomMesh,err_PETSc)
else
call DMPlexDistribute(globalMesh,0,sf,geomMesh,err_PETSc)
call DMPlexDistribute(globalMesh,0_pPETSCINT,sf,geomMesh,err_PETSc)
endif
CHKERRQ(err_PETSc)
@ -140,14 +140,14 @@ subroutine discretization_mesh_init(restart)
CHKERRQ(err_PETSc)
call ISRestoreIndicesF90(faceSetIS,pFaceSets,err_PETSc)
endif
call MPI_Bcast(mesh_boundaries,mesh_Nboundaries,MPI_INTEGER,0,MPI_COMM_WORLD,err_MPI)
call MPI_Bcast(mesh_boundaries,mesh_Nboundaries,MPI_INTEGER,0_MPI_INTEGER_KIND,MPI_COMM_WORLD,err_MPI)
if (err_MPI /= 0_MPI_INTEGER_KIND) error stop 'MPI error'
call DMDestroy(globalMesh,err_PETSc); CHKERRQ(err_PETSc)
call DMGetStratumSize(geomMesh,'depth',dimPlex,mesh_NcpElems,err_PETSc)
CHKERRQ(err_PETSc)
call DMGetStratumSize(geomMesh,'depth',0,mesh_Nnodes,err_PETSc)
call DMGetStratumSize(geomMesh,'depth',0_pPETSCINT,mesh_Nnodes,err_PETSc)
CHKERRQ(err_PETSc)
! Get initial nodal coordinates
@ -197,7 +197,7 @@ subroutine mesh_FEM_build_ipVolumes(dimPlex)
allocate(mesh_ipVolume(mesh_maxNips,mesh_NcpElems),source=0.0_pReal)
call DMPlexGetHeightStratum(geomMesh,0,cellStart,cellEnd,err_PETSc)
call DMPlexGetHeightStratum(geomMesh,0_pPETSCINT,cellStart,cellEnd,err_PETSc)
CHKERRQ(err_PETSc)
allocate(pCent(dimPlex))
allocate(pNorm(dimPlex))
@ -229,7 +229,7 @@ subroutine mesh_FEM_build_ipCoordinates(dimPlex,qPoints)
allocate(pV0(dimPlex))
allocatE(pCellJ(dimPlex**2))
allocatE(pinvCellJ(dimPlex**2))
call DMPlexGetHeightStratum(geomMesh,0,cellStart,cellEnd,err_PETSc)
call DMPlexGetHeightStratum(geomMesh,0_pPETSCINT,cellStart,cellEnd,err_PETSc)
CHKERRQ(err_PETSc)
do cell = cellStart, cellEnd-1 !< loop over all elements
call DMPlexComputeCellGeometryAffineFEM(geomMesh,cell,pV0,pCellJ,pInvcellJ,detJ,err_PETSc)