final MPI-DAMASK integer kind decoupling

bugfix: set error for openMP-calucations
This commit is contained in:
Martin Diehl 2022-01-13 12:41:11 +01:00
parent d7dbb6ffc2
commit 91a3ea96ec
3 changed files with 114 additions and 84 deletions

View File

@ -69,7 +69,8 @@ subroutine grid_damage_spectral_init()
PetscInt, dimension(0:worldsize-1) :: localK
DM :: damage_grid
Vec :: uBound, lBound
PetscErrorCode :: ierr
integer(MPI_INTEGER_KIND) :: err_MPI
PetscErrorCode :: err_PETSc
class(tNode), pointer :: &
num_grid, &
num_generic
@ -99,18 +100,19 @@ subroutine grid_damage_spectral_init()
!--------------------------------------------------------------------------------------------------
! set default and user defined options for PETSc
call PetscOptionsInsertString(PETSC_NULL_OPTIONS,'-damage_snes_type newtonls -damage_snes_mf &
&-damage_snes_ksp_ew -damage_ksp_type fgmres',ierr)
CHKERRQ(ierr)
call PetscOptionsInsertString(PETSC_NULL_OPTIONS,num_grid%get_asString('petsc_options',defaultVal=''),ierr)
CHKERRQ(ierr)
&-damage_snes_ksp_ew -damage_ksp_type fgmres',err_PETSc)
CHKERRQ(err_PETSc)
call PetscOptionsInsertString(PETSC_NULL_OPTIONS,num_grid%get_asString('petsc_options',defaultVal=''),err_PETSc)
CHKERRQ(err_PETSc)
!--------------------------------------------------------------------------------------------------
! initialize solver specific parts of PETSc
call SNESCreate(PETSC_COMM_WORLD,damage_snes,ierr); CHKERRQ(ierr)
call SNESSetOptionsPrefix(damage_snes,'damage_',ierr);CHKERRQ(ierr)
call SNESCreate(PETSC_COMM_WORLD,damage_snes,err_PETSc); CHKERRQ(err_PETSc)
call SNESSetOptionsPrefix(damage_snes,'damage_',err_PETSc);CHKERRQ(err_PETSc)
localK = 0_pPetscInt
localK(worldrank) = int(grid3,pPetscInt)
call MPI_Allreduce(MPI_IN_PLACE,localK,worldsize,MPI_INTEGER,MPI_SUM,MPI_COMM_WORLD,ierr)
call MPI_Allreduce(MPI_IN_PLACE,localK,worldsize,MPI_INTEGER,MPI_SUM,MPI_COMM_WORLD,err_MPI)
if (err_MPI /= 0_MPI_INTEGER_KIND) error stop 'MPI error'
call DMDACreate3D(PETSC_COMM_WORLD, &
DM_BOUNDARY_NONE, DM_BOUNDARY_NONE, DM_BOUNDARY_NONE, & ! cut off stencil at boundary
DMDA_STENCIL_BOX, & ! Moore (26) neighborhood around central point
@ -118,31 +120,31 @@ subroutine grid_damage_spectral_init()
1_pPetscInt, 1_pPetscInt, int(worldsize,pPetscInt), &
1_pPetscInt, 0_pPetscInt, & ! #dof (phi, scalar), ghost boundary width (domain overlap)
[int(grid(1),pPetscInt)],[int(grid(2),pPetscInt)],localK, & ! local grid
damage_grid,ierr) ! handle, error
CHKERRQ(ierr)
call SNESSetDM(damage_snes,damage_grid,ierr); CHKERRQ(ierr) ! connect snes to da
call DMsetFromOptions(damage_grid,ierr); CHKERRQ(ierr)
call DMsetUp(damage_grid,ierr); CHKERRQ(ierr)
call DMCreateGlobalVector(damage_grid,solution_vec,ierr); CHKERRQ(ierr) ! global solution vector (grid x 1, i.e. every def grad tensor)
call DMDASNESSetFunctionLocal(damage_grid,INSERT_VALUES,formResidual,PETSC_NULL_SNES,ierr) ! residual vector of same shape as solution vector
CHKERRQ(ierr)
call SNESSetFromOptions(damage_snes,ierr); CHKERRQ(ierr) ! pull it all together with additional CLI arguments
call SNESGetType(damage_snes,snes_type,ierr); CHKERRQ(ierr)
damage_grid,err_PETSc) ! handle, error
CHKERRQ(err_PETSc)
call SNESSetDM(damage_snes,damage_grid,err_PETSc); CHKERRQ(err_PETSc) ! connect snes to da
call DMsetFromOptions(damage_grid,err_PETSc); CHKERRQ(err_PETSc)
call DMsetUp(damage_grid,err_PETSc); CHKERRQ(err_PETSc)
call DMCreateGlobalVector(damage_grid,solution_vec,err_PETSc); CHKERRQ(err_PETSc) ! global solution vector (grid x 1, i.e. every def grad tensor)
call DMDASNESSetFunctionLocal(damage_grid,INSERT_VALUES,formResidual,PETSC_NULL_SNES,err_PETSc) ! residual vector of same shape as solution vector
CHKERRQ(err_PETSc)
call SNESSetFromOptions(damage_snes,err_PETSc); CHKERRQ(err_PETSc) ! pull it all together with additional CLI arguments
call SNESGetType(damage_snes,snes_type,err_PETSc); CHKERRQ(err_PETSc)
if (trim(snes_type) == 'vinewtonrsls' .or. &
trim(snes_type) == 'vinewtonssls') then
call DMGetGlobalVector(damage_grid,lBound,ierr); CHKERRQ(ierr)
call DMGetGlobalVector(damage_grid,uBound,ierr); CHKERRQ(ierr)
call VecSet(lBound,0.0_pReal,ierr); CHKERRQ(ierr)
call VecSet(uBound,1.0_pReal,ierr); CHKERRQ(ierr)
call SNESVISetVariableBounds(damage_snes,lBound,uBound,ierr) ! variable bounds for variational inequalities like contact mechanics, damage etc.
call DMRestoreGlobalVector(damage_grid,lBound,ierr); CHKERRQ(ierr)
call DMRestoreGlobalVector(damage_grid,uBound,ierr); CHKERRQ(ierr)
call DMGetGlobalVector(damage_grid,lBound,err_PETSc); CHKERRQ(err_PETSc)
call DMGetGlobalVector(damage_grid,uBound,err_PETSc); CHKERRQ(err_PETSc)
call VecSet(lBound,0.0_pReal,err_PETSc); CHKERRQ(err_PETSc)
call VecSet(uBound,1.0_pReal,err_PETSc); CHKERRQ(err_PETSc)
call SNESVISetVariableBounds(damage_snes,lBound,uBound,err_PETSc) ! variable bounds for variational inequalities like contact mechanics, damage etc.
call DMRestoreGlobalVector(damage_grid,lBound,err_PETSc); CHKERRQ(err_PETSc)
call DMRestoreGlobalVector(damage_grid,uBound,err_PETSc); CHKERRQ(err_PETSc)
end if
!--------------------------------------------------------------------------------------------------
! init fields
call DMDAGetCorners(damage_grid,xstart,ystart,zstart,xend,yend,zend,ierr)
CHKERRQ(ierr)
call DMDAGetCorners(damage_grid,xstart,ystart,zstart,xend,yend,zend,err_PETSc)
CHKERRQ(err_PETSc)
xend = xstart + xend - 1
yend = ystart + yend - 1
zend = zstart + zend - 1
@ -150,7 +152,7 @@ subroutine grid_damage_spectral_init()
allocate(phi_lastInc(grid(1),grid(2),grid3), source=1.0_pReal)
allocate(phi_stagInc(grid(1),grid(2),grid3), source=1.0_pReal)
call VecSet(solution_vec,1.0_pReal,ierr); CHKERRQ(ierr)
call VecSet(solution_vec,1.0_pReal,err_PETSc); CHKERRQ(err_PETSc)
call updateReference
@ -169,7 +171,8 @@ function grid_damage_spectral_solution(Delta_t) result(solution)
PetscInt :: devNull
PetscReal :: phi_min, phi_max, stagNorm
PetscErrorCode :: ierr
integer(MPI_INTEGER_KIND) :: err_MPI
PetscErrorCode :: err_PETSc
SNESConvergedReason :: reason
solution%converged =.false.
@ -178,8 +181,10 @@ function grid_damage_spectral_solution(Delta_t) result(solution)
! set module wide availabe data
params%Delta_t = Delta_t
call SNESSolve(damage_snes,PETSC_NULL_VEC,solution_vec,ierr); CHKERRQ(ierr)
call SNESGetConvergedReason(damage_snes,reason,ierr); CHKERRQ(ierr)
call SNESSolve(damage_snes,PETSC_NULL_VEC,solution_vec,err_PETSc)
CHKERRQ(err_PETSc)
call SNESGetConvergedReason(damage_snes,reason,err_PETSc)
CHKERRQ(err_PETSc)
if (reason < 1) then
solution%converged = .false.
@ -189,9 +194,11 @@ function grid_damage_spectral_solution(Delta_t) result(solution)
solution%iterationsNeeded = totalIter
end if
stagNorm = maxval(abs(phi_current - phi_stagInc))
call MPI_Allreduce(MPI_IN_PLACE,stagNorm,1,MPI_DOUBLE,MPI_MAX,MPI_COMM_WORLD,ierr)
call MPI_Allreduce(MPI_IN_PLACE,stagNorm,1,MPI_DOUBLE,MPI_MAX,MPI_COMM_WORLD,err_MPI)
if (err_MPI /= 0_MPI_INTEGER_KIND) error stop 'MPI error'
solution%stagConverged = stagNorm < max(num%eps_damage_atol, num%eps_damage_rtol*maxval(phi_current))
call MPI_Allreduce(MPI_IN_PLACE,solution%stagConverged,1,MPI_LOGICAL,MPI_LAND,MPI_COMM_WORLD,ierr)
call MPI_Allreduce(MPI_IN_PLACE,solution%stagConverged,1,MPI_LOGICAL,MPI_LAND,MPI_COMM_WORLD,err_MPI)
if (err_MPI /= 0_MPI_INTEGER_KIND) error stop 'MPI error'
phi_stagInc = phi_current
!--------------------------------------------------------------------------------------------------
@ -202,8 +209,8 @@ function grid_damage_spectral_solution(Delta_t) result(solution)
call homogenization_set_phi(phi_current(i,j,k),ce)
end do; end do; end do
call VecMin(solution_vec,devNull,phi_min,ierr); CHKERRQ(ierr)
call VecMax(solution_vec,devNull,phi_max,ierr); CHKERRQ(ierr)
call VecMin(solution_vec,devNull,phi_min,err_PETSc); CHKERRQ(err_PETSc)
call VecMax(solution_vec,devNull,phi_max,err_PETSc); CHKERRQ(err_PETSc)
if (solution%converged) &
print'(/,1x,a)', '... nonlocal damage converged .....................................'
print'(/,1x,a,f8.6,2x,f8.6,2x,e11.4)', 'Minimum|Maximum|Delta Damage = ', phi_min, phi_max, stagNorm
@ -222,17 +229,19 @@ subroutine grid_damage_spectral_forward(cutBack)
integer :: i, j, k, ce
DM :: dm_local
PetscScalar, dimension(:,:,:), pointer :: x_scal
PetscErrorCode :: ierr
PetscErrorCode :: err_PETSc
if (cutBack) then
phi_current = phi_lastInc
phi_stagInc = phi_lastInc
!--------------------------------------------------------------------------------------------------
! reverting damage field state
call SNESGetDM(damage_snes,dm_local,ierr); CHKERRQ(ierr)
call DMDAVecGetArrayF90(dm_local,solution_vec,x_scal,ierr); CHKERRQ(ierr) !< get the data out of PETSc to work with
call SNESGetDM(damage_snes,dm_local,err_PETSc); CHKERRQ(err_PETSc)
call DMDAVecGetArrayF90(dm_local,solution_vec,x_scal,err_PETSc) !< get the data out of PETSc to work with
CHKERRQ(err_PETSc)
x_scal(xstart:xend,ystart:yend,zstart:zend) = phi_current
call DMDAVecRestoreArrayF90(dm_local,solution_vec,x_scal,ierr); CHKERRQ(ierr)
call DMDAVecRestoreArrayF90(dm_local,solution_vec,x_scal,err_PETSc)
CHKERRQ(err_PETSc)
ce = 0
do k = 1, grid3; do j = 1, grid(2); do i = 1,grid(1)
ce = ce + 1
@ -249,7 +258,7 @@ end subroutine grid_damage_spectral_forward
!--------------------------------------------------------------------------------------------------
!> @brief forms the spectral damage residual vector
!--------------------------------------------------------------------------------------------------
subroutine formResidual(in,x_scal,f_scal,dummy,ierr)
subroutine formResidual(in,x_scal,f_scal,dummy,dummy_err)
DMDALocalInfo, dimension(DMDA_LOCAL_INFO_SIZE) :: &
in
@ -260,7 +269,7 @@ subroutine formResidual(in,x_scal,f_scal,dummy,ierr)
X_RANGE,Y_RANGE,Z_RANGE), intent(out) :: &
f_scal
PetscObject :: dummy
PetscErrorCode :: ierr
PetscErrorCode :: dummy_err
integer :: i, j, k, ce
@ -311,7 +320,8 @@ end subroutine formResidual
!--------------------------------------------------------------------------------------------------
subroutine updateReference()
integer :: ce,ierr
integer :: ce
integer(MPI_INTEGER_KIND) :: err_MPI
K_ref = 0.0_pReal
@ -322,9 +332,11 @@ subroutine updateReference()
end do
K_ref = K_ref*wgt
call MPI_Allreduce(MPI_IN_PLACE,K_ref,9,MPI_DOUBLE,MPI_SUM,MPI_COMM_WORLD,ierr)
call MPI_Allreduce(MPI_IN_PLACE,K_ref,9_MPI_INTEGER_KIND,MPI_DOUBLE,MPI_SUM,MPI_COMM_WORLD,err_MPI)
if (err_MPI /= 0_MPI_INTEGER_KIND) error stop 'MPI error'
mu_ref = mu_ref*wgt
call MPI_Allreduce(MPI_IN_PLACE,mu_ref,1,MPI_DOUBLE,MPI_SUM,MPI_COMM_WORLD,ierr)
call MPI_Allreduce(MPI_IN_PLACE,mu_ref,1_MPI_INTEGER_KIND,MPI_DOUBLE,MPI_SUM,MPI_COMM_WORLD,err_MPI)
if (err_MPI /= 0_MPI_INTEGER_KIND) error stop 'MPI error'
end subroutine updateReference

View File

@ -71,7 +71,8 @@ subroutine grid_thermal_spectral_init(T_0)
integer :: i, j, k, ce
DM :: thermal_grid
PetscScalar, dimension(:,:,:), pointer :: T_PETSc
PetscErrorCode :: ierr
integer(MPI_INTEGER_KIND) :: err_MPI
PetscErrorCode :: err_PETSc
class(tNode), pointer :: &
num_grid
@ -94,18 +95,19 @@ subroutine grid_thermal_spectral_init(T_0)
!--------------------------------------------------------------------------------------------------
! set default and user defined options for PETSc
call PetscOptionsInsertString(PETSC_NULL_OPTIONS,'-thermal_snes_type newtonls -thermal_snes_mf &
&-thermal_snes_ksp_ew -thermal_ksp_type fgmres',ierr)
CHKERRQ(ierr)
call PetscOptionsInsertString(PETSC_NULL_OPTIONS,num_grid%get_asString('petsc_options',defaultVal=''),ierr)
CHKERRQ(ierr)
&-thermal_snes_ksp_ew -thermal_ksp_type fgmres',err_PETSc)
CHKERRQ(err_PETSc)
call PetscOptionsInsertString(PETSC_NULL_OPTIONS,num_grid%get_asString('petsc_options',defaultVal=''),err_PETSc)
CHKERRQ(err_PETSc)
!--------------------------------------------------------------------------------------------------
! initialize solver specific parts of PETSc
call SNESCreate(PETSC_COMM_WORLD,thermal_snes,ierr); CHKERRQ(ierr)
call SNESSetOptionsPrefix(thermal_snes,'thermal_',ierr);CHKERRQ(ierr)
call SNESCreate(PETSC_COMM_WORLD,thermal_snes,err_PETSc); CHKERRQ(err_PETSc)
call SNESSetOptionsPrefix(thermal_snes,'thermal_',err_PETSc);CHKERRQ(err_PETSc)
localK = 0_pPetscInt
localK(worldrank) = int(grid3,pPetscInt)
call MPI_Allreduce(MPI_IN_PLACE,localK,worldsize,MPI_INTEGER,MPI_SUM,MPI_COMM_WORLD,ierr)
call MPI_Allreduce(MPI_IN_PLACE,localK,worldsize,MPI_INTEGER,MPI_SUM,MPI_COMM_WORLD,err_MPI)
if (err_MPI /= 0_MPI_INTEGER_KIND) error stop 'MPI error'
call DMDACreate3D(PETSC_COMM_WORLD, &
DM_BOUNDARY_NONE, DM_BOUNDARY_NONE, DM_BOUNDARY_NONE, & ! cut off stencil at boundary
DMDA_STENCIL_BOX, & ! Moore (26) neighborhood around central point
@ -113,20 +115,21 @@ subroutine grid_thermal_spectral_init(T_0)
1_pPetscInt, 1_pPetscInt, int(worldsize,pPetscInt), &
1_pPetscInt, 0_pPetscInt, & ! #dof (T, scalar), ghost boundary width (domain overlap)
[int(grid(1),pPetscInt)],[int(grid(2),pPetscInt)],localK, & ! local grid
thermal_grid,ierr) ! handle, error
CHKERRQ(ierr)
call SNESSetDM(thermal_snes,thermal_grid,ierr); CHKERRQ(ierr) ! connect snes to da
call DMsetFromOptions(thermal_grid,ierr); CHKERRQ(ierr)
call DMsetUp(thermal_grid,ierr); CHKERRQ(ierr)
call DMCreateGlobalVector(thermal_grid,solution_vec,ierr); CHKERRQ(ierr) ! global solution vector (grid x 1, i.e. every def grad tensor)
call DMDASNESSetFunctionLocal(thermal_grid,INSERT_VALUES,formResidual,PETSC_NULL_SNES,ierr) ! residual vector of same shape as solution vector
CHKERRQ(ierr)
call SNESSetFromOptions(thermal_snes,ierr); CHKERRQ(ierr) ! pull it all together with additional CLI arguments
thermal_grid,err_PETSc) ! handle, error
CHKERRQ(err_PETSc)
call SNESSetDM(thermal_snes,thermal_grid,err_PETSc); CHKERRQ(err_PETSc) ! connect snes to da
call DMsetFromOptions(thermal_grid,err_PETSc); CHKERRQ(err_PETSc)
call DMsetUp(thermal_grid,err_PETSc); CHKERRQ(err_PETSc)
call DMCreateGlobalVector(thermal_grid,solution_vec,err_PETSc) ! global solution vector (grid x 1, i.e. every def grad tensor)
CHKERRQ(err_PETSc)
call DMDASNESSetFunctionLocal(thermal_grid,INSERT_VALUES,formResidual,PETSC_NULL_SNES,err_PETSc) ! residual vector of same shape as solution vector
CHKERRQ(err_PETSc)
call SNESSetFromOptions(thermal_snes,err_PETSc); CHKERRQ(err_PETSc) ! pull it all together with additional CLI arguments
!--------------------------------------------------------------------------------------------------
! init fields
call DMDAGetCorners(thermal_grid,xstart,ystart,zstart,xend,yend,zend,ierr)
CHKERRQ(ierr)
call DMDAGetCorners(thermal_grid,xstart,ystart,zstart,xend,yend,zend,err_PETSc)
CHKERRQ(err_PETSc)
xend = xstart + xend - 1
yend = ystart + yend - 1
zend = zstart + zend - 1
@ -143,9 +146,11 @@ subroutine grid_thermal_spectral_init(T_0)
call homogenization_thermal_setField(T_0,0.0_pReal,ce)
end do; end do; end do
call DMDAVecGetArrayF90(thermal_grid,solution_vec,T_PETSc,ierr); CHKERRQ(ierr)
call DMDAVecGetArrayF90(thermal_grid,solution_vec,T_PETSc,err_PETSc)
CHKERRQ(err_PETSc)
T_PETSc(xstart:xend,ystart:yend,zstart:zend) = T_current
call DMDAVecRestoreArrayF90(thermal_grid,solution_vec,T_PETSc,ierr); CHKERRQ(ierr)
call DMDAVecRestoreArrayF90(thermal_grid,solution_vec,T_PETSc,err_PETSc)
CHKERRQ(err_PETSc)
call updateReference
@ -164,7 +169,8 @@ function grid_thermal_spectral_solution(Delta_t) result(solution)
PetscInt :: devNull
PetscReal :: T_min, T_max, stagNorm
PetscErrorCode :: ierr
integer(MPI_INTEGER_KIND) :: err_MPI
PetscErrorCode :: err_PETSc
SNESConvergedReason :: reason
solution%converged =.false.
@ -173,8 +179,10 @@ function grid_thermal_spectral_solution(Delta_t) result(solution)
! set module wide availabe data
params%Delta_t = Delta_t
call SNESSolve(thermal_snes,PETSC_NULL_VEC,solution_vec,ierr); CHKERRQ(ierr)
call SNESGetConvergedReason(thermal_snes,reason,ierr); CHKERRQ(ierr)
call SNESSolve(thermal_snes,PETSC_NULL_VEC,solution_vec,err_PETSc)
CHKERRQ(err_PETSc)
call SNESGetConvergedReason(thermal_snes,reason,err_PETSc)
CHKERRQ(err_PETSc)
if (reason < 1) then
solution%converged = .false.
@ -184,9 +192,11 @@ function grid_thermal_spectral_solution(Delta_t) result(solution)
solution%iterationsNeeded = totalIter
end if
stagNorm = maxval(abs(T_current - T_stagInc))
call MPI_Allreduce(MPI_IN_PLACE,stagNorm,1,MPI_DOUBLE,MPI_MAX,MPI_COMM_WORLD,ierr)
call MPI_Allreduce(MPI_IN_PLACE,stagNorm,1,MPI_DOUBLE,MPI_MAX,MPI_COMM_WORLD,err_MPI)
if (err_MPI /= 0_MPI_INTEGER_KIND) error stop 'MPI error'
solution%stagConverged = stagNorm < max(num%eps_thermal_atol, num%eps_thermal_rtol*maxval(T_current))
call MPI_Allreduce(MPI_IN_PLACE,solution%stagConverged,1,MPI_LOGICAL,MPI_LAND,MPI_COMM_WORLD,ierr)
call MPI_Allreduce(MPI_IN_PLACE,solution%stagConverged,1,MPI_LOGICAL,MPI_LAND,MPI_COMM_WORLD,err_MPI)
if (err_MPI /= 0_MPI_INTEGER_KIND) error stop 'MPI error'
T_stagInc = T_current
!--------------------------------------------------------------------------------------------------
@ -197,8 +207,8 @@ function grid_thermal_spectral_solution(Delta_t) result(solution)
call homogenization_thermal_setField(T_current(i,j,k),(T_current(i,j,k)-T_lastInc(i,j,k))/params%Delta_t,ce)
end do; end do; end do
call VecMin(solution_vec,devNull,T_min,ierr); CHKERRQ(ierr)
call VecMax(solution_vec,devNull,T_max,ierr); CHKERRQ(ierr)
call VecMin(solution_vec,devNull,T_min,err_PETSc); CHKERRQ(err_PETSc)
call VecMax(solution_vec,devNull,T_max,err_PETSc); CHKERRQ(err_PETSc)
if (solution%converged) &
print'(/,1x,a)', '... thermal conduction converged ..................................'
print'(/,1x,a,f8.4,2x,f8.4,2x,f8.4)', 'Minimum|Maximum|Delta Temperature / K = ', T_min, T_max, stagNorm
@ -217,7 +227,7 @@ subroutine grid_thermal_spectral_forward(cutBack)
integer :: i, j, k, ce
DM :: dm_local
PetscScalar, dimension(:,:,:), pointer :: x_scal
PetscErrorCode :: ierr
PetscErrorCode :: err_PETSc
if (cutBack) then
T_current = T_lastInc
@ -225,10 +235,13 @@ subroutine grid_thermal_spectral_forward(cutBack)
!--------------------------------------------------------------------------------------------------
! reverting thermal field state
call SNESGetDM(thermal_snes,dm_local,ierr); CHKERRQ(ierr)
call DMDAVecGetArrayF90(dm_local,solution_vec,x_scal,ierr); CHKERRQ(ierr) !< get the data out of PETSc to work with
call SNESGetDM(thermal_snes,dm_local,err_PETSc)
CHKERRQ(err_PETSc)
call DMDAVecGetArrayF90(dm_local,solution_vec,x_scal,err_PETSc) !< get the data out of PETSc to work with
CHKERRQ(err_PETSc)
x_scal(xstart:xend,ystart:yend,zstart:zend) = T_current
call DMDAVecRestoreArrayF90(dm_local,solution_vec,x_scal,ierr); CHKERRQ(ierr)
call DMDAVecRestoreArrayF90(dm_local,solution_vec,x_scal,err_PETSc)
CHKERRQ(err_PETSc)
ce = 0
do k = 1, grid3; do j = 1, grid(2); do i = 1,grid(1)
ce = ce + 1
@ -245,7 +258,7 @@ end subroutine grid_thermal_spectral_forward
!--------------------------------------------------------------------------------------------------
!> @brief forms the spectral thermal residual vector
!--------------------------------------------------------------------------------------------------
subroutine formResidual(in,x_scal,f_scal,dummy,ierr)
subroutine formResidual(in,x_scal,f_scal,dummy,dummy_err)
DMDALocalInfo, dimension(DMDA_LOCAL_INFO_SIZE) :: &
in
@ -256,7 +269,7 @@ subroutine formResidual(in,x_scal,f_scal,dummy,ierr)
X_RANGE,Y_RANGE,Z_RANGE), intent(out) :: &
f_scal
PetscObject :: dummy
PetscErrorCode :: ierr
PetscErrorCode :: dummy_err
integer :: i, j, k, ce
T_current = x_scal
@ -301,7 +314,8 @@ end subroutine formResidual
!--------------------------------------------------------------------------------------------------
subroutine updateReference()
integer :: ce,ierr
integer :: ce
integer(MPI_INTEGER_KIND) :: err_MPI
K_ref = 0.0_pReal
@ -312,9 +326,11 @@ subroutine updateReference()
end do
K_ref = K_ref*wgt
call MPI_Allreduce(MPI_IN_PLACE,K_ref,9,MPI_DOUBLE,MPI_SUM,MPI_COMM_WORLD,ierr)
call MPI_Allreduce(MPI_IN_PLACE,K_ref,9,MPI_DOUBLE,MPI_SUM,MPI_COMM_WORLD,err_MPI)
if (err_MPI /= 0_MPI_INTEGER_KIND) error stop 'MPI error'
mu_ref = mu_ref*wgt
call MPI_Allreduce(MPI_IN_PLACE,mu_ref,1,MPI_DOUBLE,MPI_SUM,MPI_COMM_WORLD,ierr)
call MPI_Allreduce(MPI_IN_PLACE,mu_ref,1,MPI_DOUBLE,MPI_SUM,MPI_COMM_WORLD,err_MPI)
if (err_MPI /= 0_MPI_INTEGER_KIND) error stop 'MPI error'
end subroutine updateReference

View File

@ -20,16 +20,18 @@ subroutine quit(stop_id)
PetscErrorCode :: err_PETSc
call h5open_f(err_HDF5)
if (err_HDF5 /= 0) write(6,'(a,i5)') ' Error in h5open_f ',err_HDF5 ! prevents error if not opened yet
if (err_HDF5 /= 0_MPI_INTEGER_KIND) write(6,'(a,i5)') ' Error in h5open_f ',err_HDF5 ! prevents error if not opened yet
call h5close_f(err_HDF5)
if (err_HDF5 /= 0) write(6,'(a,i5)') ' Error in h5close_f ',err_HDF5
if (err_HDF5 /= 0_MPI_INTEGER_KIND) write(6,'(a,i5)') ' Error in h5close_f ',err_HDF5
call PetscFinalize(err_PETSc)
CHKERRQ(err_PETSc)
#ifdef _OPENMP
call MPI_finalize(err_MPI)
if (err_MPI /= 0) write(6,'(a,i5)') ' Error in MPI_finalize',err_MPI
if (err_MPI /= 0_MPI_INTEGER_KIND) write(6,'(a,i5)') ' Error in MPI_finalize',err_MPI
#else
err_MPI = 0_MPI_INTEGER_KIND
#endif
call date_and_time(values = dateAndTime)