avoiding double storage of phi

general adjustments of damage solver to follow thermal solver
This commit is contained in:
Martin Diehl 2023-07-16 13:17:57 +02:00
parent b54cf03d6d
commit 8682df0e86
2 changed files with 81 additions and 78 deletions

View File

@ -47,9 +47,8 @@ module grid_damage_spectral
!-------------------------------------------------------------------------------------------------- !--------------------------------------------------------------------------------------------------
! PETSc data ! PETSc data
SNES :: SNES_damage SNES :: SNES_damage
Vec :: solution_vec Vec :: phi_PETSc
real(pREAL), dimension(:,:,:), allocatable :: & real(pREAL), dimension(:,:,:), allocatable :: &
phi, & !< field of current damage
phi_lastInc, & !< field of previous damage phi_lastInc, & !< field of previous damage
phi_stagInc !< field of staggered damage phi_stagInc !< field of staggered damage
@ -74,8 +73,8 @@ subroutine grid_damage_spectral_init()
integer(MPI_INTEGER_KIND), dimension(0:worldsize-1) :: cells3_global integer(MPI_INTEGER_KIND), dimension(0:worldsize-1) :: cells3_global
integer :: i, j, k, ce integer :: i, j, k, ce
DM :: damage_grid DM :: DM_damage
real(pREAL), dimension(:,:,:), pointer :: phi_PETSc real(pREAL), dimension(:,:,:), pointer :: phi ! 0-indexed
Vec :: uBound, lBound Vec :: uBound, lBound
integer(MPI_INTEGER_KIND) :: err_MPI integer(MPI_INTEGER_KIND) :: err_MPI
PetscErrorCode :: err_PETSc PetscErrorCode :: err_PETSc
@ -87,6 +86,7 @@ subroutine grid_damage_spectral_init()
character(len=pSTRLEN) :: & character(len=pSTRLEN) :: &
snes_type snes_type
print'(/,1x,a)', '<<<+- grid_spectral_damage init -+>>>' print'(/,1x,a)', '<<<+- grid_spectral_damage init -+>>>'
print'(/,1x,a)', 'P. Shanthraj et al., Handbook of Mechanics of Materials, 2019' print'(/,1x,a)', 'P. Shanthraj et al., Handbook of Mechanics of Materials, 2019'
@ -117,12 +117,6 @@ subroutine grid_damage_spectral_init()
call PetscOptionsInsertString(PETSC_NULL_OPTIONS,num_grid%get_asStr('petsc_options',defaultVal=''),err_PETSc) call PetscOptionsInsertString(PETSC_NULL_OPTIONS,num_grid%get_asStr('petsc_options',defaultVal=''),err_PETSc)
CHKERRQ(err_PETSc) CHKERRQ(err_PETSc)
!--------------------------------------------------------------------------------------------------
! init fields
phi = discretization_grid_getInitialCondition('phi')
phi_lastInc = phi
phi_stagInc = phi
!-------------------------------------------------------------------------------------------------- !--------------------------------------------------------------------------------------------------
! initialize solver specific parts of PETSc ! initialize solver specific parts of PETSc
call SNESCreate(PETSC_COMM_WORLD,SNES_damage,err_PETSc) call SNESCreate(PETSC_COMM_WORLD,SNES_damage,err_PETSc)
@ -139,17 +133,17 @@ subroutine grid_damage_spectral_init()
1_pPETSCINT, 1_pPETSCINT, int(worldsize,pPETSCINT), & 1_pPETSCINT, 1_pPETSCINT, int(worldsize,pPETSCINT), &
1_pPETSCINT, 0_pPETSCINT, & ! #dof (phi, scalar), ghost boundary width (domain overlap) 1_pPETSCINT, 0_pPETSCINT, & ! #dof (phi, scalar), ghost boundary width (domain overlap)
[int(cells(1),pPetscInt)],[int(cells(2),pPetscInt)],int(cells3_global,pPETSCINT), & ! local cells [int(cells(1),pPetscInt)],[int(cells(2),pPetscInt)],int(cells3_global,pPETSCINT), & ! local cells
damage_grid,err_PETSc) ! handle, error DM_damage,err_PETSc) ! handle, error
CHKERRQ(err_PETSc) CHKERRQ(err_PETSc)
call DMsetFromOptions(damage_grid,err_PETSc) call DMsetFromOptions(DM_damage,err_PETSc)
CHKERRQ(err_PETSc) CHKERRQ(err_PETSc)
call DMsetUp(damage_grid,err_PETSc) call DMsetUp(DM_damage,err_PETSc)
CHKERRQ(err_PETSc) CHKERRQ(err_PETSc)
call DMCreateGlobalVector(damage_grid,solution_vec,err_PETSc) ! global solution vector (cells x 1, i.e. every def grad tensor) call DMCreateGlobalVector(DM_damage,phi_PETSc,err_PETSc) ! global solution vector (cells x 1, i.e. every def grad tensor)
CHKERRQ(err_PETSc) CHKERRQ(err_PETSc)
call DMDASNESSetFunctionLocal(damage_grid,INSERT_VALUES,formResidual,PETSC_NULL_SNES,err_PETSc) ! residual vector of same shape as solution vector call DMDASNESSetFunctionLocal(DM_damage,INSERT_VALUES,formResidual,PETSC_NULL_SNES,err_PETSc) ! residual vector of same shape as solution vector
CHKERRQ(err_PETSc) CHKERRQ(err_PETSc)
call SNESSetDM(SNES_damage,damage_grid,err_PETSc) call SNESSetDM(SNES_damage,DM_damage,err_PETSc)
CHKERRQ(err_PETSc) CHKERRQ(err_PETSc)
call SNESSetFromOptions(SNES_damage,err_PETSc) ! pull it all together with additional CLI arguments call SNESSetFromOptions(SNES_damage,err_PETSc) ! pull it all together with additional CLI arguments
CHKERRQ(err_PETSc) CHKERRQ(err_PETSc)
@ -157,9 +151,9 @@ subroutine grid_damage_spectral_init()
CHKERRQ(err_PETSc) CHKERRQ(err_PETSc)
if (trim(snes_type) == 'vinewtonrsls' .or. & if (trim(snes_type) == 'vinewtonrsls' .or. &
trim(snes_type) == 'vinewtonssls') then trim(snes_type) == 'vinewtonssls') then
call DMGetGlobalVector(damage_grid,lBound,err_PETSc) call DMGetGlobalVector(DM_damage,lBound,err_PETSc)
CHKERRQ(err_PETSc) CHKERRQ(err_PETSc)
call DMGetGlobalVector(damage_grid,uBound,err_PETSc) call DMGetGlobalVector(DM_damage,uBound,err_PETSc)
CHKERRQ(err_PETSc) CHKERRQ(err_PETSc)
call VecSet(lBound,0.0_pREAL,err_PETSc) call VecSet(lBound,0.0_pREAL,err_PETSc)
CHKERRQ(err_PETSc) CHKERRQ(err_PETSc)
@ -167,12 +161,15 @@ subroutine grid_damage_spectral_init()
CHKERRQ(err_PETSc) CHKERRQ(err_PETSc)
call SNESVISetVariableBounds(SNES_damage,lBound,uBound,err_PETSc) ! variable bounds for variational inequalities call SNESVISetVariableBounds(SNES_damage,lBound,uBound,err_PETSc) ! variable bounds for variational inequalities
CHKERRQ(err_PETSc) CHKERRQ(err_PETSc)
call DMRestoreGlobalVector(damage_grid,lBound,err_PETSc) call DMRestoreGlobalVector(DM_damage,lBound,err_PETSc)
CHKERRQ(err_PETSc) CHKERRQ(err_PETSc)
call DMRestoreGlobalVector(damage_grid,uBound,err_PETSc) call DMRestoreGlobalVector(DM_damage,uBound,err_PETSc)
CHKERRQ(err_PETSc) CHKERRQ(err_PETSc)
end if end if
call DMDAVecGetArrayF90(DM_damage,phi_PETSc,phi,err_PETSc) ! returns 0-indexed phi
CHKERRQ(err_PETSc)
restartRead: if (CLI_restartInc > 0) then restartRead: if (CLI_restartInc > 0) then
print'(/,1x,a,1x,i0)', 'loading restart data of increment', CLI_restartInc print'(/,1x,a,1x,i0)', 'loading restart data of increment', CLI_restartInc
@ -183,18 +180,20 @@ subroutine grid_damage_spectral_init()
phi = reshape(tempN,[cells(1),cells(2),cells3]) phi = reshape(tempN,[cells(1),cells(2),cells3])
call HDF5_read(tempN,groupHandle,'phi_lastInc',.false.) call HDF5_read(tempN,groupHandle,'phi_lastInc',.false.)
phi_lastInc = reshape(tempN,[cells(1),cells(2),cells3]) phi_lastInc = reshape(tempN,[cells(1),cells(2),cells3])
phi_stagInc = phi_lastInc
else
phi = discretization_grid_getInitialCondition('phi')
phi_lastInc = phi(0:,0:,0:)
phi_stagInc = phi_lastInc
end if restartRead end if restartRead
ce = 0 ce = 0
do k = 1, cells3; do j = 1, cells(2); do i = 1, cells(1) do k = 0, cells3-1; do j = 0, cells(2)-1; do i = 0, cells(1)-1
ce = ce + 1 ce = ce + 1
call homogenization_set_phi(phi(i,j,k),ce) call homogenization_set_phi(phi(i,j,k),ce)
end do; end do; end do end do; end do; end do
call DMDAVecGetArrayF90(damage_grid,solution_vec,phi_PETSc,err_PETSc) call DMDAVecRestoreArrayF90(DM_damage,phi_PETSc,phi,err_PETSc)
CHKERRQ(err_PETSc)
phi_PETSc = phi
call DMDAVecRestoreArrayF90(damage_grid,solution_vec,phi_PETSc,err_PETSc)
CHKERRQ(err_PETSc) CHKERRQ(err_PETSc)
call updateReference() call updateReference()
@ -209,37 +208,43 @@ function grid_damage_spectral_solution(Delta_t) result(solution)
real(pREAL), intent(in) :: & real(pREAL), intent(in) :: &
Delta_t !< increment in time for current solution Delta_t !< increment in time for current solution
integer :: i, j, k, ce integer :: i, j, k, ce
type(tSolutionState) :: solution type(tSolutionState) :: solution
PetscInt :: devNull PetscInt :: devNull
PetscReal :: phi_min, phi_max, stagNorm PetscReal :: phi_min, phi_max, stagNorm
DM :: DM_damage
real(pREAL), dimension(:,:,:), pointer :: phi ! 0-indexed
integer(MPI_INTEGER_KIND) :: err_MPI integer(MPI_INTEGER_KIND) :: err_MPI
PetscErrorCode :: err_PETSc PetscErrorCode :: err_PETSc
SNESConvergedReason :: reason SNESConvergedReason :: reason
solution%converged = .false. solution%converged = .false.
!-------------------------------------------------------------------------------------------------- !--------------------------------------------------------------------------------------------------
! set module wide availabe data ! set module wide availabe data
params%Delta_t = Delta_t params%Delta_t = Delta_t
call SNESSolve(SNES_damage,PETSC_NULL_VEC,solution_vec,err_PETSc) call SNESSolve(SNES_damage,PETSC_NULL_VEC,phi_PETSc,err_PETSc)
CHKERRQ(err_PETSc) CHKERRQ(err_PETSc)
call SNESGetConvergedReason(SNES_damage,reason,err_PETSc) call SNESGetConvergedReason(SNES_damage,reason,err_PETSc)
CHKERRQ(err_PETSc) CHKERRQ(err_PETSc)
if (reason < 1) then solution%converged = reason > 0
solution%converged = .false. solution%iterationsNeeded = merge(totalIter,num%itmax,solution%converged)
solution%iterationsNeeded = num%itmax
else call SNESGetDM(SNES_damage,DM_damage,err_PETSc)
solution%converged = .true. CHKERRQ(err_PETSc)
solution%iterationsNeeded = totalIter call DMDAVecGetArrayF90(DM_damage,phi_PETSc,phi,err_PETSc) ! returns 0-indexed phi
end if CHKERRQ(err_PETSc)
phi_min = minval(phi)
phi_max = maxval(phi)
stagNorm = maxval(abs(phi - phi_stagInc)) stagNorm = maxval(abs(phi - phi_stagInc))
call MPI_Allreduce(MPI_IN_PLACE,stagNorm,1_MPI_INTEGER_KIND,MPI_DOUBLE,MPI_MAX,MPI_COMM_WORLD,err_MPI) call MPI_Allreduce(MPI_IN_PLACE,stagNorm,1_MPI_INTEGER_KIND,MPI_DOUBLE,MPI_MAX,MPI_COMM_WORLD,err_MPI)
if (err_MPI /= 0_MPI_INTEGER_KIND) error stop 'MPI error' if (err_MPI /= 0_MPI_INTEGER_KIND) error stop 'MPI error'
solution%stagConverged = stagNorm < max(num%eps_damage_atol, num%eps_damage_rtol*maxval(phi)) solution%stagConverged = stagNorm < max(num%eps_damage_atol, num%eps_damage_rtol*phi_max)
call MPI_Allreduce(MPI_IN_PLACE,solution%stagConverged,1_MPI_INTEGER_KIND,MPI_LOGICAL,MPI_LAND,MPI_COMM_WORLD,err_MPI) call MPI_Allreduce(MPI_IN_PLACE,solution%stagConverged,1_MPI_INTEGER_KIND,MPI_LOGICAL,MPI_LAND,MPI_COMM_WORLD,err_MPI)
if (err_MPI /= 0_MPI_INTEGER_KIND) error stop 'MPI error' if (err_MPI /= 0_MPI_INTEGER_KIND) error stop 'MPI error'
phi_stagInc = phi phi_stagInc = phi
@ -247,15 +252,14 @@ function grid_damage_spectral_solution(Delta_t) result(solution)
!-------------------------------------------------------------------------------------------------- !--------------------------------------------------------------------------------------------------
! updating damage state ! updating damage state
ce = 0 ce = 0
do k = 1, cells3; do j = 1, cells(2); do i = 1,cells(1) do k = 0, cells3-1; do j = 0, cells(2)-1; do i = 0,cells(1)-1
ce = ce + 1 ce = ce + 1
call homogenization_set_phi(phi(i,j,k),ce) call homogenization_set_phi(phi(i,j,k),ce)
end do; end do; end do end do; end do; end do
call VecMin(solution_vec,devNull,phi_min,err_PETSc) call DMDAVecRestoreArrayF90(DM_damage,phi_PETSc,phi,err_PETSc)
CHKERRQ(err_PETSc)
call VecMax(solution_vec,devNull,phi_max,err_PETSc)
CHKERRQ(err_PETSc) CHKERRQ(err_PETSc)
if (solution%converged) & if (solution%converged) &
print'(/,1x,a)', '... nonlocal damage converged .....................................' print'(/,1x,a)', '... nonlocal damage converged .....................................'
print'(/,1x,a,f8.6,2x,f8.6,2x,e11.4)', 'Minimum|Maximum|Delta Damage = ', phi_min, phi_max, stagNorm print'(/,1x,a,f8.6,2x,f8.6,2x,e11.4)', 'Minimum|Maximum|Delta Damage = ', phi_min, phi_max, stagNorm
@ -266,35 +270,31 @@ end function grid_damage_spectral_solution
!-------------------------------------------------------------------------------------------------- !--------------------------------------------------------------------------------------------------
!> @brief spectral damage forwarding routine !> @brief Set DAMASK data to current solver status.
!-------------------------------------------------------------------------------------------------- !--------------------------------------------------------------------------------------------------
subroutine grid_damage_spectral_forward(cutBack) subroutine grid_damage_spectral_forward(cutBack)
logical, intent(in) :: cutBack logical, intent(in) :: cutBack
integer :: i, j, k, ce integer :: i, j, k, ce
DM :: dm_local DM :: DM_damage
real(pREAL), dimension(:,:,:), pointer :: phi_PETSc real(pREAL), dimension(:,:,:), pointer :: phi ! 0-indexed
PetscErrorCode :: err_PETSc PetscErrorCode :: err_PETSc
call SNESGetDM(SNES_damage,DM_damage,err_PETSc)
CHKERRQ(err_PETSc)
call DMDAVecGetArrayF90(DM_damage,phi_PETSc,phi,err_PETSc) ! returns 0-indexed T
CHKERRQ(err_PETSc)
if (cutBack) then if (cutBack) then
phi = phi_lastInc
phi_stagInc = phi_lastInc
!--------------------------------------------------------------------------------------------------
! reverting damage field state
call SNESGetDM(SNES_damage,dm_local,err_PETSc)
CHKERRQ(err_PETSc)
call DMDAVecGetArrayF90(dm_local,solution_vec,phi_PETSc,err_PETSc) !< get the data out of PETSc to work with
CHKERRQ(err_PETSc)
phi_PETSc = phi
call DMDAVecRestoreArrayF90(dm_local,solution_vec,phi_PETSc,err_PETSc)
CHKERRQ(err_PETSc)
ce = 0 ce = 0
do k = 1, cells3; do j = 1, cells(2); do i = 1,cells(1) do k = 1, cells3; do j = 1, cells(2); do i = 1,cells(1)
ce = ce + 1 ce = ce + 1
call homogenization_set_phi(phi(i,j,k),ce) call homogenization_set_phi(phi_lastInc(i,j,k),ce)
end do; end do; end do end do; end do; end do
phi = phi_lastInc
phi_stagInc = phi_lastInc
else else
phi_lastInc = phi phi_lastInc = phi
call updateReference() call updateReference()
@ -309,13 +309,14 @@ end subroutine grid_damage_spectral_forward
subroutine grid_damage_spectral_restartWrite() subroutine grid_damage_spectral_restartWrite()
PetscErrorCode :: err_PETSc PetscErrorCode :: err_PETSc
DM :: dm_local DM :: DM_damage
integer(HID_T) :: fileHandle, groupHandle integer(HID_T) :: fileHandle, groupHandle
PetscScalar, dimension(:,:,:), pointer :: phi real(pREAL), dimension(:,:,:), pointer :: phi ! 0-indexed
call SNESGetDM(SNES_damage,dm_local,err_PETSc);
call SNESGetDM(SNES_damage,DM_damage,err_PETSc)
CHKERRQ(err_PETSc) CHKERRQ(err_PETSc)
call DMDAVecGetArrayReadF90(dm_local,solution_vec,phi,err_PETSc); call DMDAVecGetArrayReadF90(DM_damage,phi_PETSc,phi,err_PETSc) ! returns 0-indexed T
CHKERRQ(err_PETSc) CHKERRQ(err_PETSc)
print'(1x,a)', 'saving damage solver data required for restart'; flush(IO_STDOUT) print'(1x,a)', 'saving damage solver data required for restart'; flush(IO_STDOUT)
@ -327,7 +328,7 @@ subroutine grid_damage_spectral_restartWrite()
call HDF5_closeGroup(groupHandle) call HDF5_closeGroup(groupHandle)
call HDF5_closeFile(fileHandle) call HDF5_closeFile(fileHandle)
call DMDAVecRestoreArrayReadF90(dm_local,solution_vec,phi,err_PETSc); call DMDAVecRestoreArrayReadF90(DM_damage,phi_PETSc,phi,err_PETSc);
CHKERRQ(err_PETSc) CHKERRQ(err_PETSc)
end subroutine grid_damage_spectral_restartWrite end subroutine grid_damage_spectral_restartWrite
@ -351,7 +352,7 @@ subroutine formResidual(residual_subdomain,x_scal,r,dummy,err_PETSc)
real(pREAL), dimension(3,cells(1),cells(2),cells3) :: vectorField real(pREAL), dimension(3,cells(1),cells(2),cells3) :: vectorField
phi = x_scal associate(phi => x_scal)
vectorField = utilities_ScalarGradient(phi) vectorField = utilities_ScalarGradient(phi)
ce = 0 ce = 0
do k = 1, cells3; do j = 1, cells(2); do i = 1,cells(1) do k = 1, cells3; do j = 1, cells(2); do i = 1,cells(1)
@ -369,6 +370,7 @@ subroutine formResidual(residual_subdomain,x_scal,r,dummy,err_PETSc)
r = max(min(utilities_GreenConvolution(r, K_ref, mu_ref, params%Delta_t),phi_lastInc),num%phi_min) & r = max(min(utilities_GreenConvolution(r, K_ref, mu_ref, params%Delta_t),phi_lastInc),num%phi_min) &
- phi - phi
end associate
err_PETSc = 0 err_PETSc = 0
end subroutine formResidual end subroutine formResidual

View File

@ -81,6 +81,7 @@ subroutine grid_thermal_spectral_init()
type(tDict), pointer :: & type(tDict), pointer :: &
num_grid num_grid
print'(/,1x,a)', '<<<+- grid_thermal_spectral init -+>>>' print'(/,1x,a)', '<<<+- grid_thermal_spectral init -+>>>'
print'(/,1x,a)', 'P. Shanthraj et al., Handbook of Mechanics of Materials, 2019' print'(/,1x,a)', 'P. Shanthraj et al., Handbook of Mechanics of Materials, 2019'
@ -291,6 +292,7 @@ subroutine grid_thermal_spectral_restartWrite()
integer(HID_T) :: fileHandle, groupHandle integer(HID_T) :: fileHandle, groupHandle
real(pREAL), dimension(:,:,:), pointer :: T ! 0-indexed real(pREAL), dimension(:,:,:), pointer :: T ! 0-indexed
call SNESGetDM(SNES_thermal,DM_thermal,err_PETSc) call SNESGetDM(SNES_thermal,DM_thermal,err_PETSc)
CHKERRQ(err_PETSc) CHKERRQ(err_PETSc)
call DMDAVecGetArrayReadF90(DM_thermal,T_PETSc,T,err_PETSc) ! returns 0-indexed T call DMDAVecGetArrayReadF90(DM_thermal,T_PETSc,T,err_PETSc) ! returns 0-indexed T
@ -312,7 +314,6 @@ subroutine grid_thermal_spectral_restartWrite()
end subroutine grid_thermal_spectral_restartWrite end subroutine grid_thermal_spectral_restartWrite
!-------------------------------------------------------------------------------------------------- !--------------------------------------------------------------------------------------------------
!> @brief Construct the residual vector. !> @brief Construct the residual vector.
!-------------------------------------------------------------------------------------------------- !--------------------------------------------------------------------------------------------------
@ -349,8 +350,8 @@ subroutine formResidual(residual_subdomain,x_scal,r,dummy,err_PETSc)
r = T & r = T &
- utilities_GreenConvolution(r, K_ref, mu_ref, params%Delta_t) - utilities_GreenConvolution(r, K_ref, mu_ref, params%Delta_t)
err_PETSc = 0
end associate end associate
err_PETSc = 0
end subroutine formResidual end subroutine formResidual