DAMASK_EICMD/src/grid/grid_thermal_spectral.f90

377 lines
16 KiB
Fortran
Raw Normal View History

!--------------------------------------------------------------------------------------------------
2019-03-12 16:06:18 +05:30
!> @author Martin Diehl, Max-Planck-Institut für Eisenforschung GmbH
!> @author Pratheek Shanthraj, Max-Planck-Institut für Eisenforschung GmbH
!> @author Shaokang Zhang, Max-Planck-Institut für Eisenforschung GmbH
!> @brief Spectral solver for thermal conduction
!--------------------------------------------------------------------------------------------------
2019-03-12 04:07:06 +05:30
module grid_thermal_spectral
#include <petsc/finclude/petscsnes.h>
#include <petsc/finclude/petscdmda.h>
use PETScDMDA
use PETScSNES
#if (PETSC_VERSION_MAJOR==3 && PETSC_VERSION_MINOR>14) && !defined(PETSC_HAVE_MPI_F90MODULE_VISIBILITY)
use MPI_f08
#endif
2019-06-10 00:50:38 +05:30
use prec
2020-09-13 13:49:38 +05:30
use parallelization
2020-06-18 00:16:03 +05:30
use IO
2023-07-18 05:04:40 +05:30
use misc
2022-04-24 08:13:44 +05:30
use CLI
2022-01-11 01:04:50 +05:30
use HDF5_utilities
use HDF5
2019-06-10 00:50:38 +05:30
use spectral_utilities
2020-03-20 11:28:55 +05:30
use discretization_grid
2021-01-17 19:49:36 +05:30
use homogenization
2020-06-18 00:16:03 +05:30
use YAML_types
2020-08-15 19:32:10 +05:30
use config
2021-01-17 17:05:47 +05:30
2022-06-22 02:41:22 +05:30
#if (PETSC_VERSION_MAJOR==3 && PETSC_VERSION_MINOR>14) && !defined(PETSC_HAVE_MPI_F90MODULE_VISIBILITY)
implicit none(type,external)
2022-06-22 02:41:22 +05:30
#else
implicit none
#endif
2019-03-12 16:06:18 +05:30
private
2019-06-10 00:50:38 +05:30
2020-06-29 18:39:13 +05:30
type :: tNumerics
2020-06-29 20:35:11 +05:30
integer :: &
2020-09-19 11:50:29 +05:30
itmax !< maximum number of iterations
real(pREAL) :: &
2020-09-19 11:50:29 +05:30
eps_thermal_atol, & !< absolute tolerance for thermal equilibrium
eps_thermal_rtol !< relative tolerance for thermal equilibrium
end type tNumerics
2020-06-29 18:39:13 +05:30
type(tNumerics) :: num
!--------------------------------------------------------------------------------------------------
! PETSc data
2022-01-19 23:28:46 +05:30
SNES :: SNES_thermal
Vec :: T_PETSc
real(pREAL), dimension(:,:,:), allocatable :: &
2020-01-31 03:37:45 +05:30
T_lastInc, & !< field of previous temperature
2022-09-23 21:45:47 +05:30
T_stagInc, & !< field of staggered temperature
dotT_lastInc
!--------------------------------------------------------------------------------------------------
2021-01-17 17:05:47 +05:30
! reference diffusion tensor, mobility etc.
2021-04-11 19:02:17 +05:30
integer :: totalIter = 0 !< total iteration in current increment
real(pREAL), dimension(3,3) :: K_ref
real(pREAL) :: mu_ref, Delta_t_
2021-01-17 17:05:47 +05:30
2019-03-12 16:06:18 +05:30
public :: &
grid_thermal_spectral_init, &
grid_thermal_spectral_solution, &
2022-01-11 01:04:50 +05:30
grid_thermal_spectral_restartWrite, &
2019-03-12 16:06:18 +05:30
grid_thermal_spectral_forward
contains
!--------------------------------------------------------------------------------------------------
!> @brief Allocate all necessary fields and fill them with data, potentially from restart info.
!--------------------------------------------------------------------------------------------------
2023-07-12 22:18:28 +05:30
subroutine grid_thermal_spectral_init(num_grid)
type(tDict), pointer, intent(in) :: num_grid
2019-03-09 14:24:33 +05:30
integer(MPI_INTEGER_KIND), dimension(0:worldsize-1) :: cells3_global
2023-07-21 03:03:43 +05:30
integer :: ce
DM :: DM_thermal
real(pREAL), dimension(:,:,:), pointer :: T ! 0-indexed
integer(MPI_INTEGER_KIND) :: err_MPI
PetscErrorCode :: err_PETSc
2022-01-11 01:04:50 +05:30
integer(HID_T) :: fileHandle, groupHandle
real(pREAL), dimension(1,product(cells(1:2))*cells3) :: tempN
type(tDict), pointer :: &
2023-07-12 22:18:28 +05:30
num_grid_thermal
character(len=:), allocatable :: &
2023-07-17 21:01:05 +05:30
extmsg, &
2023-07-12 22:18:28 +05:30
petsc_options
print'(/,1x,a)', '<<<+- grid_thermal_spectral init -+>>>'
2019-03-09 15:32:12 +05:30
print'(/,1x,a)', 'P. Shanthraj et al., Handbook of Mechanics of Materials, 2019'
print'( 1x,a)', 'https://doi.org/10.1007/978-981-10-6855-3_80'
2019-03-12 04:07:06 +05:30
if (.not. homogenization_thermal_active()) call IO_error(501,ext_msg='thermal')
2020-06-18 02:30:03 +05:30
!-------------------------------------------------------------------------------------------------
2020-09-19 11:50:29 +05:30
! read numerical parameters and do sanity checks
2023-07-12 22:18:28 +05:30
num_grid_thermal => num_grid%get_dict('thermal',defaultVal=emptyDict)
num%itmax = num_grid_thermal%get_asInt('N_iter_max', defaultVal=100)
2023-07-17 21:21:34 +05:30
num%eps_thermal_atol = num_grid_thermal%get_asReal('eps_abs_T', defaultVal=1.0e-2_pREAL)
num%eps_thermal_rtol = num_grid_thermal%get_asReal('eps_rel_T', defaultVal=1.0e-6_pREAL)
2023-07-12 22:18:28 +05:30
2023-07-17 21:01:05 +05:30
extmsg = ''
2023-07-17 21:21:34 +05:30
if (num%eps_thermal_atol <= 0.0_pREAL) extmsg = trim(extmsg)//' eps_abs_T'
if (num%eps_thermal_rtol <= 0.0_pREAL) extmsg = trim(extmsg)//' eps_rel_T'
2023-07-17 21:01:05 +05:30
if (num%itmax < 1) extmsg = trim(extmsg)//' N_iter_max'
2020-06-24 20:39:15 +05:30
2023-07-17 21:01:05 +05:30
if (extmsg /= '') call IO_error(301,ext_msg=trim(extmsg))
2019-03-12 04:07:06 +05:30
!--------------------------------------------------------------------------------------------------
! set default and user defined options for PETSc
2023-07-18 05:04:40 +05:30
petsc_options = misc_prefixOptions('-snes_type newtonls -snes_mf -snes_ksp_ew -ksp_type fgmres '// &
num_grid_thermal%get_asStr('PETSc_options',defaultVal=''), 'thermal_')
2023-07-12 22:18:28 +05:30
call PetscOptionsInsertString(PETSC_NULL_OPTIONS,petsc_options,err_PETSc)
CHKERRQ(err_PETSc)
2021-01-17 17:05:47 +05:30
!--------------------------------------------------------------------------------------------------
! initialize solver specific parts of PETSc
call SNESCreate(PETSC_COMM_WORLD,SNES_thermal,err_PETSc)
CHKERRQ(err_PETSc)
call SNESSetOptionsPrefix(SNES_thermal,'thermal_',err_PETSc)
CHKERRQ(err_PETSc)
call MPI_Allgather(int(cells3,pPETSCINT),1_MPI_INTEGER_KIND,MPI_INTEGER,&
cells3_global,1_MPI_INTEGER_KIND,MPI_INTEGER,MPI_COMM_WORLD,err_MPI)
if (err_MPI /= 0_MPI_INTEGER_KIND) error stop 'MPI error'
2019-03-09 14:24:33 +05:30
call DMDACreate3D(PETSC_COMM_WORLD, &
DM_BOUNDARY_NONE, DM_BOUNDARY_NONE, DM_BOUNDARY_NONE, & ! cut off stencil at boundary
DMDA_STENCIL_BOX, & ! Moore (26) neighborhood around central point
int(cells(1),pPETSCINT),int(cells(2),pPETSCINT),int(cells(3),pPETSCINT), & ! global cells
1_pPETSCINT, 1_pPETSCINT, int(worldsize,pPETSCINT), &
1_pPETSCINT, 0_pPETSCINT, & ! #dof (T, scalar), ghost boundary width (domain overlap)
[int(cells(1),pPETSCINT)],[int(cells(2),pPETSCINT)],int(cells3_global,pPETSCINT), & ! local cells
DM_thermal,err_PETSc) ! handle, error
CHKERRQ(err_PETSc)
call DMsetFromOptions(DM_thermal,err_PETSc)
CHKERRQ(err_PETSc)
call DMsetUp(DM_thermal,err_PETSc)
CHKERRQ(err_PETSc)
call DMCreateGlobalVector(DM_thermal,T_PETSc,err_PETSc) ! global solution vector (cells x 1, i.e. every def grad tensor)
CHKERRQ(err_PETSc)
call DMDASNESSetFunctionLocal(DM_thermal,INSERT_VALUES,formResidual,PETSC_NULL_SNES,err_PETSc) ! residual vector of same shape as solution vector
CHKERRQ(err_PETSc)
call SNESSetDM(SNES_thermal,DM_thermal,err_PETSc)
CHKERRQ(err_PETSc)
call SNESSetFromOptions(SNES_thermal,err_PETSc) ! pull it all together with additional CLI arguments
CHKERRQ(err_PETSc)
call DMDAVecGetArrayF90(DM_thermal,T_PETSc,T,err_PETSc) ! returns 0-indexed T
CHKERRQ(err_PETSc)
2022-01-11 01:04:50 +05:30
restartRead: if (CLI_restartInc > 0) then
2023-06-28 15:53:00 +05:30
print'(/,1x,a,1x,i0)', 'loading restart data of increment', CLI_restartInc
2022-01-11 01:04:50 +05:30
fileHandle = HDF5_openFile(getSolverJobName()//'_restart.hdf5','r')
groupHandle = HDF5_openGroup(fileHandle,'solver')
call HDF5_read(tempN,groupHandle,'T',.false.)
T = reshape(tempN,[cells(1),cells(2),cells3])
call HDF5_read(tempN,groupHandle,'T_lastInc',.false.)
T_lastInc = reshape(tempN,[cells(1),cells(2),cells3])
T_stagInc = T_lastInc
call HDF5_read(tempN,groupHandle,'dotT_lastInc',.false.)
dotT_lastInc = reshape(tempN,[cells(1),cells(2),cells3])
else
T = discretization_grid_getInitialCondition('T')
T_lastInc = T(0:,0:,0:)
T_stagInc = T_lastInc
dotT_lastInc = 0.0_pREAL * T_lastInc
end if restartRead
2021-04-11 19:02:17 +05:30
2023-07-21 03:03:43 +05:30
call homogenization_thermal_setField(reshape(T,[product(cells(1:2))*cells3]), &
[(0.0_pReal, ce = 1,product(cells(1:2))*cells3)])
2021-04-11 19:02:17 +05:30
call DMDAVecRestoreArrayF90(DM_thermal,T_PETSc,T,err_PETSc)
CHKERRQ(err_PETSc)
2022-01-19 23:28:46 +05:30
call updateReference()
2019-03-12 04:07:06 +05:30
end subroutine grid_thermal_spectral_init
2019-03-12 16:06:18 +05:30
2021-01-17 17:05:47 +05:30
!--------------------------------------------------------------------------------------------------
2016-06-27 21:20:43 +05:30
!> @brief solution for the spectral thermal scheme with internal iterations
!--------------------------------------------------------------------------------------------------
2021-07-20 02:00:20 +05:30
function grid_thermal_spectral_solution(Delta_t) result(solution)
2021-01-17 17:05:47 +05:30
real(pREAL), intent(in) :: Delta_t !< increment in time for current solution
2019-03-12 04:07:06 +05:30
type(tSolutionState) :: solution
2020-01-31 03:37:45 +05:30
PetscInt :: devNull
2021-12-11 16:07:17 +05:30
PetscReal :: T_min, T_max, stagNorm
DM :: DM_thermal
real(pREAL), dimension(:,:,:), pointer :: T ! 0-indexed
integer(MPI_INTEGER_KIND) :: err_MPI
PetscErrorCode :: err_PETSc
2019-03-09 14:24:33 +05:30
SNESConvergedReason :: reason
2016-06-27 21:20:43 +05:30
!--------------------------------------------------------------------------------------------------
2021-01-17 17:05:47 +05:30
! set module wide availabe data
Delta_t_ = Delta_t
call SNESSolve(SNES_thermal,PETSC_NULL_VEC,T_PETSc,err_PETSc)
CHKERRQ(err_PETSc)
2022-01-19 23:28:46 +05:30
call SNESGetConvergedReason(SNES_thermal,reason,err_PETSc)
CHKERRQ(err_PETSc)
solution%converged = reason > 0
solution%iterationsNeeded = merge(totalIter,num%itmax,solution%converged)
call SNESGetDM(SNES_thermal,DM_thermal,err_PETSc)
CHKERRQ(err_PETSc)
call DMDAVecGetArrayF90(DM_thermal,T_PETSc,T,err_PETSc) ! returns 0-indexed T
CHKERRQ(err_PETSc)
T_min = minval(T)
T_max = maxval(T)
stagNorm = maxval(abs(T - T_stagInc))
call MPI_Allreduce(MPI_IN_PLACE,stagNorm,1_MPI_INTEGER_KIND,MPI_DOUBLE,MPI_MAX,MPI_COMM_WORLD,err_MPI)
if (err_MPI /= 0_MPI_INTEGER_KIND) error stop 'MPI error'
solution%stagConverged = stagNorm < max(num%eps_thermal_atol, num%eps_thermal_rtol*T_max)
call MPI_Allreduce(MPI_IN_PLACE,solution%stagConverged,1_MPI_INTEGER_KIND,MPI_LOGICAL,MPI_LAND,MPI_COMM_WORLD,err_MPI)
if (err_MPI /= 0_MPI_INTEGER_KIND) error stop 'MPI error'
T_stagInc = T
2023-07-21 03:03:43 +05:30
call homogenization_thermal_setField(reshape(T,[product(cells(1:2))*cells3]), &
reshape(T-T_lastInc,[product(cells(1:2))*cells3])/Delta_t_)
call DMDAVecRestoreArrayF90(DM_thermal,T_PETSc,T,err_PETSc)
CHKERRQ(err_PETSc)
2019-03-12 04:07:06 +05:30
if (solution%converged) &
print'(/,1x,a)', '... thermal conduction converged ..................................'
print'(/,1x,a,f8.4,2x,f8.4,2x,f8.4)', 'Minimum|Maximum|Delta Temperature / K = ', T_min, T_max, stagNorm
print'(/,1x,a)', '==========================================================================='
2021-01-17 17:05:47 +05:30
flush(IO_STDOUT)
2019-03-12 04:07:06 +05:30
end function grid_thermal_spectral_solution
2019-03-12 10:23:12 +05:30
!--------------------------------------------------------------------------------------------------
!> @brief Set DAMASK data to current solver status.
2019-03-12 10:23:12 +05:30
!--------------------------------------------------------------------------------------------------
subroutine grid_thermal_spectral_forward(cutBack)
2021-01-17 17:05:47 +05:30
logical, intent(in) :: cutBack
2022-11-19 13:55:06 +05:30
DM :: DM_thermal
real(pREAL), dimension(:,:,:), pointer :: T ! 0-indexed
PetscErrorCode :: err_PETSc
2021-01-17 17:05:47 +05:30
2022-11-19 13:55:06 +05:30
call SNESGetDM(SNES_thermal,DM_thermal,err_PETSc)
CHKERRQ(err_PETSc)
call DMDAVecGetArrayF90(DM_thermal,T_PETSc,T,err_PETSc) ! returns 0-indexed T
CHKERRQ(err_PETSc)
2019-03-12 10:23:12 +05:30
if (cutBack) then
2023-07-21 03:03:43 +05:30
call homogenization_thermal_setField(reshape(T_lastInc,[product(cells(1:2))*cells3]), &
reshape(dotT_lastInc,[product(cells(1:2))*cells3]))
T = T_lastInc
T_stagInc = T_lastInc
2019-03-12 16:06:18 +05:30
else
dotT_lastInc = (T - T_lastInc)/Delta_t_
T_lastInc = T
call updateReference()
end if
2021-01-17 17:05:47 +05:30
call DMDAVecRestoreArrayF90(DM_thermal,T_PETSc,T,err_PETSc)
CHKERRQ(err_PETSc)
2019-03-12 10:23:12 +05:30
end subroutine grid_thermal_spectral_forward
2022-01-11 01:04:50 +05:30
!--------------------------------------------------------------------------------------------------
!> @brief Write current solver and constitutive data for restart to file.
2022-01-11 01:04:50 +05:30
!--------------------------------------------------------------------------------------------------
2023-07-11 00:20:24 +05:30
subroutine grid_thermal_spectral_restartWrite()
2022-01-11 01:04:50 +05:30
PetscErrorCode :: err_PETSc
DM :: DM_thermal
2022-01-11 01:04:50 +05:30
integer(HID_T) :: fileHandle, groupHandle
real(pREAL), dimension(:,:,:), pointer :: T ! 0-indexed
2022-01-11 01:04:50 +05:30
call SNESGetDM(SNES_thermal,DM_thermal,err_PETSc)
CHKERRQ(err_PETSc)
call DMDAVecGetArrayReadF90(DM_thermal,T_PETSc,T,err_PETSc) ! returns 0-indexed T
CHKERRQ(err_PETSc)
2022-01-11 01:04:50 +05:30
2023-06-21 15:07:00 +05:30
print'(1x,a)', 'saving thermal solver data required for restart'; flush(IO_STDOUT)
2022-01-11 01:04:50 +05:30
2022-01-13 01:12:37 +05:30
fileHandle = HDF5_openFile(getSolverJobName()//'_restart.hdf5','a')
groupHandle = HDF5_openGroup(fileHandle,'solver')
call HDF5_write(reshape(T,[1,product(cells(1:2))*cells3]),groupHandle,'T')
call HDF5_write(reshape(T_lastInc,[1,product(cells(1:2))*cells3]),groupHandle,'T_lastInc')
call HDF5_write(reshape(dotT_lastInc,[1,product(cells(1:2))*cells3]),groupHandle,'dotT_lastInc')
2022-01-11 01:04:50 +05:30
call HDF5_closeGroup(groupHandle)
call HDF5_closeFile(fileHandle)
call DMDAVecRestoreArrayReadF90(DM_thermal,T_PETSc,T,err_PETSc);
CHKERRQ(err_PETSc)
2022-01-11 01:04:50 +05:30
end subroutine grid_thermal_spectral_restartWrite
!--------------------------------------------------------------------------------------------------
!> @brief Construct the residual vector.
!--------------------------------------------------------------------------------------------------
subroutine formResidual(residual_subdomain,x_scal,r,dummy,err_PETSc)
2021-01-17 17:05:47 +05:30
2019-03-12 16:06:18 +05:30
DMDALocalInfo, dimension(DMDA_LOCAL_INFO_SIZE) :: &
residual_subdomain
real(pREAL), dimension(cells(1),cells(2),cells3), intent(in) :: &
2019-03-12 16:06:18 +05:30
x_scal
real(pREAL), dimension(cells(1),cells(2),cells3), intent(out) :: &
r !< residual
2019-03-12 16:06:18 +05:30
PetscObject :: dummy
2022-11-19 13:55:06 +05:30
PetscErrorCode, intent(out) :: err_PETSc
2021-01-17 17:05:47 +05:30
integer :: i, j, k, ce
real(pREAL), dimension(3,cells(1),cells(2),cells3) :: vectorField
2022-11-19 13:55:06 +05:30
associate(T => x_scal)
vectorField = utilities_ScalarGradient(T)
ce = 0
do k = 1, cells3; do j = 1, cells(2); do i = 1,cells(1)
ce = ce + 1
vectorField(1:3,i,j,k) = matmul(homogenization_K_T(ce) - K_ref, vectorField(1:3,i,j,k))
end do; end do; end do
r = utilities_VectorDivergence(vectorField)
ce = 0
do k = 1, cells3; do j = 1, cells(2); do i = 1,cells(1)
ce = ce + 1
r(i,j,k) = Delta_t_*(r(i,j,k) + homogenization_f_T(ce)) &
+ homogenization_mu_T(ce) * (T_lastInc(i,j,k) - T(i,j,k)) &
+ mu_ref*T(i,j,k)
end do; end do; end do
r = T &
- utilities_GreenConvolution(r, K_ref, mu_ref, Delta_t_)
end associate
2022-01-20 12:26:45 +05:30
err_PETSc = 0
2019-03-12 10:23:12 +05:30
end subroutine formResidual
2020-01-31 03:37:45 +05:30
!--------------------------------------------------------------------------------------------------
!> @brief Update reference viscosity and conductivity.
2020-01-31 03:37:45 +05:30
!--------------------------------------------------------------------------------------------------
2021-04-11 19:02:17 +05:30
subroutine updateReference()
2020-01-31 03:37:45 +05:30
integer :: ce
integer(MPI_INTEGER_KIND) :: err_MPI
2021-04-11 13:16:11 +05:30
2021-01-17 17:05:47 +05:30
K_ref = 0.0_pREAL
mu_ref = 0.0_pREAL
2022-01-29 19:35:07 +05:30
do ce = 1, product(cells(1:2))*cells3
2021-04-09 03:10:20 +05:30
K_ref = K_ref + homogenization_K_T(ce)
mu_ref = mu_ref + homogenization_mu_T(ce)
end do
2021-04-11 13:16:11 +05:30
2020-01-31 03:37:45 +05:30
K_ref = K_ref*wgt
call MPI_Allreduce(MPI_IN_PLACE,K_ref,9_MPI_INTEGER_KIND,MPI_DOUBLE,MPI_SUM,MPI_COMM_WORLD,err_MPI)
if (err_MPI /= 0_MPI_INTEGER_KIND) error stop 'MPI error'
2020-01-31 03:37:45 +05:30
mu_ref = mu_ref*wgt
call MPI_Allreduce(MPI_IN_PLACE,mu_ref,1_MPI_INTEGER_KIND,MPI_DOUBLE,MPI_SUM,MPI_COMM_WORLD,err_MPI)
if (err_MPI /= 0_MPI_INTEGER_KIND) error stop 'MPI error'
2020-01-31 03:37:45 +05:30
end subroutine updateReference
2019-03-12 04:07:06 +05:30
end module grid_thermal_spectral