DAMASK_EICMD/src/grid/grid_thermal_spectral.f90

331 lines
15 KiB
Fortran
Raw Normal View History

!--------------------------------------------------------------------------------------------------
2019-03-12 16:06:18 +05:30
!> @author Martin Diehl, Max-Planck-Institut für Eisenforschung GmbH
!> @author Pratheek Shanthraj, Max-Planck-Institut für Eisenforschung GmbH
!> @author Shaokang Zhang, Max-Planck-Institut für Eisenforschung GmbH
!> @brief Spectral solver for thermal conduction
!--------------------------------------------------------------------------------------------------
2019-03-12 04:07:06 +05:30
module grid_thermal_spectral
#include <petsc/finclude/petscsnes.h>
#include <petsc/finclude/petscdmda.h>
use PETScDMDA
use PETScSNES
#if (PETSC_VERSION_MAJOR==3 && PETSC_VERSION_MINOR>14) && !defined(PETSC_HAVE_MPI_F90MODULE_VISIBILITY)
use MPI_f08
#endif
2019-06-10 00:50:38 +05:30
use prec
2020-09-13 13:49:38 +05:30
use parallelization
2020-06-18 00:16:03 +05:30
use IO
2019-06-10 00:50:38 +05:30
use spectral_utilities
2020-03-20 11:28:55 +05:30
use discretization_grid
2021-01-17 19:49:36 +05:30
use homogenization
2020-06-18 00:16:03 +05:30
use YAML_types
2020-08-15 19:32:10 +05:30
use config
2021-01-17 17:05:47 +05:30
2019-03-12 16:06:18 +05:30
implicit none
private
2019-06-10 00:50:38 +05:30
2020-06-29 18:39:13 +05:30
type :: tNumerics
2020-06-29 20:35:11 +05:30
integer :: &
2020-09-19 11:50:29 +05:30
itmax !< maximum number of iterations
real(pReal) :: &
2020-09-19 11:50:29 +05:30
eps_thermal_atol, & !< absolute tolerance for thermal equilibrium
eps_thermal_rtol !< relative tolerance for thermal equilibrium
end type tNumerics
2020-06-29 18:39:13 +05:30
type(tNumerics) :: num
2020-09-19 11:50:29 +05:30
type(tSolutionParams) :: params
!--------------------------------------------------------------------------------------------------
! PETSc data
2022-01-19 23:28:46 +05:30
SNES :: SNES_thermal
Vec :: solution_vec
2020-06-29 18:39:13 +05:30
real(pReal), dimension(:,:,:), allocatable :: &
2020-01-31 03:37:45 +05:30
T_current, & !< field of current temperature
T_lastInc, & !< field of previous temperature
T_stagInc !< field of staggered temperature
!--------------------------------------------------------------------------------------------------
2021-01-17 17:05:47 +05:30
! reference diffusion tensor, mobility etc.
2021-04-11 19:02:17 +05:30
integer :: totalIter = 0 !< total iteration in current increment
2020-06-29 18:39:13 +05:30
real(pReal), dimension(3,3) :: K_ref
real(pReal) :: mu_ref
2021-01-17 17:05:47 +05:30
2019-03-12 16:06:18 +05:30
public :: &
grid_thermal_spectral_init, &
grid_thermal_spectral_solution, &
grid_thermal_spectral_forward
contains
!--------------------------------------------------------------------------------------------------
2019-03-12 10:23:12 +05:30
!> @brief allocates all neccessary fields and fills them with data
! ToDo: Restart not implemented
!--------------------------------------------------------------------------------------------------
subroutine grid_thermal_spectral_init(T_0)
real(pReal), intent(in) :: T_0
2019-03-09 14:24:33 +05:30
2021-01-17 17:05:47 +05:30
PetscInt, dimension(0:worldsize-1) :: localK
integer :: i, j, k, ce
2019-03-09 14:24:33 +05:30
DM :: thermal_grid
2021-04-11 19:02:17 +05:30
PetscScalar, dimension(:,:,:), pointer :: T_PETSc
integer(MPI_INTEGER_KIND) :: err_MPI
PetscErrorCode :: err_PETSc
2020-06-18 02:30:03 +05:30
class(tNode), pointer :: &
2020-06-19 06:02:33 +05:30
num_grid
print'(/,1x,a)', '<<<+- grid_thermal_spectral init -+>>>'
2019-03-09 15:32:12 +05:30
print'(/,1x,a)', 'P. Shanthraj et al., Handbook of Mechanics of Materials, 2019'
print'( 1x,a)', 'https://doi.org/10.1007/978-981-10-6855-3_80'
2019-03-12 04:07:06 +05:30
2020-06-18 02:30:03 +05:30
!-------------------------------------------------------------------------------------------------
2020-09-19 11:50:29 +05:30
! read numerical parameters and do sanity checks
num_grid => config_numerics%get('grid',defaultVal=emptyDict)
2020-06-29 20:35:11 +05:30
num%itmax = num_grid%get_asInt ('itmax', defaultVal=250)
num%eps_thermal_atol = num_grid%get_asFloat ('eps_thermal_atol',defaultVal=1.0e-2_pReal)
num%eps_thermal_rtol = num_grid%get_asFloat ('eps_thermal_rtol',defaultVal=1.0e-6_pReal)
2020-06-24 20:39:15 +05:30
2021-04-11 19:02:17 +05:30
if (num%itmax <= 1) call IO_error(301,ext_msg='itmax')
if (num%eps_thermal_atol <= 0.0_pReal) call IO_error(301,ext_msg='eps_thermal_atol')
if (num%eps_thermal_rtol <= 0.0_pReal) call IO_error(301,ext_msg='eps_thermal_rtol')
2020-06-24 20:39:15 +05:30
2019-03-12 04:07:06 +05:30
!--------------------------------------------------------------------------------------------------
! set default and user defined options for PETSc
call PetscOptionsInsertString(PETSC_NULL_OPTIONS,'-thermal_snes_type newtonls -thermal_snes_mf &
&-thermal_snes_ksp_ew -thermal_ksp_type fgmres',err_PETSc)
CHKERRQ(err_PETSc)
call PetscOptionsInsertString(PETSC_NULL_OPTIONS,num_grid%get_asString('petsc_options',defaultVal=''),err_PETSc)
CHKERRQ(err_PETSc)
2021-01-17 17:05:47 +05:30
!--------------------------------------------------------------------------------------------------
! init fields
allocate(T_current(grid(1),grid(2),grid3), source=T_0)
allocate(T_lastInc(grid(1),grid(2),grid3), source=T_0)
allocate(T_stagInc(grid(1),grid(2),grid3), source=T_0)
ce = 0
do k = 1, grid3; do j = 1, grid(2); do i = 1,grid(1)
ce = ce + 1
call homogenization_thermal_setField(T_0,0.0_pReal,ce)
end do; end do; end do
!--------------------------------------------------------------------------------------------------
! initialize solver specific parts of PETSc
2022-01-19 23:28:46 +05:30
call SNESCreate(PETSC_COMM_WORLD,SNES_thermal,err_PETSc); CHKERRQ(err_PETSc)
call SNESSetOptionsPrefix(SNES_thermal,'thermal_',err_PETSc);CHKERRQ(err_PETSc)
localK = 0_pPetscInt
localK(worldrank) = int(grid3,pPetscInt)
call MPI_Allreduce(MPI_IN_PLACE,localK,worldsize,MPI_INTEGER,MPI_SUM,MPI_COMM_WORLD,err_MPI)
if (err_MPI /= 0_MPI_INTEGER_KIND) error stop 'MPI error'
2019-03-09 14:24:33 +05:30
call DMDACreate3D(PETSC_COMM_WORLD, &
DM_BOUNDARY_NONE, DM_BOUNDARY_NONE, DM_BOUNDARY_NONE, & ! cut off stencil at boundary
DMDA_STENCIL_BOX, & ! Moore (26) neighborhood around central point
int(grid(1),pPetscInt),int(grid(2),pPetscInt),int(grid(3),pPetscInt), & ! global grid
1_pPetscInt, 1_pPetscInt, int(worldsize,pPetscInt), &
1_pPetscInt, 0_pPetscInt, & ! #dof (T, scalar), ghost boundary width (domain overlap)
[int(grid(1),pPetscInt)],[int(grid(2),pPetscInt)],localK, & ! local grid
thermal_grid,err_PETSc) ! handle, error
CHKERRQ(err_PETSc)
call DMsetFromOptions(thermal_grid,err_PETSc); CHKERRQ(err_PETSc)
call DMsetUp(thermal_grid,err_PETSc); CHKERRQ(err_PETSc)
call DMCreateGlobalVector(thermal_grid,solution_vec,err_PETSc) ! global solution vector (grid x 1, i.e. every def grad tensor)
CHKERRQ(err_PETSc)
call DMDASNESSetFunctionLocal(thermal_grid,INSERT_VALUES,formResidual,PETSC_NULL_SNES,err_PETSc) ! residual vector of same shape as solution vector
CHKERRQ(err_PETSc)
call SNESSetDM(SNES_thermal,thermal_grid,err_PETSc); CHKERRQ(err_PETSc) ! connect snes to da
2022-01-19 23:28:46 +05:30
call SNESSetFromOptions(SNES_thermal,err_PETSc); CHKERRQ(err_PETSc) ! pull it all together with additional CLI arguments
!--------------------------------------------------------------------------------------------------
call DMDAVecGetArrayF90(thermal_grid,solution_vec,T_PETSc,err_PETSc)
CHKERRQ(err_PETSc)
T_PETSc = T_current
call DMDAVecRestoreArrayF90(thermal_grid,solution_vec,T_PETSc,err_PETSc)
CHKERRQ(err_PETSc)
2022-01-19 23:28:46 +05:30
call updateReference()
2019-03-12 04:07:06 +05:30
end subroutine grid_thermal_spectral_init
2019-03-12 16:06:18 +05:30
2021-01-17 17:05:47 +05:30
!--------------------------------------------------------------------------------------------------
2016-06-27 21:20:43 +05:30
!> @brief solution for the spectral thermal scheme with internal iterations
!--------------------------------------------------------------------------------------------------
2021-07-20 02:00:20 +05:30
function grid_thermal_spectral_solution(Delta_t) result(solution)
2021-01-17 17:05:47 +05:30
2019-03-09 14:24:33 +05:30
real(pReal), intent(in) :: &
2021-07-20 02:00:20 +05:30
Delta_t !< increment in time for current solution
2021-01-17 17:05:47 +05:30
integer :: i, j, k, ce
2019-03-12 04:07:06 +05:30
type(tSolutionState) :: solution
2020-01-31 03:37:45 +05:30
PetscInt :: devNull
2021-12-11 16:07:17 +05:30
PetscReal :: T_min, T_max, stagNorm
2016-06-27 21:20:43 +05:30
integer(MPI_INTEGER_KIND) :: err_MPI
PetscErrorCode :: err_PETSc
2019-03-09 14:24:33 +05:30
SNESConvergedReason :: reason
2016-06-27 21:20:43 +05:30
2019-03-12 04:07:06 +05:30
solution%converged =.false.
2021-01-17 17:05:47 +05:30
!--------------------------------------------------------------------------------------------------
2021-01-17 17:05:47 +05:30
! set module wide availabe data
2021-07-20 02:00:20 +05:30
params%Delta_t = Delta_t
2022-01-19 23:28:46 +05:30
call SNESSolve(SNES_thermal,PETSC_NULL_VEC,solution_vec,err_PETSc)
CHKERRQ(err_PETSc)
2022-01-19 23:28:46 +05:30
call SNESGetConvergedReason(SNES_thermal,reason,err_PETSc)
CHKERRQ(err_PETSc)
2019-03-09 14:24:33 +05:30
if (reason < 1) then
2019-03-12 04:07:06 +05:30
solution%converged = .false.
2020-06-29 20:35:11 +05:30
solution%iterationsNeeded = num%itmax
2019-03-09 14:24:33 +05:30
else
2019-03-12 04:07:06 +05:30
solution%converged = .true.
solution%iterationsNeeded = totalIter
end if
2020-01-31 03:37:45 +05:30
stagNorm = maxval(abs(T_current - T_stagInc))
call MPI_Allreduce(MPI_IN_PLACE,stagNorm,1_MPI_INTEGER_KIND,MPI_DOUBLE,MPI_MAX,MPI_COMM_WORLD,err_MPI)
if (err_MPI /= 0_MPI_INTEGER_KIND) error stop 'MPI error'
2021-12-11 16:07:17 +05:30
solution%stagConverged = stagNorm < max(num%eps_thermal_atol, num%eps_thermal_rtol*maxval(T_current))
call MPI_Allreduce(MPI_IN_PLACE,solution%stagConverged,1_MPI_INTEGER_KIND,MPI_LOGICAL,MPI_LAND,MPI_COMM_WORLD,err_MPI)
if (err_MPI /= 0_MPI_INTEGER_KIND) error stop 'MPI error'
2021-07-07 02:01:13 +05:30
T_stagInc = T_current
!--------------------------------------------------------------------------------------------------
2021-01-17 17:05:47 +05:30
! updating thermal state
ce = 0
2019-03-09 14:24:33 +05:30
do k = 1, grid3; do j = 1, grid(2); do i = 1,grid(1)
2021-01-17 17:05:47 +05:30
ce = ce + 1
2021-07-20 02:00:20 +05:30
call homogenization_thermal_setField(T_current(i,j,k),(T_current(i,j,k)-T_lastInc(i,j,k))/params%Delta_t,ce)
end do; end do; end do
call VecMin(solution_vec,devNull,T_min,err_PETSc); CHKERRQ(err_PETSc)
call VecMax(solution_vec,devNull,T_max,err_PETSc); CHKERRQ(err_PETSc)
2019-03-12 04:07:06 +05:30
if (solution%converged) &
print'(/,1x,a)', '... thermal conduction converged ..................................'
print'(/,1x,a,f8.4,2x,f8.4,2x,f8.4)', 'Minimum|Maximum|Delta Temperature / K = ', T_min, T_max, stagNorm
print'(/,1x,a)', '==========================================================================='
2021-01-17 17:05:47 +05:30
flush(IO_STDOUT)
2019-03-12 04:07:06 +05:30
end function grid_thermal_spectral_solution
2019-03-12 10:23:12 +05:30
!--------------------------------------------------------------------------------------------------
!> @brief forwarding routine
!--------------------------------------------------------------------------------------------------
subroutine grid_thermal_spectral_forward(cutBack)
2021-01-17 17:05:47 +05:30
logical, intent(in) :: cutBack
2021-01-17 17:05:47 +05:30
integer :: i, j, k, ce
2019-03-12 16:06:18 +05:30
DM :: dm_local
2022-01-20 00:13:54 +05:30
PetscScalar, dimension(:,:,:), pointer :: T_PETSc
PetscErrorCode :: err_PETSc
2021-01-17 17:05:47 +05:30
if (cutBack) then
2020-01-31 03:37:45 +05:30
T_current = T_lastInc
T_stagInc = T_lastInc
2019-03-12 10:23:12 +05:30
!--------------------------------------------------------------------------------------------------
2021-01-17 17:05:47 +05:30
! reverting thermal field state
2022-01-19 23:28:46 +05:30
call SNESGetDM(SNES_thermal,dm_local,err_PETSc)
CHKERRQ(err_PETSc)
2022-01-20 00:13:54 +05:30
call DMDAVecGetArrayF90(dm_local,solution_vec,T_PETSc,err_PETSc) !< get the data out of PETSc to work with
CHKERRQ(err_PETSc)
2022-01-20 00:13:54 +05:30
T_PETSc = T_current
call DMDAVecRestoreArrayF90(dm_local,solution_vec,T_PETSc,err_PETSc)
CHKERRQ(err_PETSc)
2021-04-11 19:02:17 +05:30
ce = 0
2019-03-12 16:06:18 +05:30
do k = 1, grid3; do j = 1, grid(2); do i = 1,grid(1)
2021-01-17 17:05:47 +05:30
ce = ce + 1
2021-07-20 02:00:20 +05:30
call homogenization_thermal_setField(T_current(i,j,k),(T_current(i,j,k)-T_lastInc(i,j,k))/params%Delta_t,ce)
end do; end do; end do
2019-03-12 16:06:18 +05:30
else
2020-01-31 03:37:45 +05:30
T_lastInc = T_current
call updateReference
end if
2021-01-17 17:05:47 +05:30
2019-03-12 10:23:12 +05:30
end subroutine grid_thermal_spectral_forward
!--------------------------------------------------------------------------------------------------
!> @brief forms the spectral thermal residual vector
!--------------------------------------------------------------------------------------------------
subroutine formResidual(in,x_scal,f_scal,dummy,dummy_err)
2021-01-17 17:05:47 +05:30
2019-03-12 16:06:18 +05:30
DMDALocalInfo, dimension(DMDA_LOCAL_INFO_SIZE) :: &
in
PetscScalar, dimension( &
XG_RANGE,YG_RANGE,ZG_RANGE), intent(in) :: &
x_scal
PetscScalar, dimension( &
X_RANGE,Y_RANGE,Z_RANGE), intent(out) :: &
f_scal
PetscObject :: dummy
PetscErrorCode :: dummy_err
2021-01-17 17:05:47 +05:30
integer :: i, j, k, ce
2021-01-17 17:05:47 +05:30
T_current = x_scal
!--------------------------------------------------------------------------------------------------
! evaluate polarization field
2019-03-12 16:06:18 +05:30
scalarField_real = 0.0_pReal
2021-01-17 17:05:47 +05:30
scalarField_real(1:grid(1),1:grid(2),1:grid3) = T_current
2019-03-12 16:06:18 +05:30
call utilities_FFTscalarForward
2020-01-31 03:37:45 +05:30
call utilities_fourierScalarGradient !< calculate gradient of temperature field
2019-03-12 16:06:18 +05:30
call utilities_FFTvectorBackward
2021-01-17 17:05:47 +05:30
ce = 0
2019-03-12 16:06:18 +05:30
do k = 1, grid3; do j = 1, grid(2); do i = 1,grid(1)
2021-01-17 17:05:47 +05:30
ce = ce + 1
2021-04-11 19:02:17 +05:30
vectorField_real(1:3,i,j,k) = matmul(homogenization_K_T(ce) - K_ref, vectorField_real(1:3,i,j,k))
end do; end do; end do
2019-03-12 16:06:18 +05:30
call utilities_FFTvectorForward
2020-01-31 03:37:45 +05:30
call utilities_fourierVectorDivergence !< calculate temperature divergence in fourier field
2019-03-12 16:06:18 +05:30
call utilities_FFTscalarBackward
2021-01-17 17:05:47 +05:30
ce = 0
2019-03-12 16:06:18 +05:30
do k = 1, grid3; do j = 1, grid(2); do i = 1,grid(1)
2021-01-17 17:05:47 +05:30
ce = ce + 1
2021-07-20 02:00:20 +05:30
scalarField_real(i,j,k) = params%Delta_t*(scalarField_real(i,j,k) + homogenization_f_T(ce)) &
2021-04-09 03:10:20 +05:30
+ homogenization_mu_T(ce) * (T_lastInc(i,j,k) - T_current(i,j,k)) &
2020-01-31 03:37:45 +05:30
+ mu_ref*T_current(i,j,k)
end do; end do; end do
!--------------------------------------------------------------------------------------------------
2020-01-31 03:37:45 +05:30
! convolution of temperature field with green operator
2019-03-12 16:06:18 +05:30
call utilities_FFTscalarForward
2021-07-20 02:00:20 +05:30
call utilities_fourierGreenConvolution(K_ref, mu_ref, params%Delta_t)
2019-03-12 16:06:18 +05:30
call utilities_FFTscalarBackward
2021-01-17 17:05:47 +05:30
!--------------------------------------------------------------------------------------------------
! constructing residual
2020-01-31 03:37:45 +05:30
f_scal = T_current - scalarField_real(1:grid(1),1:grid(2),1:grid3)
2019-03-12 10:23:12 +05:30
end subroutine formResidual
2020-01-31 03:37:45 +05:30
!--------------------------------------------------------------------------------------------------
!> @brief update reference viscosity and conductivity
!--------------------------------------------------------------------------------------------------
2021-04-11 19:02:17 +05:30
subroutine updateReference()
2020-01-31 03:37:45 +05:30
integer :: ce
integer(MPI_INTEGER_KIND) :: err_MPI
2021-04-11 13:16:11 +05:30
2021-01-17 17:05:47 +05:30
2020-01-31 03:37:45 +05:30
K_ref = 0.0_pReal
mu_ref = 0.0_pReal
2021-04-11 13:16:11 +05:30
do ce = 1, product(grid(1:2))*grid3
2021-04-09 03:10:20 +05:30
K_ref = K_ref + homogenization_K_T(ce)
mu_ref = mu_ref + homogenization_mu_T(ce)
end do
2021-04-11 13:16:11 +05:30
2020-01-31 03:37:45 +05:30
K_ref = K_ref*wgt
call MPI_Allreduce(MPI_IN_PLACE,K_ref,9_MPI_INTEGER_KIND,MPI_DOUBLE,MPI_SUM,MPI_COMM_WORLD,err_MPI)
if (err_MPI /= 0_MPI_INTEGER_KIND) error stop 'MPI error'
2020-01-31 03:37:45 +05:30
mu_ref = mu_ref*wgt
call MPI_Allreduce(MPI_IN_PLACE,mu_ref,1_MPI_INTEGER_KIND,MPI_DOUBLE,MPI_SUM,MPI_COMM_WORLD,err_MPI)
if (err_MPI /= 0_MPI_INTEGER_KIND) error stop 'MPI error'
2020-01-31 03:37:45 +05:30
end subroutine updateReference
2019-03-12 04:07:06 +05:30
end module grid_thermal_spectral