DAMASK_EICMD/src/grid/grid_mech_spectral_polarisa...

636 lines
33 KiB
Fortran
Raw Normal View History

2013-07-24 18:36:16 +05:30
!--------------------------------------------------------------------------------------------------
!> @author Pratheek Shanthraj, Max-Planck-Institut für Eisenforschung GmbH
!> @author Martin Diehl, Max-Planck-Institut für Eisenforschung GmbH
!> @author Philip Eisenlohr, Max-Planck-Institut für Eisenforschung GmbH
!> @brief Grid solver for mechanics: Spectral Polarisation
2013-07-24 18:36:16 +05:30
!--------------------------------------------------------------------------------------------------
2021-02-09 03:51:53 +05:30
module grid_mechanical_spectral_polarisation
#include <petsc/finclude/petscsnes.h>
#include <petsc/finclude/petscdmda.h>
use PETScDMDA
use PETScSNES
#if (PETSC_VERSION_MAJOR==3 && PETSC_VERSION_MINOR>14) && !defined(PETSC_HAVE_MPI_F90MODULE_VISIBILITY)
use MPI_f08
#endif
2019-06-10 00:50:38 +05:30
use prec
2020-09-13 13:49:38 +05:30
use parallelization
2019-06-10 00:50:38 +05:30
use DAMASK_interface
use IO
2021-07-08 17:57:04 +05:30
use HDF5
2019-06-10 00:50:38 +05:30
use HDF5_utilities
use math
2020-12-07 21:56:50 +05:30
use rotations
2019-06-10 00:50:38 +05:30
use spectral_utilities
use config
use homogenization
2020-03-20 11:28:55 +05:30
use discretization_grid
2020-02-25 23:23:12 +05:30
2019-04-15 19:23:46 +05:30
implicit none
private
2020-02-25 23:23:12 +05:30
2020-06-29 18:39:13 +05:30
type(tSolutionParams) :: params
2020-02-25 23:23:12 +05:30
2020-06-29 18:39:13 +05:30
type :: tNumerics
2019-04-15 19:23:46 +05:30
logical :: update_gamma !< update gamma operator with current stiffness
2020-06-24 20:15:13 +05:30
integer :: &
itmin, & !< minimum number of iterations
itmax !< maximum number of iterations
real(pReal) :: &
eps_div_atol, & !< absolute tolerance for equilibrium
eps_div_rtol, & !< relative tolerance for equilibrium
eps_curl_atol, & !< absolute tolerance for compatibility
eps_curl_rtol, & !< relative tolerance for compatibility
eps_stress_atol, & !< absolute tolerance for fullfillment of stress BC
eps_stress_rtol !< relative tolerance for fullfillment of stress BC
real(pReal) :: &
2020-06-29 18:39:13 +05:30
alpha, & !< polarization scheme parameter 0.0 < alpha < 2.0. alpha = 1.0 ==> AL scheme, alpha = 2.0 ==> accelerated scheme
beta !< polarization scheme parameter 0.0 < beta < 2.0. beta = 1.0 ==> AL scheme, beta = 2.0 ==> accelerated scheme
end type tNumerics
2020-02-25 23:23:12 +05:30
2020-06-29 18:39:13 +05:30
type(tNumerics) :: num ! numerics parameters. Better name?
2020-02-25 23:23:12 +05:30
logical :: debugRotation
2020-07-02 02:09:44 +05:30
2013-07-24 18:36:16 +05:30
!--------------------------------------------------------------------------------------------------
! PETSc data
2020-06-29 18:39:13 +05:30
DM :: da
SNES :: snes
Vec :: solution_vec
2016-06-27 21:20:43 +05:30
2013-07-24 18:36:16 +05:30
!--------------------------------------------------------------------------------------------------
! common pointwise data
2020-06-29 18:39:13 +05:30
real(pReal), dimension(:,:,:,:,:), allocatable :: &
2019-04-15 19:23:46 +05:30
F_lastInc, & !< field of previous compatible deformation gradients
2020-02-25 23:23:12 +05:30
F_tau_lastInc, & !< field of previous incompatible deformation gradient
2019-04-15 19:23:46 +05:30
Fdot, & !< field of assumed rate of compatible deformation gradient
F_tauDot !< field of assumed rate of incopatible deformation gradient
2013-07-24 18:36:16 +05:30
!--------------------------------------------------------------------------------------------------
! stress, stiffness and compliance average etc.
2020-06-29 18:39:13 +05:30
real(pReal), dimension(3,3) :: &
2019-04-15 19:23:46 +05:30
F_aimDot = 0.0_pReal, & !< assumed rate of average deformation gradient
F_aim = math_I3, & !< current prescribed deformation gradient
F_aim_lastInc = math_I3, & !< previous average deformation gradient
F_av = 0.0_pReal, & !< average incompatible def grad field
P_av = 0.0_pReal, & !< average 1st Piola--Kirchhoff stress
P_aim = 0.0_pReal
2020-08-09 10:07:50 +05:30
character(len=:), allocatable :: incInfo !< time and increment information
2020-06-29 18:39:13 +05:30
real(pReal), dimension(3,3,3,3) :: &
2020-02-25 23:23:12 +05:30
C_volAvg = 0.0_pReal, & !< current volume average stiffness
2019-04-15 19:23:46 +05:30
C_volAvgLastInc = 0.0_pReal, & !< previous volume average stiffness
C_minMaxAvg = 0.0_pReal, & !< current (min+max)/2 stiffness
C_minMaxAvgLastInc = 0.0_pReal, & !< previous (min+max)/2 stiffness
S = 0.0_pReal, & !< current compliance (filled up with zeros)
C_scale = 0.0_pReal, &
S_scale = 0.0_pReal
2020-02-25 23:23:12 +05:30
2020-06-29 18:39:13 +05:30
real(pReal) :: &
2019-04-15 19:23:46 +05:30
err_BC, & !< deviation from stress BC
err_curl, & !< RMS of curl of F
err_div !< RMS of div of P
2020-02-25 23:23:12 +05:30
2020-06-29 18:39:13 +05:30
integer :: &
2019-04-15 19:23:46 +05:30
totalIter = 0 !< total iteration in current increment
2020-02-25 23:23:12 +05:30
2019-04-15 19:23:46 +05:30
public :: &
2021-02-09 03:51:53 +05:30
grid_mechanical_spectral_polarisation_init, &
grid_mechanical_spectral_polarisation_solution, &
grid_mechanical_spectral_polarisation_forward, &
grid_mechanical_spectral_polarisation_updateCoords, &
grid_mechanical_spectral_polarisation_restartWrite
2013-07-24 18:36:16 +05:30
contains
!--------------------------------------------------------------------------------------------------
!> @brief allocates all necessary fields and fills them with data, potentially from restart info
2013-07-24 18:36:16 +05:30
!--------------------------------------------------------------------------------------------------
2021-02-09 03:51:53 +05:30
subroutine grid_mechanical_spectral_polarisation_init
2020-02-25 23:23:12 +05:30
2019-04-15 19:23:46 +05:30
real(pReal), dimension(3,3,grid(1),grid(2),grid3) :: P
PetscErrorCode :: ierr
PetscScalar, pointer, dimension(:,:,:,:) :: &
FandF_tau, & ! overall pointer to solution data
F, & ! specific (sub)pointer
F_tau ! specific (sub)pointer
2020-02-25 23:23:12 +05:30
PetscInt, dimension(0:worldsize-1) :: localK
2019-12-07 19:54:45 +05:30
integer(HID_T) :: fileHandle, groupHandle
#if (PETSC_VERSION_MAJOR==3 && PETSC_VERSION_MINOR>14) && !defined(PETSC_HAVE_MPI_F90MODULE_VISIBILITY)
type(MPI_File) :: fileUnit
#else
integer :: fileUnit
#endif
2020-09-20 15:41:48 +05:30
class (tNode), pointer :: &
num_grid, &
debug_grid
2020-02-25 23:23:12 +05:30
print'(/,1x,a)', '<<<+- grid_mechanical_spectral_polarization init -+>>>'; flush(IO_STDOUT)
2020-02-25 23:23:12 +05:30
print'(/,1x,a)', 'P. Shanthraj et al., International Journal of Plasticity 66:3145, 2015'
print'( 1x,a)', 'https://doi.org/10.1016/j.ijplas.2014.02.006'
2020-02-25 23:23:12 +05:30
2020-09-20 15:41:48 +05:30
!-------------------------------------------------------------------------------------------------
2020-07-02 02:09:44 +05:30
! debugging options
debug_grid => config_debug%get('grid',defaultVal=emptyList)
2020-07-02 02:31:37 +05:30
debugRotation = debug_grid%contains('rotation')
2020-07-02 02:09:44 +05:30
2020-06-18 02:30:03 +05:30
!-------------------------------------------------------------------------------------------------
2020-09-19 11:50:29 +05:30
! read numerical parameters and do sanity checks
num_grid => config_numerics%get('grid',defaultVal=emptyDict)
2020-09-20 15:41:48 +05:30
2020-09-20 21:06:11 +05:30
num%update_gamma = num_grid%get_asBool ('update_gamma', defaultVal=.false.)
num%eps_div_atol = num_grid%get_asFloat('eps_div_atol', defaultVal=1.0e-4_pReal)
num%eps_div_rtol = num_grid%get_asFloat('eps_div_rtol', defaultVal=5.0e-4_pReal)
num%eps_curl_atol = num_grid%get_asFloat('eps_curl_atol', defaultVal=1.0e-10_pReal)
num%eps_curl_rtol = num_grid%get_asFloat('eps_curl_rtol', defaultVal=5.0e-4_pReal)
num%eps_stress_atol = num_grid%get_asFloat('eps_stress_atol',defaultVal=1.0e3_pReal)
num%eps_stress_rtol = num_grid%get_asFloat('eps_stress_rtol',defaultVal=1.0e-3_pReal)
num%itmin = num_grid%get_asInt ('itmin', defaultVal=1)
num%itmax = num_grid%get_asInt ('itmax', defaultVal=250)
num%alpha = num_grid%get_asFloat('alpha', defaultVal=1.0_pReal)
num%beta = num_grid%get_asFloat('beta', defaultVal=1.0_pReal)
2020-06-24 20:15:13 +05:30
if (num%eps_div_atol <= 0.0_pReal) call IO_error(301,ext_msg='eps_div_atol')
if (num%eps_div_rtol < 0.0_pReal) call IO_error(301,ext_msg='eps_div_rtol')
if (num%eps_curl_atol <= 0.0_pReal) call IO_error(301,ext_msg='eps_curl_atol')
if (num%eps_curl_rtol < 0.0_pReal) call IO_error(301,ext_msg='eps_curl_rtol')
if (num%eps_stress_atol <= 0.0_pReal) call IO_error(301,ext_msg='eps_stress_atol')
if (num%eps_stress_rtol < 0.0_pReal) call IO_error(301,ext_msg='eps_stress_rtol')
if (num%itmax <= 1) call IO_error(301,ext_msg='itmax')
if (num%itmin > num%itmax .or. num%itmin < 1) call IO_error(301,ext_msg='itmin')
if (num%alpha <= 0.0_pReal .or. num%alpha > 2.0_pReal) call IO_error(301,ext_msg='alpha')
if (num%beta < 0.0_pReal .or. num%beta > 2.0_pReal) call IO_error(301,ext_msg='beta')
2013-07-24 18:36:16 +05:30
2019-03-12 23:26:28 +05:30
!--------------------------------------------------------------------------------------------------
! set default and user defined options for PETSc
2021-02-09 03:51:53 +05:30
call PetscOptionsInsertString(PETSC_NULL_OPTIONS,'-mechanical_snes_type ngmres',ierr)
2019-04-15 19:23:46 +05:30
CHKERRQ(ierr)
2020-11-11 16:17:23 +05:30
call PetscOptionsInsertString(PETSC_NULL_OPTIONS,num_grid%get_asString('petsc_options',defaultVal=''),ierr)
2019-04-15 19:23:46 +05:30
CHKERRQ(ierr)
2019-03-12 23:26:28 +05:30
2013-07-24 18:36:16 +05:30
!--------------------------------------------------------------------------------------------------
! allocate global fields
2019-04-15 19:23:46 +05:30
allocate(F_lastInc (3,3,grid(1),grid(2),grid3),source = 0.0_pReal)
allocate(Fdot (3,3,grid(1),grid(2),grid3),source = 0.0_pReal)
allocate(F_tau_lastInc(3,3,grid(1),grid(2),grid3),source = 0.0_pReal)
allocate(F_tauDot (3,3,grid(1),grid(2),grid3),source = 0.0_pReal)
2020-02-25 23:23:12 +05:30
2013-07-24 18:36:16 +05:30
!--------------------------------------------------------------------------------------------------
2016-06-27 21:20:43 +05:30
! initialize solver specific parts of PETSc
2019-04-15 19:23:46 +05:30
call SNESCreate(PETSC_COMM_WORLD,snes,ierr); CHKERRQ(ierr)
2021-02-09 03:51:53 +05:30
call SNESSetOptionsPrefix(snes,'mechanical_',ierr);CHKERRQ(ierr)
localK = 0_pPetscInt
localK(worldrank) = int(grid3,pPetscInt)
call MPI_Allreduce(MPI_IN_PLACE,localK,worldsize,MPI_INTEGER,MPI_SUM,MPI_COMM_WORLD,ierr)
2019-04-15 19:23:46 +05:30
call DMDACreate3d(PETSC_COMM_WORLD, &
DM_BOUNDARY_NONE, DM_BOUNDARY_NONE, DM_BOUNDARY_NONE, & ! cut off stencil at boundary
DMDA_STENCIL_BOX, & ! Moore (26) neighborhood around central point
int(grid(1),pPetscInt),int(grid(2),pPetscInt),int(grid(3),pPetscInt), & ! global grid
1_pPetscInt, 1_pPetscInt, int(worldsize,pPetscInt), &
18_pPetscInt, 0_pPetscInt, & ! #dof (2xtensor), ghost boundary width (domain overlap)
[int(grid(1),pPetscInt)],[int(grid(2),pPetscInt)],localK, & ! local grid
2019-04-15 19:23:46 +05:30
da,ierr) ! handle, error
CHKERRQ(ierr)
call SNESSetDM(snes,da,ierr); CHKERRQ(ierr) ! connect snes to da
call DMsetFromOptions(da,ierr); CHKERRQ(ierr)
call DMsetUp(da,ierr); CHKERRQ(ierr)
call DMcreateGlobalVector(da,solution_vec,ierr); CHKERRQ(ierr) ! global solution vector (grid x 18, i.e. every def grad tensor)
call DMDASNESsetFunctionLocal(da,INSERT_VALUES,formResidual,PETSC_NULL_SNES,ierr) ! residual vector of same shape as solution vector
2020-02-25 23:23:12 +05:30
CHKERRQ(ierr)
2019-04-15 19:23:46 +05:30
call SNESsetConvergenceTest(snes,converged,PETSC_NULL_SNES,PETSC_NULL_FUNCTION,ierr) ! specify custom convergence check function "converged"
CHKERRQ(ierr)
call SNESsetFromOptions(snes,ierr); CHKERRQ(ierr) ! pull it all together with additional CLI arguments
2013-07-24 18:36:16 +05:30
!--------------------------------------------------------------------------------------------------
2020-02-25 23:23:12 +05:30
! init fields
2019-04-15 19:23:46 +05:30
call DMDAVecGetArrayF90(da,solution_vec,FandF_tau,ierr); CHKERRQ(ierr) ! places pointer on PETSc data
2019-10-28 17:47:05 +05:30
F => FandF_tau(0: 8,:,:,:)
F_tau => FandF_tau(9:17,:,:,:)
2020-02-25 23:23:12 +05:30
restartRead: if (interface_restartInc > 0) then
print'(/,1x,a,i0,a)', 'reading restart data of increment ', interface_restartInc, ' from file'
2019-12-07 19:54:45 +05:30
fileHandle = HDF5_openFile(getSolverJobName()//'_restart.hdf5','r')
2019-10-28 17:47:05 +05:30
groupHandle = HDF5_openGroup(fileHandle,'solver')
2020-02-25 23:23:12 +05:30
call HDF5_read(P_aim,groupHandle,'P_aim',.false.)
call MPI_Bcast(P_aim,9,MPI_DOUBLE,0,MPI_COMM_WORLD,ierr)
if (ierr /=0) error stop 'MPI error'
call HDF5_read(F_aim,groupHandle,'F_aim',.false.)
call MPI_Bcast(F_aim,9,MPI_DOUBLE,0,MPI_COMM_WORLD,ierr)
if (ierr /=0) error stop 'MPI error'
call HDF5_read(F_aim_lastInc,groupHandle,'F_aim_lastInc',.false.)
call MPI_Bcast(F_aim_lastInc,9,MPI_DOUBLE,0,MPI_COMM_WORLD,ierr)
if (ierr /=0) error stop 'MPI error'
call HDF5_read(F_aimDot,groupHandle,'F_aimDot',.false.)
call MPI_Bcast(F_aimDot,9,MPI_DOUBLE,0,MPI_COMM_WORLD,ierr)
if (ierr /=0) error stop 'MPI error'
call HDF5_read(F,groupHandle,'F')
call HDF5_read(F_lastInc,groupHandle,'F_lastInc')
call HDF5_read(F_tau,groupHandle,'F_tau')
call HDF5_read(F_tau_lastInc,groupHandle,'F_tau_lastInc')
2020-02-25 23:23:12 +05:30
elseif (interface_restartInc == 0) then restartRead
2019-04-15 19:23:46 +05:30
F_lastInc = spread(spread(spread(math_I3,3,grid(1)),4,grid(2)),5,grid3) ! initialize to identity
F = reshape(F_lastInc,[9,grid(1),grid(2),grid3])
F_tau = 2.0_pReal*F
F_tau_lastInc = 2.0_pReal*F_lastInc
end if restartRead
2020-02-25 23:23:12 +05:30
2021-07-17 15:20:21 +05:30
homogenization_F0 = reshape(F_lastInc, [3,3,product(grid(1:2))*grid3]) ! set starting condition for homogenization_mechanical_response
2020-09-20 15:41:48 +05:30
call utilities_updateCoords(reshape(F,shape(F_lastInc)))
call utilities_constitutiveResponse(P,P_av,C_volAvg,C_minMaxAvg, & ! stress field, stress avg, global average of stiffness and (min+max)/2
2019-04-15 19:23:46 +05:30
reshape(F,shape(F_lastInc)), & ! target F
2019-12-03 00:53:50 +05:30
0.0_pReal) ! time increment
2019-04-15 19:23:46 +05:30
call DMDAVecRestoreArrayF90(da,solution_vec,FandF_tau,ierr); CHKERRQ(ierr) ! deassociate pointer
2020-02-25 23:23:12 +05:30
restartRead2: if (interface_restartInc > 0) then
print'(1x,a,i0,a)', 'reading more restart data of increment ', interface_restartInc, ' from file'
call HDF5_read(C_volAvg,groupHandle,'C_volAvg',.false.)
call MPI_Bcast(C_volAvg,81,MPI_DOUBLE,0,MPI_COMM_WORLD,ierr)
if (ierr /=0) error stop 'MPI error'
call HDF5_read(C_volAvgLastInc,groupHandle,'C_volAvgLastInc',.false.)
call MPI_Bcast(C_volAvgLastInc,81,MPI_DOUBLE,0,MPI_COMM_WORLD,ierr)
if (ierr /=0) error stop 'MPI error'
2019-10-28 17:47:05 +05:30
call HDF5_closeGroup(groupHandle)
2019-04-15 19:23:46 +05:30
call HDF5_closeFile(fileHandle)
call MPI_File_open(MPI_COMM_WORLD, trim(getSolverJobName())//'.C_ref', &
MPI_MODE_RDONLY,MPI_INFO_NULL,fileUnit,ierr)
call MPI_File_read(fileUnit,C_minMaxAvg,81,MPI_DOUBLE,MPI_STATUS_IGNORE,ierr)
call MPI_File_close(fileUnit,ierr)
end if restartRead2
2020-02-25 23:23:12 +05:30
2019-10-24 02:20:01 +05:30
call utilities_updateGamma(C_minMaxAvg)
call utilities_saveReferenceStiffness
2019-04-15 19:23:46 +05:30
C_scale = C_minMaxAvg
S_scale = math_invSym3333(C_minMaxAvg)
2020-02-25 23:23:12 +05:30
2021-02-09 03:51:53 +05:30
end subroutine grid_mechanical_spectral_polarisation_init
2013-07-24 18:36:16 +05:30
!--------------------------------------------------------------------------------------------------
!> @brief solution for the Polarisation scheme with internal iterations
!--------------------------------------------------------------------------------------------------
2021-02-09 03:51:53 +05:30
function grid_mechanical_spectral_polarisation_solution(incInfoIn) result(solution)
2016-06-27 21:20:43 +05:30
2013-07-24 18:36:16 +05:30
!--------------------------------------------------------------------------------------------------
! input data for solution
2019-04-15 19:23:46 +05:30
character(len=*), intent(in) :: &
incInfoIn
type(tSolutionState) :: &
solution
2013-07-24 18:36:16 +05:30
!--------------------------------------------------------------------------------------------------
! PETSc Data
2020-02-25 23:23:12 +05:30
PetscErrorCode :: ierr
2019-04-15 19:23:46 +05:30
SNESConvergedReason :: reason
2020-02-25 23:23:12 +05:30
2019-04-15 19:23:46 +05:30
incInfo = incInfoIn
2013-07-24 18:36:16 +05:30
!--------------------------------------------------------------------------------------------------
! update stiffness (and gamma operator)
S = utilities_maskedCompliance(params%rotation_BC,params%stress_mask,C_volAvg)
if (num%update_gamma) then
2019-10-24 02:20:01 +05:30
call utilities_updateGamma(C_minMaxAvg)
2019-04-15 19:23:46 +05:30
C_scale = C_minMaxAvg
S_scale = math_invSym3333(C_minMaxAvg)
end if
2013-07-24 18:36:16 +05:30
!--------------------------------------------------------------------------------------------------
2020-02-25 23:23:12 +05:30
! solve BVP
2019-04-15 19:23:46 +05:30
call SNESsolve(snes,PETSC_NULL_VEC,solution_vec,ierr); CHKERRQ(ierr)
2013-07-24 18:36:16 +05:30
!--------------------------------------------------------------------------------------------------
! check convergence
2019-04-15 19:23:46 +05:30
call SNESGetConvergedReason(snes,reason,ierr); CHKERRQ(ierr)
2020-02-25 23:23:12 +05:30
2019-04-15 19:23:46 +05:30
solution%converged = reason > 0
solution%iterationsNeeded = totalIter
solution%termIll = terminallyIll
terminallyIll = .false.
2021-07-20 02:26:34 +05:30
P_aim = merge(P_av,P_aim,params%stress_mask)
2013-07-24 18:36:16 +05:30
2021-02-09 03:51:53 +05:30
end function grid_mechanical_spectral_polarisation_solution
2013-07-24 18:36:16 +05:30
!--------------------------------------------------------------------------------------------------
!> @brief forwarding routine
!> @details find new boundary conditions and best F estimate for end of current timestep
!--------------------------------------------------------------------------------------------------
2021-02-09 03:51:53 +05:30
subroutine grid_mechanical_spectral_polarisation_forward(cutBack,guess,Delta_t,Delta_t_old,t_remaining,&
deformation_BC,stress_BC,rotation_BC)
logical, intent(in) :: &
cutBack, &
guess
real(pReal), intent(in) :: &
Delta_t_old, &
Delta_t, &
t_remaining !< remaining time of current load case
type(tBoundaryCondition), intent(in) :: &
stress_BC, &
deformation_BC
type(rotation), intent(in) :: &
rotation_BC
PetscErrorCode :: ierr
2020-09-20 15:41:48 +05:30
PetscScalar, pointer, dimension(:,:,:,:) :: FandF_tau, F, F_tau
integer :: i, j, k
real(pReal), dimension(3,3) :: F_lambda33
call DMDAVecGetArrayF90(da,solution_vec,FandF_tau,ierr); CHKERRQ(ierr)
F => FandF_tau(0: 8,:,:,:)
F_tau => FandF_tau(9:17,:,:,:)
if (cutBack) then
C_volAvg = C_volAvgLastInc
C_minMaxAvg = C_minMaxAvgLastInc
2020-02-25 23:23:12 +05:30
else
C_volAvgLastInc = C_volAvg
C_minMaxAvgLastInc = C_minMaxAvg
2021-07-20 02:26:34 +05:30
F_aimDot = merge(merge(.0_pReal,(F_aim-F_aim_lastInc)/Delta_t_old,stress_BC%mask),.0_pReal,guess) ! estimate deformation rate for prescribed stress components
F_aim_lastInc = F_aim
2019-10-24 17:16:36 +05:30
!-----------------------------------------------------------------------------------------------
! calculate rate for aim
2020-10-16 03:02:38 +05:30
if (deformation_BC%myType=='L') then ! calculate F_aimDot from given L and current F
F_aimDot = F_aimDot &
+ matmul(merge(.0_pReal,deformation_BC%values,deformation_BC%mask),F_aim_lastInc)
elseif (deformation_BC%myType=='dot_F') then ! F_aimDot is prescribed
F_aimDot = F_aimDot &
2021-07-20 02:26:34 +05:30
+ merge(.0_pReal,deformation_BC%values,deformation_BC%mask)
2020-10-16 03:02:38 +05:30
elseif (deformation_BC%myType=='F') then ! aim at end of load case is prescribed
F_aimDot = F_aimDot &
2021-07-20 02:26:34 +05:30
+ merge(.0_pReal,(deformation_BC%values - F_aim_lastInc)/t_remaining,deformation_BC%mask)
end if
2019-10-25 11:25:23 +05:30
Fdot = utilities_calculateRate(guess, &
F_lastInc,reshape(F,[3,3,grid(1),grid(2),grid3]),Delta_t_old, &
2020-02-25 23:23:12 +05:30
rotation_BC%rotate(F_aimDot,active=.true.))
2019-10-25 11:25:23 +05:30
F_tauDot = utilities_calculateRate(guess, &
F_tau_lastInc,reshape(F_tau,[3,3,grid(1),grid(2),grid3]), Delta_t_old, &
2020-02-25 23:23:12 +05:30
rotation_BC%rotate(F_aimDot,active=.true.))
2019-10-30 03:45:02 +05:30
F_lastInc = reshape(F, [3,3,grid(1),grid(2),grid3])
F_tau_lastInc = reshape(F_tau,[3,3,grid(1),grid(2),grid3])
2020-02-25 23:23:12 +05:30
homogenization_F0 = reshape(F,[3,3,product(grid(1:2))*grid3])
end if
!--------------------------------------------------------------------------------------------------
! update average and local deformation gradients
F_aim = F_aim_lastInc + F_aimDot * Delta_t
if (stress_BC%myType=='P') P_aim = P_aim &
2021-07-20 02:26:34 +05:30
+ merge(.0_pReal,(stress_BC%values - P_aim)/t_remaining,stress_BC%mask)*Delta_t
if (stress_BC%myType=='dot_P') P_aim = P_aim &
2021-07-20 02:26:34 +05:30
+ merge(.0_pReal,stress_BC%values,stress_BC%mask)*Delta_t
F = reshape(utilities_forwardField(Delta_t,F_lastInc,Fdot, & ! estimate of F at end of time+Delta_t that matches rotated F_aim on average
2020-02-25 23:23:12 +05:30
rotation_BC%rotate(F_aim,active=.true.)),&
[9,grid(1),grid(2),grid3])
2019-04-15 19:23:46 +05:30
if (guess) then
F_tau = reshape(Utilities_forwardField(Delta_t,F_tau_lastInc,F_taudot), &
2019-04-15 19:23:46 +05:30
[9,grid(1),grid(2),grid3]) ! does not have any average value as boundary condition
else
do k = 1, grid3; do j = 1, grid(2); do i = 1, grid(1)
F_lambda33 = reshape(F_tau(1:9,i,j,k)-F(1:9,i,j,k),[3,3])
F_lambda33 = math_I3 &
+ math_mul3333xx33(S_scale,0.5_pReal*matmul(F_lambda33, &
math_mul3333xx33(C_scale,matmul(transpose(F_lambda33),F_lambda33)-math_I3)))
2019-04-15 19:23:46 +05:30
F_tau(1:9,i,j,k) = reshape(F_lambda33,[9])+F(1:9,i,j,k)
end do; end do; end do
end if
2020-02-25 23:23:12 +05:30
2019-04-15 19:23:46 +05:30
call DMDAVecRestoreArrayF90(da,solution_vec,FandF_tau,ierr); CHKERRQ(ierr)
!--------------------------------------------------------------------------------------------------
! set module wide available data
params%stress_mask = stress_BC%mask
params%rotation_BC = rotation_BC
2021-07-20 02:00:20 +05:30
params%Delta_t = Delta_t
2021-02-09 03:51:53 +05:30
end subroutine grid_mechanical_spectral_polarisation_forward
!--------------------------------------------------------------------------------------------------
!> @brief Update coordinates
!--------------------------------------------------------------------------------------------------
2021-02-09 03:51:53 +05:30
subroutine grid_mechanical_spectral_polarisation_updateCoords
PetscErrorCode :: ierr
PetscScalar, dimension(:,:,:,:), pointer :: FandF_tau
2020-02-25 23:23:12 +05:30
call DMDAVecGetArrayF90(da,solution_vec,FandF_tau,ierr); CHKERRQ(ierr)
call utilities_updateCoords(FandF_tau(0:8,:,:,:))
call DMDAVecRestoreArrayF90(da,solution_vec,FandF_tau,ierr); CHKERRQ(ierr)
2021-02-09 03:51:53 +05:30
end subroutine grid_mechanical_spectral_polarisation_updateCoords
2019-10-24 17:16:36 +05:30
!--------------------------------------------------------------------------------------------------
!> @brief Write current solver and constitutive data for restart to file
!--------------------------------------------------------------------------------------------------
2021-02-09 03:51:53 +05:30
subroutine grid_mechanical_spectral_polarisation_restartWrite
2019-10-24 17:16:36 +05:30
PetscErrorCode :: ierr
2019-12-07 19:54:45 +05:30
integer(HID_T) :: fileHandle, groupHandle
2019-10-24 17:16:36 +05:30
PetscScalar, dimension(:,:,:,:), pointer :: FandF_tau, F, F_tau
call DMDAVecGetArrayF90(da,solution_vec,FandF_tau,ierr); CHKERRQ(ierr)
F => FandF_tau(0: 8,:,:,:)
F_tau => FandF_tau(9:17,:,:,:)
print'(1x,a)', 'writing solver data required for restart to file'; flush(IO_STDOUT)
2019-10-24 17:16:36 +05:30
fileHandle = HDF5_openFile(getSolverJobName()//'_restart.hdf5','w')
2019-10-28 17:47:05 +05:30
groupHandle = HDF5_addGroup(fileHandle,'solver')
call HDF5_write(F,groupHandle,'F')
call HDF5_write(F_lastInc,groupHandle,'F_lastInc')
call HDF5_write(F_tau,groupHandle,'F_tau')
call HDF5_write(F_tau_lastInc,groupHandle,'F_tau_lastInc')
2019-10-28 17:47:05 +05:30
call HDF5_closeGroup(groupHandle)
2019-10-24 17:16:36 +05:30
call HDF5_closeFile(fileHandle)
2020-02-25 23:23:12 +05:30
if (worldrank == 0) then
fileHandle = HDF5_openFile(getSolverJobName()//'_restart.hdf5','a',.false.)
groupHandle = HDF5_openGroup(fileHandle,'solver')
call HDF5_write(F_aim,groupHandle,'P_aim',.false.)
call HDF5_write(F_aim,groupHandle,'F_aim',.false.)
call HDF5_write(F_aim_lastInc,groupHandle,'F_aim_lastInc',.false.)
call HDF5_write(F_aimDot,groupHandle,'F_aimDot',.false.)
call HDF5_write(C_volAvg,groupHandle,'C_volAvg',.false.)
call HDF5_write(C_volAvgLastInc,groupHandle,'C_volAvgLastInc',.false.)
call HDF5_closeGroup(groupHandle)
call HDF5_closeFile(fileHandle)
end if
if (num%update_gamma) call utilities_saveReferenceStiffness
2020-02-25 23:23:12 +05:30
2019-10-24 17:16:36 +05:30
call DMDAVecRestoreArrayF90(da,solution_vec,FandF_tau,ierr); CHKERRQ(ierr)
2021-02-09 03:51:53 +05:30
end subroutine grid_mechanical_spectral_polarisation_restartWrite
2019-10-24 17:16:36 +05:30
!--------------------------------------------------------------------------------------------------
!> @brief convergence check
!--------------------------------------------------------------------------------------------------
2019-04-15 19:23:46 +05:30
subroutine converged(snes_local,PETScIter,devNull1,devNull2,devNull3,reason,dummy,ierr)
2020-02-25 23:23:12 +05:30
2019-04-15 19:23:46 +05:30
SNES :: snes_local
PetscInt, intent(in) :: PETScIter
PetscReal, intent(in) :: &
devNull1, &
devNull2, &
2020-02-25 23:23:12 +05:30
devNull3
2019-04-15 19:23:46 +05:30
SNESConvergedReason :: reason
PetscObject :: dummy
PetscErrorCode :: ierr
real(pReal) :: &
curlTol, &
divTol, &
2020-06-24 20:15:13 +05:30
BCTol
2020-06-18 00:16:03 +05:30
2021-07-17 00:02:21 +05:30
curlTol = max(maxval(abs(F_aim-math_I3))*num%eps_curl_rtol, num%eps_curl_atol)
divTol = max(maxval(abs(P_av))*num%eps_div_rtol, num%eps_div_atol)
BCTol = max(maxval(abs(P_av))*num%eps_stress_rtol, num%eps_stress_atol)
2020-06-24 20:15:13 +05:30
2021-03-28 03:47:04 +05:30
if ((totalIter >= num%itmin .and. all([err_div/divTol, err_curl/curlTol, err_BC/BCTol] < 1.0_pReal)) &
.or. terminallyIll) then
2019-04-15 19:23:46 +05:30
reason = 1
2020-06-24 20:15:13 +05:30
elseif (totalIter >= num%itmax) then
2019-04-15 19:23:46 +05:30
reason = -1
else
reason = 0
end if
print'(/,1x,a)', '... reporting .............................................................'
print'(/,1x,a,f12.2,a,es8.2,a,es9.2,a)', 'error divergence = ', &
2019-04-15 19:23:46 +05:30
err_div/divTol, ' (',err_div, ' / m, tol = ',divTol,')'
print '(1x,a,f12.2,a,es8.2,a,es9.2,a)', 'error curl = ', &
2019-04-15 19:23:46 +05:30
err_curl/curlTol,' (',err_curl,' -, tol = ',curlTol,')'
print '(1x,a,f12.2,a,es8.2,a,es9.2,a)', 'error stress BC = ', &
2020-02-25 23:23:12 +05:30
err_BC/BCTol, ' (',err_BC, ' Pa, tol = ',BCTol,')'
print'(/,1x,a)', '==========================================================================='
2020-09-22 16:39:12 +05:30
flush(IO_STDOUT)
end subroutine converged
!--------------------------------------------------------------------------------------------------
2019-04-15 19:46:14 +05:30
!> @brief forms the residual vector
!--------------------------------------------------------------------------------------------------
subroutine formResidual(in, FandF_tau, &
residuum, dummy,ierr)
2019-04-15 19:23:46 +05:30
DMDALocalInfo, dimension(DMDA_LOCAL_INFO_SIZE) :: in !< DMDA info (needs to be named "in" for macros like XRANGE to work)
PetscScalar, dimension(3,3,2,XG_RANGE,YG_RANGE,ZG_RANGE), &
target, intent(in) :: FandF_tau
PetscScalar, dimension(3,3,2,X_RANGE,Y_RANGE,Z_RANGE),&
target, intent(out) :: residuum !< residuum field
PetscScalar, pointer, dimension(:,:,:,:,:) :: &
F, &
F_tau, &
residual_F, &
residual_F_tau
PetscInt :: &
PETScIter, &
nfuncs
PetscObject :: dummy
PetscErrorCode :: ierr
2020-07-02 02:09:44 +05:30
integer :: &
2020-06-24 20:15:13 +05:30
i, j, k, e
2019-04-15 19:23:46 +05:30
2020-06-18 00:16:03 +05:30
!---------------------------------------------------------------------------------------------------
2020-06-17 18:51:51 +05:30
2019-10-28 17:47:05 +05:30
F => FandF_tau(1:3,1:3,1,&
2021-07-17 00:02:21 +05:30
XG_RANGE,YG_RANGE,ZG_RANGE)
2019-10-28 17:47:05 +05:30
F_tau => FandF_tau(1:3,1:3,2,&
2021-07-17 00:02:21 +05:30
XG_RANGE,YG_RANGE,ZG_RANGE)
2019-10-28 17:47:05 +05:30
residual_F => residuum(1:3,1:3,1,&
2021-07-17 00:02:21 +05:30
X_RANGE, Y_RANGE, Z_RANGE)
2019-10-28 17:47:05 +05:30
residual_F_tau => residuum(1:3,1:3,2,&
2021-07-17 00:02:21 +05:30
X_RANGE, Y_RANGE, Z_RANGE)
2019-04-15 19:23:46 +05:30
F_av = sum(sum(sum(F,dim=5),dim=4),dim=3) * wgt
call MPI_Allreduce(MPI_IN_PLACE,F_av,9,MPI_DOUBLE,MPI_SUM,MPI_COMM_WORLD,ierr)
2020-02-25 23:23:12 +05:30
2019-04-15 19:23:46 +05:30
call SNESGetNumberFunctionEvals(snes,nfuncs,ierr); CHKERRQ(ierr)
call SNESGetIterationNumber(snes,PETScIter,ierr); CHKERRQ(ierr)
2019-04-15 19:23:46 +05:30
if (nfuncs == 0 .and. PETScIter == 0) totalIter = -1 ! new increment
!--------------------------------------------------------------------------------------------------
! begin of new iteration
2019-04-15 19:23:46 +05:30
newIteration: if (totalIter <= PETScIter) then
totalIter = totalIter + 1
2020-09-19 11:50:29 +05:30
print'(1x,a,3(a,i0))', trim(incInfo), ' @ Iteration ', num%itmin, '≤',totalIter, '≤', num%itmax
if (debugRotation) print'(/,1x,a,/,2(3(f12.7,1x)/),3(f12.7,1x))', &
'deformation gradient aim (lab) =', transpose(params%rotation_BC%rotate(F_aim,active=.true.))
print'(/,1x,a,/,2(3(f12.7,1x)/),3(f12.7,1x))', &
'deformation gradient aim =', transpose(F_aim)
2020-09-22 16:39:12 +05:30
flush(IO_STDOUT)
end if newIteration
!--------------------------------------------------------------------------------------------------
2020-02-25 23:23:12 +05:30
!
2019-04-15 19:23:46 +05:30
tensorField_real = 0.0_pReal
do k = 1, grid3; do j = 1, grid(2); do i = 1, grid(1)
tensorField_real(1:3,1:3,i,j,k) = &
2020-06-24 20:48:16 +05:30
num%beta*math_mul3333xx33(C_scale,F(1:3,1:3,i,j,k) - math_I3) -&
num%alpha*matmul(F(1:3,1:3,i,j,k), &
2019-04-15 19:23:46 +05:30
math_mul3333xx33(C_scale,F_tau(1:3,1:3,i,j,k) - F(1:3,1:3,i,j,k) - math_I3))
end do; end do; end do
2020-02-25 23:23:12 +05:30
!--------------------------------------------------------------------------------------------------
2020-02-25 23:23:12 +05:30
! doing convolution in Fourier space
2019-04-15 19:23:46 +05:30
call utilities_FFTtensorForward
2020-06-24 20:48:16 +05:30
call utilities_fourierGammaConvolution(params%rotation_BC%rotate(num%beta*F_aim,active=.true.))
2019-04-15 19:23:46 +05:30
call utilities_FFTtensorBackward
!--------------------------------------------------------------------------------------------------
2020-02-25 23:23:12 +05:30
! constructing residual
2020-06-24 20:48:16 +05:30
residual_F_tau = num%beta*F - tensorField_real(1:3,1:3,1:grid(1),1:grid(2),1:grid3)
!--------------------------------------------------------------------------------------------------
! evaluate constitutive response
2019-04-15 19:23:46 +05:30
call utilities_constitutiveResponse(residual_F, & ! "residuum" gets field of first PK stress (to save memory)
P_av,C_volAvg,C_minMaxAvg, &
2021-07-20 02:00:20 +05:30
F - residual_F_tau/num%beta,params%Delta_t,params%rotation_BC)
call MPI_Allreduce(MPI_IN_PLACE,terminallyIll,1,MPI_LOGICAL,MPI_LOR,MPI_COMM_WORLD,ierr)
2020-02-25 23:23:12 +05:30
!--------------------------------------------------------------------------------------------------
! stress BC handling
F_aim = F_aim - math_mul3333xx33(S, P_av - P_aim) ! S = 0.0 for no bc
2021-07-20 02:26:34 +05:30
err_BC = maxval(abs(merge(math_mul3333xx33(C_scale,F_aim-params%rotation_BC%rotate(F_av)), &
P_av-P_aim, &
params%stress_mask)))
! calculate divergence
2019-04-15 19:23:46 +05:30
tensorField_real = 0.0_pReal
tensorField_real(1:3,1:3,1:grid(1),1:grid(2),1:grid3) = residual_F !< stress field in disguise
call utilities_FFTtensorForward
2020-09-20 15:41:48 +05:30
err_div = utilities_divergenceRMS() !< root mean squared error in divergence of stress
2019-04-15 19:23:46 +05:30
!--------------------------------------------------------------------------------------------------
! constructing residual
2019-04-15 19:23:46 +05:30
e = 0
do k = 1, grid3; do j = 1, grid(2); do i = 1, grid(1)
e = e + 1
residual_F(1:3,1:3,i,j,k) = &
math_mul3333xx33(math_invSym3333(homogenization_dPdF(1:3,1:3,1:3,1:3,e) + C_scale), &
2019-04-15 19:23:46 +05:30
residual_F(1:3,1:3,i,j,k) - matmul(F(1:3,1:3,i,j,k), &
math_mul3333xx33(C_scale,F_tau(1:3,1:3,i,j,k) - F(1:3,1:3,i,j,k) - math_I3))) &
+ residual_F_tau(1:3,1:3,i,j,k)
end do; end do; end do
2020-02-25 23:23:12 +05:30
!--------------------------------------------------------------------------------------------------
! calculating curl
2019-04-15 19:23:46 +05:30
tensorField_real = 0.0_pReal
tensorField_real(1:3,1:3,1:grid(1),1:grid(2),1:grid3) = F
call utilities_FFTtensorForward
2020-09-20 15:41:48 +05:30
err_curl = utilities_curlRMS()
end subroutine formResidual
2021-02-09 03:51:53 +05:30
end module grid_mechanical_spectral_polarisation