Merge remote-tracking branch 'remotes/origin/Grid-FEM-2' into development

This commit is contained in:
Franz Roters 2019-03-27 16:57:20 +01:00
commit 1e20e94792
19 changed files with 1810 additions and 1126 deletions

View File

@ -4,8 +4,8 @@ stages:
- preprocessing
- postprocessing
- compilePETSc
- prepareSpectral
- spectral
- prepareGrid
- grid
- compileMarc
- marc
- compileAbaqus
@ -141,9 +141,9 @@ Pre_General:
- master
- release
Spectral_geometryPacking:
grid_geometryPacking:
stage: preprocessing
script: Spectral_geometryPacking/test.py
script: grid_geometryPacking/test.py
except:
- master
- release
@ -172,7 +172,7 @@ Post_General:
Post_GeometryReconstruction:
stage: postprocessing
script: Spectral_geometryReconstruction/test.py
script: spectral_geometryReconstruction/test.py
except:
- master
- release
@ -215,12 +215,12 @@ Post_OrientationConversion:
- release
###################################################################################################
Compile_Spectral_Intel:
grid_mech_compile_Intel:
stage: compilePETSc
script:
- module load $IntelCompiler $MPICH_Intel $PETSc_MPICH_Intel
- cp -r SpectralAll_compile SpectralAll_compile_Intel
- SpectralAll_compile_Intel/test.py
- cp -r grid_mech_compile grid_mech_compile_Intel
- grid_mech_compile_Intel/test.py
except:
- master
- release
@ -235,12 +235,12 @@ Compile_FEM_Intel:
- master
- release
Compile_Spectral_GNU:
grid_mech_compile_GNU:
stage: compilePETSc
script:
- module load $GNUCompiler $MPICH_GNU $PETSc_MPICH_GNU
- cp -r SpectralAll_compile SpectralAll_compile_GNU
- SpectralAll_compile_GNU/test.py
- cp -r grid_mech_compile grid_mech_compile_GNU
- grid_mech_compile_GNU/test.py
except:
- master
- release
@ -257,134 +257,134 @@ Compile_FEM_GNU:
###################################################################################################
Compile_Intel_Prepare:
stage: prepareSpectral
stage: prepareGrid
script:
- module load $IntelCompiler $MPICH_Intel $PETSc_MPICH_Intel
- cd $DAMASKROOT
- make clean spectral processing
- make clean grid processing
except:
- master
- release
###################################################################################################
Thermal:
stage: spectral
stage: grid
script: Thermal/test.py
except:
- master
- release
Spectral_PackedGeometry:
stage: spectral
script: Spectral_PackedGeometry/test.py
grid_packedGeometry:
stage: grid
script: grid_packedGeometry/test.py
except:
- master
- release
Spectral_parsingArguments:
stage: spectral
script: Spectral_parsingArguments/test.py
grid_parsingArguments:
stage: grid
script: grid_parsingArguments/test.py
except:
- master
- release
StateIntegration_compareVariants:
stage: spectral
stage: grid
script: StateIntegration_compareVariants/test.py
except:
- master
- release
nonlocal_densityConservation:
stage: spectral
stage: grid
script: nonlocal_densityConservation/test.py
except:
- master
- release
Spectral_ipNeighborhood:
stage: spectral
stage: grid
script: Spectral_ipNeighborhood/test.py
except:
- master
- release
RGC_DetectChanges:
stage: spectral
stage: grid
script: RGC_DetectChanges/test.py
except:
- master
- release
Nonlocal_Damage_DetectChanges:
stage: spectral
stage: grid
script: Nonlocal_Damage_DetectChanges/test.py
except:
- master
- release
SpectralAll_restart:
stage: spectral
script: SpectralAll_restart/test.py
grid_all_restart:
stage: grid
script: grid_all_restart/test.py
except:
- master
- release
SpectralAll_parsingLoadCase:
stage: spectral
script: SpectralAll_parsingLoadCase/test.py
grid_parsingLoadCase:
stage: grid
script: grid_parsingLoadCase/test.py
except:
- master
- release
SpectralBasic_loadCaseRotation:
stage: spectral
script: SpectralBasic_loadCaseRotation/test.py
grid_all_loadCaseRotation:
stage: grid
script: grid_all_loadCaseRotation/test.py
except:
- master
- release
Spectral_MPI:
stage: spectral
grid_mech_MPI:
stage: grid
script:
- module load $IntelCompiler $MPICH_Intel $PETSc_MPICH_Intel
- Spectral_MPI/test.py
- grid_mech_MPI/test.py
except:
- master
- release
SpectralAll_restartMPI:
stage: spectral
grid_all_restartMPI:
stage: grid
script:
- module load $IntelCompiler $MPICH_Intel $PETSc_MPICH_Intel
- SpectralAll_restartMPI/test.py
- grid_all_restartMPI/test.py
except:
- master
- release
Plasticity_DetectChanges:
stage: spectral
stage: grid
script: Plasticity_DetectChanges/test.py
except:
- master
- release
Homogenization:
stage: spectral
stage: grid
script: Homogenization/test.py
except:
- master
- release
Phenopowerlaw_singleSlip:
stage: spectral
stage: grid
script: Phenopowerlaw_singleSlip/test.py
except:
- master
- release
TextureComponents:
stage: spectral
stage: grid
script: TextureComponents/test.py
except:
- master
@ -451,9 +451,9 @@ Abaqus_compile:
- release
###################################################################################################
SpectralExample:
grid_all_example:
stage: example
script: SpectralAll_example/test.py
script: grid_all_example/test.py
only:
- development
@ -463,7 +463,7 @@ SpectralRuntime:
script:
- module load $IntelCompiler $MPICH_Intel $PETSc_MPICH_Intel
- cd $DAMASKROOT
- make clean spectral processing OPTIMIZATION=AGGRESSIVE
- make clean grid processing OPTIMIZATION=AGGRESSIVE
- cd $TESTROOT/performance # location of old results
- git checkout . # undo any changes (i.e. run time data from non-development branch)
- cd $DAMASKROOT/PRIVATE/testing
@ -501,7 +501,7 @@ Marc:
- master
- release
Spectral:
GridSolver:
stage: createDocumentation
script:
- module load $IntelCompiler $MPICH_Intel $PETSc_MPICH_Intel $Doxygen

View File

@ -105,9 +105,9 @@ set (CMAKE_C_COMPILER "${PETSC_MPICC}")
# Now start to care about DAMASK
# DAMASK solver defines project to build
if (DAMASK_SOLVER STREQUAL "SPECTRAL")
if (DAMASK_SOLVER STREQUAL "GRID")
project (DAMASK_spectral Fortran C)
add_definitions (-DSpectral)
add_definitions (-DGrid)
message ("Building Spectral Solver\n")
elseif (DAMASK_SOLVER STREQUAL "FEM")
project (DAMASK_FEM Fortran C)

View File

@ -3,20 +3,24 @@ SHELL = /bin/sh
# Makefile for the installation of DAMASK
########################################################################################
.PHONY: all
all: spectral FEM processing
all: grid FEM processing
.PHONY: grid
grid: build/grid
@(cd build/grid;make -j4 --no-print-directory -ws all install;)
.PHONY: spectral
spectral: build/spectral
@(cd build/spectral;make -j4 --no-print-directory -ws all install;)
spectral: build/grid
@(cd build/grid;make -j4 --no-print-directory -ws all install;)
.PHONY: FEM
FEM: build/FEM
@(cd build/FEM; make -j4 --no-print-directory -ws all install;)
.PHONY: build/spectral
build/spectral:
@mkdir -p build/spectral
@(cd build/spectral; cmake -Wno-dev -DDAMASK_SOLVER=SPECTRAL -DCMAKE_BUILD_TYPE=${BUILD_TYPE} -DBUILDCMD_POST=${BUILDCMD_POST} -DBUILDCMD_PRE=${BUILDCMD_PRE} -DOPTIMIZATION=${OPTIMIZATION} -DOPENMP=${OPENMP} ../../;)
.PHONY: build/grid
build/grid:
@mkdir -p build/grid
@(cd build/grid; cmake -Wno-dev -DDAMASK_SOLVER=GRID -DCMAKE_BUILD_TYPE=${BUILD_TYPE} -DBUILDCMD_POST=${BUILDCMD_POST} -DBUILDCMD_PRE=${BUILDCMD_PRE} -DOPTIMIZATION=${OPTIMIZATION} -DOPENMP=${OPENMP} ../../;)
.PHONY: build/FEM
build/FEM:

@ -1 +1 @@
Subproject commit d81a446bdfaa2bc3c939e802c50a5fd8f2fb38e3
Subproject commit 397d9265ef677966610831bbf4d1358d879a4ac2

View File

@ -11,11 +11,8 @@
# Compile_cpp and link_exe for Abaqus make utility.
#
import os, re, glob, driverUtils
from damask import version as DAMASKVERSION
from damask import Environment
myEnv = Environment()
if myEnv.options['DAMASK_HDF5'] == 'ON':
if false:
# use hdf5 compiler wrapper in $PATH
fortCmd = os.popen('h5fc -shlib -show').read().replace('\n','') # complicated way needed to pass in DAMASKVERSION string
link_sl += fortCmd.split()[1:]
@ -44,7 +41,7 @@ compile_fortran = (fortCmd + " -c -fPIC -auto -shared-intel " +
"-implicitnone -standard-semantics " +
"-assume nostd_mod_proc_name " +
"-real-size 64 " +
'-DDAMASKVERSION=\\\"%s\\\"'%DAMASKVERSION)
'-DDAMASKVERSION=\\\"n/a\\\"')
# Abaqus/CAE will generate an input file without parts and assemblies.
cae_no_parts_input_file=ON
@ -57,6 +54,3 @@ ask_delete=OFF
# Remove the temporary names from the namespace
del fortCmd
del Environment
del myEnv
del DAMASKVERSION

View File

@ -11,11 +11,8 @@
# Compile_cpp and link_exe for Abaqus make utility.
#
import os, re, glob, driverUtils
from damask import version as DAMASKVERSION
from damask import Environment
myEnv = Environment()
if myEnv.options['DAMASK_HDF5'] == 'ON':
if false:
# use hdf5 compiler wrapper in $PATH
fortCmd = os.popen('h5fc -shlib -show').read().replace('\n','') # complicated way needed to pass in DAMASKVERSION string
link_sl += fortCmd.split()[1:]
@ -49,7 +46,7 @@ compile_fortran = (fortCmd + " -c -fPIC -auto -shared-intel " +
"-check bounds,format,output_conversion,uninit " +
"-ftrapuv -fpe-all0 " +
"-g -traceback -gen-interfaces -fp-stack-check -fp-model strict " +
'-DDAMASKVERSION=\\\"%s\\\"'%DAMASKVERSION)
'-DDAMASKVERSION=\\\"n/a\\\"')
# Abaqus/CAE will generate an input file without parts and assemblies.
cae_no_parts_input_file=ON
@ -62,6 +59,3 @@ ask_delete=OFF
# Remove the temporary names from the namespace
del fortCmd
del Environment
del myEnv
del DAMASKVERSION

View File

@ -176,6 +176,7 @@ if (PROJECT_NAME STREQUAL "DAMASK_spectral")
add_library(SPECTRAL_SOLVER OBJECT
"grid_thermal_spectral.f90"
"grid_damage_spectral.f90"
"grid_mech_FEM.f90"
"grid_mech_spectral_basic.f90"
"grid_mech_spectral_polarisation.f90")
add_dependencies(SPECTRAL_SOLVER SPECTRAL_UTILITIES)

View File

@ -31,6 +31,8 @@ program DAMASK_spectral
IO_lc, &
IO_intOut, &
IO_warning
use config, only: &
config_numerics
use debug, only: &
debug_level, &
debug_spectral, &
@ -50,7 +52,6 @@ program DAMASK_spectral
worldsize, &
stagItMax, &
maxCutBack, &
spectral_solver, &
continueCalculation
use homogenization, only: &
materialpoint_sizeResults, &
@ -73,6 +74,7 @@ program DAMASK_spectral
FIELD_DAMAGE_ID
use grid_mech_spectral_basic
use grid_mech_spectral_polarisation
use grid_mech_FEM
use grid_damage_spectral
use grid_thermal_spectral
use results
@ -165,21 +167,28 @@ program DAMASK_spectral
!--------------------------------------------------------------------------------------------------
! assign mechanics solver depending on selected type
select case (spectral_solver)
case (GRID_MECH_SPECTRAL_BASIC_LABEL)
select case (trim(config_numerics%getString('spectral_solver',defaultVal='basic')))
case ('basic')
mech_init => grid_mech_spectral_basic_init
mech_forward => grid_mech_spectral_basic_forward
mech_solution => grid_mech_spectral_basic_solution
case (GRID_MECH_SPECTRAL_POLARISATION_LABEL)
case ('polarisation')
if(iand(debug_level(debug_spectral),debug_levelBasic)/= 0) &
call IO_warning(42_pInt, ext_msg='debug Divergence')
mech_init => grid_mech_spectral_polarisation_init
mech_forward => grid_mech_spectral_polarisation_forward
mech_solution => grid_mech_spectral_polarisation_solution
case ('fem')
if(iand(debug_level(debug_spectral),debug_levelBasic)/= 0) &
call IO_warning(42_pInt, ext_msg='debug Divergence')
mech_init => grid_mech_FEM_init
mech_forward => grid_mech_FEM_forward
mech_solution => grid_mech_FEM_solution
case default
call IO_error(error_ID = 891_pInt, ext_msg = trim(spectral_solver))
call IO_error(error_ID = 891_pInt, ext_msg = config_numerics%getString('spectral_solver'))
end select

View File

@ -72,7 +72,7 @@ subroutine FE_init
modelName = getSolverJobName()
#if defined(Spectral) || defined(FEM)
#if defined(Grid) || defined(FEM)
restartInc = interface_RestartInc
if(restartInc < 0_pInt) then

View File

@ -823,8 +823,6 @@ subroutine IO_error(error_ID,el,ip,g,instance,ext_msg)
msg = 'microstructure count mismatch'
case (846_pInt)
msg = 'rotation for load case rotation ill-defined (R:RT != I)'
case (847_pInt)
msg = 'update of gamma operator not possible when pre-calculated'
case (880_pInt)
msg = 'mismatch of microstructure count and a*b*c in geom file'
case (891_pInt)

View File

@ -608,7 +608,7 @@ character(len=65536) function getString(this,key,defaultVal,raw)
implicit none
class(tPartitionedStringList), target, intent(in) :: this
character(len=*), intent(in) :: key
character(len=65536), intent(in), optional :: defaultVal
character(len=*), intent(in), optional :: defaultVal
logical, intent(in), optional :: raw
type(tPartitionedStringList), pointer :: item
logical :: found, &
@ -622,7 +622,7 @@ character(len=65536) function getString(this,key,defaultVal,raw)
found = present(defaultVal)
if (found) then
getString = trim(defaultVal)
if (len_trim(getString) /= len_trim(defaultVal)) call IO_error(0,ext_msg='getString')
!if (len_trim(getString) /= len_trim(defaultVal)) call IO_error(0,ext_msg='getString')
endif
item => this

View File

@ -170,7 +170,7 @@ function grid_damage_spectral_solution(timeinc,timeinc_old,loadCaseTime) result(
loadCaseTime !< remaining time of current load case
integer :: i, j, k, cell
type(tSolutionState) :: solution
PetscInt ::position
PetscInt :: devNull
PetscReal :: minDamage, maxDamage, stagNorm, solnNorm
PetscErrorCode :: ierr
@ -208,8 +208,8 @@ function grid_damage_spectral_solution(timeinc,timeinc_old,loadCaseTime) result(
call damage_nonlocal_putNonLocalDamage(damage_current(i,j,k),1,cell)
enddo; enddo; enddo
call VecMin(solution_vec,position,minDamage,ierr); CHKERRQ(ierr)
call VecMax(solution_vec,position,maxDamage,ierr); CHKERRQ(ierr)
call VecMin(solution_vec,devNull,minDamage,ierr); CHKERRQ(ierr)
call VecMax(solution_vec,devNull,maxDamage,ierr); CHKERRQ(ierr)
if (solution%converged) &
write(6,'(/,a)') ' ... nonlocal damage converged .....................................'
write(6,'(/,a,f8.6,2x,f8.6,2x,f8.6,/)',advance='no') ' Minimum|Maximum|Delta Damage = ',&

737
src/grid_mech_FEM.f90 Normal file
View File

@ -0,0 +1,737 @@
!--------------------------------------------------------------------------------------------------
!> @author Arko Jyoti Bhattacharjee, Max-Planck-Institut für Eisenforschung GmbH
!> @author Martin Diehl, Max-Planck-Institut für Eisenforschung GmbH
!> @author Pratheek Shanthraj, Max-Planck-Institut für Eisenforschung GmbH
!> @brief Grid solver for mechanics: FEM
!--------------------------------------------------------------------------------------------------
module grid_mech_FEM
#include <petsc/finclude/petscsnes.h>
#include <petsc/finclude/petscdmda.h>
use PETScdmda
use PETScsnes
use prec, only: &
pInt, &
pReal
use math, only: &
math_I3
use spectral_utilities, only: &
tSolutionState, &
tSolutionParams
implicit none
private
!--------------------------------------------------------------------------------------------------
! derived types
type(tSolutionParams), private :: params
!--------------------------------------------------------------------------------------------------
! PETSc data
DM, private :: mech_grid
SNES, private :: mech_snes
Vec, private :: solution_current, solution_lastInc, solution_rate
!--------------------------------------------------------------------------------------------------
! common pointwise data
real(pReal), private, dimension(:,:,:,:,:), allocatable :: F, P_current, F_lastInc
real(pReal), private :: detJ
real(pReal), private, dimension(3) :: delta
real(pReal), private, dimension(3,8) :: BMat
real(pReal), private, dimension(8,8) :: HGMat
PetscInt, private :: xstart,ystart,zstart,xend,yend,zend
!--------------------------------------------------------------------------------------------------
! stress, stiffness and compliance average etc.
real(pReal), private, dimension(3,3) :: &
F_aimDot = 0.0_pReal, & !< assumed rate of average deformation gradient
F_aim = math_I3, & !< current prescribed deformation gradient
F_aim_lastIter = math_I3, &
F_aim_lastInc = math_I3, & !< previous average deformation gradient
P_av = 0.0_pReal !< average 1st Piola--Kirchhoff stress
character(len=1024), private :: incInfo !< time and increment information
real(pReal), private, dimension(3,3,3,3) :: &
C_volAvg = 0.0_pReal, & !< current volume average stiffness
C_volAvgLastInc = 0.0_pReal, & !< previous volume average stiffness
S = 0.0_pReal !< current compliance (filled up with zeros)
real(pReal), private :: &
err_BC !< deviation from stress BC
integer(pInt), private :: &
totalIter = 0_pInt !< total iteration in current increment
public :: &
grid_mech_FEM_init, &
grid_mech_FEM_solution, &
grid_mech_FEM_forward
contains
!--------------------------------------------------------------------------------------------------
!> @brief allocates all necessary fields and fills them with data, potentially from restart info
!--------------------------------------------------------------------------------------------------
subroutine grid_mech_FEM_init
use IO, only: &
IO_intOut, &
IO_error, &
IO_open_jobFile_binary
use FEsolving, only: &
restartInc
use numerics, only: &
worldrank, &
worldsize, &
petsc_options
use homogenization, only: &
materialpoint_F0
use DAMASK_interface, only: &
getSolverJobName
use spectral_utilities, only: &
utilities_constitutiveResponse, &
utilities_updateIPcoords, &
wgt
use mesh, only: &
geomSize, &
grid, &
grid3
use math, only: &
math_invSym3333
implicit none
real(pReal) :: HGCoeff = 0e-2_pReal
PetscInt, dimension(:), allocatable :: localK
real(pReal), dimension(3,3) :: &
temp33_Real = 0.0_pReal
real(pReal), dimension(4,8) :: &
HGcomp = reshape([ 1.0_pReal, 1.0_pReal, 1.0_pReal,-1.0_pReal, &
1.0_pReal,-1.0_pReal,-1.0_pReal, 1.0_pReal, &
-1.0_pReal, 1.0_pReal,-1.0_pReal, 1.0_pReal, &
-1.0_pReal,-1.0_pReal, 1.0_pReal,-1.0_pReal, &
-1.0_pReal,-1.0_pReal, 1.0_pReal, 1.0_pReal, &
-1.0_pReal, 1.0_pReal,-1.0_pReal,-1.0_pReal, &
1.0_pReal,-1.0_pReal,-1.0_pReal,-1.0_pReal, &
1.0_pReal, 1.0_pReal, 1.0_pReal, 1.0_pReal], [4,8])
PetscErrorCode :: ierr
integer(pInt) :: rank
integer :: fileUnit
character(len=1024) :: rankStr
real(pReal), dimension(3,3,3,3) :: devNull
PetscScalar, pointer, dimension(:,:,:,:) :: &
u_current,u_lastInc
write(6,'(/,a)') ' <<<+- grid_mech_FEM init -+>>>'
!--------------------------------------------------------------------------------------------------
! set default and user defined options for PETSc
call PETScOptionsInsertString(PETSC_NULL_OPTIONS,'-mech_snes_type newtonls -mech_ksp_type fgmres &
&-mech_ksp_max_it 25 -mech_pc_type ml -mech_mg_levels_ksp_type chebyshev',ierr)
CHKERRQ(ierr)
call PETScOptionsInsertString(PETSC_NULL_OPTIONS,trim(petsc_options),ierr)
CHKERRQ(ierr)
!--------------------------------------------------------------------------------------------------
! allocate global fields
allocate (F (3,3,grid(1),grid(2),grid3),source = 0.0_pReal)
allocate (P_current (3,3,grid(1),grid(2),grid3),source = 0.0_pReal)
allocate (F_lastInc (3,3,grid(1),grid(2),grid3),source = 0.0_pReal)
!--------------------------------------------------------------------------------------------------
! initialize solver specific parts of PETSc
call SNESCreate(PETSC_COMM_WORLD,mech_snes,ierr); CHKERRQ(ierr)
call SNESSetOptionsPrefix(mech_snes,'mech_',ierr);CHKERRQ(ierr)
allocate(localK(worldsize), source = 0); localK(worldrank+1) = grid3
do rank = 1, worldsize
call MPI_Bcast(localK(rank),1,MPI_INTEGER,rank-1,PETSC_COMM_WORLD,ierr)
enddo
call DMDACreate3d(PETSC_COMM_WORLD, &
DM_BOUNDARY_PERIODIC, DM_BOUNDARY_PERIODIC, DM_BOUNDARY_PERIODIC, &
DMDA_STENCIL_BOX, &
grid(1),grid(2),grid(3), &
1, 1, worldsize, &
3, 1, &
[grid(1)],[grid(2)],localK, &
mech_grid,ierr)
CHKERRQ(ierr)
call DMDASetUniformCoordinates(mech_grid,0.0,geomSize(1),0.0,geomSize(2),0.0,geomSize(3),ierr)
CHKERRQ(ierr)
call SNESSetDM(mech_snes,mech_grid,ierr); CHKERRQ(ierr)
call DMsetFromOptions(mech_grid,ierr); CHKERRQ(ierr)
call DMsetUp(mech_grid,ierr); CHKERRQ(ierr)
call DMCreateGlobalVector(mech_grid,solution_current,ierr); CHKERRQ(ierr)
call DMCreateGlobalVector(mech_grid,solution_lastInc,ierr); CHKERRQ(ierr)
call DMCreateGlobalVector(mech_grid,solution_rate ,ierr); CHKERRQ(ierr)
call DMSNESSetFunctionLocal(mech_grid,formResidual,PETSC_NULL_SNES,ierr)
CHKERRQ(ierr)
call DMSNESSetJacobianLocal(mech_grid,formJacobian,PETSC_NULL_SNES,ierr)
CHKERRQ(ierr)
call SNESSetConvergenceTest(mech_snes,converged,PETSC_NULL_SNES,PETSC_NULL_FUNCTION,ierr)
CHKERRQ(ierr) ! specify custom convergence check function "_converged"
call SNESSetMaxLinearSolveFailures(mech_snes, huge(1), ierr); CHKERRQ(ierr) ! ignore linear solve failures
call SNESSetFromOptions(mech_snes,ierr); CHKERRQ(ierr) ! pull it all together with additional cli arguments
!--------------------------------------------------------------------------------------------------
! init fields
call VecSet(solution_current,0.0,ierr);CHKERRQ(ierr)
call VecSet(solution_lastInc,0.0,ierr);CHKERRQ(ierr)
call VecSet(solution_rate ,0.0,ierr);CHKERRQ(ierr)
call DMDAVecGetArrayF90(mech_grid,solution_current,u_current,ierr); CHKERRQ(ierr)
call DMDAVecGetArrayF90(mech_grid,solution_lastInc,u_lastInc,ierr); CHKERRQ(ierr)
call DMDAGetCorners(mech_grid,xstart,ystart,zstart,xend,yend,zend,ierr) ! local grid extent
CHKERRQ(ierr)
xend = xstart+xend-1
yend = ystart+yend-1
zend = zstart+zend-1
delta = geomSize/real(grid,pReal) ! grid spacing
detJ = product(delta) ! cell volume
BMat = reshape(real([-1.0_pReal/delta(1),-1.0_pReal/delta(2),-1.0_pReal/delta(3), &
1.0_pReal/delta(1),-1.0_pReal/delta(2),-1.0_pReal/delta(3), &
-1.0_pReal/delta(1), 1.0_pReal/delta(2),-1.0_pReal/delta(3), &
1.0_pReal/delta(1), 1.0_pReal/delta(2),-1.0_pReal/delta(3), &
-1.0_pReal/delta(1),-1.0_pReal/delta(2), 1.0_pReal/delta(3), &
1.0_pReal/delta(1),-1.0_pReal/delta(2), 1.0_pReal/delta(3), &
-1.0_pReal/delta(1), 1.0_pReal/delta(2), 1.0_pReal/delta(3), &
1.0_pReal/delta(1), 1.0_pReal/delta(2), 1.0_pReal/delta(3)],pReal), [3,8])/4.0_pReal ! shape function derivative matrix
HGMat = matmul(transpose(HGcomp),HGcomp) &
* HGCoeff*(delta(1)*delta(2) + delta(2)*delta(3) + delta(3)*delta(1))/16.0_pReal ! hourglass stabilization matrix
!--------------------------------------------------------------------------------------------------
! init fields
restart: if (restartInc > 0) then
write(6,'(/,a,'//IO_intOut(restartInc)//',a)') 'reading values of increment ', restartInc, ' from file'
fileUnit = IO_open_jobFile_binary('F_aim')
read(fileUnit) F_aim; close(fileUnit)
fileUnit = IO_open_jobFile_binary('F_aim_lastInc')
read(fileUnit) F_aim_lastInc; close(fileUnit)
fileUnit = IO_open_jobFile_binary('F_aimDot')
read(fileUnit) F_aimDot; close(fileUnit)
write(rankStr,'(a1,i0)')'_',worldrank
fileUnit = IO_open_jobFile_binary('F'//trim(rankStr))
read(fileUnit) F; close (fileUnit)
fileUnit = IO_open_jobFile_binary('F_lastInc'//trim(rankStr))
read(fileUnit) F_lastInc; close (fileUnit)
fileUnit = IO_open_jobFile_binary('u'//trim(rankStr))
read(fileUnit) u_current; close (fileUnit)
fileUnit = IO_open_jobFile_binary('u_lastInc'//trim(rankStr))
read(fileUnit) u_lastInc; close (fileUnit)
elseif (restartInc == 0) then restart
F_lastInc = spread(spread(spread(math_I3,3,grid(1)),4,grid(2)),5,grid3) ! initialize to identity
F = spread(spread(spread(math_I3,3,grid(1)),4,grid(2)),5,grid3)
endif restart
materialpoint_F0 = reshape(F_lastInc, [3,3,1,product(grid(1:2))*grid3]) ! set starting condition for materialpoint_stressAndItsTangent
call Utilities_updateIPcoords(F)
call Utilities_constitutiveResponse(P_current,temp33_Real,C_volAvg,devNull, & ! stress field, stress avg, global average of stiffness and (min+max)/2
F, & ! target F
0.0_pReal, & ! time increment
math_I3) ! no rotation of boundary condition
call DMDAVecRestoreArrayF90(mech_grid,solution_current,u_current,ierr)
CHKERRQ(ierr)
call DMDAVecRestoreArrayF90(mech_grid,solution_lastInc,u_lastInc,ierr)
CHKERRQ(ierr)
restartRead: if (restartInc > 0_pInt) then
write(6,'(/,a,'//IO_intOut(restartInc)//',a)') 'reading more values of increment ', restartInc, ' from file'
fileUnit = IO_open_jobFile_binary('C_volAvg')
read(fileUnit) C_volAvg; close(fileUnit)
fileUnit = IO_open_jobFile_binary('C_volAvgLastInv')
read(fileUnit) C_volAvgLastInc; close(fileUnit)
endif restartRead
end subroutine grid_mech_FEM_init
!--------------------------------------------------------------------------------------------------
!> @brief solution for the FEM scheme with internal iterations
!--------------------------------------------------------------------------------------------------
function grid_mech_FEM_solution(incInfoIn,timeinc,timeinc_old,stress_BC,rotation_BC) result(solution)
use IO, only: &
IO_error
use spectral_utilities, only: &
tBoundaryCondition, &
utilities_maskedCompliance
use FEsolving, only: &
restartWrite, &
terminallyIll
implicit none
!--------------------------------------------------------------------------------------------------
! input data for solution
character(len=*), intent(in) :: &
incInfoIn
real(pReal), intent(in) :: &
timeinc, & !< time increment of current solution
timeinc_old !< time increment of last successful increment
type(tBoundaryCondition), intent(in) :: &
stress_BC
real(pReal), dimension(3,3), intent(in) :: rotation_BC
type(tSolutionState) :: &
solution
!--------------------------------------------------------------------------------------------------
! PETSc Data
PetscErrorCode :: ierr
SNESConvergedReason :: reason
incInfo = incInfoIn
!--------------------------------------------------------------------------------------------------
! update stiffness (and gamma operator)
S = Utilities_maskedCompliance(rotation_BC,stress_BC%maskLogical,C_volAvg)
!--------------------------------------------------------------------------------------------------
! set module wide available data
params%stress_mask = stress_BC%maskFloat
params%stress_BC = stress_BC%values
params%rotation_BC = rotation_BC
params%timeinc = timeinc
params%timeincOld = timeinc_old
!--------------------------------------------------------------------------------------------------
! solve BVP
call SNESsolve(mech_snes,PETSC_NULL_VEC,solution_current,ierr);CHKERRQ(ierr)
!--------------------------------------------------------------------------------------------------
! check convergence
call SNESGetConvergedReason(mech_snes,reason,ierr);CHKERRQ(ierr)
solution%converged = reason > 0
solution%iterationsNeeded = totalIter
solution%termIll = terminallyIll
terminallyIll = .false.
end function grid_mech_FEM_solution
!--------------------------------------------------------------------------------------------------
!> @brief forwarding routine
!> @details find new boundary conditions and best F estimate for end of current timestep
!> possibly writing restart information, triggering of state increment in DAMASK, and updating of IPcoordinates
!--------------------------------------------------------------------------------------------------
subroutine grid_mech_FEM_forward(guess,timeinc,timeinc_old,loadCaseTime,deformation_BC,stress_BC,rotation_BC)
use math, only: &
math_mul33x33 ,&
math_rotate_backward33
use numerics, only: &
worldrank
use homogenization, only: &
materialpoint_F0
use mesh, only: &
grid, &
grid3
use CPFEM2, only: &
CPFEM_age
use spectral_utilities, only: &
utilities_updateIPcoords, &
tBoundaryCondition, &
cutBack
use IO, only: &
IO_open_jobFile_binary
use FEsolving, only: &
restartWrite
implicit none
logical, intent(in) :: &
guess
real(pReal), intent(in) :: &
timeinc_old, &
timeinc, &
loadCaseTime !< remaining time of current load case
type(tBoundaryCondition), intent(in) :: &
stress_BC, &
deformation_BC
real(pReal), dimension(3,3), intent(in) :: &
rotation_BC
PetscErrorCode :: ierr
integer :: fileUnit
character(len=32) :: rankStr
PetscScalar, pointer, dimension(:,:,:,:) :: &
u_current,u_lastInc
call DMDAVecGetArrayF90(mech_grid,solution_current,u_current,ierr); CHKERRQ(ierr)
call DMDAVecGetArrayF90(mech_grid,solution_lastInc,u_lastInc,ierr); CHKERRQ(ierr)
if (cutBack) then
C_volAvg = C_volAvgLastInc ! QUESTION: where is this required?
else
!--------------------------------------------------------------------------------------------------
! restart information for spectral solver
if (restartWrite) then ! QUESTION: where is this logical properly set?
write(6,'(/,a)') ' writing converged results for restart'
flush(6)
if (worldrank == 0) then
fileUnit = IO_open_jobFile_binary('C_volAvg','w')
write(fileUnit) C_volAvg; close(fileUnit)
fileUnit = IO_open_jobFile_binary('C_volAvgLastInv','w')
write(fileUnit) C_volAvgLastInc; close(fileUnit)
fileUnit = IO_open_jobFile_binary('F_aim','w')
write(fileUnit) F_aim; close(fileUnit)
fileUnit = IO_open_jobFile_binary('F_aim_lastInc','w')
write(fileUnit) F_aim_lastInc; close(fileUnit)
fileUnit = IO_open_jobFile_binary('F_aimDot','w')
write(fileUnit) F_aimDot; close(fileUnit)
endif
write(rankStr,'(a1,i0)')'_',worldrank
fileUnit = IO_open_jobFile_binary('F'//trim(rankStr),'w')
write(fileUnit) F; close (fileUnit)
fileUnit = IO_open_jobFile_binary('F_lastInc'//trim(rankStr),'w')
write(fileUnit) F_lastInc; close (fileUnit)
fileUnit = IO_open_jobFile_binary('u'//trim(rankStr),'w')
write(fileUnit) u_current; close (fileUnit)
fileUnit = IO_open_jobFile_binary('u_lastInc'//trim(rankStr),'w')
write(fileUnit) u_lastInc; close (fileUnit)
endif
call CPFEM_age() ! age state and kinematics
call utilities_updateIPcoords(F)
C_volAvgLastInc = C_volAvg
F_aimDot = merge(stress_BC%maskFloat*(F_aim-F_aim_lastInc)/timeinc_old, 0.0_pReal, guess)
F_aim_lastInc = F_aim
!--------------------------------------------------------------------------------------------------
! calculate rate for aim
if (deformation_BC%myType=='l') then ! calculate F_aimDot from given L and current F
F_aimDot = &
F_aimDot + deformation_BC%maskFloat * math_mul33x33(deformation_BC%values, F_aim_lastInc)
elseif(deformation_BC%myType=='fdot') then ! F_aimDot is prescribed
F_aimDot = &
F_aimDot + deformation_BC%maskFloat * deformation_BC%values
elseif (deformation_BC%myType=='f') then ! aim at end of load case is prescribed
F_aimDot = &
F_aimDot + deformation_BC%maskFloat * (deformation_BC%values - F_aim_lastInc)/loadCaseTime
endif
if (guess) then
call VecWAXPY(solution_rate,-1.0,solution_lastInc,solution_current,ierr)
CHKERRQ(ierr)
call VecScale(solution_rate,1.0/timeinc_old,ierr); CHKERRQ(ierr)
else
call VecSet(solution_rate,0.0,ierr); CHKERRQ(ierr)
endif
call VecCopy(solution_current,solution_lastInc,ierr); CHKERRQ(ierr)
F_lastInc = F ! winding F forward
materialpoint_F0 = reshape(F_lastInc, [3,3,1,product(grid(1:2))*grid3]) ! set starting condition for materialpoint_stressAndItsTangent
endif
!--------------------------------------------------------------------------------------------------
! update average and local deformation gradients
F_aim = F_aim_lastInc + F_aimDot * timeinc
call VecAXPY(solution_current,timeinc,solution_rate,ierr); CHKERRQ(ierr)
call DMDAVecRestoreArrayF90(mech_grid,solution_current,u_current,ierr)
CHKERRQ(ierr)
call DMDAVecRestoreArrayF90(mech_grid,solution_lastInc,u_lastInc,ierr)
CHKERRQ(ierr)
end subroutine grid_mech_FEM_forward
!--------------------------------------------------------------------------------------------------
!> @brief convergence check
!--------------------------------------------------------------------------------------------------
subroutine converged(snes_local,PETScIter,xnorm,snorm,fnorm,reason,dummy,ierr)
use mesh
use spectral_utilities
use numerics, only: &
itmax, &
itmin, &
err_div_tolRel, &
err_div_tolAbs, &
err_stress_tolRel, &
err_stress_tolAbs
use FEsolving, only: &
terminallyIll
implicit none
SNES :: snes_local
PetscInt :: PETScIter
PetscReal :: &
xnorm, & ! not used
snorm, & ! not used
fnorm
SNESConvergedReason :: reason
PetscObject :: dummy
PetscErrorCode :: ierr
real(pReal) :: &
err_div, &
divTol, &
BCTol
err_div = fnorm*sqrt(wgt)*geomSize(1)/scaledGeomSize(1)/detJ
divTol = max(maxval(abs(P_av))*err_div_tolRel ,err_div_tolAbs)
BCTol = max(maxval(abs(P_av))*err_stress_tolRel,err_stress_tolAbs)
if ((totalIter >= itmin .and. &
all([ err_div/divTol, &
err_BC /BCTol ] < 1.0_pReal)) &
.or. terminallyIll) then
reason = 1
elseif (totalIter >= itmax) then
reason = -1
else
reason = 0
endif
!--------------------------------------------------------------------------------------------------
! report
write(6,'(1/,a)') ' ... reporting .............................................................'
write(6,'(1/,a,f12.2,a,es8.2,a,es9.2,a)') ' error divergence = ', &
err_div/divTol, ' (',err_div,' / m, tol = ',divTol,')'
write(6,'(a,f12.2,a,es8.2,a,es9.2,a)') ' error stress BC = ', &
err_BC/BCTol, ' (',err_BC, ' Pa, tol = ',BCTol,')'
write(6,'(/,a)') ' ==========================================================================='
flush(6)
end subroutine converged
!--------------------------------------------------------------------------------------------------
!> @brief forms the residual vector
!--------------------------------------------------------------------------------------------------
subroutine formResidual(da_local,x_local,f_local,dummy,ierr)
use numerics, only: &
itmax, &
itmin
use numerics, only: &
worldrank
use mesh, only: &
grid
use math, only: &
math_rotate_backward33, &
math_mul3333xx33
use debug, only: &
debug_level, &
debug_spectral, &
debug_spectralRotation
use spectral_utilities, only: &
utilities_constitutiveResponse
use IO, only: &
IO_intOut
use FEsolving, only: &
terminallyIll
use homogenization, only: &
materialpoint_dPdF
implicit none
DM :: da_local
Vec :: x_local, f_local
PetscScalar, pointer,dimension(:,:,:,:) :: x_scal, f_scal
PetscScalar, dimension(8,3) :: x_elem, f_elem
PetscInt :: i, ii, j, jj, k, kk, ctr, ele
real(pReal), dimension(3,3) :: &
deltaF_aim
PetscInt :: &
PETScIter, &
nfuncs
PetscObject :: dummy
PetscErrorCode :: ierr
real(pReal), dimension(3,3,3,3) :: devNull
call SNESGetNumberFunctionEvals(mech_snes,nfuncs,ierr); CHKERRQ(ierr)
call SNESGetIterationNumber(mech_snes,PETScIter,ierr); CHKERRQ(ierr)
if (nfuncs == 0 .and. PETScIter == 0) totalIter = -1_pInt ! new increment
!--------------------------------------------------------------------------------------------------
! begin of new iteration
newIteration: if (totalIter <= PETScIter) then
totalIter = totalIter + 1_pInt
write(6,'(1x,a,3(a,'//IO_intOut(itmax)//'))') &
trim(incInfo), ' @ Iteration ', itmin, '≤',totalIter+1, '≤', itmax
if (iand(debug_level(debug_spectral),debug_spectralRotation) /= 0) &
write(6,'(/,a,/,3(3(f12.7,1x)/))',advance='no') &
' deformation gradient aim (lab) =', transpose(math_rotate_backward33(F_aim,params%rotation_BC))
write(6,'(/,a,/,3(3(f12.7,1x)/))',advance='no') &
' deformation gradient aim =', transpose(F_aim)
flush(6)
endif newIteration
!--------------------------------------------------------------------------------------------------
! get deformation gradient
call DMDAVecGetArrayF90(da_local,x_local,x_scal,ierr);CHKERRQ(ierr)
do k = zstart, zend; do j = ystart, yend; do i = xstart, xend
ctr = 0
do kk = 0, 1; do jj = 0, 1; do ii = 0, 1
ctr = ctr + 1
x_elem(ctr,1:3) = x_scal(0:2,i+ii,j+jj,k+kk)
enddo; enddo; enddo
ii = i-xstart+1; jj = j-ystart+1; kk = k-zstart+1
F(1:3,1:3,ii,jj,kk) = math_rotate_backward33(F_aim,params%rotation_BC) + transpose(matmul(BMat,x_elem))
enddo; enddo; enddo
call DMDAVecRestoreArrayF90(da_local,x_local,x_scal,ierr);CHKERRQ(ierr)
!--------------------------------------------------------------------------------------------------
! evaluate constitutive response
call Utilities_constitutiveResponse(P_current,&
P_av,C_volAvg,devNull, &
F,params%timeinc,params%rotation_BC)
call MPI_Allreduce(MPI_IN_PLACE,terminallyIll,1,MPI_LOGICAL,MPI_LOR,PETSC_COMM_WORLD,ierr)
!--------------------------------------------------------------------------------------------------
! stress BC handling
F_aim_lastIter = F_aim
deltaF_aim = math_mul3333xx33(S, P_av - params%stress_BC)
F_aim = F_aim - deltaF_aim
err_BC = maxval(abs(params%stress_mask * (P_av - params%stress_BC))) ! mask = 0.0 when no stress bc
!--------------------------------------------------------------------------------------------------
! constructing residual
call VecSet(f_local,0.0,ierr);CHKERRQ(ierr)
call DMDAVecGetArrayF90(da_local,f_local,f_scal,ierr);CHKERRQ(ierr)
call DMDAVecGetArrayF90(da_local,x_local,x_scal,ierr);CHKERRQ(ierr)
ele = 0
do k = zstart, zend; do j = ystart, yend; do i = xstart, xend
ctr = 0
do kk = 0, 1; do jj = 0, 1; do ii = 0, 1
ctr = ctr + 1
x_elem(ctr,1:3) = x_scal(0:2,i+ii,j+jj,k+kk)
enddo; enddo; enddo
ii = i-xstart+1; jj = j-ystart+1; kk = k-zstart+1
ele = ele + 1
f_elem = matmul(transpose(BMat),transpose(P_current(1:3,1:3,ii,jj,kk)))*detJ + &
matmul(HGMat,x_elem)*(materialpoint_dPdF(1,1,1,1,1,ele) + &
materialpoint_dPdF(2,2,2,2,1,ele) + &
materialpoint_dPdF(3,3,3,3,1,ele))/3.0_pReal
ctr = 0
do kk = 0, 1; do jj = 0, 1; do ii = 0, 1
ctr = ctr + 1
f_scal(0:2,i+ii,j+jj,k+kk) = f_scal(0:2,i+ii,j+jj,k+kk) + f_elem(ctr,1:3)
enddo; enddo; enddo
enddo; enddo; enddo
call DMDAVecRestoreArrayF90(da_local,x_local,x_scal,ierr);CHKERRQ(ierr)
call DMDAVecRestoreArrayF90(da_local,f_local,f_scal,ierr);CHKERRQ(ierr)
!--------------------------------------------------------------------------------------------------
! applying boundary conditions
call DMDAVecGetArrayF90(da_local,f_local,f_scal,ierr);CHKERRQ(ierr)
if (zstart == 0) then
f_scal(0:2,xstart,ystart,zstart) = 0.0
f_scal(0:2,xend+1,ystart,zstart) = 0.0
f_scal(0:2,xstart,yend+1,zstart) = 0.0
f_scal(0:2,xend+1,yend+1,zstart) = 0.0
endif
if (zend + 1 == grid(3)) then
f_scal(0:2,xstart,ystart,zend+1) = 0.0
f_scal(0:2,xend+1,ystart,zend+1) = 0.0
f_scal(0:2,xstart,yend+1,zend+1) = 0.0
f_scal(0:2,xend+1,yend+1,zend+1) = 0.0
endif
call DMDAVecRestoreArrayF90(da_local,f_local,f_scal,ierr);CHKERRQ(ierr)
end subroutine formResidual
!--------------------------------------------------------------------------------------------------
!> @brief forms the FEM stiffness matrix
!--------------------------------------------------------------------------------------------------
subroutine formJacobian(da_local,x_local,Jac_pre,Jac,dummy,ierr)
use mesh, only: &
mesh_ipCoordinates
use homogenization, only: &
materialpoint_dPdF
implicit none
DM :: da_local
Vec :: x_local, coordinates
Mat :: Jac_pre, Jac
MatStencil,dimension(4,24) :: row, col
PetscScalar,pointer,dimension(:,:,:,:) :: x_scal
PetscScalar,dimension(24,24) :: K_ele
PetscScalar,dimension(9,24) :: BMatFull
PetscInt :: i, ii, j, jj, k, kk, ctr, ele
PetscInt,dimension(3) :: rows
PetscScalar :: diag
PetscObject :: dummy
MatNullSpace :: matnull
PetscErrorCode :: ierr
BMatFull = 0.0
BMatFull(1:3,1 :8 ) = BMat
BMatFull(4:6,9 :16) = BMat
BMatFull(7:9,17:24) = BMat
call MatSetOption(Jac,MAT_KEEP_NONZERO_PATTERN,PETSC_TRUE,ierr); CHKERRQ(ierr)
call MatSetOption(Jac,MAT_NEW_NONZERO_ALLOCATION_ERR,PETSC_FALSE,ierr); CHKERRQ(ierr)
call MatZeroEntries(Jac,ierr); CHKERRQ(ierr)
ele = 0
do k = zstart, zend; do j = ystart, yend; do i = xstart, xend
ctr = 0
do kk = 0, 1; do jj = 0, 1; do ii = 0, 1
ctr = ctr + 1
col(MatStencil_i,ctr ) = i+ii
col(MatStencil_j,ctr ) = j+jj
col(MatStencil_k,ctr ) = k+kk
col(MatStencil_c,ctr ) = 0
col(MatStencil_i,ctr+8 ) = i+ii
col(MatStencil_j,ctr+8 ) = j+jj
col(MatStencil_k,ctr+8 ) = k+kk
col(MatStencil_c,ctr+8 ) = 1
col(MatStencil_i,ctr+16) = i+ii
col(MatStencil_j,ctr+16) = j+jj
col(MatStencil_k,ctr+16) = k+kk
col(MatStencil_c,ctr+16) = 2
enddo; enddo; enddo
row = col
ele = ele + 1
K_ele = 0.0
K_ele(1 :8 ,1 :8 ) = HGMat*(materialpoint_dPdF(1,1,1,1,1,ele) + &
materialpoint_dPdF(2,2,2,2,1,ele) + &
materialpoint_dPdF(3,3,3,3,1,ele))/3.0_pReal
K_ele(9 :16,9 :16) = HGMat*(materialpoint_dPdF(1,1,1,1,1,ele) + &
materialpoint_dPdF(2,2,2,2,1,ele) + &
materialpoint_dPdF(3,3,3,3,1,ele))/3.0_pReal
K_ele(17:24,17:24) = HGMat*(materialpoint_dPdF(1,1,1,1,1,ele) + &
materialpoint_dPdF(2,2,2,2,1,ele) + &
materialpoint_dPdF(3,3,3,3,1,ele))/3.0_pReal
K_ele = K_ele + &
matmul(transpose(BMatFull), &
matmul(reshape(reshape(materialpoint_dPdF(1:3,1:3,1:3,1:3,1,ele), &
shape=[3,3,3,3], order=[2,1,4,3]),shape=[9,9]),BMatFull))*detJ
call MatSetValuesStencil(Jac,24,row,24,col,K_ele,ADD_VALUES,ierr)
CHKERRQ(ierr)
enddo; enddo; enddo
call MatAssemblyBegin(Jac,MAT_FINAL_ASSEMBLY,ierr); CHKERRQ(ierr)
call MatAssemblyEnd(Jac,MAT_FINAL_ASSEMBLY,ierr); CHKERRQ(ierr)
call MatAssemblyBegin(Jac_pre,MAT_FINAL_ASSEMBLY,ierr); CHKERRQ(ierr)
call MatAssemblyEnd(Jac_pre,MAT_FINAL_ASSEMBLY,ierr); CHKERRQ(ierr)
!--------------------------------------------------------------------------------------------------
! applying boundary conditions
rows = [0, 1, 2]
diag = (C_volAvg(1,1,1,1)/delta(1)**2.0_pReal + &
C_volAvg(2,2,2,2)/delta(2)**2.0_pReal + &
C_volAvg(3,3,3,3)/delta(3)**2.0_pReal)*detJ
call MatZeroRowsColumns(Jac,size(rows),rows,diag,PETSC_NULL_VEC,PETSC_NULL_VEC,ierr)
CHKERRQ(ierr)
call DMGetGlobalVector(da_local,coordinates,ierr);CHKERRQ(ierr)
call DMDAVecGetArrayF90(da_local,coordinates,x_scal,ierr);CHKERRQ(ierr)
ele = 0
do k = zstart, zend; do j = ystart, yend; do i = xstart, xend
ele = ele + 1
x_scal(0:2,i,j,k) = mesh_ipCoordinates(1:3,1,ele)
enddo; enddo; enddo
call DMDAVecRestoreArrayF90(da_local,coordinates,x_scal,ierr);CHKERRQ(ierr) ! initialize to undeformed coordinates (ToDo: use ip coordinates)
call MatNullSpaceCreateRigidBody(coordinates,matnull,ierr);CHKERRQ(ierr) ! get rigid body deformation modes
call DMRestoreGlobalVector(da_local,coordinates,ierr);CHKERRQ(ierr)
call MatSetNullSpace(Jac,matnull,ierr); CHKERRQ(ierr)
call MatSetNearNullSpace(Jac,matnull,ierr); CHKERRQ(ierr)
call MatNullSpaceDestroy(matnull,ierr); CHKERRQ(ierr)
end subroutine formJacobian
end module grid_mech_FEM

View File

@ -19,13 +19,17 @@ module grid_mech_spectral_basic
implicit none
private
character (len=*), parameter, public :: &
GRID_MECH_SPECTRAL_BASIC_LABEL = 'basic'
!--------------------------------------------------------------------------------------------------
! derived types
type(tSolutionParams), private :: params
type, private :: tNumerics
logical :: &
update_gamma !< update gamma operator with current stiffness
end type tNumerics
type(tNumerics) :: num ! numerics parameters. Better name?
!--------------------------------------------------------------------------------------------------
! PETSc data
@ -35,7 +39,9 @@ module grid_mech_spectral_basic
!--------------------------------------------------------------------------------------------------
! common pointwise data
real(pReal), private, dimension(:,:,:,:,:), allocatable :: F_lastInc, Fdot
real(pReal), private, dimension(:,:,:,:,:), allocatable :: &
F_lastInc, &
Fdot
!--------------------------------------------------------------------------------------------------
! stress, stiffness and compliance average etc.
@ -44,9 +50,8 @@ module grid_mech_spectral_basic
F_aim = math_I3, & !< current prescribed deformation gradient
F_aim_lastInc = math_I3, & !< previous average deformation gradient
P_av = 0.0_pReal !< average 1st Piola--Kirchhoff stress
character(len=1024), private :: incInfo !< time and increment information
character(len=1024), private :: incInfo !< time and increment information
real(pReal), private, dimension(3,3,3,3) :: &
C_volAvg = 0.0_pReal, & !< current volume average stiffness
C_volAvgLastInc = 0.0_pReal, & !< previous volume average stiffness
@ -65,6 +70,9 @@ module grid_mech_spectral_basic
grid_mech_spectral_basic_init, &
grid_mech_spectral_basic_solution, &
grid_mech_spectral_basic_forward
private :: &
converged, &
formResidual
contains
@ -76,12 +84,10 @@ subroutine grid_mech_spectral_basic_init
IO_intOut, &
IO_error, &
IO_open_jobFile_binary
use debug, only: &
debug_level, &
debug_spectral, &
debug_spectralRestart
use FEsolving, only: &
restartInc
use config, only :&
config_numerics
use numerics, only: &
worldrank, &
worldsize, &
@ -107,7 +113,8 @@ subroutine grid_mech_spectral_basic_init
temp33_Real = 0.0_pReal
PetscErrorCode :: ierr
PetscScalar, pointer, dimension(:,:,:,:) :: F
PetscScalar, pointer, dimension(:,:,:,:) :: &
F ! pointer to solution data
PetscInt, dimension(worldsize) :: localK
integer :: fileUnit
character(len=1024) :: rankStr
@ -119,6 +126,8 @@ subroutine grid_mech_spectral_basic_init
write(6,'(/,a)') ' Shanthraj et al., International Journal of Plasticity 66:3145, 2015'
write(6,'(a)') ' https://doi.org/10.1016/j.ijplas.2014.02.006'
num%update_gamma = config_numerics%getInt('update_gamma',defaultVal=0) > 0
!--------------------------------------------------------------------------------------------------
! set default and user defined options for PETSc
@ -152,23 +161,23 @@ subroutine grid_mech_spectral_basic_init
call DMsetFromOptions(da,ierr); CHKERRQ(ierr)
call DMsetUp(da,ierr); CHKERRQ(ierr)
call DMcreateGlobalVector(da,solution_vec,ierr); CHKERRQ(ierr) ! global solution vector (grid x 9, i.e. every def grad tensor)
call DMDASNESsetFunctionLocal(da,INSERT_VALUES,grid_mech_spectral_basic_formResidual,PETSC_NULL_SNES,ierr)! residual vector of same shape as solution vector
call DMDASNESsetFunctionLocal(da,INSERT_VALUES,formResidual,PETSC_NULL_SNES,ierr) ! residual vector of same shape as solution vector
CHKERRQ(ierr)
call SNESsetConvergenceTest(snes,grid_mech_spectral_basic_converged,PETSC_NULL_SNES,PETSC_NULL_FUNCTION,ierr)! specify custom convergence check function "_converged"
call SNESsetConvergenceTest(snes,converged,PETSC_NULL_SNES,PETSC_NULL_FUNCTION,ierr)! specify custom convergence check function "converged"
CHKERRQ(ierr)
call SNESsetFromOptions(snes,ierr); CHKERRQ(ierr) ! pull it all together with additional CLI arguments
!--------------------------------------------------------------------------------------------------
! init fields
call DMDAVecGetArrayF90(da,solution_vec,F,ierr); CHKERRQ(ierr) ! get the data out of PETSc to work with
! init fields
call DMDAVecGetArrayF90(da,solution_vec,F,ierr); CHKERRQ(ierr) ! places pointer on PETSc data
restart: if (restartInc > 0) then
if (iand(debug_level(debug_spectral),debug_spectralRestart) /= 0) then
write(6,'(/,a,'//IO_intOut(restartInc)//',a)') &
'reading values of increment ', restartInc, ' from file'
flush(6)
endif
write(6,'(/,a,'//IO_intOut(restartInc)//',a)') ' reading values of increment ', restartInc, ' from file'
fileUnit = IO_open_jobFile_binary('F_aim')
read(fileUnit) F_aim; close(fileUnit)
fileUnit = IO_open_jobFile_binary('F_aim_lastInc')
read(fileUnit) F_aim_lastInc; close(fileUnit)
fileUnit = IO_open_jobFile_binary('F_aimDot')
read(fileUnit) F_aimDot; close(fileUnit)
@ -179,12 +188,6 @@ subroutine grid_mech_spectral_basic_init
fileUnit = IO_open_jobFile_binary('F_lastInc'//trim(rankStr))
read(fileUnit) F_lastInc; close (fileUnit)
F_aim = reshape(sum(sum(sum(F,dim=4),dim=3),dim=2) * wgt, [3,3]) ! average of F
call MPI_Allreduce(MPI_IN_PLACE,F_aim,9,MPI_DOUBLE,MPI_SUM,PETSC_COMM_WORLD,ierr)
if(ierr /=0) call IO_error(894, ext_msg='F_aim')
F_aim_lastInc = sum(sum(sum(F_lastInc,dim=5),dim=4),dim=3) * wgt ! average of F_lastInc
call MPI_Allreduce(MPI_IN_PLACE,F_aim_lastInc,9,MPI_DOUBLE,MPI_SUM,PETSC_COMM_WORLD,ierr)
if(ierr /=0) call IO_error(894, ext_msg='F_aim_lastInc')
elseif (restartInc == 0) then restart
F_lastInc = spread(spread(spread(math_I3,3,grid(1)),4,grid(2)),5,grid3) ! initialize to identity
F = reshape(F_lastInc,[9,grid(1),grid(2),grid3])
@ -196,14 +199,10 @@ subroutine grid_mech_spectral_basic_init
reshape(F,shape(F_lastInc)), & ! target F
0.0_pReal, & ! time increment
math_I3) ! no rotation of boundary condition
call DMDAVecRestoreArrayF90(da,solution_vec,F,ierr); CHKERRQ(ierr) ! write data back to PETSc
! QUESTION: why not writing back right after reading (l.189)?
call DMDAVecRestoreArrayF90(da,solution_vec,F,ierr); CHKERRQ(ierr) ! deassociate pointer
restartRead: if (restartInc > 0) then
if (iand(debug_level(debug_spectral),debug_spectralRestart) /= 0) &
write(6,'(/,a,'//IO_intOut(restartInc)//',a)') &
'reading more values of increment ', restartInc, ' from file'
flush(6)
write(6,'(/,a,'//IO_intOut(restartInc)//',a)') 'reading more values of increment ', restartInc, ' from file'
fileUnit = IO_open_jobFile_binary('C_volAvg')
read(fileUnit) C_volAvg; close(fileUnit)
fileUnit = IO_open_jobFile_binary('C_volAvgLastInv')
@ -218,11 +217,9 @@ end subroutine grid_mech_spectral_basic_init
!--------------------------------------------------------------------------------------------------
!> @brief solution for the Basic scheme with internal iterations
!> @brief solution for the basic scheme with internal iterations
!--------------------------------------------------------------------------------------------------
function grid_mech_spectral_basic_solution(incInfoIn,timeinc,timeinc_old,stress_BC,rotation_BC) result(solution)
use numerics, only: &
update_gamma
use spectral_utilities, only: &
tBoundaryCondition, &
utilities_maskedCompliance, &
@ -245,7 +242,6 @@ function grid_mech_spectral_basic_solution(incInfoIn,timeinc,timeinc_old,stress_
real(pReal), dimension(3,3), intent(in) :: rotation_BC
type(tSolutionState) :: &
solution
!--------------------------------------------------------------------------------------------------
! PETSc Data
PetscErrorCode :: ierr
@ -256,7 +252,7 @@ function grid_mech_spectral_basic_solution(incInfoIn,timeinc,timeinc_old,stress_
!--------------------------------------------------------------------------------------------------
! update stiffness (and gamma operator)
S = Utilities_maskedCompliance(rotation_BC,stress_BC%maskLogical,C_volAvg)
if (update_gamma) call Utilities_updateGamma(C_minMaxAvg,restartWrite)
if (num%update_gamma) call Utilities_updateGamma(C_minMaxAvg,restartWrite)
!--------------------------------------------------------------------------------------------------
! set module wide available data
@ -279,155 +275,9 @@ function grid_mech_spectral_basic_solution(incInfoIn,timeinc,timeinc_old,stress_
solution%termIll = terminallyIll
terminallyIll = .false.
end function grid_mech_spectral_basic_solution
!--------------------------------------------------------------------------------------------------
!> @brief forms the basic residual vector
!--------------------------------------------------------------------------------------------------
subroutine grid_mech_spectral_basic_formResidual(in, F, &
residuum, dummy, ierr)
use numerics, only: &
itmax, &
itmin
use mesh, only: &
grid, &
grid3
use math, only: &
math_rotate_backward33, &
math_mul3333xx33
use debug, only: &
debug_level, &
debug_spectral, &
debug_spectralRotation
use spectral_utilities, only: &
tensorField_real, &
utilities_FFTtensorForward, &
utilities_fourierGammaConvolution, &
utilities_FFTtensorBackward, &
utilities_constitutiveResponse, &
utilities_divergenceRMS
use IO, only: &
IO_intOut
use FEsolving, only: &
terminallyIll
implicit none
DMDALocalInfo, dimension(DMDA_LOCAL_INFO_SIZE) :: in !< DMDA info (needs to be named "in" for macros like XRANGE to work)
PetscScalar, dimension(3,3,XG_RANGE,YG_RANGE,ZG_RANGE), &
intent(in) :: F !< deformation gradient field
PetscScalar, dimension(3,3,X_RANGE,Y_RANGE,Z_RANGE), &
intent(out) :: residuum !< residuum field
real(pReal), dimension(3,3) :: &
deltaF_aim
PetscInt :: &
PETScIter, &
nfuncs
PetscObject :: dummy
PetscErrorCode :: ierr
call SNESGetNumberFunctionEvals(snes,nfuncs,ierr); CHKERRQ(ierr)
call SNESGetIterationNumber(snes,PETScIter,ierr); CHKERRQ(ierr)
if (nfuncs == 0 .and. PETScIter == 0) totalIter = -1 ! new increment
!--------------------------------------------------------------------------------------------------
! begin of new iteration
newIteration: if (totalIter <= PETScIter) then
totalIter = totalIter + 1
write(6,'(1x,a,3(a,'//IO_intOut(itmax)//'))') &
trim(incInfo), ' @ Iteration ', itmin, '≤',totalIter, '≤', itmax
if (iand(debug_level(debug_spectral),debug_spectralRotation) /= 0) &
write(6,'(/,a,/,3(3(f12.7,1x)/))',advance='no') &
' deformation gradient aim (lab) =', transpose(math_rotate_backward33(F_aim,params%rotation_BC))
write(6,'(/,a,/,3(3(f12.7,1x)/))',advance='no') &
' deformation gradient aim =', transpose(F_aim)
flush(6)
endif newIteration
!--------------------------------------------------------------------------------------------------
! evaluate constitutive response
call Utilities_constitutiveResponse(residuum, & ! "residuum" gets field of first PK stress (to save memory)
P_av,C_volAvg,C_minMaxAvg, &
F,params%timeinc,params%rotation_BC)
call MPI_Allreduce(MPI_IN_PLACE,terminallyIll,1,MPI_LOGICAL,MPI_LOR,PETSC_COMM_WORLD,ierr)
!--------------------------------------------------------------------------------------------------
! stress BC handling
deltaF_aim = math_mul3333xx33(S, P_av - params%stress_BC)
F_aim = F_aim - deltaF_aim
err_BC = maxval(abs(params%stress_mask * (P_av - params%stress_BC))) ! mask = 0.0 when no stress bc
!--------------------------------------------------------------------------------------------------
! updated deformation gradient using fix point algorithm of basic scheme
tensorField_real = 0.0_pReal
tensorField_real(1:3,1:3,1:grid(1),1:grid(2),1:grid3) = residuum ! store fPK field for subsequent FFT forward transform
call utilities_FFTtensorForward ! FFT forward of global "tensorField_real"
err_div = Utilities_divergenceRMS() ! divRMS of tensorField_fourier for later use
call utilities_fourierGammaConvolution(math_rotate_backward33(deltaF_aim,params%rotation_BC)) ! convolution of Gamma and tensorField_fourier, with arg
call utilities_FFTtensorBackward ! FFT backward of global tensorField_fourier
!--------------------------------------------------------------------------------------------------
! constructing residual
residuum = tensorField_real(1:3,1:3,1:grid(1),1:grid(2),1:grid3) ! Gamma*P gives correction towards div(P) = 0, so needs to be zero, too
end subroutine grid_mech_spectral_basic_formResidual
!--------------------------------------------------------------------------------------------------
!> @brief convergence check
!--------------------------------------------------------------------------------------------------
subroutine grid_mech_spectral_basic_converged(snes_local,PETScIter,xnorm,snorm,fnorm,reason,dummy,ierr)
use numerics, only: &
itmax, &
itmin, &
err_div_tolRel, &
err_div_tolAbs, &
err_stress_tolRel, &
err_stress_tolAbs
use FEsolving, only: &
terminallyIll
implicit none
SNES :: snes_local
PetscInt :: PETScIter
PetscReal :: &
xnorm, & ! not used
snorm, & ! not used
fnorm ! not used
SNESConvergedReason :: reason
PetscObject :: dummy
PetscErrorCode :: ierr
real(pReal) :: &
divTol, &
BCTol
divTol = max(maxval(abs(P_av))*err_div_tolRel ,err_div_tolAbs)
BCTol = max(maxval(abs(P_av))*err_stress_tolRel,err_stress_tolAbs)
converged: if ((totalIter >= itmin .and. &
all([ err_div/divTol, &
err_BC /BCTol ] < 1.0_pReal)) &
.or. terminallyIll) then
reason = 1
elseif (totalIter >= itmax) then converged
reason = -1
else converged
reason = 0
endif converged
!--------------------------------------------------------------------------------------------------
! report
write(6,'(1/,a)') ' ... reporting .............................................................'
write(6,'(1/,a,f12.2,a,es8.2,a,es9.2,a)') ' error divergence = ', &
err_div/divTol, ' (',err_div,' / m, tol = ',divTol,')'
write(6,'(a,f12.2,a,es8.2,a,es9.2,a)') ' error stress BC = ', &
err_BC/BCTol, ' (',err_BC, ' Pa, tol = ',BCTol,')'
write(6,'(/,a)') ' ==========================================================================='
flush(6)
end subroutine grid_mech_spectral_basic_converged
!--------------------------------------------------------------------------------------------------
!> @brief forwarding routine
!> @details find new boundary conditions and best F estimate for end of current timestep
@ -492,6 +342,10 @@ subroutine grid_mech_spectral_basic_forward(guess,timeinc,timeinc_old,loadCaseTi
write(fileUnit) C_volAvg; close(fileUnit)
fileUnit = IO_open_jobFile_binary('C_volAvgLastInv','w')
write(fileUnit) C_volAvgLastInc; close(fileUnit)
fileUnit = IO_open_jobFile_binary('F_aim','w')
write(fileUnit) F_aim; close(fileUnit)
fileUnit = IO_open_jobFile_binary('F_aim_lastInc','w')
write(fileUnit) F_aim_lastInc; close(fileUnit)
fileUnit = IO_open_jobFile_binary('F_aimDot','w')
write(fileUnit) F_aimDot; close(fileUnit)
endif
@ -526,7 +380,7 @@ subroutine grid_mech_spectral_basic_forward(guess,timeinc,timeinc_old,loadCaseTi
endif
Fdot = Utilities_calculateRate(guess, &
Fdot = utilities_calculateRate(guess, &
F_lastInc,reshape(F,[3,3,grid(1),grid(2),grid3]),timeinc_old, &
math_rotate_backward33(F_aimDot,rotation_BC))
F_lastInc = reshape(F, [3,3,grid(1),grid(2),grid3]) ! winding F forward
@ -542,4 +396,151 @@ subroutine grid_mech_spectral_basic_forward(guess,timeinc,timeinc_old,loadCaseTi
end subroutine grid_mech_spectral_basic_forward
!--------------------------------------------------------------------------------------------------
!> @brief convergence check
!--------------------------------------------------------------------------------------------------
subroutine converged(snes_local,PETScIter,xnorm,snorm,fnorm,reason,dummy,ierr)
use numerics, only: &
itmax, &
itmin, &
err_div_tolRel, &
err_div_tolAbs, &
err_stress_tolRel, &
err_stress_tolAbs
use FEsolving, only: &
terminallyIll
implicit none
SNES :: snes_local
PetscInt :: PETScIter
PetscReal :: &
xnorm, & ! not used
snorm, & ! not used
fnorm ! not used
SNESConvergedReason :: reason
PetscObject :: dummy
PetscErrorCode :: ierr
real(pReal) :: &
divTol, &
BCTol
divTol = max(maxval(abs(P_av))*err_div_tolRel ,err_div_tolAbs)
BCTol = max(maxval(abs(P_av))*err_stress_tolRel,err_stress_tolAbs)
if ((totalIter >= itmin .and. &
all([ err_div/divTol, &
err_BC /BCTol ] < 1.0_pReal)) &
.or. terminallyIll) then
reason = 1
elseif (totalIter >= itmax) then
reason = -1
else
reason = 0
endif
!--------------------------------------------------------------------------------------------------
! report
write(6,'(1/,a)') ' ... reporting .............................................................'
write(6,'(1/,a,f12.2,a,es8.2,a,es9.2,a)') ' error divergence = ', &
err_div/divTol, ' (',err_div,' / m, tol = ',divTol,')'
write(6,'(a,f12.2,a,es8.2,a,es9.2,a)') ' error stress BC = ', &
err_BC/BCTol, ' (',err_BC, ' Pa, tol = ',BCTol,')'
write(6,'(/,a)') ' ==========================================================================='
flush(6)
end subroutine converged
!--------------------------------------------------------------------------------------------------
!> @brief forms the basic residual vector
!--------------------------------------------------------------------------------------------------
subroutine formResidual(in, F, &
residuum, dummy, ierr)
use numerics, only: &
itmax, &
itmin
use mesh, only: &
grid, &
grid3
use math, only: &
math_rotate_backward33, &
math_mul3333xx33
use debug, only: &
debug_level, &
debug_spectral, &
debug_spectralRotation
use spectral_utilities, only: &
tensorField_real, &
utilities_FFTtensorForward, &
utilities_fourierGammaConvolution, &
utilities_FFTtensorBackward, &
utilities_constitutiveResponse, &
utilities_divergenceRMS
use IO, only: &
IO_intOut
use FEsolving, only: &
terminallyIll
implicit none
DMDALocalInfo, dimension(DMDA_LOCAL_INFO_SIZE) :: in !< DMDA info (needs to be named "in" for macros like XRANGE to work)
PetscScalar, dimension(3,3,XG_RANGE,YG_RANGE,ZG_RANGE), &
intent(in) :: F !< deformation gradient field
PetscScalar, dimension(3,3,X_RANGE,Y_RANGE,Z_RANGE), &
intent(out) :: residuum !< residuum field
real(pReal), dimension(3,3) :: &
deltaF_aim
PetscInt :: &
PETScIter, &
nfuncs
PetscObject :: dummy
PetscErrorCode :: ierr
call SNESGetNumberFunctionEvals(snes,nfuncs,ierr); CHKERRQ(ierr)
call SNESGetIterationNumber(snes,PETScIter,ierr); CHKERRQ(ierr)
if (nfuncs == 0 .and. PETScIter == 0) totalIter = -1 ! new increment
!--------------------------------------------------------------------------------------------------
! begin of new iteration
newIteration: if (totalIter <= PETScIter) then
totalIter = totalIter + 1
write(6,'(1x,a,3(a,'//IO_intOut(itmax)//'))') &
trim(incInfo), ' @ Iteration ', itmin, '≤',totalIter, '≤', itmax
if (iand(debug_level(debug_spectral),debug_spectralRotation) /= 0) &
write(6,'(/,a,/,3(3(f12.7,1x)/))',advance='no') &
' deformation gradient aim (lab) =', transpose(math_rotate_backward33(F_aim,params%rotation_BC))
write(6,'(/,a,/,3(3(f12.7,1x)/))',advance='no') &
' deformation gradient aim =', transpose(F_aim)
flush(6)
endif newIteration
!--------------------------------------------------------------------------------------------------
! evaluate constitutive response
call Utilities_constitutiveResponse(residuum, & ! "residuum" gets field of first PK stress (to save memory)
P_av,C_volAvg,C_minMaxAvg, &
F,params%timeinc,params%rotation_BC)
call MPI_Allreduce(MPI_IN_PLACE,terminallyIll,1,MPI_LOGICAL,MPI_LOR,PETSC_COMM_WORLD,ierr)
!--------------------------------------------------------------------------------------------------
! stress BC handling
deltaF_aim = math_mul3333xx33(S, P_av - params%stress_BC)
F_aim = F_aim - deltaF_aim
err_BC = maxval(abs(params%stress_mask * (P_av - params%stress_BC))) ! mask = 0.0 when no stress bc
!--------------------------------------------------------------------------------------------------
! updated deformation gradient using fix point algorithm of basic scheme
tensorField_real = 0.0_pReal
tensorField_real(1:3,1:3,1:grid(1),1:grid(2),1:grid3) = residuum ! store fPK field for subsequent FFT forward transform
call utilities_FFTtensorForward ! FFT forward of global "tensorField_real"
err_div = Utilities_divergenceRMS() ! divRMS of tensorField_fourier for later use
call utilities_fourierGammaConvolution(math_rotate_backward33(deltaF_aim,params%rotation_BC)) ! convolution of Gamma and tensorField_fourier, with arg
call utilities_FFTtensorBackward ! FFT backward of global tensorField_fourier
!--------------------------------------------------------------------------------------------------
! constructing residual
residuum = tensorField_real(1:3,1:3,1:grid(1),1:grid(2),1:grid3) ! Gamma*P gives correction towards div(P) = 0, so needs to be zero, too
end subroutine formResidual
end module grid_mech_spectral_basic

View File

@ -2,7 +2,7 @@
!> @author Pratheek Shanthraj, Max-Planck-Institut für Eisenforschung GmbH
!> @author Martin Diehl, Max-Planck-Institut für Eisenforschung GmbH
!> @author Philip Eisenlohr, Max-Planck-Institut für Eisenforschung GmbH
!> @brief Polarisation scheme solver
!> @brief Grid solver for mechanics: Spectral Polarisation
!--------------------------------------------------------------------------------------------------
module grid_mech_spectral_polarisation
#include <petsc/finclude/petscsnes.h>
@ -10,7 +10,6 @@ module grid_mech_spectral_polarisation
use PETScdmda
use PETScsnes
use prec, only: &
pInt, &
pReal
use math, only: &
math_I3
@ -20,14 +19,18 @@ module grid_mech_spectral_polarisation
implicit none
private
character (len=*), parameter, public :: &
GRID_MECH_SPECTRAL_POLARISATION_LABEL = 'polarisation'
!--------------------------------------------------------------------------------------------------
! derived types
type(tSolutionParams), private :: params
type(tSolutionParams), private :: params
type, private :: tNumerics
logical :: &
update_gamma !< update gamma operator with current stiffness
end type tNumerics
type(tNumerics) :: num ! numerics parameters. Better name?
!--------------------------------------------------------------------------------------------------
! PETSc data
DM, private :: da
@ -49,9 +52,9 @@ module grid_mech_spectral_polarisation
F_aim = math_I3, & !< current prescribed deformation gradient
F_aim_lastInc = math_I3, & !< previous average deformation gradient
F_av = 0.0_pReal, & !< average incompatible def grad field
P_av = 0.0_pReal, & !< average 1st Piola--Kirchhoff stress
P_avLastEval = 0.0_pReal !< average 1st Piola--Kirchhoff stress last call of CPFEM_general
character(len=1024), private :: incInfo !< time and increment information
P_av = 0.0_pReal !< average 1st Piola--Kirchhoff stress
character(len=1024), private :: incInfo !< time and increment information
real(pReal), private, dimension(3,3,3,3) :: &
C_volAvg = 0.0_pReal, & !< current volume average stiffness
C_volAvgLastInc = 0.0_pReal, & !< previous volume average stiffness
@ -66,13 +69,16 @@ module grid_mech_spectral_polarisation
err_curl, & !< RMS of curl of F
err_div !< RMS of div of P
integer(pInt), private :: &
totalIter = 0_pInt !< total iteration in current increment
integer, private :: &
totalIter = 0 !< total iteration in current increment
public :: &
grid_mech_spectral_polarisation_init, &
grid_mech_spectral_polarisation_solution, &
grid_mech_spectral_polarisation_forward
private :: &
converged, &
formResidual
contains
@ -84,12 +90,10 @@ subroutine grid_mech_spectral_polarisation_init
IO_intOut, &
IO_error, &
IO_open_jobFile_binary
use debug, only: &
debug_level, &
debug_spectral, &
debug_spectralRestart
use FEsolving, only: &
restartInc
use config, only :&
config_numerics
use numerics, only: &
worldrank, &
worldsize, &
@ -99,9 +103,9 @@ subroutine grid_mech_spectral_polarisation_init
use DAMASK_interface, only: &
getSolverJobName
use spectral_utilities, only: &
Utilities_constitutiveResponse, &
Utilities_updateGamma, &
Utilities_updateIPcoords, &
utilities_constitutiveResponse, &
utilities_updateGamma, &
utilities_updateIPcoords, &
wgt
use mesh, only: &
grid, &
@ -123,10 +127,12 @@ subroutine grid_mech_spectral_polarisation_init
integer :: fileUnit
character(len=1024) :: rankStr
write(6,'(/,a)') ' <<<+- DAMASK_spectral_solverPolarisation init -+>>>'
write(6,'(/,a)') ' <<<+- grid_mech_spectral_polarisation init -+>>>'
write(6,'(/,a)') ' Shanthraj et al., International Journal of Plasticity 66:3145, 2015'
write(6,'(a)') ' https://doi.org/10.1016/j.ijplas.2014.02.006'
num%update_gamma = config_numerics%getInt('update_gamma',defaultVal=0) > 0
!--------------------------------------------------------------------------------------------------
! set default and user defined options for PETSc
@ -164,7 +170,7 @@ subroutine grid_mech_spectral_polarisation_init
call DMcreateGlobalVector(da,solution_vec,ierr); CHKERRQ(ierr) ! global solution vector (grid x 18, i.e. every def grad tensor)
call DMDASNESsetFunctionLocal(da,INSERT_VALUES,formResidual,PETSC_NULL_SNES,ierr) ! residual vector of same shape as solution vector
CHKERRQ(ierr)
call SNESsetConvergenceTest(snes,grid_mech_spectral_polarisation_converged,PETSC_NULL_SNES,PETSC_NULL_FUNCTION,ierr) ! specify custom convergence check function "_converged"
call SNESsetConvergenceTest(snes,converged,PETSC_NULL_SNES,PETSC_NULL_FUNCTION,ierr) ! specify custom convergence check function "converged"
CHKERRQ(ierr)
call SNESsetFromOptions(snes,ierr); CHKERRQ(ierr) ! pull it all together with additional CLI arguments
@ -173,13 +179,14 @@ subroutine grid_mech_spectral_polarisation_init
call DMDAVecGetArrayF90(da,solution_vec,FandF_tau,ierr); CHKERRQ(ierr) ! places pointer on PETSc data
F => FandF_tau( 0: 8,:,:,:)
F_tau => FandF_tau( 9:17,:,:,:)
restart: if (restartInc > 0) then
if (iand(debug_level(debug_spectral),debug_spectralRestart) /= 0) then
write(6,'(/,a,'//IO_intOut(restartInc)//',a)') &
'reading values of increment ', restartInc, ' from file'
flush(6)
endif
restart: if (restartInc > 0) then
write(6,'(/,a,'//IO_intOut(restartInc)//',a)') ' reading values of increment ', restartInc, ' from file'
fileUnit = IO_open_jobFile_binary('F_aim')
read(fileUnit) F_aim; close(fileUnit)
fileUnit = IO_open_jobFile_binary('F_aim_lastInc')
read(fileUnit) F_aim_lastInc; close(fileUnit)
fileUnit = IO_open_jobFile_binary('F_aimDot')
read(fileUnit) F_aimDot; close(fileUnit)
@ -189,19 +196,12 @@ subroutine grid_mech_spectral_polarisation_init
read(fileUnit) F; close (fileUnit)
fileUnit = IO_open_jobFile_binary('F_lastInc'//trim(rankStr))
read(fileUnit) F_lastInc; close (fileUnit)
fileUnit = IO_open_jobFile_binary('F_tau'//trim(rankStr))
read(fileUnit) F_tau; close (fileUnit)
fileUnit = IO_open_jobFile_binary('F_tau_lastInc'//trim(rankStr))
read(fileUnit) F_tau_lastInc; close (fileUnit)
F_aim = reshape(sum(sum(sum(F,dim=4),dim=3),dim=2) * wgt, [3,3]) ! average of F
call MPI_Allreduce(MPI_IN_PLACE,F_aim,9,MPI_DOUBLE,MPI_SUM,PETSC_COMM_WORLD,ierr)
if(ierr /=0_pInt) call IO_error(894_pInt, ext_msg='F_aim')
F_aim_lastInc = sum(sum(sum(F_lastInc,dim=5),dim=4),dim=3) * wgt ! average of F_lastInc
call MPI_Allreduce(MPI_IN_PLACE,F_aim_lastInc,9,MPI_DOUBLE,MPI_SUM,PETSC_COMM_WORLD,ierr)
if(ierr /=0_pInt) call IO_error(894_pInt, ext_msg='F_aim_lastInc')
elseif (restartInc == 0_pInt) then restart
elseif (restartInc == 0) then restart
F_lastInc = spread(spread(spread(math_I3,3,grid(1)),4,grid(2)),5,grid3) ! initialize to identity
F = reshape(F_lastInc,[9,grid(1),grid(2),grid3])
F_tau = 2.0_pReal*F
@ -214,15 +214,10 @@ subroutine grid_mech_spectral_polarisation_init
reshape(F,shape(F_lastInc)), & ! target F
0.0_pReal, & ! time increment
math_I3) ! no rotation of boundary condition
nullify(F)
nullify(F_tau)
call DMDAVecRestoreArrayF90(da,solution_vec,FandF_tau,ierr); CHKERRQ(ierr) ! write data back to PETSc
call DMDAVecRestoreArrayF90(da,solution_vec,FandF_tau,ierr); CHKERRQ(ierr) ! deassociate pointer
restartRead: if (restartInc > 0_pInt) then
if (iand(debug_level(debug_spectral),debug_spectralRestart)/= 0 .and. worldrank == 0_pInt) &
write(6,'(/,a,'//IO_intOut(restartInc)//',a)') &
'reading more values of increment ', restartInc, ' from file'
flush(6)
restartRead: if (restartInc > 0) then
write(6,'(/,a,'//IO_intOut(restartInc)//',a)') ' reading more values of increment ', restartInc, ' from file'
fileUnit = IO_open_jobFile_binary('C_volAvg')
read(fileUnit) C_volAvg; close(fileUnit)
fileUnit = IO_open_jobFile_binary('C_volAvgLastInv')
@ -242,16 +237,12 @@ end subroutine grid_mech_spectral_polarisation_init
!> @brief solution for the Polarisation scheme with internal iterations
!--------------------------------------------------------------------------------------------------
function grid_mech_spectral_polarisation_solution(incInfoIn,timeinc,timeinc_old,stress_BC,rotation_BC) result(solution)
use IO, only: &
IO_error
use numerics, only: &
update_gamma
use math, only: &
math_invSym3333
use spectral_utilities, only: &
tBoundaryCondition, &
Utilities_maskedCompliance, &
Utilities_updateGamma
utilities_maskedCompliance, &
utilities_updateGamma
use FEsolving, only: &
restartWrite, &
terminallyIll
@ -280,14 +271,14 @@ function grid_mech_spectral_polarisation_solution(incInfoIn,timeinc,timeinc_old,
!--------------------------------------------------------------------------------------------------
! update stiffness (and gamma operator)
S = Utilities_maskedCompliance(rotation_BC,stress_BC%maskLogical,C_volAvg)
if (update_gamma) then
call Utilities_updateGamma(C_minMaxAvg,restartWrite)
if (num%update_gamma) then
call utilities_updateGamma(C_minMaxAvg,restartWrite)
C_scale = C_minMaxAvg
S_scale = math_invSym3333(C_minMaxAvg)
endif
!--------------------------------------------------------------------------------------------------
! set module wide availabe data
! set module wide available data
params%stress_mask = stress_BC%maskFloat
params%stress_BC = stress_BC%values
params%rotation_BC = rotation_BC
@ -306,232 +297,10 @@ function grid_mech_spectral_polarisation_solution(incInfoIn,timeinc,timeinc_old,
solution%iterationsNeeded = totalIter
solution%termIll = terminallyIll
terminallyIll = .false.
if (reason == -4) call IO_error(893_pInt) ! MPI error
end function grid_mech_spectral_polarisation_solution
!--------------------------------------------------------------------------------------------------
!> @brief forms the Polarisation residual vector
!--------------------------------------------------------------------------------------------------
subroutine formResidual(in, & ! DMDA info (needs to be named "in" for XRANGE, etc. macros to work)
FandF_tau, & ! defgrad fields on grid
residuum, & ! residuum fields on grid
dummy, &
ierr)
use numerics, only: &
itmax, &
itmin, &
polarAlpha, &
polarBeta
use mesh, only: &
grid, &
grid3
use IO, only: &
IO_intOut
use math, only: &
math_rotate_backward33, &
math_mul3333xx33, &
math_invSym3333, &
math_mul33x33
use debug, only: &
debug_level, &
debug_spectral, &
debug_spectralRotation
use spectral_utilities, only: &
wgt, &
tensorField_real, &
utilities_FFTtensorForward, &
utilities_fourierGammaConvolution, &
utilities_FFTtensorBackward, &
Utilities_constitutiveResponse, &
Utilities_divergenceRMS, &
Utilities_curlRMS
use homogenization, only: &
materialpoint_dPdF
use FEsolving, only: &
terminallyIll
implicit none
DMDALocalInfo, dimension(DMDA_LOCAL_INFO_SIZE) :: in
PetscScalar, &
target, dimension(3,3,2, XG_RANGE,YG_RANGE,ZG_RANGE), intent(in) :: FandF_tau
PetscScalar, &
target, dimension(3,3,2, X_RANGE, Y_RANGE, Z_RANGE), intent(out) :: residuum
PetscScalar, pointer, dimension(:,:,:,:,:) :: &
F, &
F_tau, &
residual_F, &
residual_F_tau
PetscInt :: &
PETScIter, &
nfuncs
PetscObject :: dummy
PetscErrorCode :: ierr
integer(pInt) :: &
i, j, k, e
F => FandF_tau(1:3,1:3,1,&
XG_RANGE,YG_RANGE,ZG_RANGE)
F_tau => FandF_tau(1:3,1:3,2,&
XG_RANGE,YG_RANGE,ZG_RANGE)
residual_F => residuum(1:3,1:3,1,&
X_RANGE, Y_RANGE, Z_RANGE)
residual_F_tau => residuum(1:3,1:3,2,&
X_RANGE, Y_RANGE, Z_RANGE)
F_av = sum(sum(sum(F,dim=5),dim=4),dim=3) * wgt
call MPI_Allreduce(MPI_IN_PLACE,F_av,9,MPI_DOUBLE,MPI_SUM,PETSC_COMM_WORLD,ierr)
call SNESGetNumberFunctionEvals(snes,nfuncs,ierr); CHKERRQ(ierr)
call SNESGetIterationNumber(snes,PETScIter,ierr); CHKERRQ(ierr)
if (nfuncs == 0 .and. PETScIter == 0) totalIter = -1_pInt ! new increment
!--------------------------------------------------------------------------------------------------
! begin of new iteration
newIteration: if (totalIter <= PETScIter) then
totalIter = totalIter + 1_pInt
write(6,'(1x,a,3(a,'//IO_intOut(itmax)//'))') &
trim(incInfo), ' @ Iteration ', itmin, '≤',totalIter, '≤', itmax
if (iand(debug_level(debug_spectral),debug_spectralRotation) /= 0) &
write(6,'(/,a,/,3(3(f12.7,1x)/))',advance='no') &
' deformation gradient aim (lab) =', transpose(math_rotate_backward33(F_aim,params%rotation_BC))
write(6,'(/,a,/,3(3(f12.7,1x)/))',advance='no') &
' deformation gradient aim =', transpose(F_aim)
flush(6)
endif newIteration
!--------------------------------------------------------------------------------------------------
!
tensorField_real = 0.0_pReal
do k = 1_pInt, grid3; do j = 1_pInt, grid(2); do i = 1_pInt, grid(1)
tensorField_real(1:3,1:3,i,j,k) = &
polarBeta*math_mul3333xx33(C_scale,F(1:3,1:3,i,j,k) - math_I3) -&
polarAlpha*math_mul33x33(F(1:3,1:3,i,j,k), &
math_mul3333xx33(C_scale,F_tau(1:3,1:3,i,j,k) - F(1:3,1:3,i,j,k) - math_I3))
enddo; enddo; enddo
!--------------------------------------------------------------------------------------------------
! doing convolution in Fourier space
call utilities_FFTtensorForward()
call utilities_fourierGammaConvolution(math_rotate_backward33(polarBeta*F_aim,params%rotation_BC))
call utilities_FFTtensorBackward()
!--------------------------------------------------------------------------------------------------
! constructing residual
residual_F_tau = polarBeta*F - tensorField_real(1:3,1:3,1:grid(1),1:grid(2),1:grid3)
!--------------------------------------------------------------------------------------------------
! evaluate constitutive response
P_avLastEval = P_av
call Utilities_constitutiveResponse(residual_F,P_av,C_volAvg,C_minMaxAvg, &
F - residual_F_tau/polarBeta,params%timeinc,params%rotation_BC)
call MPI_Allreduce(MPI_IN_PLACE,terminallyIll,1,MPI_LOGICAL,MPI_LOR,PETSC_COMM_WORLD,ierr)
!--------------------------------------------------------------------------------------------------
! calculate divergence
tensorField_real = 0.0_pReal
tensorField_real(1:3,1:3,1:grid(1),1:grid(2),1:grid3) = residual_F !< stress field in disguise
call utilities_FFTtensorForward()
err_div = Utilities_divergenceRMS() !< root mean squared error in divergence of stress
!--------------------------------------------------------------------------------------------------
! constructing residual
e = 0_pInt
do k = 1_pInt, grid3; do j = 1_pInt, grid(2); do i = 1_pInt, grid(1)
e = e + 1_pInt
residual_F(1:3,1:3,i,j,k) = &
math_mul3333xx33(math_invSym3333(materialpoint_dPdF(1:3,1:3,1:3,1:3,1,e) + C_scale), &
residual_F(1:3,1:3,i,j,k) - math_mul33x33(F(1:3,1:3,i,j,k), &
math_mul3333xx33(C_scale,F_tau(1:3,1:3,i,j,k) - F(1:3,1:3,i,j,k) - math_I3))) &
+ residual_F_tau(1:3,1:3,i,j,k)
enddo; enddo; enddo
!--------------------------------------------------------------------------------------------------
! calculating curl
tensorField_real = 0.0_pReal
tensorField_real(1:3,1:3,1:grid(1),1:grid(2),1:grid3) = F
call utilities_FFTtensorForward()
err_curl = Utilities_curlRMS()
nullify(F)
nullify(F_tau)
nullify(residual_F)
nullify(residual_F_tau)
end subroutine formResidual
!--------------------------------------------------------------------------------------------------
!> @brief convergence check
!--------------------------------------------------------------------------------------------------
subroutine grid_mech_spectral_polarisation_converged(snes_local,PETScIter,xnorm,snorm,fnorm,reason,dummy,ierr)
use numerics, only: &
itmax, &
itmin, &
err_div_tolRel, &
err_div_tolAbs, &
err_curl_tolRel, &
err_curl_tolAbs, &
err_stress_tolRel, &
err_stress_tolAbs
use math, only: &
math_mul3333xx33
use FEsolving, only: &
terminallyIll
implicit none
SNES :: snes_local
PetscInt :: PETScIter
PetscReal :: &
xnorm, &
snorm, &
fnorm
SNESConvergedReason :: reason
PetscObject :: dummy
PetscErrorCode :: ierr
real(pReal) :: &
curlTol, &
divTol, &
BCTol
!--------------------------------------------------------------------------------------------------
! stress BC handling
F_aim = F_aim - math_mul3333xx33(S, ((P_av - params%stress_BC))) ! S = 0.0 for no bc
err_BC = maxval(abs((1.0_pReal-params%stress_mask) * math_mul3333xx33(C_scale,F_aim-F_av) + &
params%stress_mask * (P_av-params%stress_BC))) ! mask = 0.0 for no bc
!--------------------------------------------------------------------------------------------------
! error calculation
curlTol = max(maxval(abs(F_aim-math_I3))*err_curl_tolRel ,err_curl_tolAbs)
divTol = max(maxval(abs(P_av)) *err_div_tolRel ,err_div_tolAbs)
BCTol = max(maxval(abs(P_av)) *err_stress_tolRel,err_stress_tolAbs)
converged: if ((totalIter >= itmin .and. &
all([ err_div /divTol, &
err_curl/curlTol, &
err_BC /BCTol ] < 1.0_pReal)) &
.or. terminallyIll) then
reason = 1
elseif (totalIter >= itmax) then converged
reason = -1
else converged
reason = 0
endif converged
!--------------------------------------------------------------------------------------------------
! report
write(6,'(1/,a)') ' ... reporting .............................................................'
write(6,'(/,a,f12.2,a,es8.2,a,es9.2,a)') ' error divergence = ', &
err_div/divTol, ' (',err_div, ' / m, tol = ',divTol,')'
write(6, '(a,f12.2,a,es8.2,a,es9.2,a)') ' error curl = ', &
err_curl/curlTol,' (',err_curl,' -, tol = ',curlTol,')'
write(6, '(a,f12.2,a,es8.2,a,es9.2,a)') ' error BC = ', &
err_BC/BCTol, ' (',err_BC, ' Pa, tol = ',BCTol,')'
write(6,'(/,a)') ' ==========================================================================='
flush(6)
end subroutine grid_mech_spectral_polarisation_converged
!--------------------------------------------------------------------------------------------------
!> @brief forwarding routine
!> @details find new boundary conditions and best F estimate for end of current timestep
@ -552,9 +321,9 @@ subroutine grid_mech_spectral_polarisation_forward(guess,timeinc,timeinc_old,loa
use CPFEM2, only: &
CPFEM_age
use spectral_utilities, only: &
Utilities_calculateRate, &
Utilities_forwardField, &
Utilities_updateIPcoords, &
utilities_calculateRate, &
utilities_forwardField, &
utilities_updateIPcoords, &
tBoundaryCondition, &
cutBack
use IO, only: &
@ -576,14 +345,12 @@ subroutine grid_mech_spectral_polarisation_forward(guess,timeinc,timeinc_old,loa
rotation_BC
PetscErrorCode :: ierr
PetscScalar, dimension(:,:,:,:), pointer :: FandF_tau, F, F_tau
integer(pInt) :: i, j, k
integer :: i, j, k
real(pReal), dimension(3,3) :: F_lambda33
integer :: fileUnit
character(len=32) :: rankStr
!--------------------------------------------------------------------------------------------------
! update coordinates and rate and forward last inc
call DMDAVecGetArrayF90(da,solution_vec,FandF_tau,ierr); CHKERRQ(ierr)
F => FandF_tau( 0: 8,:,:,:)
F_tau => FandF_tau( 9:17,:,:,:)
@ -603,6 +370,10 @@ subroutine grid_mech_spectral_polarisation_forward(guess,timeinc,timeinc_old,loa
write(fileUnit) C_volAvg; close(fileUnit)
fileUnit = IO_open_jobFile_binary('C_volAvgLastInv','w')
write(fileUnit) C_volAvgLastInc; close(fileUnit)
fileUnit = IO_open_jobFile_binary('F_aim','w')
write(fileUnit) F_aim; close(fileUnit)
fileUnit = IO_open_jobFile_binary('F_aim_lastInc','w')
write(fileUnit) F_aim_lastInc; close(fileUnit)
fileUnit = IO_open_jobFile_binary('F_aimDot','w')
write(fileUnit) F_aimDot; close(fileUnit)
endif
@ -612,14 +383,13 @@ subroutine grid_mech_spectral_polarisation_forward(guess,timeinc,timeinc_old,loa
write(fileUnit) F; close (fileUnit)
fileUnit = IO_open_jobFile_binary('F_lastInc'//trim(rankStr),'w')
write(fileUnit) F_lastInc; close (fileUnit)
fileUnit = IO_open_jobFile_binary('F_tau'//trim(rankStr),'w')
write(fileUnit) F_tau; close (fileUnit)
fileUnit = IO_open_jobFile_binary('F_tau_lastInc'//trim(rankStr),'w')
write(fileUnit) F_tau_lastInc; close (fileUnit)
endif
call CPFEM_age() ! age state and kinematics
call CPFEM_age ! age state and kinematics
call utilities_updateIPcoords(F)
C_volAvgLastInc = C_volAvg
@ -642,10 +412,10 @@ subroutine grid_mech_spectral_polarisation_forward(guess,timeinc,timeinc_old,loa
endif
Fdot = Utilities_calculateRate(guess, &
Fdot = utilities_calculateRate(guess, &
F_lastInc,reshape(F,[3,3,grid(1),grid(2),grid3]),timeinc_old, &
math_rotate_backward33(F_aimDot,rotation_BC))
F_tauDot = Utilities_calculateRate(guess, &
F_tauDot = utilities_calculateRate(guess, &
F_tau_lastInc,reshape(F_tau,[3,3,grid(1),grid(2),grid3]), timeinc_old, &
math_rotate_backward33(F_aimDot,rotation_BC))
F_lastInc = reshape(F, [3,3,grid(1),grid(2),grid3]) ! winding F forward
@ -656,15 +426,14 @@ subroutine grid_mech_spectral_polarisation_forward(guess,timeinc,timeinc_old,loa
!--------------------------------------------------------------------------------------------------
! update average and local deformation gradients
F_aim = F_aim_lastInc + F_aimDot * timeinc
F = reshape(Utilities_forwardField(timeinc,F_lastInc,Fdot, & ! estimate of F at end of time+timeinc that matches rotated F_aim on average
F = reshape(utilities_forwardField(timeinc,F_lastInc,Fdot, & ! estimate of F at end of time+timeinc that matches rotated F_aim on average
math_rotate_backward33(F_aim,rotation_BC)),&
[9,grid(1),grid(2),grid3])
if (guess) then
if (guess) then
F_tau = reshape(Utilities_forwardField(timeinc,F_tau_lastInc,F_taudot), &
[9,grid(1),grid(2),grid3]) ! does not have any average value as boundary condition
else
do k = 1_pInt, grid3; do j = 1_pInt, grid(2); do i = 1_pInt, grid(1)
do k = 1, grid3; do j = 1, grid(2); do i = 1, grid(1)
F_lambda33 = reshape(F_tau(1:9,i,j,k)-F(1:9,i,j,k),[3,3])
F_lambda33 = math_mul3333xx33(S_scale,math_mul33x33(F_lambda33, &
math_mul3333xx33(C_scale,&
@ -675,10 +444,218 @@ subroutine grid_mech_spectral_polarisation_forward(guess,timeinc,timeinc_old,loa
enddo; enddo; enddo
endif
nullify(F)
nullify(F_tau)
call DMDAVecRestoreArrayF90(da,solution_vec,FandF_tau,ierr); CHKERRQ(ierr)
end subroutine grid_mech_spectral_polarisation_forward
!--------------------------------------------------------------------------------------------------
!> @brief convergence check
!--------------------------------------------------------------------------------------------------
subroutine converged(snes_local,PETScIter,xnorm,snorm,fnorm,reason,dummy,ierr)
use numerics, only: &
itmax, &
itmin, &
err_div_tolRel, &
err_div_tolAbs, &
err_curl_tolRel, &
err_curl_tolAbs, &
err_stress_tolRel, &
err_stress_tolAbs
use FEsolving, only: &
terminallyIll
implicit none
SNES :: snes_local
PetscInt :: PETScIter
PetscReal :: &
xnorm, & ! not used
snorm, & ! not used
fnorm ! not used
SNESConvergedReason :: reason
PetscObject :: dummy
PetscErrorCode :: ierr
real(pReal) :: &
curlTol, &
divTol, &
BCTol
curlTol = max(maxval(abs(F_aim-math_I3))*err_curl_tolRel ,err_curl_tolAbs)
divTol = max(maxval(abs(P_av)) *err_div_tolRel ,err_div_tolAbs)
BCTol = max(maxval(abs(P_av)) *err_stress_tolRel,err_stress_tolAbs)
if ((totalIter >= itmin .and. &
all([ err_div /divTol, &
err_curl/curlTol, &
err_BC /BCTol ] < 1.0_pReal)) &
.or. terminallyIll) then
reason = 1
elseif (totalIter >= itmax) then
reason = -1
else
reason = 0
endif
!--------------------------------------------------------------------------------------------------
! report
write(6,'(1/,a)') ' ... reporting .............................................................'
write(6,'(1/,a,f12.2,a,es8.2,a,es9.2,a)') ' error divergence = ', &
err_div/divTol, ' (',err_div, ' / m, tol = ',divTol,')'
write(6, '(a,f12.2,a,es8.2,a,es9.2,a)') ' error curl = ', &
err_curl/curlTol,' (',err_curl,' -, tol = ',curlTol,')'
write(6, '(a,f12.2,a,es8.2,a,es9.2,a)') ' error BC = ', &
err_BC/BCTol, ' (',err_BC, ' Pa, tol = ',BCTol,')'
write(6,'(/,a)') ' ==========================================================================='
flush(6)
end subroutine converged
!--------------------------------------------------------------------------------------------------
!> @brief forms the polarisation residual vector
!--------------------------------------------------------------------------------------------------
subroutine formResidual(in, FandF_tau, &
residuum, dummy,ierr)
use numerics, only: &
itmax, &
itmin, &
polarAlpha, &
polarBeta
use mesh, only: &
grid, &
grid3
use math, only: &
math_rotate_forward33, &
math_rotate_backward33, &
math_mul3333xx33, &
math_invSym3333, &
math_mul33x33
use debug, only: &
debug_level, &
debug_spectral, &
debug_spectralRotation
use spectral_utilities, only: &
wgt, &
tensorField_real, &
utilities_FFTtensorForward, &
utilities_fourierGammaConvolution, &
utilities_FFTtensorBackward, &
utilities_constitutiveResponse, &
utilities_divergenceRMS, &
utilities_curlRMS
use IO, only: &
IO_intOut
use homogenization, only: &
materialpoint_dPdF
use FEsolving, only: &
terminallyIll
implicit none
DMDALocalInfo, dimension(DMDA_LOCAL_INFO_SIZE) :: in !< DMDA info (needs to be named "in" for macros like XRANGE to work)
PetscScalar, dimension(3,3,2,XG_RANGE,YG_RANGE,ZG_RANGE), &
target, intent(in) :: FandF_tau
PetscScalar, dimension(3,3,2,X_RANGE,Y_RANGE,Z_RANGE),&
target, intent(out) :: residuum !< residuum field
PetscScalar, pointer, dimension(:,:,:,:,:) :: &
F, &
F_tau, &
residual_F, &
residual_F_tau
PetscInt :: &
PETScIter, &
nfuncs
PetscObject :: dummy
PetscErrorCode :: ierr
integer :: &
i, j, k, e
F => FandF_tau(1:3,1:3,1,&
XG_RANGE,YG_RANGE,ZG_RANGE)
F_tau => FandF_tau(1:3,1:3,2,&
XG_RANGE,YG_RANGE,ZG_RANGE)
residual_F => residuum(1:3,1:3,1,&
X_RANGE, Y_RANGE, Z_RANGE)
residual_F_tau => residuum(1:3,1:3,2,&
X_RANGE, Y_RANGE, Z_RANGE)
F_av = sum(sum(sum(F,dim=5),dim=4),dim=3) * wgt
call MPI_Allreduce(MPI_IN_PLACE,F_av,9,MPI_DOUBLE,MPI_SUM,PETSC_COMM_WORLD,ierr)
call SNESGetNumberFunctionEvals(snes,nfuncs,ierr); CHKERRQ(ierr)
call SNESGetIterationNumber(snes,PETScIter,ierr); CHKERRQ(ierr)
if (nfuncs == 0 .and. PETScIter == 0) totalIter = -1 ! new increment
!--------------------------------------------------------------------------------------------------
! begin of new iteration
newIteration: if (totalIter <= PETScIter) then
totalIter = totalIter + 1
write(6,'(1x,a,3(a,'//IO_intOut(itmax)//'))') &
trim(incInfo), ' @ Iteration ', itmin, '≤',totalIter, '≤', itmax
if (iand(debug_level(debug_spectral),debug_spectralRotation) /= 0) &
write(6,'(/,a,/,3(3(f12.7,1x)/))',advance='no') &
' deformation gradient aim (lab) =', transpose(math_rotate_backward33(F_aim,params%rotation_BC))
write(6,'(/,a,/,3(3(f12.7,1x)/))',advance='no') &
' deformation gradient aim =', transpose(F_aim)
flush(6)
endif newIteration
!--------------------------------------------------------------------------------------------------
!
tensorField_real = 0.0_pReal
do k = 1, grid3; do j = 1, grid(2); do i = 1, grid(1)
tensorField_real(1:3,1:3,i,j,k) = &
polarBeta*math_mul3333xx33(C_scale,F(1:3,1:3,i,j,k) - math_I3) -&
polarAlpha*math_mul33x33(F(1:3,1:3,i,j,k), &
math_mul3333xx33(C_scale,F_tau(1:3,1:3,i,j,k) - F(1:3,1:3,i,j,k) - math_I3))
enddo; enddo; enddo
!--------------------------------------------------------------------------------------------------
! doing convolution in Fourier space
call utilities_FFTtensorForward
call utilities_fourierGammaConvolution(math_rotate_backward33(polarBeta*F_aim,params%rotation_BC))
call utilities_FFTtensorBackward
!--------------------------------------------------------------------------------------------------
! constructing residual
residual_F_tau = polarBeta*F - tensorField_real(1:3,1:3,1:grid(1),1:grid(2),1:grid3)
!--------------------------------------------------------------------------------------------------
! evaluate constitutive response
call utilities_constitutiveResponse(residual_F, & ! "residuum" gets field of first PK stress (to save memory)
P_av,C_volAvg,C_minMaxAvg, &
F - residual_F_tau/polarBeta,params%timeinc,params%rotation_BC)
call MPI_Allreduce(MPI_IN_PLACE,terminallyIll,1,MPI_LOGICAL,MPI_LOR,PETSC_COMM_WORLD,ierr)
!--------------------------------------------------------------------------------------------------
! stress BC handling
F_aim = F_aim - math_mul3333xx33(S, ((P_av - params%stress_BC))) ! S = 0.0 for no bc
err_BC = maxval(abs((1.0_pReal-params%stress_mask) * math_mul3333xx33(C_scale,F_aim &
-math_rotate_forward33(F_av,params%rotation_BC)) + &
params%stress_mask * (P_av-params%stress_BC))) ! mask = 0.0 for no bc
! calculate divergence
tensorField_real = 0.0_pReal
tensorField_real(1:3,1:3,1:grid(1),1:grid(2),1:grid3) = residual_F !< stress field in disguise
call utilities_FFTtensorForward
err_div = Utilities_divergenceRMS() !< root mean squared error in divergence of stress
!--------------------------------------------------------------------------------------------------
! constructing residual
e = 0
do k = 1, grid3; do j = 1, grid(2); do i = 1, grid(1)
e = e + 1
residual_F(1:3,1:3,i,j,k) = &
math_mul3333xx33(math_invSym3333(materialpoint_dPdF(1:3,1:3,1:3,1:3,1,e) + C_scale), &
residual_F(1:3,1:3,i,j,k) - math_mul33x33(F(1:3,1:3,i,j,k), &
math_mul3333xx33(C_scale,F_tau(1:3,1:3,i,j,k) - F(1:3,1:3,i,j,k) - math_I3))) &
+ residual_F_tau(1:3,1:3,i,j,k)
enddo; enddo; enddo
!--------------------------------------------------------------------------------------------------
! calculating curl
tensorField_real = 0.0_pReal
tensorField_real(1:3,1:3,1:grid(1),1:grid(2),1:grid3) = F
call utilities_FFTtensorForward
err_curl = Utilities_curlRMS()
end subroutine formResidual
end module grid_mech_spectral_polarisation

View File

@ -77,7 +77,7 @@ module numerics
!--------------------------------------------------------------------------------------------------
! spectral parameters:
#ifdef Spectral
#ifdef Grid
real(pReal), protected, public :: &
err_div_tolAbs = 1.0e-4_pReal, & !< absolute tolerance for equilibrium
err_div_tolRel = 5.0e-4_pReal, & !< relative tolerance for equilibrium
@ -85,27 +85,17 @@ module numerics
err_curl_tolRel = 5.0e-4_pReal, & !< relative tolerance for compatibility
err_stress_tolAbs = 1.0e3_pReal, & !< absolute tolerance for fullfillment of stress BC
err_stress_tolRel = 0.01_pReal, & !< relative tolerance for fullfillment of stress BC
fftw_timelimit = -1.0_pReal, & !< sets the timelimit of plan creation for FFTW, see manual on www.fftw.org, Default -1.0: disable timelimit
rotation_tol = 1.0e-12_pReal, & !< tolerance of rotation specified in loadcase, Default 1.0e-12: first guess
polarAlpha = 1.0_pReal, & !< polarization scheme parameter 0.0 < alpha < 2.0. alpha = 1.0 ==> AL scheme, alpha = 2.0 ==> accelerated scheme
polarBeta = 1.0_pReal !< polarization scheme parameter 0.0 < beta < 2.0. beta = 1.0 ==> AL scheme, beta = 2.0 ==> accelerated scheme
character(len=64), private :: &
fftw_plan_mode = 'FFTW_PATIENT' !< reads the planing-rigor flag, see manual on www.fftw.org, Default FFTW_PATIENT: use patient planner flag
character(len=64), protected, public :: &
spectral_solver = 'basic', & !< spectral solution method
spectral_derivative = 'continuous' !< spectral spatial derivative method
character(len=1024), protected, public :: &
petsc_defaultOptions = '-mech_snes_type ngmres &
&-damage_snes_type ngmres &
&-thermal_snes_type ngmres ', &
petsc_options = ''
integer(pInt), protected, public :: &
fftw_planner_flag = 32_pInt, & !< conversion of fftw_plan_mode to integer, basically what is usually done in the include file of fftw
divergence_correction = 2_pInt !< correct divergence calculation in fourier space 0: no correction, 1: size scaled to 1, 2: size scaled to Npoints
logical, protected, public :: &
continueCalculation = .false., & !< false:exit if BVP solver does not converge, true: continue calculation despite BVP solver not converging
memory_efficient = .true., & !< for fast execution (pre calculation of gamma_hat), Default .true.: do not precalculate
update_gamma = .false. !< update gamma operator with current stiffness, Default .false.: use initial stiffness
continueCalculation = .false. !< false:exit if BVP solver does not converge, true: continue calculation despite BVP solver not converging
#endif
!--------------------------------------------------------------------------------------------------
@ -319,7 +309,7 @@ subroutine numerics_init
!--------------------------------------------------------------------------------------------------
! spectral parameters
#ifdef Spectral
#ifdef Grid
case ('err_div_tolabs')
err_div_tolAbs = IO_floatValue(line,chunkPos,2_pInt)
case ('err_div_tolrel')
@ -330,22 +320,8 @@ subroutine numerics_init
err_stress_tolabs = IO_floatValue(line,chunkPos,2_pInt)
case ('continuecalculation')
continueCalculation = IO_intValue(line,chunkPos,2_pInt) > 0_pInt
case ('memory_efficient')
memory_efficient = IO_intValue(line,chunkPos,2_pInt) > 0_pInt
case ('fftw_timelimit')
fftw_timelimit = IO_floatValue(line,chunkPos,2_pInt)
case ('fftw_plan_mode')
fftw_plan_mode = IO_lc(IO_stringValue(line,chunkPos,2_pInt))
case ('spectralderivative')
spectral_derivative = IO_lc(IO_stringValue(line,chunkPos,2_pInt))
case ('divergence_correction')
divergence_correction = IO_intValue(line,chunkPos,2_pInt)
case ('update_gamma')
update_gamma = IO_intValue(line,chunkPos,2_pInt) > 0_pInt
case ('petsc_options')
petsc_options = trim(line(chunkPos(4):))
case ('spectralsolver','myspectralsolver')
spectral_solver = IO_lc(IO_stringValue(line,chunkPos,2_pInt))
case ('err_curl_tolabs')
err_curl_tolAbs = IO_floatValue(line,chunkPos,2_pInt)
case ('err_curl_tolrel')
@ -354,13 +330,6 @@ subroutine numerics_init
polarAlpha = IO_floatValue(line,chunkPos,2_pInt)
case ('polarbeta')
polarBeta = IO_floatValue(line,chunkPos,2_pInt)
#else
case ('err_div_tolabs','err_div_tolrel','err_stress_tolrel','err_stress_tolabs',& ! found spectral parameter for FEM build
'memory_efficient','fftw_timelimit','fftw_plan_mode', &
'divergence_correction','update_gamma','spectralfilter','myfilter', &
'err_curl_tolabs','err_curl_tolrel', &
'polaralpha','polarbeta')
call IO_warning(40_pInt,ext_msg=tag)
#endif
!--------------------------------------------------------------------------------------------------
@ -374,36 +343,16 @@ subroutine numerics_init
petsc_options = trim(line(chunkPos(4):))
case ('bbarstabilisation')
BBarStabilisation = IO_intValue(line,chunkPos,2_pInt) > 0_pInt
#else
case ('integrationorder','structorder','thermalorder', 'damageorder', &
'bbarstabilisation')
call IO_warning(40_pInt,ext_msg=tag)
#endif
case default ! found unknown keyword
call IO_error(300_pInt,ext_msg=tag)
end select
enddo
else fileExists
write(6,'(a,/)') ' using standard values'
flush(6)
endif fileExists
#ifdef Spectral
select case(IO_lc(fftw_plan_mode)) ! setting parameters for the plan creation of FFTW. Basically a translation from fftw3.f
case('estimate','fftw_estimate') ! ordered from slow execution (but fast plan creation) to fast execution
fftw_planner_flag = 64_pInt
case('measure','fftw_measure')
fftw_planner_flag = 0_pInt
case('patient','fftw_patient')
fftw_planner_flag= 32_pInt
case('exhaustive','fftw_exhaustive')
fftw_planner_flag = 8_pInt
case default
call IO_warning(warning_ID=47_pInt,ext_msg=trim(IO_lc(fftw_plan_mode)))
fftw_planner_flag = 32_pInt
end select
#endif
!--------------------------------------------------------------------------------------------------
! writing parameters to output
@ -478,19 +427,8 @@ subroutine numerics_init
!--------------------------------------------------------------------------------------------------
! spectral parameters
#ifdef Spectral
#ifdef Grid
write(6,'(a24,1x,L8)') ' continueCalculation: ',continueCalculation
write(6,'(a24,1x,L8)') ' memory_efficient: ',memory_efficient
write(6,'(a24,1x,i8)') ' divergence_correction: ',divergence_correction
write(6,'(a24,1x,a)') ' spectral_derivative: ',trim(spectral_derivative)
if(fftw_timelimit<0.0_pReal) then
write(6,'(a24,1x,L8)') ' fftw_timelimit: ',.false.
else
write(6,'(a24,1x,es8.1)') ' fftw_timelimit: ',fftw_timelimit
endif
write(6,'(a24,1x,a)') ' fftw_plan_mode: ',trim(fftw_plan_mode)
write(6,'(a24,1x,i8)') ' fftw_planner_flag: ',fftw_planner_flag
write(6,'(a24,1x,L8,/)') ' update_gamma: ',update_gamma
write(6,'(a24,1x,es8.1)') ' err_stress_tolAbs: ',err_stress_tolAbs
write(6,'(a24,1x,es8.1)') ' err_stress_tolRel: ',err_stress_tolRel
write(6,'(a24,1x,es8.1)') ' err_div_tolAbs: ',err_div_tolAbs
@ -499,7 +437,6 @@ subroutine numerics_init
write(6,'(a24,1x,es8.1)') ' err_curl_tolRel: ',err_curl_tolRel
write(6,'(a24,1x,es8.1)') ' polarAlpha: ',polarAlpha
write(6,'(a24,1x,es8.1)') ' polarBeta: ',polarBeta
write(6,'(a24,1x,a)') ' spectral solver: ',trim(spectral_solver)
write(6,'(a24,1x,a)') ' PETSc_options: ',trim(petsc_defaultOptions)//' '//trim(petsc_options)
#endif
@ -564,11 +501,7 @@ subroutine numerics_init
if (err_thermal_tolrel <= 0.0_pReal) call IO_error(301_pInt,ext_msg='err_thermal_tolrel')
if (err_damage_tolabs <= 0.0_pReal) call IO_error(301_pInt,ext_msg='err_damage_tolabs')
if (err_damage_tolrel <= 0.0_pReal) call IO_error(301_pInt,ext_msg='err_damage_tolrel')
#ifdef Spectral
if (divergence_correction < 0_pInt .or. &
divergence_correction > 2_pInt) call IO_error(301_pInt,ext_msg='divergence_correction')
if (update_gamma .and. &
.not. memory_efficient) call IO_error(error_ID = 847_pInt)
#ifdef Grid
if (err_stress_tolrel <= 0.0_pReal) call IO_error(301_pInt,ext_msg='err_stress_tolRel')
if (err_stress_tolabs <= 0.0_pReal) call IO_error(301_pInt,ext_msg='err_stress_tolAbs')
if (err_div_tolRel < 0.0_pReal) call IO_error(301_pInt,ext_msg='err_div_tolRel')

View File

@ -282,13 +282,13 @@ subroutine plastic_nonlocal_init
character(len=65536), dimension(:), allocatable :: outputs
integer :: NofMyPhase
write(6,'(/,a)') ' <<<+- constitutive_'//PLASTICITY_NONLOCAL_label//' init -+>>>'
write(6,'(/,a)') ' <<<+- constitutive_'//PLASTICITY_NONLOCAL_label//' init -+>>>'
write(6,'(/,a)') ' Reuber et al., Acta Materialia 71:333348, 2014'
write(6,'(a)') ' https://doi.org/10.1016/j.actamat.2014.03.012'
write(6,'(/,a)') ' Reuber et al., Acta Materialia 71:333348, 2014'
write(6,'(a)') ' https://doi.org/10.1016/j.actamat.2014.03.012'
write(6,'(/,a)') ' Kords, Dissertation RWTH Aachen, 2014'
write(6,'(a)') ' http://publications.rwth-aachen.de/record/229993'
write(6,'(/,a)') ' Kords, Dissertation RWTH Aachen, 2014'
write(6,'(a)') ' http://publications.rwth-aachen.de/record/229993'
maxNinstances = count(phase_plasticity == PLASTICITY_NONLOCAL_ID)
if (iand(debug_level(debug_constitutive),debug_levelBasic) /= 0) &

View File

@ -13,8 +13,11 @@ module prec
implicit none
private
! https://software.intel.com/en-us/blogs/2017/03/27/doctor-fortran-in-it-takes-all-kinds
#ifdef Abaqus
integer, parameter, public :: pReal = selected_real_kind(15,307) !< number with 15 significant digits, up to 1e+-307 (typically 64 bit)
#else
integer, parameter, public :: pReal = IEEE_selected_real_kind(15,307) !< number with 15 significant digits, up to 1e+-307 (typically 64 bit)
#endif
#if(INT==8)
integer, parameter, public :: pInt = selected_int_kind(18) !< number with at least up to +-1e18 (typically 64 bit)
#else

File diff suppressed because it is too large Load Diff