employing use statements (almost no external functions anymore)

adopted function calls to newer PETSc versions
This commit is contained in:
Martin Diehl 2018-05-17 12:04:21 +02:00
parent 31311d63db
commit 537bb8df48
9 changed files with 132 additions and 223 deletions

View File

@ -12,6 +12,8 @@ program DAMASK_spectral
compiler_version, &
compiler_options
#endif
#include <petsc/finclude/petscsys.h>
use PETScsys
use prec, only: &
pInt, &
pLongInt, &
@ -84,11 +86,8 @@ program DAMASK_spectral
use spectral_damage
use spectral_thermal
implicit none
#include <petsc/finclude/petscsys.h>
!--------------------------------------------------------------------------------------------------
! variables related to information from load case and geom file
real(pReal), dimension(9) :: temp_valueVector = 0.0_pReal !< temporarily from loadcase file when reading in tensors (initialize to 0.0)
@ -143,18 +142,11 @@ program DAMASK_spectral
integer(pInt), parameter :: maxByteOut = 2147483647-4096 !< limit of one file output write https://trac.mpich.org/projects/mpich/ticket/1742
integer(pInt), parameter :: maxRealOut = maxByteOut/pReal
integer(pLongInt), dimension(2) :: outputIndex
PetscErrorCode :: ierr
integer :: ierr
external :: &
quit, &
MPI_file_open, &
MPI_file_close, &
MPI_file_seek, &
MPI_file_get_position, &
MPI_file_write, &
MPI_abort, &
MPI_finalize, &
MPI_allreduce, &
PETScFinalize
quit
!--------------------------------------------------------------------------------------------------
! init DAMASK (all modules)
@ -442,10 +434,9 @@ program DAMASK_spectral
do i = 1, size(materialpoint_results,3)/(maxByteOut/(materialpoint_sizeResults*pReal))+1 ! slice the output of my process in chunks not exceeding the limit for one output
outputIndex = int([(i-1_pInt)*((maxRealOut)/materialpoint_sizeResults)+1_pInt, & ! QUESTION: why not starting i at 0 instead of murky 1?
min(i*((maxRealOut)/materialpoint_sizeResults),size(materialpoint_results,3))],pLongInt)
call MPI_file_write(resUnit, &
reshape(materialpoint_results(:,:,outputIndex(1):outputIndex(2)), &
call MPI_file_write(resUnit,reshape(materialpoint_results(:,:,outputIndex(1):outputIndex(2)), &
[(outputIndex(2)-outputIndex(1)+1)*int(materialpoint_sizeResults,pLongInt)]), &
(outputIndex(2)-outputIndex(1)+1)*int(materialpoint_sizeResults,pLongInt), &
int((outputIndex(2)-outputIndex(1)+1)*int(materialpoint_sizeResults,pLongInt)), &
MPI_DOUBLE, MPI_STATUS_IGNORE, ierr)
if (ierr /= 0_pInt) call IO_error(error_ID=894_pInt, ext_msg='MPI_file_write')
enddo
@ -635,7 +626,7 @@ program DAMASK_spectral
min(i*((maxRealOut)/materialpoint_sizeResults),size(materialpoint_results,3))],pLongInt)
call MPI_file_write(resUnit,reshape(materialpoint_results(:,:,outputIndex(1):outputIndex(2)),&
[(outputIndex(2)-outputIndex(1)+1)*int(materialpoint_sizeResults,pLongInt)]), &
(outputIndex(2)-outputIndex(1)+1)*int(materialpoint_sizeResults,pLongInt),&
int((outputIndex(2)-outputIndex(1)+1)*int(materialpoint_sizeResults,pLongInt)),&
MPI_DOUBLE, MPI_STATUS_IGNORE, ierr)
if(ierr /=0_pInt) call IO_error(894_pInt, ext_msg='MPI_file_write')
enddo
@ -682,6 +673,7 @@ end program DAMASK_spectral
!> stderr. Exit code 3 signals no severe problems, but some increments did not converge
!--------------------------------------------------------------------------------------------------
subroutine quit(stop_id)
use MPI
use prec, only: &
pInt
use spectral_mech_Basic, only: &
@ -705,8 +697,7 @@ subroutine quit(stop_id)
logical :: ErrorInQuit
external :: &
PETScFinalize, &
MPI_finalize
PETScFinalize
call BasicPETSC_destroy()
call Polarisation_destroy()

View File

@ -118,11 +118,6 @@ module mesh
logical, private :: noPart !< for cases where the ABAQUS input file does not use part/assembly information
#endif
#ifdef Spectral
#include <petsc/finclude/petscsys.h>
include 'fftw3-mpi.f03'
#endif
! These definitions should actually reside in the FE-solver specific part (different for MARC/ABAQUS)
! Hence, I suggest to prefix with "FE_"
@ -481,6 +476,10 @@ subroutine mesh_init(ip,el)
use, intrinsic :: iso_fortran_env, only: &
compiler_version, &
compiler_options
#endif
#ifdef Spectral
#include <petsc/finclude/petscsys.h>
use PETScsys
#endif
use DAMASK_interface
use IO, only: &
@ -516,6 +515,7 @@ subroutine mesh_init(ip,el)
implicit none
#ifdef Spectral
include 'fftw3-mpi.f03'
integer(C_INTPTR_T) :: devNull, local_K, local_K_offset
integer :: ierr, worldsize
#endif
@ -524,8 +524,6 @@ subroutine mesh_init(ip,el)
integer(pInt) :: j
logical :: myDebug
external :: MPI_comm_size
write(6,'(/,a)') ' <<<+- mesh init -+>>>'
write(6,'(a15,a)') ' Current time: ',IO_timeStamp()
#include "compilation_info.f90"

View File

@ -10,9 +10,6 @@ module numerics
implicit none
private
#ifdef PETSc
#include <petsc/finclude/petsc.h90>
#endif
character(len=64), parameter, private :: &
numerics_CONFIGFILE = 'numerics.config' !< name of configuration file
@ -216,6 +213,10 @@ subroutine numerics_init
IO_warning, &
IO_timeStamp, &
IO_EOF
#ifdef PETSc
#include <petsc/finclude/petscsys.h>
use petscsys
#endif
#if defined(Spectral) || defined(FEM)
!$ use OMP_LIB, only: omp_set_num_threads ! Use the standard conforming module file for omp if using the spectral solver
implicit none
@ -232,9 +233,7 @@ subroutine numerics_init
line
!$ character(len=6) DAMASK_NumThreadsString ! environment variable DAMASK_NUM_THREADS
external :: &
MPI_Comm_rank, &
MPI_Comm_size, &
MPI_Abort
PETScErrorF ! is called in the CHKERRQ macro
#ifdef PETSc
call MPI_Comm_rank(PETSC_COMM_WORLD,worldrank,ierr);CHKERRQ(ierr)

View File

@ -4,6 +4,10 @@
!> @brief Spectral solver for nonlocal damage
!--------------------------------------------------------------------------------------------------
module spectral_damage
#include <petsc/finclude/petscsnes.h>
#include <petsc/finclude/petscdmda.h>
use PETScdmda
use PETScsnes
use prec, only: &
pInt, &
pReal
@ -18,7 +22,6 @@ module spectral_damage
implicit none
private
#include <petsc/finclude/petsc.h90>
character (len=*), parameter, public :: &
spectral_damage_label = 'spectraldamage'
@ -49,10 +52,7 @@ module spectral_damage
spectral_damage_forward, &
spectral_damage_destroy
external :: &
PETScFinalize, &
MPI_Abort, &
MPI_Bcast, &
MPI_Allreduce
PETScErrorF ! is called in the CHKERRQ macro
contains
@ -79,32 +79,22 @@ subroutine spectral_damage_init()
damage_nonlocal_getMobility
implicit none
integer(pInt), dimension(:), allocatable :: localK
PetscInt, dimension(:), allocatable :: localK
integer(pInt) :: proc
integer(pInt) :: i, j, k, cell
DM :: damage_grid
Vec :: uBound, lBound
PetscErrorCode :: ierr
character(len=100) :: snes_type
external :: &
SNESCreate, &
SNESSetOptionsPrefix, &
DMDACreate3D, &
SNESSetDM, &
DMDAGetCorners, &
DMCreateGlobalVector, &
DMDASNESSetFunctionLocal, &
SNESSetFromOptions, &
SNESGetType, &
VecSet, &
DMGetGlobalVector, &
DMRestoreGlobalVector, &
SNESVISetVariableBounds
DMDAGetCorners, &
DMDASNESSetFunctionLocal
write(6,'(/,a)') ' <<<+- spectral_damage init -+>>>'
write(6,'(/,a)') ' Shanthraj et al., Handbook of Mechanics of Materials, volume in press, '
write(6,'(/,a)') ' chapter Spectral Solvers for Crystal Plasticity and Multi-Physics Simulations. Springer, 2018 '
write(6,'(a,/)') ' chapter Spectral Solvers for Crystal Plasticity and Multi-Physics Simulations. Springer, 2018 '
write(6,'(a15,a)') ' Current time: ',IO_timeStamp()
#include "compilation_info.f90"
@ -116,21 +106,23 @@ subroutine spectral_damage_init()
do proc = 1, worldsize
call MPI_Bcast(localK(proc),1,MPI_INTEGER,proc-1,PETSC_COMM_WORLD,ierr)
enddo
call DMDACreate3d(PETSC_COMM_WORLD, &
call DMDACreate3D(PETSC_COMM_WORLD, &
DM_BOUNDARY_NONE, DM_BOUNDARY_NONE, DM_BOUNDARY_NONE, & !< cut off stencil at boundary
DMDA_STENCIL_BOX, & !< Moore (26) neighborhood around central point
grid(1),grid(2),grid(3), & !< global grid
1, 1, worldsize, &
1, 0, & !< #dof (damage phase field), ghost boundary width (domain overlap)
grid(1),grid(2),localK, & !< local grid
[grid(1)],[grid(2)],localK, & !< local grid
damage_grid,ierr) !< handle, error
CHKERRQ(ierr)
call SNESSetDM(damage_snes,damage_grid,ierr); CHKERRQ(ierr) !< connect snes to da
call DMsetFromOptions(damage_grid,ierr); CHKERRQ(ierr)
call DMsetUp(damage_grid,ierr); CHKERRQ(ierr)
call DMCreateGlobalVector(damage_grid,solution,ierr); CHKERRQ(ierr) !< global solution vector (grid x 1, i.e. every def grad tensor)
call DMDASNESSetFunctionLocal(damage_grid,INSERT_VALUES,spectral_damage_formResidual,&
PETSC_NULL_OBJECT,ierr) !< residual vector of same shape as solution vector
PETSC_NULL_SNES,ierr) !< residual vector of same shape as solution vector
CHKERRQ(ierr)
call SNESSetFromOptions(damage_snes,ierr); CHKERRQ(ierr) !< pull it all together with additional cli arguments
call SNESSetFromOptions(damage_snes,ierr); CHKERRQ(ierr) !< pull it all together with additional CLI arguments
call SNESGetType(damage_snes,snes_type,ierr); CHKERRQ(ierr)
if (trim(snes_type) == 'vinewtonrsls' .or. &
trim(snes_type) == 'vinewtonssls') then
@ -206,8 +198,7 @@ type(tSolutionState) function spectral_damage_solution(timeinc,timeinc_old,loadC
external :: &
VecMin, &
VecMax, &
SNESSolve, &
SNESGetConvergedReason
SNESSolve
spectral_damage_solution%converged =.false.
@ -216,7 +207,7 @@ type(tSolutionState) function spectral_damage_solution(timeinc,timeinc_old,loadC
params%timeinc = timeinc
params%timeincOld = timeinc_old
call SNESSolve(damage_snes,PETSC_NULL_OBJECT,solution,ierr); CHKERRQ(ierr)
call SNESSolve(damage_snes,PETSC_NULL_VEC,solution,ierr); CHKERRQ(ierr)
call SNESGetConvergedReason(damage_snes,reason,ierr); CHKERRQ(ierr)
if (reason < 1) then
@ -244,14 +235,12 @@ type(tSolutionState) function spectral_damage_solution(timeinc,timeinc_old,loadC
call VecMin(solution,position,minDamage,ierr); CHKERRQ(ierr)
call VecMax(solution,position,maxDamage,ierr); CHKERRQ(ierr)
if (worldrank == 0) then
if (spectral_damage_solution%converged) &
write(6,'(/,a)') ' ... nonlocal damage converged .....................................'
write(6,'(/,a,f8.6,2x,f8.6,2x,f8.6,/)',advance='no') ' Minimum|Maximum|Delta Damage = ',&
minDamage, maxDamage, stagNorm
write(6,'(/,a)') ' ==========================================================================='
flush(6)
endif
end function spectral_damage_solution
@ -362,9 +351,6 @@ subroutine spectral_damage_forward()
PetscScalar, dimension(:,:,:), pointer :: x_scal
PetscErrorCode :: ierr
external :: &
SNESGetDM
if (cutBack) then
damage_current = damage_lastInc
damage_stagInc = damage_lastInc
@ -407,10 +393,6 @@ subroutine spectral_damage_destroy()
implicit none
PetscErrorCode :: ierr
external :: &
VecDestroy, &
SNESDestroy
call VecDestroy(solution,ierr); CHKERRQ(ierr)
call SNESDestroy(damage_snes,ierr); CHKERRQ(ierr)

View File

@ -11,9 +11,9 @@
module DAMASK_interface
use prec, only: &
pInt
implicit none
private
#include <petsc/finclude/petscsys.h>
logical, public, protected :: appendToOutFile = .false. !< Append to existing spectralOut file (in case of restart, not in case of regridding)
integer(pInt), public, protected :: spectralRestartInc = 0_pInt !< Increment at which calculation starts
character(len=1024), public, protected :: &
@ -44,7 +44,13 @@ contains
subroutine DAMASK_interface_init()
use, intrinsic :: &
iso_fortran_env
#include <petsc/finclude/petscsys.h>
#if PETSC_VERSION_MAJOR!=3 || PETSC_VERSION_MINOR!=9
===================================================================================================
========================= THIS VERSION OF DAMASK REQUIRES PETSc 3.9.x =========================
===================================================================================================
#endif
use PETScSys
use system_routines, only: &
getHostName
@ -72,11 +78,8 @@ subroutine DAMASK_interface_init()
logical :: error
external :: &
quit,&
MPI_Comm_rank,&
MPI_Comm_size,&
PETScInitialize, &
MPI_Init_Thread, &
MPI_abort
PETScErrorF, & ! is called in the CHKERRQ macro
PETScInitialize
open(6, encoding='UTF-8') ! for special characters in output
@ -91,7 +94,7 @@ subroutine DAMASK_interface_init()
call quit(1_pInt)
endif
#endif
call PetscInitialize(PETSC_NULL_CHARACTER,ierr) ! according to PETSc manual, that should be the first line in the code
call PETScInitialize(PETSC_NULL_CHARACTER,ierr) ! according to PETSc manual, that should be the first line in the code
CHKERRQ(ierr) ! this is a macro definition, it is case sensitive
call MPI_Comm_rank(PETSC_COMM_WORLD,worldrank,ierr);CHKERRQ(ierr)
call MPI_Comm_size(PETSC_COMM_WORLD,worldsize,ierr);CHKERRQ(ierr)
@ -104,10 +107,6 @@ subroutine DAMASK_interface_init()
write(output_unit,'(a)') ' STDERR != 0'
call quit(1_pInt)
endif
if (PETSC_VERSION_MAJOR /= 3 .or. PETSC_VERSION_MINOR /= 7) then
write(6,'(a,2(i1.1,a))') 'PETSc ',PETSC_VERSION_MAJOR,'.',PETSC_VERSION_MINOR,'.x not supported'
call quit(1_pInt)
endif
else mainProcess
close(6) ! disable output for non-master processes (open 6 to rank specific file for debug)
open(6,file='/dev/null',status='replace') ! close(6) alone will leave some temp files in cwd

View File

@ -5,6 +5,10 @@
!> @brief Basic scheme PETSc solver
!--------------------------------------------------------------------------------------------------
module spectral_mech_basic
#include <petsc/finclude/petscsnes.h>
#include <petsc/finclude/petscdmda.h>
use PETScdmda
use PETScsnes
use prec, only: &
pInt, &
pReal
@ -16,7 +20,6 @@ module spectral_mech_basic
implicit none
private
#include <petsc/finclude/petsc.h90>
character (len=*), parameter, public :: &
DAMASK_spectral_SolverBasicPETSC_label = 'basicpetsc'
@ -67,10 +70,7 @@ module spectral_mech_basic
BasicPETSc_forward, &
basicPETSc_destroy
external :: &
PETScFinalize, &
MPI_Abort, &
MPI_Bcast, &
MPI_Allreduce
PETScErrorF ! is called in the CHKERRQ macro
contains
@ -118,25 +118,18 @@ subroutine basicPETSc_init
PetscErrorCode :: ierr
PetscScalar, pointer, dimension(:,:,:,:) :: F
integer(pInt), dimension(:), allocatable :: localK
PetscInt, dimension(:), allocatable :: localK
integer(pInt) :: proc
character(len=1024) :: rankStr
external :: &
SNESCreate, &
SNESSetOptionsPrefix, &
DMDACreate3D, &
SNESSetDM, &
DMCreateGlobalVector, &
DMDASNESSetFunctionLocal, &
SNESGetConvergedReason, &
SNESSetConvergenceTest, &
SNESSetFromOptions
DMDASNESsetFunctionLocal
write(6,'(/,a)') ' <<<+- DAMASK_spectral_solverBasicPETSc init -+>>>'
write(6,'(/,a)') ' Shanthraj et al., International Journal of Plasticity, 66:3145, 2015'
write(6,'(/,a)') ' https://doi.org/10.1016/j.ijplas.2014.02.006'
write(6,'(a,/)') ' https://doi.org/10.1016/j.ijplas.2014.02.006'
write(6,'(a15,a)') ' Current time: ',IO_timeStamp()
#include "compilation_info.f90"
@ -159,16 +152,18 @@ subroutine basicPETSc_init
grid(1),grid(2),grid(3), & ! global grid
1 , 1, worldsize, &
9, 0, & ! #dof (F tensor), ghost boundary width (domain overlap)
grid(1),grid(2),localK, & ! local grid
[grid(1)],[grid(2)],localK, & ! local grid
da,ierr) ! handle, error
CHKERRQ(ierr)
call SNESSetDM(snes,da,ierr); CHKERRQ(ierr) ! connect snes to da
call DMCreateGlobalVector(da,solution_vec,ierr); CHKERRQ(ierr) ! global solution vector (grid x 9, i.e. every def grad tensor)
call DMDASNESSetFunctionLocal(da,INSERT_VALUES,BasicPETSC_formResidual,PETSC_NULL_OBJECT,ierr) ! residual vector of same shape as solution vector
call DMsetFromOptions(da,ierr); CHKERRQ(ierr)
call DMsetUp(da,ierr); CHKERRQ(ierr)
call DMcreateGlobalVector(da,solution_vec,ierr); CHKERRQ(ierr) ! global solution vector (grid x 9, i.e. every def grad tensor)
call DMDASNESsetFunctionLocal(da,INSERT_VALUES,BasicPETSC_formResidual,PETSC_NULL_SNES,ierr) ! residual vector of same shape as solution vector
CHKERRQ(ierr)
call SNESSetConvergenceTest(snes,BasicPETSC_converged,PETSC_NULL_OBJECT,PETSC_NULL_FUNCTION,ierr) ! specify custom convergence check function "_converged"
call SNESsetConvergenceTest(snes,BasicPETSC_converged,PETSC_NULL_SNES,PETSC_NULL_FUNCTION,ierr) ! specify custom convergence check function "_converged"
CHKERRQ(ierr)
call SNESSetFromOptions(snes,ierr); CHKERRQ(ierr) ! pull it all together with additional cli arguments
call SNESsetFromOptions(snes,ierr); CHKERRQ(ierr) ! pull it all together with additional CLI arguments
!--------------------------------------------------------------------------------------------------
! init fields
@ -255,8 +250,7 @@ type(tSolutionState) function basicPETSc_solution(incInfoIn,timeinc,timeinc_old,
SNESConvergedReason :: reason
external :: &
SNESSolve, &
SNESGetConvergedReason
SNESsolve
incInfo = incInfoIn
@ -276,7 +270,7 @@ type(tSolutionState) function basicPETSc_solution(incInfoIn,timeinc,timeinc_old,
!--------------------------------------------------------------------------------------------------
! solve BVP
call SNESSolve(snes,PETSC_NULL_OBJECT,solution_vec,ierr); CHKERRQ(ierr)
call SNESsolve(snes,PETSC_NULL_VEC,solution_vec,ierr); CHKERRQ(ierr)
!--------------------------------------------------------------------------------------------------
! check convergence
@ -334,10 +328,6 @@ subroutine BasicPETSC_formResidual(in,x_scal,f_scal,dummy,ierr)
real(pReal), dimension(3,3) :: &
deltaF_aim
external :: &
SNESGetNumberFunctionEvals, &
SNESGetIterationNumber
call SNESGetNumberFunctionEvals(snes,nfuncs,ierr); CHKERRQ(ierr)
call SNESGetIterationNumber(snes,PETScIter,ierr); CHKERRQ(ierr)
@ -561,11 +551,6 @@ subroutine BasicPETSc_destroy()
implicit none
PetscErrorCode :: ierr
external :: &
VecDestroy, &
SNESDestroy, &
DMDestroy
call VecDestroy(solution_vec,ierr); CHKERRQ(ierr)
call SNESDestroy(snes,ierr); CHKERRQ(ierr)
call DMDestroy(da,ierr); CHKERRQ(ierr)

View File

@ -5,6 +5,10 @@
!> @brief Polarisation scheme solver
!--------------------------------------------------------------------------------------------------
module spectral_mech_Polarisation
#include <petsc/finclude/petscsnes.h>
#include <petsc/finclude/petscdmda.h>
use PETScdmda
use PETScsnes
use prec, only: &
pInt, &
pReal
@ -16,7 +20,6 @@ module spectral_mech_Polarisation
implicit none
private
#include <petsc/finclude/petsc.h90>
character (len=*), parameter, public :: &
DAMASK_spectral_solverPolarisation_label = 'polarisation'
@ -73,10 +76,7 @@ module spectral_mech_Polarisation
Polarisation_forward, &
Polarisation_destroy
external :: &
PETScFinalize, &
MPI_Abort, &
MPI_Bcast, &
MPI_Allreduce
PETScErrorF ! is called in the CHKERRQ macro
contains
@ -128,25 +128,18 @@ subroutine Polarisation_init
FandF_tau, & ! overall pointer to solution data
F, & ! specific (sub)pointer
F_tau ! specific (sub)pointer
integer(pInt), dimension(:), allocatable :: localK
PetscInt, dimension(:), allocatable :: localK
integer(pInt) :: proc
character(len=1024) :: rankStr
external :: &
SNESCreate, &
SNESSetOptionsPrefix, &
DMDACreate3D, &
SNESSetDM, &
DMCreateGlobalVector, &
DMDASNESSetFunctionLocal, &
SNESGetConvergedReason, &
SNESSetConvergenceTest, &
SNESSetFromOptions
DMDASNESsetFunctionLocal
write(6,'(/,a)') ' <<<+- DAMASK_spectral_solverPolarisation init -+>>>'
write(6,'(/,a)') ' Shanthraj et al., International Journal of Plasticity, 66:3145, 2015'
write(6,'(/,a)') ' https://doi.org/10.1016/j.ijplas.2014.02.006'
write(6,'(a,/)') ' https://doi.org/10.1016/j.ijplas.2014.02.006'
write(6,'(a15,a)') ' Current time: ',IO_timeStamp()
#include "compilation_info.f90"
@ -171,16 +164,18 @@ subroutine Polarisation_init
grid(1),grid(2),grid(3), & ! global grid
1 , 1, worldsize, &
18, 0, & ! #dof (F tensor), ghost boundary width (domain overlap)
grid(1),grid(2),localK, & ! local grid
[grid(1)],[grid(2)],localK, & ! local grid
da,ierr) ! handle, error
CHKERRQ(ierr)
call SNESSetDM(snes,da,ierr); CHKERRQ(ierr) ! connect snes to da
call DMCreateGlobalVector(da,solution_vec,ierr); CHKERRQ(ierr) ! global solution vector (grid x 9, i.e. every def grad tensor)
call DMDASNESSetFunctionLocal(da,INSERT_VALUES,Polarisation_formResidual,PETSC_NULL_OBJECT,ierr) ! residual vector of same shape as solution vector
call DMsetFromOptions(da,ierr); CHKERRQ(ierr)
call DMsetUp(da,ierr); CHKERRQ(ierr)
call DMcreateGlobalVector(da,solution_vec,ierr); CHKERRQ(ierr) ! global solution vector (grid x 18, i.e. every def grad tensor)
call DMDASNESsetFunctionLocal(da,INSERT_VALUES,Polarisation_formResidual,PETSC_NULL_SNES,ierr) ! residual vector of same shape as solution vector
CHKERRQ(ierr)
call SNESSetConvergenceTest(snes,Polarisation_converged,PETSC_NULL_OBJECT,PETSC_NULL_FUNCTION,ierr) ! specify custom convergence check function "_converged"
call SNESsetConvergenceTest(snes,Polarisation_converged,PETSC_NULL_SNES,PETSC_NULL_FUNCTION,ierr) ! specify custom convergence check function "_converged"
CHKERRQ(ierr)
call SNESSetFromOptions(snes,ierr); CHKERRQ(ierr) ! pull it all together with additional cli arguments
call SNESsetFromOptions(snes,ierr); CHKERRQ(ierr) ! pull it all together with additional CLI arguments
!--------------------------------------------------------------------------------------------------
! init fields
@ -280,8 +275,7 @@ type(tSolutionState) function Polarisation_solution(incInfoIn,timeinc,timeinc_ol
SNESConvergedReason :: reason
external :: &
SNESSolve, &
SNESGetConvergedReason
SNESSolve
incInfo = incInfoIn
@ -304,7 +298,7 @@ type(tSolutionState) function Polarisation_solution(incInfoIn,timeinc,timeinc_ol
!--------------------------------------------------------------------------------------------------
! solve BVP
call SNESSolve(snes,PETSC_NULL_OBJECT,solution_vec,ierr); CHKERRQ(ierr)
call SNESsolve(snes,PETSC_NULL_VEC,solution_vec,ierr); CHKERRQ(ierr)
!--------------------------------------------------------------------------------------------------
! check convergence
@ -375,10 +369,6 @@ subroutine Polarisation_formResidual(in,x_scal,f_scal,dummy,ierr)
integer(pInt) :: &
i, j, k, e
external :: &
SNESGetNumberFunctionEvals, &
SNESGetIterationNumber
F => x_scal(1:3,1:3,1,&
XG_RANGE,YG_RANGE,ZG_RANGE)
F_tau => x_scal(1:3,1:3,2,&
@ -695,11 +685,6 @@ subroutine Polarisation_destroy()
implicit none
PetscErrorCode :: ierr
external :: &
VecDestroy, &
SNESDestroy, &
DMDestroy
call VecDestroy(solution_vec,ierr); CHKERRQ(ierr)
call SNESDestroy(snes,ierr); CHKERRQ(ierr)
call DMDestroy(da,ierr); CHKERRQ(ierr)

View File

@ -4,6 +4,10 @@
!> @brief Spectral solver for thermal conduction
!--------------------------------------------------------------------------------------------------
module spectral_thermal
#include <petsc/finclude/petscsnes.h>
#include <petsc/finclude/petscdmda.h>
use PETScdmda
use PETScsnes
use prec, only: &
pInt, &
pReal
@ -18,7 +22,6 @@ module spectral_thermal
implicit none
private
#include <petsc/finclude/petsc.h90>
character (len=*), parameter, public :: &
spectral_thermal_label = 'spectralthermal'
@ -49,10 +52,7 @@ module spectral_thermal
spectral_thermal_forward, &
spectral_thermal_destroy
external :: &
PETScFinalize, &
MPI_Abort, &
MPI_Bcast, &
MPI_Allreduce
PETScErrorF ! is called in the CHKERRQ macro
contains
@ -92,22 +92,15 @@ subroutine spectral_thermal_init
PetscErrorCode :: ierr
external :: &
SNESCreate, &
SNESSetOptionsPrefix, &
DMDACreate3D, &
SNESSetDM, &
DMDAGetCorners, &
DMCreateGlobalVector, &
DMDASNESSetFunctionLocal, &
SNESSetFromOptions
SNESsetOptionsPrefix, &
DMDAgetCorners, &
DMDASNESsetFunctionLocal
mainProcess: if (worldrank == 0_pInt) then
write(6,'(/,a)') ' <<<+- spectral_thermal init -+>>>'
write(6,'(/,a)') ' Shanthraj et al., Handbook of Mechanics of Materials, volume in press,'
write(6,'(/,a)') ' chapter Spectral Solvers for Crystal Plasticity and Multi-Physics Simulations. Springer, 2018'
write(6,'(a15,a)') ' Current time: ',IO_timeStamp()
#include "compilation_info.f90"
endif mainProcess
!--------------------------------------------------------------------------------------------------
! initialize solver specific parts of PETSc
@ -117,21 +110,23 @@ subroutine spectral_thermal_init
do proc = 1, worldsize
call MPI_Bcast(localK(proc),1,MPI_INTEGER,proc-1,PETSC_COMM_WORLD,ierr)
enddo
call DMDACreate3d(PETSC_COMM_WORLD, &
call DMDACreate3D(PETSC_COMM_WORLD, &
DM_BOUNDARY_NONE, DM_BOUNDARY_NONE, DM_BOUNDARY_NONE, & ! cut off stencil at boundary
DMDA_STENCIL_BOX, & ! Moore (26) neighborhood around central point
grid(1),grid(2),grid(3), & ! global grid
1, 1, worldsize, &
1, 0, & ! #dof (temperature field), ghost boundary width (domain overlap)
grid (1),grid(2),localK, & ! local grid
thermal_grid,ierr) ! handle, error
1, 0, & !< #dof (thermal phase field), ghost boundary width (domain overlap)
[grid(1)],[grid(2)],localK, & !< local grid
thermal_grid,ierr) !< handle, error
CHKERRQ(ierr)
call SNESSetDM(thermal_snes,thermal_grid,ierr); CHKERRQ(ierr) ! connect snes to da
call DMsetFromOptions(thermal_grid,ierr); CHKERRQ(ierr)
call DMsetUp(thermal_grid,ierr); CHKERRQ(ierr)
call DMCreateGlobalVector(thermal_grid,solution ,ierr); CHKERRQ(ierr) ! global solution vector (grid x 1, i.e. every def grad tensor)
call DMDASNESSetFunctionLocal(thermal_grid,INSERT_VALUES,spectral_thermal_formResidual,&
PETSC_NULL_OBJECT,ierr) ! residual vector of same shape as solution vector
PETSC_NULL_SNES,ierr) ! residual vector of same shape as solution vector
CHKERRQ(ierr)
call SNESSetFromOptions(thermal_snes,ierr); CHKERRQ(ierr) ! pull it all together with additional cli arguments
call SNESSetFromOptions(thermal_snes,ierr); CHKERRQ(ierr) ! pull it all together with additional CLI arguments
!--------------------------------------------------------------------------------------------------
! init fields
@ -207,8 +202,7 @@ type(tSolutionState) function spectral_thermal_solution(timeinc,timeinc_old,load
external :: &
VecMin, &
VecMax, &
SNESSolve, &
SNESGetConvergedReason
SNESSolve
spectral_thermal_solution%converged =.false.
@ -217,7 +211,7 @@ type(tSolutionState) function spectral_thermal_solution(timeinc,timeinc_old,load
params%timeinc = timeinc
params%timeincOld = timeinc_old
call SNESSolve(thermal_snes,PETSC_NULL_OBJECT,solution,ierr); CHKERRQ(ierr)
call SNESSolve(thermal_snes,PETSC_NULL_VEC,solution,ierr); CHKERRQ(ierr)
call SNESGetConvergedReason(thermal_snes,reason,ierr); CHKERRQ(ierr)
if (reason < 1) then
@ -247,14 +241,12 @@ type(tSolutionState) function spectral_thermal_solution(timeinc,timeinc_old,load
call VecMin(solution,position,minTemperature,ierr); CHKERRQ(ierr)
call VecMax(solution,position,maxTemperature,ierr); CHKERRQ(ierr)
if (worldrank == 0) then
if (spectral_thermal_solution%converged) &
write(6,'(/,a)') ' ... thermal conduction converged ..................................'
write(6,'(/,a,f8.4,2x,f8.4,2x,f8.4,/)',advance='no') ' Minimum|Maximum|Delta Temperature / K = ',&
minTemperature, maxTemperature, stagNorm
write(6,'(/,a)') ' ==========================================================================='
flush(6)
endif
end function spectral_thermal_solution
@ -361,9 +353,6 @@ subroutine spectral_thermal_forward()
PetscScalar, dimension(:,:,:), pointer :: x_scal
PetscErrorCode :: ierr
external :: &
SNESGetDM
if (cutBack) then
temperature_current = temperature_lastInc
temperature_stagInc = temperature_lastInc
@ -411,10 +400,6 @@ subroutine spectral_thermal_destroy()
implicit none
PetscErrorCode :: ierr
external :: &
VecDestroy, &
SNESDestroy
call VecDestroy(solution,ierr); CHKERRQ(ierr)
call SNESDestroy(thermal_snes,ierr); CHKERRQ(ierr)

View File

@ -5,6 +5,8 @@
!--------------------------------------------------------------------------------------------------
module spectral_utilities
use, intrinsic :: iso_c_binding
#include <petsc/finclude/petscsys.h>
use PETScSys
use prec, only: &
pReal, &
pInt
@ -13,7 +15,6 @@ module spectral_utilities
implicit none
private
#include <petsc/finclude/petscsys.h>
include 'fftw3-mpi.f03'
logical, public :: cutBack = .false. !< cut back of BVP solver in case convergence is not achieved or a material point is terminally ill
@ -147,6 +148,8 @@ module spectral_utilities
FIELD_DAMAGE_ID
private :: &
utilities_getFreqDerivative
external :: &
PETScErrorF ! is called in the CHKERRQ macro
contains
@ -195,12 +198,6 @@ subroutine utilities_init()
geomSize
implicit none
external :: &
PETScOptionsClear, &
PETScOptionsInsertString, &
MPI_Abort
PetscErrorCode :: ierr
integer(pInt) :: i, j, k
integer(pInt), dimension(3) :: k_s
@ -214,6 +211,8 @@ subroutine utilities_init()
scalarSize = 1_C_INTPTR_T, &
vecSize = 3_C_INTPTR_T, &
tensorSize = 9_C_INTPTR_T
external :: &
PetscOptionsInsertString
write(6,'(/,a)') ' <<<+- spectral_utilities init -+>>>'
write(6,'(/,a)') ' Eisenlohr et al., International Journal of Plasticity, 46:3753, 2013'
@ -232,13 +231,13 @@ subroutine utilities_init()
trim(PETScDebug), &
' add more using the PETSc_Options keyword in numerics.config '; flush(6)
call PetscOptionsClear(PETSC_NULL_OBJECT,ierr)
call PETScOptionsClear(PETSC_NULL_OPTIONS,ierr)
CHKERRQ(ierr)
if(debugPETSc) call PetscOptionsInsertString(PETSC_NULL_OBJECT,trim(PETSCDEBUG),ierr)
if(debugPETSc) call PETScOptionsInsertString(PETSC_NULL_OPTIONS,trim(PETSCDEBUG),ierr)
CHKERRQ(ierr)
call PetscOptionsInsertString(PETSC_NULL_OBJECT,trim(petsc_defaultOptions),ierr)
call PETScOptionsInsertString(PETSC_NULL_OPTIONS,trim(petsc_defaultOptions),ierr)
CHKERRQ(ierr)
call PetscOptionsInsertString(PETSC_NULL_OBJECT,trim(petsc_options),ierr)
call PETScOptionsInsertString(PETSC_NULL_OPTIONS,trim(petsc_options),ierr)
CHKERRQ(ierr)
grid1Red = grid(1)/2_pInt + 1_pInt
@ -632,9 +631,6 @@ real(pReal) function utilities_divergenceRMS()
integer(pInt) :: i, j, k, ierr
complex(pReal), dimension(3) :: rescaledGeom
external :: &
MPI_Allreduce
write(6,'(/,a)') ' ... calculating divergence ................................................'
flush(6)
@ -686,9 +682,6 @@ real(pReal) function utilities_curlRMS()
complex(pReal), dimension(3,3) :: curl_fourier
complex(pReal), dimension(3) :: rescaledGeom
external :: &
MPI_Allreduce
write(6,'(/,a)') ' ... calculating curl ......................................................'
flush(6)
@ -962,9 +955,6 @@ subroutine utilities_constitutiveResponse(P,P_av,C_volAvg,C_minmaxAvg,&
real(pReal), dimension(3,3,3,3) :: max_dPdF, min_dPdF
real(pReal) :: max_dPdF_norm, min_dPdF_norm, defgradDetMin, defgradDetMax, defgradDet
external :: &
MPI_Allreduce
write(6,'(/,a)') ' ... evaluating constitutive response ......................................'
flush(6)
@ -1081,9 +1071,6 @@ function utilities_forwardField(timeinc,field_lastInc,rate,aim)
real(pReal), dimension(3,3) :: fieldDiff !< <a + adot*t> - aim
PetscErrorCode :: ierr
external :: &
MPI_Allreduce
utilities_forwardField = field_lastInc + rate*timeinc
if (present(aim)) then !< correct to match average
fieldDiff = sum(sum(sum(utilities_forwardField,dim=5),dim=4),dim=3)*wgt
@ -1175,8 +1162,6 @@ subroutine utilities_updateIPcoords(F)
integer(pInt) :: i, j, k, m, ierr
real(pReal), dimension(3) :: step, offset_coords
real(pReal), dimension(3,3) :: Favg
external &
MPI_Bcast
!--------------------------------------------------------------------------------------------------
! integration in Fourier space