Merge branch 'petsc-64bit-integer' into 'development'

support for PETSc with 64bit integers

See merge request damask/DAMASK!496
This commit is contained in:
Sharan Roongta 2022-01-17 09:16:39 +00:00
commit 40ee4d3dd1
24 changed files with 1005 additions and 840 deletions

View File

@ -93,6 +93,27 @@ test_mesh_GNU:
- cd PRIVATE/testing/pytest - cd PRIVATE/testing/pytest
- pytest -k 'compile and mesh' --basetemp ${TESTROOT}/compile_mesh_GNU - pytest -k 'compile and mesh' --basetemp ${TESTROOT}/compile_mesh_GNU
test_grid_GNU-64bit:
stage: compile
script:
- module load Compiler/GNU/10 Libraries/PETSc/3.16.2/64bit
- cd PRIVATE/testing/pytest
- pytest -k 'compile and grid' --basetemp ${TESTROOT}/compile_grid_GNU-64bit
test_mesh_GNU-64bit:
stage: compile
script:
- module load Compiler/GNU/10 Libraries/PETSc/3.16.2/64bit
- cd PRIVATE/testing/pytest
- pytest -k 'compile and mesh' --basetemp ${TESTROOT}/compile_mesh_GNU-64bit
test_grid_IntelLLVM:
stage: compile
script:
- module load ${COMPILER_INTELLLVM} ${MPI_INTELLLVM} ${PETSC_INTELLLVM}
- cd PRIVATE/testing/pytest
- pytest -k 'compile and grid' --basetemp ${TESTROOT}/compile_grid_IntelLLVM
test_mesh_IntelLLVM: test_mesh_IntelLLVM:
stage: compile stage: compile
script: script:
@ -121,7 +142,6 @@ test_Marc:
- cd PRIVATE/testing/pytest - cd PRIVATE/testing/pytest
- pytest -k 'compile and Marc' --basetemp ${TESTROOT}/compile_Marc - pytest -k 'compile and Marc' --basetemp ${TESTROOT}/compile_Marc
setup_grid: setup_grid:
stage: compile stage: compile
script: script:

View File

@ -12,17 +12,15 @@
# AEM # AEM
if test "$MARCDLLOUTDIR" = ""; then if test "$MARCDLLOUTDIR" = ""; then
@@ -390,8 +395,8 @@ @@ -390,7 +395,7 @@
I8DEFINES= I8DEFINES=
I8CDEFINES= I8CDEFINES=
else else
- I8FFLAGS="-i8" - I8FFLAGS="-i8"
- I8DEFINES="-DI64"
+ I8FFLAGS="-i8 -integer-size 64" + I8FFLAGS="-i8 -integer-size 64"
+ I8DEFINES="-DI64 -DINT=8" I8DEFINES="-DI64"
I8CDEFINES="-U_DOUBLE -D_SINGLE" I8CDEFINES="-U_DOUBLE -D_SINGLE"
fi fi
@@ -498,7 +503,7 @@ @@ -498,7 +503,7 @@
PROFILE=" $PROFILE -pg" PROFILE=" $PROFILE -pg"
fi fi

View File

@ -12,17 +12,15 @@
# AEM # AEM
if test "$MARCDLLOUTDIR" = ""; then if test "$MARCDLLOUTDIR" = ""; then
@@ -439,8 +444,8 @@ @@ -439,7 +444,7 @@
I8DEFINES= I8DEFINES=
I8CDEFINES= I8CDEFINES=
else else
- I8FFLAGS="-i8" - I8FFLAGS="-i8"
- I8DEFINES="-DI64"
+ I8FFLAGS="-i8 -integer-size 64" + I8FFLAGS="-i8 -integer-size 64"
+ I8DEFINES="-DI64 -DINT=8" I8DEFINES="-DI64"
I8CDEFINES="-U_DOUBLE -D_SINGLE" I8CDEFINES="-U_DOUBLE -D_SINGLE"
fi fi
@@ -556,7 +561,7 @@ @@ -556,7 +561,7 @@
PROFILE=" $PROFILE -pg" PROFILE=" $PROFILE -pg"
fi fi

View File

@ -12,17 +12,15 @@
# AEM # AEM
if test "$MARCDLLOUTDIR" = ""; then if test "$MARCDLLOUTDIR" = ""; then
DLLOUTDIR="$MARC_LIB" DLLOUTDIR="$MARC_LIB"
@@ -439,8 +444,8 @@ if test "$MARC_INTEGER_SIZE" = "i4" ; then @@ -439,7 +444,7 @@ if test "$MARC_INTEGER_SIZE" = "i4" ; then
I8DEFINES= I8DEFINES=
I8CDEFINES= I8CDEFINES=
else else
- I8FFLAGS="-i8" - I8FFLAGS="-i8"
- I8DEFINES="-DI64"
+ I8FFLAGS="-i8 -integer-size 64" + I8FFLAGS="-i8 -integer-size 64"
+ I8DEFINES="-DI64 -DINT=8" I8DEFINES="-DI64"
I8CDEFINES="-U_DOUBLE -D_SINGLE" I8CDEFINES="-U_DOUBLE -D_SINGLE"
fi fi
@@ -556,7 +561,7 @@ then @@ -556,7 +561,7 @@ then
PROFILE=" $PROFILE -pg" PROFILE=" $PROFILE -pg"
fi fi

View File

@ -32,18 +32,18 @@ module CPFEM
real(pReal), dimension (:,:,:,:), allocatable, private :: & real(pReal), dimension (:,:,:,:), allocatable, private :: &
CPFEM_dcsdE_knownGood !< known good tangent CPFEM_dcsdE_knownGood !< known good tangent
integer(pInt), public :: & integer, public :: &
cycleCounter = 0_pInt !< needs description cycleCounter = 0 !< needs description
integer(pInt), parameter, public :: & integer, parameter, public :: &
CPFEM_CALCRESULTS = 2_pInt**0_pInt, & CPFEM_CALCRESULTS = 2**0, &
CPFEM_AGERESULTS = 2_pInt**1_pInt, & CPFEM_AGERESULTS = 2**1, &
CPFEM_BACKUPJACOBIAN = 2_pInt**2_pInt, & CPFEM_BACKUPJACOBIAN = 2**2, &
CPFEM_RESTOREJACOBIAN = 2_pInt**3_pInt CPFEM_RESTOREJACOBIAN = 2**3
type, private :: tNumerics type, private :: tNumerics
integer :: & integer :: &
iJacoStiffness !< frequency of stiffness update iJacoStiffness !< frequency of stiffness update
end type tNumerics end type tNumerics
type(tNumerics), private :: num type(tNumerics), private :: num
@ -134,12 +134,12 @@ end subroutine CPFEM_init
!-------------------------------------------------------------------------------------------------- !--------------------------------------------------------------------------------------------------
subroutine CPFEM_general(mode, ffn, ffn1, temperature_inp, dt, elFE, ip, cauchyStress, jacobian) subroutine CPFEM_general(mode, ffn, ffn1, temperature_inp, dt, elFE, ip, cauchyStress, jacobian)
integer(pInt), intent(in) :: elFE, & !< FE element number integer, intent(in) :: elFE, & !< FE element number
ip !< integration point number ip !< integration point number
real(pReal), intent(in) :: dt !< time increment real(pReal), intent(in) :: dt !< time increment
real(pReal), dimension (3,3), intent(in) :: ffn, & !< deformation gradient for t=t0 real(pReal), dimension (3,3), intent(in) :: ffn, & !< deformation gradient for t=t0
ffn1 !< deformation gradient for t=t1 ffn1 !< deformation gradient for t=t1
integer(pInt), intent(in) :: mode !< computation mode 1: regular computation plus aging of results integer, intent(in) :: mode !< computation mode 1: regular computation plus aging of results
real(pReal), intent(in) :: temperature_inp !< temperature real(pReal), intent(in) :: temperature_inp !< temperature
real(pReal), dimension(6), intent(out) :: cauchyStress !< stress as 6 vector real(pReal), dimension(6), intent(out) :: cauchyStress !< stress as 6 vector
real(pReal), dimension(6,6), intent(out) :: jacobian !< jacobian as 66 tensor (Consistent tangent dcs/dE) real(pReal), dimension(6,6), intent(out) :: jacobian !< jacobian as 66 tensor (Consistent tangent dcs/dE)
@ -150,7 +150,7 @@ subroutine CPFEM_general(mode, ffn, ffn1, temperature_inp, dt, elFE, ip, cauchyS
real(pReal), dimension (3,3,3,3) :: H_sym, & real(pReal), dimension (3,3,3,3) :: H_sym, &
H H
integer(pInt) elCP, & ! crystal plasticity element number integer elCP, & ! crystal plasticity element number
i, j, k, l, m, n, ph, homog, mySource,ce i, j, k, l, m, n, ph, homog, mySource,ce
real(pReal), parameter :: ODD_STRESS = 1e15_pReal, & !< return value for stress if terminallyIll real(pReal), parameter :: ODD_STRESS = 1e15_pReal, & !< return value for stress if terminallyIll
@ -171,17 +171,17 @@ subroutine CPFEM_general(mode, ffn, ffn1, temperature_inp, dt, elFE, ip, cauchyS
print'(a,/)', '#############################################'; flush (6) print'(a,/)', '#############################################'; flush (6)
endif endif
if (iand(mode, CPFEM_BACKUPJACOBIAN) /= 0_pInt) & if (iand(mode, CPFEM_BACKUPJACOBIAN) /= 0) &
CPFEM_dcsde_knownGood = CPFEM_dcsde CPFEM_dcsde_knownGood = CPFEM_dcsde
if (iand(mode, CPFEM_RESTOREJACOBIAN) /= 0_pInt) & if (iand(mode, CPFEM_RESTOREJACOBIAN) /= 0) &
CPFEM_dcsde = CPFEM_dcsde_knownGood CPFEM_dcsde = CPFEM_dcsde_knownGood
if (iand(mode, CPFEM_AGERESULTS) /= 0_pInt) call CPFEM_forward if (iand(mode, CPFEM_AGERESULTS) /= 0) call CPFEM_forward
homogenization_F0(1:3,1:3,ce) = ffn homogenization_F0(1:3,1:3,ce) = ffn
homogenization_F(1:3,1:3,ce) = ffn1 homogenization_F(1:3,1:3,ce) = ffn1
if (iand(mode, CPFEM_CALCRESULTS) /= 0_pInt) then if (iand(mode, CPFEM_CALCRESULTS) /= 0) then
validCalculation: if (terminallyIll) then validCalculation: if (terminallyIll) then
call random_number(rnd) call random_number(rnd)
@ -264,8 +264,8 @@ end subroutine CPFEM_forward
!-------------------------------------------------------------------------------------------------- !--------------------------------------------------------------------------------------------------
subroutine CPFEM_results(inc,time) subroutine CPFEM_results(inc,time)
integer(pInt), intent(in) :: inc integer, intent(in) :: inc
real(pReal), intent(in) :: time real(pReal), intent(in) :: time
call results_openJobFile call results_openJobFile
call results_addIncrement(inc,time) call results_addIncrement(inc,time)

View File

@ -223,9 +223,9 @@ subroutine hypela2(d,g,e,de,s,t,dt,ngens,m,nn,kcus,matus,ndi,nshear,disp, &
integer :: computationMode, i, node, CPnodeID integer :: computationMode, i, node, CPnodeID
integer(pI32) :: defaultNumThreadsInt !< default value set by Marc integer(pI32) :: defaultNumThreadsInt !< default value set by Marc
integer(pInt), save :: & integer, save :: &
theInc = -1_pInt, & !< needs description theInc = -1, & !< needs description
lastLovl = 0_pInt !< lovl in previous call to marc hypela2 lastLovl = 0 !< lovl in previous call to marc hypela2
real(pReal), save :: & real(pReal), save :: &
theTime = 0.0_pReal, & !< needs description theTime = 0.0_pReal, & !< needs description
theDelta = 0.0_pReal theDelta = 0.0_pReal

View File

@ -1857,13 +1857,13 @@ subroutine initialize_read(dset_id, filespace_id, memspace_id, plist_id, aplist_
localShape localShape
integer(HSIZE_T), intent(out), dimension(size(localShape,1)):: & integer(HSIZE_T), intent(out), dimension(size(localShape,1)):: &
myStart, & myStart, &
globalShape !< shape of the dataset (all processes) globalShape !< shape of the dataset (all processes)
integer(HID_T), intent(out) :: dset_id, filespace_id, memspace_id, plist_id, aplist_id integer(HID_T), intent(out) :: dset_id, filespace_id, memspace_id, plist_id, aplist_id
integer, dimension(worldsize) :: & integer, dimension(worldsize) :: &
readSize !< contribution of all processes readSize !< contribution of all processes
integer :: ierr
integer :: hdferr integer :: hdferr
integer(MPI_INTEGER_KIND) :: err_MPI
!------------------------------------------------------------------------------------------------- !-------------------------------------------------------------------------------------------------
! creating a property list for transfer properties (is collective for MPI) ! creating a property list for transfer properties (is collective for MPI)
@ -1877,8 +1877,8 @@ subroutine initialize_read(dset_id, filespace_id, memspace_id, plist_id, aplist_
if (parallel) then if (parallel) then
call H5Pset_dxpl_mpio_f(plist_id, H5FD_MPIO_COLLECTIVE_F, hdferr) call H5Pset_dxpl_mpio_f(plist_id, H5FD_MPIO_COLLECTIVE_F, hdferr)
if(hdferr < 0) error stop 'HDF5 error' if(hdferr < 0) error stop 'HDF5 error'
call MPI_allreduce(MPI_IN_PLACE,readSize,worldsize,MPI_INT,MPI_SUM,PETSC_COMM_WORLD,ierr) ! get total output size over each process call MPI_allreduce(MPI_IN_PLACE,readSize,worldsize,MPI_INTEGER,MPI_SUM,PETSC_COMM_WORLD,err_MPI) ! get total output size over each process
if (ierr /= 0) error stop 'MPI error' if (err_MPI /= 0_MPI_INTEGER_KIND) error stop 'MPI error'
end if end if
#endif #endif
myStart = int(0,HSIZE_T) myStart = int(0,HSIZE_T)
@ -1956,7 +1956,8 @@ subroutine initialize_write(dset_id, filespace_id, memspace_id, plist_id, &
integer, dimension(worldsize) :: writeSize !< contribution of all processes integer, dimension(worldsize) :: writeSize !< contribution of all processes
integer(HID_T) :: dcpl integer(HID_T) :: dcpl
integer :: ierr, hdferr integer :: hdferr
integer(MPI_INTEGER_KIND) :: err_MPI
integer(HSIZE_T), parameter :: chunkSize = 1024_HSIZE_T**2/8_HSIZE_T integer(HSIZE_T), parameter :: chunkSize = 1024_HSIZE_T**2/8_HSIZE_T
@ -1977,8 +1978,8 @@ subroutine initialize_write(dset_id, filespace_id, memspace_id, plist_id, &
writeSize(worldrank+1) = int(myShape(ubound(myShape,1))) writeSize(worldrank+1) = int(myShape(ubound(myShape,1)))
#ifdef PETSC #ifdef PETSC
if (parallel) then if (parallel) then
call MPI_allreduce(MPI_IN_PLACE,writeSize,worldsize,MPI_INT,MPI_SUM,PETSC_COMM_WORLD,ierr) ! get total output size over each process call MPI_allreduce(MPI_IN_PLACE,writeSize,worldsize,MPI_INTEGER,MPI_SUM,PETSC_COMM_WORLD,err_MPI) ! get total output size over each process
if (ierr /= 0) error stop 'MPI error' if (err_MPI /= 0_MPI_INTEGER_KIND) error stop 'MPI error'
end if end if
#endif #endif
myStart = int(0,HSIZE_T) myStart = int(0,HSIZE_T)

View File

@ -151,7 +151,7 @@ pure logical function validBase64(base64_str)
l = len(base64_str,pI64) l = len(base64_str,pI64)
validBase64 = .true. validBase64 = .true.
if(mod(l,4_pI64)/=0_pI64 .or. l < 4_pInt) validBase64 = .false. if(mod(l,4_pI64)/=0_pI64 .or. l < 4_pI64) validBase64 = .false.
if(verify(base64_str(:l-2_pI64),base64_encoding, kind=pI64) /= 0_pI64) validBase64 = .false. if(verify(base64_str(:l-2_pI64),base64_encoding, kind=pI64) /= 0_pI64) validBase64 = .false.
if(verify(base64_str(l-1_pI64:),base64_encoding//'=',kind=pI64) /= 0_pI64) validBase64 = .false. if(verify(base64_str(l-1_pI64:),base64_encoding//'=',kind=pI64) /= 0_pI64) validBase64 = .false.

View File

@ -75,7 +75,6 @@ program DAMASK_grid
integer :: & integer :: &
i, j, m, field, & i, j, m, field, &
errorID = 0, & errorID = 0, &
ierr,&
cutBackLevel = 0, & !< cut back level \f$ t = \frac{t_{inc}}{2^l} \f$ cutBackLevel = 0, & !< cut back level \f$ t = \frac{t_{inc}}{2^l} \f$
stepFraction = 0, & !< fraction of current time interval stepFraction = 0, & !< fraction of current time interval
l = 0, & !< current load case l = 0, & !< current load case
@ -86,6 +85,7 @@ program DAMASK_grid
nActiveFields = 0, & nActiveFields = 0, &
maxCutBack, & !< max number of cut backs maxCutBack, & !< max number of cut backs
stagItMax !< max number of field level staggered iterations stagItMax !< max number of field level staggered iterations
integer(MPI_INTEGER_KIND) :: err_MPI
character(len=pStringLen) :: & character(len=pStringLen) :: &
incInfo incInfo
@ -455,23 +455,23 @@ program DAMASK_grid
print'(/,1x,a,i0,a)', 'increment ', totalIncsCounter, ' NOT converged' print'(/,1x,a,i0,a)', 'increment ', totalIncsCounter, ' NOT converged'
endif; flush(IO_STDOUT) endif; flush(IO_STDOUT)
call MPI_Allreduce(interface_SIGUSR1,signal,1,MPI_LOGICAL,MPI_LOR,MPI_COMM_WORLD,ierr) call MPI_Allreduce(interface_SIGUSR1,signal,1_MPI_INTEGER_KIND,MPI_LOGICAL,MPI_LOR,MPI_COMM_WORLD,err_MPI)
if (ierr /= 0) error stop 'MPI error' if (err_MPI /= 0_MPI_INTEGER_KIND) error stop 'MPI error'
if (mod(inc,loadCases(l)%f_out) == 0 .or. signal) then if (mod(inc,loadCases(l)%f_out) == 0 .or. signal) then
print'(/,1x,a)', '... writing results to file ...............................................' print'(/,1x,a)', '... writing results to file ...............................................'
flush(IO_STDOUT) flush(IO_STDOUT)
call CPFEM_results(totalIncsCounter,t) call CPFEM_results(totalIncsCounter,t)
endif endif
if (signal) call interface_setSIGUSR1(.false.) if (signal) call interface_setSIGUSR1(.false.)
call MPI_Allreduce(interface_SIGUSR2,signal,1,MPI_LOGICAL,MPI_LOR,MPI_COMM_WORLD,ierr) call MPI_Allreduce(interface_SIGUSR2,signal,1_MPI_INTEGER_KIND,MPI_LOGICAL,MPI_LOR,MPI_COMM_WORLD,err_MPI)
if (ierr /= 0) error stop 'MPI error' if (err_MPI /= 0_MPI_INTEGER_KIND) error stop 'MPI error'
if (mod(inc,loadCases(l)%f_restart) == 0 .or. signal) then if (mod(inc,loadCases(l)%f_restart) == 0 .or. signal) then
call mechanical_restartWrite call mechanical_restartWrite
call CPFEM_restartWrite call CPFEM_restartWrite
endif endif
if (signal) call interface_setSIGUSR2(.false.) if (signal) call interface_setSIGUSR2(.false.)
call MPI_Allreduce(interface_SIGTERM,signal,1,MPI_LOGICAL,MPI_LOR,MPI_COMM_WORLD,ierr) call MPI_Allreduce(interface_SIGTERM,signal,1_MPI_INTEGER_KIND,MPI_LOGICAL,MPI_LOR,MPI_COMM_WORLD,err_MPI)
if (ierr /= 0) error stop 'MPI error' if (err_MPI /= 0_MPI_INTEGER_KIND) error stop 'MPI error'
if (signal) exit loadCaseLooping if (signal) exit loadCaseLooping
endif skipping endif skipping

View File

@ -27,14 +27,14 @@ module discretization_grid
private private
integer, dimension(3), public, protected :: & integer, dimension(3), public, protected :: &
grid !< (global) grid grid !< (global) grid
integer, public, protected :: & integer, public, protected :: &
grid3, & !< (local) grid in 3rd direction grid3, & !< (local) grid in 3rd direction
grid3Offset !< (local) grid offset in 3rd direction grid3Offset !< (local) grid offset in 3rd direction
real(pReal), dimension(3), public, protected :: & real(pReal), dimension(3), public, protected :: &
geomSize !< (global) physical size geomSize !< (global) physical size
real(pReal), public, protected :: & real(pReal), public, protected :: &
size3, & !< (local) size in 3rd direction size3, & !< (local) size in 3rd direction
size3offset !< (local) size offset in 3rd direction size3offset !< (local) size offset in 3rd direction
public :: & public :: &
@ -62,8 +62,8 @@ subroutine discretization_grid_init(restart)
integer :: & integer :: &
j, & j, &
debug_element, debug_ip, & debug_element, debug_ip
ierr integer(MPI_INTEGER_KIND) :: err_MPI
integer(C_INTPTR_T) :: & integer(C_INTPTR_T) :: &
devNull, z, z_offset devNull, z, z_offset
integer, dimension(worldsize) :: & integer, dimension(worldsize) :: &
@ -88,13 +88,13 @@ subroutine discretization_grid_init(restart)
end if end if
call MPI_Bcast(grid,3,MPI_INTEGER,0,MPI_COMM_WORLD, ierr) call MPI_Bcast(grid,3_MPI_INTEGER_KIND,MPI_INTEGER,0_MPI_INTEGER_KIND,MPI_COMM_WORLD, err_MPI)
if (ierr /= 0) error stop 'MPI error' if (err_MPI /= 0_MPI_INTEGER_KIND) error stop 'MPI error'
if (grid(1) < 2) call IO_error(844, ext_msg='cells(1) must be larger than 1') if (grid(1) < 2) call IO_error(844, ext_msg='cells(1) must be larger than 1')
call MPI_Bcast(geomSize,3,MPI_DOUBLE,0,MPI_COMM_WORLD, ierr) call MPI_Bcast(geomSize,3_MPI_INTEGER_KIND,MPI_DOUBLE,0_MPI_INTEGER_KIND,MPI_COMM_WORLD, err_MPI)
if (ierr /= 0) error stop 'MPI error' if (err_MPI /= 0_MPI_INTEGER_KIND) error stop 'MPI error'
call MPI_Bcast(origin,3,MPI_DOUBLE,0,MPI_COMM_WORLD, ierr) call MPI_Bcast(origin,3_MPI_INTEGER_KIND,MPI_DOUBLE,0_MPI_INTEGER_KIND,MPI_COMM_WORLD, err_MPI)
if (ierr /= 0) error stop 'MPI error' if (err_MPI /= 0_MPI_INTEGER_KIND) error stop 'MPI error'
print'(/,1x,a,3(i12,1x))', 'cells a b c: ', grid print'(/,1x,a,3(i12,1x))', 'cells a b c: ', grid
print '(1x,a,3(es12.5,1x))', 'size x y z: ', geomSize print '(1x,a,3(es12.5,1x))', 'size x y z: ', geomSize
@ -118,14 +118,17 @@ subroutine discretization_grid_init(restart)
myGrid = [grid(1:2),grid3] myGrid = [grid(1:2),grid3]
mySize = [geomSize(1:2),size3] mySize = [geomSize(1:2),size3]
call MPI_Gather(product(grid(1:2))*grid3Offset,1,MPI_INTEGER,displs, 1,MPI_INTEGER,0,MPI_COMM_WORLD,ierr) call MPI_Gather(product(grid(1:2))*grid3Offset, 1_MPI_INTEGER_KIND,MPI_INTEGER,displs,&
if (ierr /= 0) error stop 'MPI error' 1_MPI_INTEGER_KIND,MPI_INTEGER,0_MPI_INTEGER_KIND,MPI_COMM_WORLD,err_MPI)
call MPI_Gather(product(myGrid), 1,MPI_INTEGER,sendcounts,1,MPI_INTEGER,0,MPI_COMM_WORLD,ierr) if (err_MPI /= 0_MPI_INTEGER_KIND) error stop 'MPI error'
if (ierr /= 0) error stop 'MPI error' call MPI_Gather(product(myGrid), 1_MPI_INTEGER_KIND,MPI_INTEGER,sendcounts,&
1_MPI_INTEGER_KIND,MPI_INTEGER,0_MPI_INTEGER_KIND,MPI_COMM_WORLD,err_MPI)
if (err_MPI /= 0_MPI_INTEGER_KIND) error stop 'MPI error'
allocate(materialAt(product(myGrid))) allocate(materialAt(product(myGrid)))
call MPI_Scatterv(materialAt_global,sendcounts,displs,MPI_INTEGER,materialAt,size(materialAt),MPI_INTEGER,0,MPI_COMM_WORLD,ierr) call MPI_Scatterv(materialAt_global,sendcounts,displs,MPI_INTEGER,materialAt,size(materialAt),&
if (ierr /= 0) error stop 'MPI error' MPI_INTEGER,0_MPI_INTEGER_KIND,MPI_COMM_WORLD,err_MPI)
if (err_MPI /= 0_MPI_INTEGER_KIND) error stop 'MPI error'
call discretization_init(materialAt, & call discretization_init(materialAt, &
IPcoordinates0(myGrid,mySize,grid3Offset), & IPcoordinates0(myGrid,mySize,grid3Offset), &
@ -307,7 +310,7 @@ subroutine readVTI(grid,geomSize,origin,material, &
case('Float64') case('Float64')
as_Int = int(prec_bytesToC_DOUBLE (asBytes(base64_str,headerType,compressed))) as_Int = int(prec_bytesToC_DOUBLE (asBytes(base64_str,headerType,compressed)))
case default case default
call IO_error(844_pInt,ext_msg='unknown data type: '//trim(dataType)) call IO_error(844,ext_msg='unknown data type: '//trim(dataType))
end select end select
end function as_Int end function as_Int
@ -335,7 +338,7 @@ subroutine readVTI(grid,geomSize,origin,material, &
case('Float64') case('Float64')
as_pReal = real(prec_bytesToC_DOUBLE (asBytes(base64_str,headerType,compressed)),pReal) as_pReal = real(prec_bytesToC_DOUBLE (asBytes(base64_str,headerType,compressed)),pReal)
case default case default
call IO_error(844_pInt,ext_msg='unknown data type: '//trim(dataType)) call IO_error(844,ext_msg='unknown data type: '//trim(dataType))
end select end select
end function as_pReal end function as_pReal

View File

@ -69,7 +69,8 @@ subroutine grid_damage_spectral_init()
PetscInt, dimension(0:worldsize-1) :: localK PetscInt, dimension(0:worldsize-1) :: localK
DM :: damage_grid DM :: damage_grid
Vec :: uBound, lBound Vec :: uBound, lBound
PetscErrorCode :: ierr integer(MPI_INTEGER_KIND) :: err_MPI
PetscErrorCode :: err_PETSc
class(tNode), pointer :: & class(tNode), pointer :: &
num_grid, & num_grid, &
num_generic num_generic
@ -99,50 +100,51 @@ subroutine grid_damage_spectral_init()
!-------------------------------------------------------------------------------------------------- !--------------------------------------------------------------------------------------------------
! set default and user defined options for PETSc ! set default and user defined options for PETSc
call PetscOptionsInsertString(PETSC_NULL_OPTIONS,'-damage_snes_type newtonls -damage_snes_mf & call PetscOptionsInsertString(PETSC_NULL_OPTIONS,'-damage_snes_type newtonls -damage_snes_mf &
&-damage_snes_ksp_ew -damage_ksp_type fgmres',ierr) &-damage_snes_ksp_ew -damage_ksp_type fgmres',err_PETSc)
CHKERRQ(ierr) CHKERRQ(err_PETSc)
call PetscOptionsInsertString(PETSC_NULL_OPTIONS,num_grid%get_asString('petsc_options',defaultVal=''),ierr) call PetscOptionsInsertString(PETSC_NULL_OPTIONS,num_grid%get_asString('petsc_options',defaultVal=''),err_PETSc)
CHKERRQ(ierr) CHKERRQ(err_PETSc)
!-------------------------------------------------------------------------------------------------- !--------------------------------------------------------------------------------------------------
! initialize solver specific parts of PETSc ! initialize solver specific parts of PETSc
call SNESCreate(PETSC_COMM_WORLD,damage_snes,ierr); CHKERRQ(ierr) call SNESCreate(PETSC_COMM_WORLD,damage_snes,err_PETSc); CHKERRQ(err_PETSc)
call SNESSetOptionsPrefix(damage_snes,'damage_',ierr);CHKERRQ(ierr) call SNESSetOptionsPrefix(damage_snes,'damage_',err_PETSc);CHKERRQ(err_PETSc)
localK = 0 localK = 0_pPetscInt
localK(worldrank) = grid3 localK(worldrank) = int(grid3,pPetscInt)
call MPI_Allreduce(MPI_IN_PLACE,localK,worldsize,MPI_INTEGER,MPI_SUM,MPI_COMM_WORLD,ierr) call MPI_Allreduce(MPI_IN_PLACE,localK,worldsize,MPI_INTEGER,MPI_SUM,MPI_COMM_WORLD,err_MPI)
if (err_MPI /= 0_MPI_INTEGER_KIND) error stop 'MPI error'
call DMDACreate3D(PETSC_COMM_WORLD, & call DMDACreate3D(PETSC_COMM_WORLD, &
DM_BOUNDARY_NONE, DM_BOUNDARY_NONE, DM_BOUNDARY_NONE, & ! cut off stencil at boundary DM_BOUNDARY_NONE, DM_BOUNDARY_NONE, DM_BOUNDARY_NONE, & ! cut off stencil at boundary
DMDA_STENCIL_BOX, & ! Moore (26) neighborhood around central point DMDA_STENCIL_BOX, & ! Moore (26) neighborhood around central point
grid(1),grid(2),grid(3), & ! global grid int(grid(1),pPetscInt),int(grid(2),pPetscInt),int(grid(3),pPetscInt), & ! global grid
1, 1, worldsize, & 1_pPetscInt, 1_pPetscInt, int(worldsize,pPetscInt), &
1, 0, & ! #dof (damage phase field), ghost boundary width (domain overlap) 1_pPetscInt, 0_pPetscInt, & ! #dof (phi, scalar), ghost boundary width (domain overlap)
[grid(1)],[grid(2)],localK, & ! local grid [int(grid(1),pPetscInt)],[int(grid(2),pPetscInt)],localK, & ! local grid
damage_grid,ierr) ! handle, error damage_grid,err_PETSc) ! handle, error
CHKERRQ(ierr) CHKERRQ(err_PETSc)
call SNESSetDM(damage_snes,damage_grid,ierr); CHKERRQ(ierr) ! connect snes to da call SNESSetDM(damage_snes,damage_grid,err_PETSc); CHKERRQ(err_PETSc) ! connect snes to da
call DMsetFromOptions(damage_grid,ierr); CHKERRQ(ierr) call DMsetFromOptions(damage_grid,err_PETSc); CHKERRQ(err_PETSc)
call DMsetUp(damage_grid,ierr); CHKERRQ(ierr) call DMsetUp(damage_grid,err_PETSc); CHKERRQ(err_PETSc)
call DMCreateGlobalVector(damage_grid,solution_vec,ierr); CHKERRQ(ierr) ! global solution vector (grid x 1, i.e. every def grad tensor) call DMCreateGlobalVector(damage_grid,solution_vec,err_PETSc); CHKERRQ(err_PETSc) ! global solution vector (grid x 1, i.e. every def grad tensor)
call DMDASNESSetFunctionLocal(damage_grid,INSERT_VALUES,formResidual,PETSC_NULL_SNES,ierr) ! residual vector of same shape as solution vector call DMDASNESSetFunctionLocal(damage_grid,INSERT_VALUES,formResidual,PETSC_NULL_SNES,err_PETSc) ! residual vector of same shape as solution vector
CHKERRQ(ierr) CHKERRQ(err_PETSc)
call SNESSetFromOptions(damage_snes,ierr); CHKERRQ(ierr) ! pull it all together with additional CLI arguments call SNESSetFromOptions(damage_snes,err_PETSc); CHKERRQ(err_PETSc) ! pull it all together with additional CLI arguments
call SNESGetType(damage_snes,snes_type,ierr); CHKERRQ(ierr) call SNESGetType(damage_snes,snes_type,err_PETSc); CHKERRQ(err_PETSc)
if (trim(snes_type) == 'vinewtonrsls' .or. & if (trim(snes_type) == 'vinewtonrsls' .or. &
trim(snes_type) == 'vinewtonssls') then trim(snes_type) == 'vinewtonssls') then
call DMGetGlobalVector(damage_grid,lBound,ierr); CHKERRQ(ierr) call DMGetGlobalVector(damage_grid,lBound,err_PETSc); CHKERRQ(err_PETSc)
call DMGetGlobalVector(damage_grid,uBound,ierr); CHKERRQ(ierr) call DMGetGlobalVector(damage_grid,uBound,err_PETSc); CHKERRQ(err_PETSc)
call VecSet(lBound,0.0_pReal,ierr); CHKERRQ(ierr) call VecSet(lBound,0.0_pReal,err_PETSc); CHKERRQ(err_PETSc)
call VecSet(uBound,1.0_pReal,ierr); CHKERRQ(ierr) call VecSet(uBound,1.0_pReal,err_PETSc); CHKERRQ(err_PETSc)
call SNESVISetVariableBounds(damage_snes,lBound,uBound,ierr) ! variable bounds for variational inequalities like contact mechanics, damage etc. call SNESVISetVariableBounds(damage_snes,lBound,uBound,err_PETSc) ! variable bounds for variational inequalities like contact mechanics, damage etc.
call DMRestoreGlobalVector(damage_grid,lBound,ierr); CHKERRQ(ierr) call DMRestoreGlobalVector(damage_grid,lBound,err_PETSc); CHKERRQ(err_PETSc)
call DMRestoreGlobalVector(damage_grid,uBound,ierr); CHKERRQ(ierr) call DMRestoreGlobalVector(damage_grid,uBound,err_PETSc); CHKERRQ(err_PETSc)
end if end if
!-------------------------------------------------------------------------------------------------- !--------------------------------------------------------------------------------------------------
! init fields ! init fields
call DMDAGetCorners(damage_grid,xstart,ystart,zstart,xend,yend,zend,ierr) call DMDAGetCorners(damage_grid,xstart,ystart,zstart,xend,yend,zend,err_PETSc)
CHKERRQ(ierr) CHKERRQ(err_PETSc)
xend = xstart + xend - 1 xend = xstart + xend - 1
yend = ystart + yend - 1 yend = ystart + yend - 1
zend = zstart + zend - 1 zend = zstart + zend - 1
@ -150,7 +152,7 @@ subroutine grid_damage_spectral_init()
allocate(phi_lastInc(grid(1),grid(2),grid3), source=1.0_pReal) allocate(phi_lastInc(grid(1),grid(2),grid3), source=1.0_pReal)
allocate(phi_stagInc(grid(1),grid(2),grid3), source=1.0_pReal) allocate(phi_stagInc(grid(1),grid(2),grid3), source=1.0_pReal)
call VecSet(solution_vec,1.0_pReal,ierr); CHKERRQ(ierr) call VecSet(solution_vec,1.0_pReal,err_PETSc); CHKERRQ(err_PETSc)
call updateReference call updateReference
@ -169,7 +171,8 @@ function grid_damage_spectral_solution(Delta_t) result(solution)
PetscInt :: devNull PetscInt :: devNull
PetscReal :: phi_min, phi_max, stagNorm PetscReal :: phi_min, phi_max, stagNorm
PetscErrorCode :: ierr integer(MPI_INTEGER_KIND) :: err_MPI
PetscErrorCode :: err_PETSc
SNESConvergedReason :: reason SNESConvergedReason :: reason
solution%converged =.false. solution%converged =.false.
@ -178,8 +181,10 @@ function grid_damage_spectral_solution(Delta_t) result(solution)
! set module wide availabe data ! set module wide availabe data
params%Delta_t = Delta_t params%Delta_t = Delta_t
call SNESSolve(damage_snes,PETSC_NULL_VEC,solution_vec,ierr); CHKERRQ(ierr) call SNESSolve(damage_snes,PETSC_NULL_VEC,solution_vec,err_PETSc)
call SNESGetConvergedReason(damage_snes,reason,ierr); CHKERRQ(ierr) CHKERRQ(err_PETSc)
call SNESGetConvergedReason(damage_snes,reason,err_PETSc)
CHKERRQ(err_PETSc)
if (reason < 1) then if (reason < 1) then
solution%converged = .false. solution%converged = .false.
@ -189,9 +194,11 @@ function grid_damage_spectral_solution(Delta_t) result(solution)
solution%iterationsNeeded = totalIter solution%iterationsNeeded = totalIter
end if end if
stagNorm = maxval(abs(phi_current - phi_stagInc)) stagNorm = maxval(abs(phi_current - phi_stagInc))
call MPI_Allreduce(MPI_IN_PLACE,stagNorm,1,MPI_DOUBLE,MPI_MAX,MPI_COMM_WORLD,ierr) call MPI_Allreduce(MPI_IN_PLACE,stagNorm,1_MPI_INTEGER_KIND,MPI_DOUBLE,MPI_MAX,MPI_COMM_WORLD,err_MPI)
if (err_MPI /= 0_MPI_INTEGER_KIND) error stop 'MPI error'
solution%stagConverged = stagNorm < max(num%eps_damage_atol, num%eps_damage_rtol*maxval(phi_current)) solution%stagConverged = stagNorm < max(num%eps_damage_atol, num%eps_damage_rtol*maxval(phi_current))
call MPI_Allreduce(MPI_IN_PLACE,solution%stagConverged,1,MPI_LOGICAL,MPI_LAND,MPI_COMM_WORLD,ierr) call MPI_Allreduce(MPI_IN_PLACE,solution%stagConverged,1_MPI_INTEGER_KIND,MPI_LOGICAL,MPI_LAND,MPI_COMM_WORLD,err_MPI)
if (err_MPI /= 0_MPI_INTEGER_KIND) error stop 'MPI error'
phi_stagInc = phi_current phi_stagInc = phi_current
!-------------------------------------------------------------------------------------------------- !--------------------------------------------------------------------------------------------------
@ -202,8 +209,8 @@ function grid_damage_spectral_solution(Delta_t) result(solution)
call homogenization_set_phi(phi_current(i,j,k),ce) call homogenization_set_phi(phi_current(i,j,k),ce)
end do; end do; end do end do; end do; end do
call VecMin(solution_vec,devNull,phi_min,ierr); CHKERRQ(ierr) call VecMin(solution_vec,devNull,phi_min,err_PETSc); CHKERRQ(err_PETSc)
call VecMax(solution_vec,devNull,phi_max,ierr); CHKERRQ(ierr) call VecMax(solution_vec,devNull,phi_max,err_PETSc); CHKERRQ(err_PETSc)
if (solution%converged) & if (solution%converged) &
print'(/,1x,a)', '... nonlocal damage converged .....................................' print'(/,1x,a)', '... nonlocal damage converged .....................................'
print'(/,1x,a,f8.6,2x,f8.6,2x,e11.4)', 'Minimum|Maximum|Delta Damage = ', phi_min, phi_max, stagNorm print'(/,1x,a,f8.6,2x,f8.6,2x,e11.4)', 'Minimum|Maximum|Delta Damage = ', phi_min, phi_max, stagNorm
@ -222,17 +229,19 @@ subroutine grid_damage_spectral_forward(cutBack)
integer :: i, j, k, ce integer :: i, j, k, ce
DM :: dm_local DM :: dm_local
PetscScalar, dimension(:,:,:), pointer :: x_scal PetscScalar, dimension(:,:,:), pointer :: x_scal
PetscErrorCode :: ierr PetscErrorCode :: err_PETSc
if (cutBack) then if (cutBack) then
phi_current = phi_lastInc phi_current = phi_lastInc
phi_stagInc = phi_lastInc phi_stagInc = phi_lastInc
!-------------------------------------------------------------------------------------------------- !--------------------------------------------------------------------------------------------------
! reverting damage field state ! reverting damage field state
call SNESGetDM(damage_snes,dm_local,ierr); CHKERRQ(ierr) call SNESGetDM(damage_snes,dm_local,err_PETSc); CHKERRQ(err_PETSc)
call DMDAVecGetArrayF90(dm_local,solution_vec,x_scal,ierr); CHKERRQ(ierr) !< get the data out of PETSc to work with call DMDAVecGetArrayF90(dm_local,solution_vec,x_scal,err_PETSc) !< get the data out of PETSc to work with
CHKERRQ(err_PETSc)
x_scal(xstart:xend,ystart:yend,zstart:zend) = phi_current x_scal(xstart:xend,ystart:yend,zstart:zend) = phi_current
call DMDAVecRestoreArrayF90(dm_local,solution_vec,x_scal,ierr); CHKERRQ(ierr) call DMDAVecRestoreArrayF90(dm_local,solution_vec,x_scal,err_PETSc)
CHKERRQ(err_PETSc)
ce = 0 ce = 0
do k = 1, grid3; do j = 1, grid(2); do i = 1,grid(1) do k = 1, grid3; do j = 1, grid(2); do i = 1,grid(1)
ce = ce + 1 ce = ce + 1
@ -249,7 +258,7 @@ end subroutine grid_damage_spectral_forward
!-------------------------------------------------------------------------------------------------- !--------------------------------------------------------------------------------------------------
!> @brief forms the spectral damage residual vector !> @brief forms the spectral damage residual vector
!-------------------------------------------------------------------------------------------------- !--------------------------------------------------------------------------------------------------
subroutine formResidual(in,x_scal,f_scal,dummy,ierr) subroutine formResidual(in,x_scal,f_scal,dummy,dummy_err)
DMDALocalInfo, dimension(DMDA_LOCAL_INFO_SIZE) :: & DMDALocalInfo, dimension(DMDA_LOCAL_INFO_SIZE) :: &
in in
@ -260,7 +269,7 @@ subroutine formResidual(in,x_scal,f_scal,dummy,ierr)
X_RANGE,Y_RANGE,Z_RANGE), intent(out) :: & X_RANGE,Y_RANGE,Z_RANGE), intent(out) :: &
f_scal f_scal
PetscObject :: dummy PetscObject :: dummy
PetscErrorCode :: ierr PetscErrorCode :: dummy_err
integer :: i, j, k, ce integer :: i, j, k, ce
@ -311,7 +320,8 @@ end subroutine formResidual
!-------------------------------------------------------------------------------------------------- !--------------------------------------------------------------------------------------------------
subroutine updateReference() subroutine updateReference()
integer :: ce,ierr integer :: ce
integer(MPI_INTEGER_KIND) :: err_MPI
K_ref = 0.0_pReal K_ref = 0.0_pReal
@ -322,9 +332,11 @@ subroutine updateReference()
end do end do
K_ref = K_ref*wgt K_ref = K_ref*wgt
call MPI_Allreduce(MPI_IN_PLACE,K_ref,9,MPI_DOUBLE,MPI_SUM,MPI_COMM_WORLD,ierr) call MPI_Allreduce(MPI_IN_PLACE,K_ref,9_MPI_INTEGER_KIND,MPI_DOUBLE,MPI_SUM,MPI_COMM_WORLD,err_MPI)
if (err_MPI /= 0_MPI_INTEGER_KIND) error stop 'MPI error'
mu_ref = mu_ref*wgt mu_ref = mu_ref*wgt
call MPI_Allreduce(MPI_IN_PLACE,mu_ref,1,MPI_DOUBLE,MPI_SUM,MPI_COMM_WORLD,ierr) call MPI_Allreduce(MPI_IN_PLACE,mu_ref,1_MPI_INTEGER_KIND,MPI_DOUBLE,MPI_SUM,MPI_COMM_WORLD,err_MPI)
if (err_MPI /= 0_MPI_INTEGER_KIND) error stop 'MPI error'
end subroutine updateReference end subroutine updateReference

View File

@ -107,7 +107,8 @@ subroutine grid_mechanical_FEM_init
1.0_pReal,-1.0_pReal,-1.0_pReal,-1.0_pReal, & 1.0_pReal,-1.0_pReal,-1.0_pReal,-1.0_pReal, &
1.0_pReal, 1.0_pReal, 1.0_pReal, 1.0_pReal], [4,8]) 1.0_pReal, 1.0_pReal, 1.0_pReal, 1.0_pReal], [4,8])
real(pReal), dimension(3,3,3,3) :: devNull real(pReal), dimension(3,3,3,3) :: devNull
PetscErrorCode :: ierr PetscErrorCode :: err_PETSc
integer(MPI_INTEGER_KIND) :: err_MPI
PetscScalar, pointer, dimension(:,:,:,:) :: & PetscScalar, pointer, dimension(:,:,:,:) :: &
u_current,u_lastInc u_current,u_lastInc
PetscInt, dimension(0:worldsize-1) :: localK PetscInt, dimension(0:worldsize-1) :: localK
@ -145,12 +146,11 @@ subroutine grid_mechanical_FEM_init
! set default and user defined options for PETSc ! set default and user defined options for PETSc
call PetscOptionsInsertString(PETSC_NULL_OPTIONS, & call PetscOptionsInsertString(PETSC_NULL_OPTIONS, &
'-mechanical_snes_type newtonls -mechanical_ksp_type fgmres & '-mechanical_snes_type newtonls -mechanical_ksp_type fgmres &
&-mechanical_ksp_max_it 25 -mechanical_pc_type ml & &-mechanical_ksp_max_it 25 -mechanical_mg_levels_ksp_type chebyshev', &
&-mechanical_mg_levels_ksp_type chebyshev', & err_PETSc)
ierr) CHKERRQ(err_PETSc)
CHKERRQ(ierr) call PetscOptionsInsertString(PETSC_NULL_OPTIONS,num_grid%get_asString('petsc_options',defaultVal=''),err_PETSc)
call PetscOptionsInsertString(PETSC_NULL_OPTIONS,num_grid%get_asString('petsc_options',defaultVal=''),ierr) CHKERRQ(err_PETSc)
CHKERRQ(ierr)
!-------------------------------------------------------------------------------------------------- !--------------------------------------------------------------------------------------------------
! allocate global fields ! allocate global fields
@ -160,59 +160,60 @@ subroutine grid_mechanical_FEM_init
!-------------------------------------------------------------------------------------------------- !--------------------------------------------------------------------------------------------------
! initialize solver specific parts of PETSc ! initialize solver specific parts of PETSc
call SNESCreate(PETSC_COMM_WORLD,mechanical_snes,ierr) call SNESCreate(PETSC_COMM_WORLD,mechanical_snes,err_PETSc)
CHKERRQ(ierr) CHKERRQ(err_PETSc)
call SNESSetOptionsPrefix(mechanical_snes,'mechanical_',ierr) call SNESSetOptionsPrefix(mechanical_snes,'mechanical_',err_PETSc)
CHKERRQ(ierr) CHKERRQ(err_PETSc)
localK = 0 localK = 0_pPetscInt
localK(worldrank) = grid3 localK(worldrank) = int(grid3,pPetscInt)
call MPI_Allreduce(MPI_IN_PLACE,localK,worldsize,MPI_INTEGER,MPI_SUM,MPI_COMM_WORLD,ierr) call MPI_Allreduce(MPI_IN_PLACE,localK,worldsize,MPI_INTEGER,MPI_SUM,MPI_COMM_WORLD,err_MPI)
if(err_MPI /= 0_MPI_INTEGER_KIND) error stop 'MPI error'
call DMDACreate3d(PETSC_COMM_WORLD, & call DMDACreate3d(PETSC_COMM_WORLD, &
DM_BOUNDARY_PERIODIC, DM_BOUNDARY_PERIODIC, DM_BOUNDARY_PERIODIC, & DM_BOUNDARY_PERIODIC, DM_BOUNDARY_PERIODIC, DM_BOUNDARY_PERIODIC, &
DMDA_STENCIL_BOX, & DMDA_STENCIL_BOX, &
grid(1),grid(2),grid(3), & int(grid(1),pPetscInt),int(grid(2),pPetscInt),int(grid(3),pPetscInt), & ! global grid
1, 1, worldsize, & 1_pPetscInt, 1_pPetscInt, int(worldsize,pPetscInt), &
3, 1, & 3_pPetscInt, 1_pPetscInt, & ! #dof (u, vector), ghost boundary width (domain overlap)
[grid(1)],[grid(2)],localK, & [int(grid(1),pPetscInt)],[int(grid(2),pPetscInt)],localK, & ! local grid
mechanical_grid,ierr) mechanical_grid,err_PETSc)
CHKERRQ(ierr) CHKERRQ(err_PETSc)
call SNESSetDM(mechanical_snes,mechanical_grid,ierr) call SNESSetDM(mechanical_snes,mechanical_grid,err_PETSc)
CHKERRQ(ierr) CHKERRQ(err_PETSc)
call DMsetFromOptions(mechanical_grid,ierr) call DMsetFromOptions(mechanical_grid,err_PETSc)
CHKERRQ(ierr) CHKERRQ(err_PETSc)
call DMsetUp(mechanical_grid,ierr) call DMsetUp(mechanical_grid,err_PETSc)
CHKERRQ(ierr) CHKERRQ(err_PETSc)
call DMDASetUniformCoordinates(mechanical_grid,0.0_pReal,geomSize(1),0.0_pReal,geomSize(2),0.0_pReal,geomSize(3),ierr) call DMDASetUniformCoordinates(mechanical_grid,0.0_pReal,geomSize(1),0.0_pReal,geomSize(2),0.0_pReal,geomSize(3),err_PETSc)
CHKERRQ(ierr) CHKERRQ(err_PETSc)
call DMCreateGlobalVector(mechanical_grid,solution_current,ierr) call DMCreateGlobalVector(mechanical_grid,solution_current,err_PETSc)
CHKERRQ(ierr) CHKERRQ(err_PETSc)
call DMCreateGlobalVector(mechanical_grid,solution_lastInc,ierr) call DMCreateGlobalVector(mechanical_grid,solution_lastInc,err_PETSc)
CHKERRQ(ierr) CHKERRQ(err_PETSc)
call DMCreateGlobalVector(mechanical_grid,solution_rate ,ierr) call DMCreateGlobalVector(mechanical_grid,solution_rate ,err_PETSc)
CHKERRQ(ierr) CHKERRQ(err_PETSc)
call DMSNESSetFunctionLocal(mechanical_grid,formResidual,PETSC_NULL_SNES,ierr) call DMSNESSetFunctionLocal(mechanical_grid,formResidual,PETSC_NULL_SNES,err_PETSc)
CHKERRQ(ierr) CHKERRQ(err_PETSc)
call DMSNESSetJacobianLocal(mechanical_grid,formJacobian,PETSC_NULL_SNES,ierr) call DMSNESSetJacobianLocal(mechanical_grid,formJacobian,PETSC_NULL_SNES,err_PETSc)
CHKERRQ(ierr) CHKERRQ(err_PETSc)
call SNESSetConvergenceTest(mechanical_snes,converged,PETSC_NULL_SNES,PETSC_NULL_FUNCTION,ierr) ! specify custom convergence check function "_converged" call SNESSetConvergenceTest(mechanical_snes,converged,PETSC_NULL_SNES,PETSC_NULL_FUNCTION,err_PETSc) ! specify custom convergence check function "_converged"
CHKERRQ(ierr) CHKERRQ(err_PETSc)
call SNESSetMaxLinearSolveFailures(mechanical_snes, huge(1), ierr) ! ignore linear solve failures call SNESSetMaxLinearSolveFailures(mechanical_snes, huge(1_pPetscInt), err_PETSc) ! ignore linear solve failures
CHKERRQ(ierr) CHKERRQ(err_PETSc)
call SNESSetFromOptions(mechanical_snes,ierr) ! pull it all together with additional cli arguments call SNESSetFromOptions(mechanical_snes,err_PETSc) ! pull it all together with additional cli arguments
CHKERRQ(ierr) CHKERRQ(err_PETSc)
!-------------------------------------------------------------------------------------------------- !--------------------------------------------------------------------------------------------------
! init fields ! init fields
call VecSet(solution_current,0.0_pReal,ierr);CHKERRQ(ierr) call VecSet(solution_current,0.0_pReal,err_PETSc);CHKERRQ(err_PETSc)
call VecSet(solution_lastInc,0.0_pReal,ierr);CHKERRQ(ierr) call VecSet(solution_lastInc,0.0_pReal,err_PETSc);CHKERRQ(err_PETSc)
call VecSet(solution_rate ,0.0_pReal,ierr);CHKERRQ(ierr) call VecSet(solution_rate ,0.0_pReal,err_PETSc);CHKERRQ(err_PETSc)
call DMDAVecGetArrayF90(mechanical_grid,solution_current,u_current,ierr) call DMDAVecGetArrayF90(mechanical_grid,solution_current,u_current,err_PETSc)
CHKERRQ(ierr) CHKERRQ(err_PETSc)
call DMDAVecGetArrayF90(mechanical_grid,solution_lastInc,u_lastInc,ierr) call DMDAVecGetArrayF90(mechanical_grid,solution_lastInc,u_lastInc,err_PETSc)
CHKERRQ(ierr) CHKERRQ(err_PETSc)
call DMDAGetCorners(mechanical_grid,xstart,ystart,zstart,xend,yend,zend,ierr) ! local grid extent call DMDAGetCorners(mechanical_grid,xstart,ystart,zstart,xend,yend,zend,err_PETSc) ! local grid extent
CHKERRQ(ierr) CHKERRQ(err_PETSc)
xend = xstart+xend-1 xend = xstart+xend-1
yend = ystart+yend-1 yend = ystart+yend-1
zend = zstart+zend-1 zend = zstart+zend-1
@ -240,17 +241,17 @@ subroutine grid_mechanical_FEM_init
groupHandle = HDF5_openGroup(fileHandle,'solver') groupHandle = HDF5_openGroup(fileHandle,'solver')
call HDF5_read(P_aim,groupHandle,'P_aim',.false.) call HDF5_read(P_aim,groupHandle,'P_aim',.false.)
call MPI_Bcast(P_aim,9,MPI_DOUBLE,0,MPI_COMM_WORLD,ierr) call MPI_Bcast(P_aim,9_MPI_INTEGER_KIND,MPI_DOUBLE,0_MPI_INTEGER_KIND,MPI_COMM_WORLD,err_MPI)
if(ierr /=0) error stop 'MPI error' if(err_MPI /= 0_MPI_INTEGER_KIND) error stop 'MPI error'
call HDF5_read(F_aim,groupHandle,'F_aim',.false.) call HDF5_read(F_aim,groupHandle,'F_aim',.false.)
call MPI_Bcast(F_aim,9,MPI_DOUBLE,0,MPI_COMM_WORLD,ierr) call MPI_Bcast(F_aim,9_MPI_INTEGER_KIND,MPI_DOUBLE,0_MPI_INTEGER_KIND,MPI_COMM_WORLD,err_MPI)
if(ierr /=0) error stop 'MPI error' if(err_MPI /= 0_MPI_INTEGER_KIND) error stop 'MPI error'
call HDF5_read(F_aim_lastInc,groupHandle,'F_aim_lastInc',.false.) call HDF5_read(F_aim_lastInc,groupHandle,'F_aim_lastInc',.false.)
call MPI_Bcast(F_aim_lastInc,9,MPI_DOUBLE,0,MPI_COMM_WORLD,ierr) call MPI_Bcast(F_aim_lastInc,9_MPI_INTEGER_KIND,MPI_DOUBLE,0_MPI_INTEGER_KIND,MPI_COMM_WORLD,err_MPI)
if(ierr /=0) error stop 'MPI error' if(err_MPI /= 0_MPI_INTEGER_KIND) error stop 'MPI error'
call HDF5_read(F_aimDot,groupHandle,'F_aimDot',.false.) call HDF5_read(F_aimDot,groupHandle,'F_aimDot',.false.)
call MPI_Bcast(F_aimDot,9,MPI_DOUBLE,0,MPI_COMM_WORLD,ierr) call MPI_Bcast(F_aimDot,9_MPI_INTEGER_KIND,MPI_DOUBLE,0_MPI_INTEGER_KIND,MPI_COMM_WORLD,err_MPI)
if(ierr /=0) error stop 'MPI error' if(err_MPI /= 0_MPI_INTEGER_KIND) error stop 'MPI error'
call HDF5_read(F,groupHandle,'F') call HDF5_read(F,groupHandle,'F')
call HDF5_read(F_lastInc,groupHandle,'F_lastInc') call HDF5_read(F_lastInc,groupHandle,'F_lastInc')
call HDF5_read(u_current,groupHandle,'u') call HDF5_read(u_current,groupHandle,'u')
@ -266,19 +267,19 @@ subroutine grid_mechanical_FEM_init
call utilities_constitutiveResponse(P_current,P_av,C_volAvg,devNull, & ! stress field, stress avg, global average of stiffness and (min+max)/2 call utilities_constitutiveResponse(P_current,P_av,C_volAvg,devNull, & ! stress field, stress avg, global average of stiffness and (min+max)/2
F, & ! target F F, & ! target F
0.0_pReal) ! time increment 0.0_pReal) ! time increment
call DMDAVecRestoreArrayF90(mechanical_grid,solution_current,u_current,ierr) call DMDAVecRestoreArrayF90(mechanical_grid,solution_current,u_current,err_PETSc)
CHKERRQ(ierr) CHKERRQ(err_PETSc)
call DMDAVecRestoreArrayF90(mechanical_grid,solution_lastInc,u_lastInc,ierr) call DMDAVecRestoreArrayF90(mechanical_grid,solution_lastInc,u_lastInc,err_PETSc)
CHKERRQ(ierr) CHKERRQ(err_PETSc)
restartRead2: if (interface_restartInc > 0) then restartRead2: if (interface_restartInc > 0) then
print'(1x,a,i0,a)', 'reading more restart data of increment ', interface_restartInc, ' from file' print'(1x,a,i0,a)', 'reading more restart data of increment ', interface_restartInc, ' from file'
call HDF5_read(C_volAvg,groupHandle,'C_volAvg',.false.) call HDF5_read(C_volAvg,groupHandle,'C_volAvg',.false.)
call MPI_Bcast(C_volAvg,81,MPI_DOUBLE,0,MPI_COMM_WORLD,ierr) call MPI_Bcast(C_volAvg,81_MPI_INTEGER_KIND,MPI_DOUBLE,0_MPI_INTEGER_KIND,MPI_COMM_WORLD,err_MPI)
if(ierr /=0) error stop 'MPI error' if(err_MPI /= 0_MPI_INTEGER_KIND) error stop 'MPI error'
call HDF5_read(C_volAvgLastInc,groupHandle,'C_volAvgLastInc',.false.) call HDF5_read(C_volAvgLastInc,groupHandle,'C_volAvgLastInc',.false.)
call MPI_Bcast(C_volAvgLastInc,81,MPI_DOUBLE,0,MPI_COMM_WORLD,ierr) call MPI_Bcast(C_volAvgLastInc,81_MPI_INTEGER_KIND,MPI_DOUBLE,0_MPI_INTEGER_KIND,MPI_COMM_WORLD,err_MPI)
if(ierr /=0) error stop 'MPI error' if(err_MPI /= 0_MPI_INTEGER_KIND) error stop 'MPI error'
call HDF5_closeGroup(groupHandle) call HDF5_closeGroup(groupHandle)
call HDF5_closeFile(fileHandle) call HDF5_closeFile(fileHandle)
@ -301,7 +302,7 @@ function grid_mechanical_FEM_solution(incInfoIn) result(solution)
solution solution
!-------------------------------------------------------------------------------------------------- !--------------------------------------------------------------------------------------------------
! PETSc Data ! PETSc Data
PetscErrorCode :: ierr PetscErrorCode :: err_PETSc
SNESConvergedReason :: reason SNESConvergedReason :: reason
incInfo = incInfoIn incInfo = incInfoIn
@ -312,13 +313,13 @@ function grid_mechanical_FEM_solution(incInfoIn) result(solution)
!-------------------------------------------------------------------------------------------------- !--------------------------------------------------------------------------------------------------
! solve BVP ! solve BVP
call SNESsolve(mechanical_snes,PETSC_NULL_VEC,solution_current,ierr) call SNESsolve(mechanical_snes,PETSC_NULL_VEC,solution_current,err_PETSc)
CHKERRQ(ierr) CHKERRQ(err_PETSc)
!-------------------------------------------------------------------------------------------------- !--------------------------------------------------------------------------------------------------
! check convergence ! check convergence
call SNESGetConvergedReason(mechanical_snes,reason,ierr) call SNESGetConvergedReason(mechanical_snes,reason,err_PETSc)
CHKERRQ(ierr) CHKERRQ(err_PETSc)
solution%converged = reason > 0 solution%converged = reason > 0
solution%iterationsNeeded = totalIter solution%iterationsNeeded = totalIter
@ -348,22 +349,22 @@ subroutine grid_mechanical_FEM_forward(cutBack,guess,Delta_t,Delta_t_old,t_remai
deformation_BC deformation_BC
type(rotation), intent(in) :: & type(rotation), intent(in) :: &
rotation_BC rotation_BC
PetscErrorCode :: ierr PetscErrorCode :: err_PETSc
PetscScalar, pointer, dimension(:,:,:,:) :: & PetscScalar, pointer, dimension(:,:,:,:) :: &
u_current,u_lastInc u_current,u_lastInc
call DMDAVecGetArrayF90(mechanical_grid,solution_current,u_current,ierr) call DMDAVecGetArrayF90(mechanical_grid,solution_current,u_current,err_PETSc)
CHKERRQ(ierr) CHKERRQ(err_PETSc)
call DMDAVecGetArrayF90(mechanical_grid,solution_lastInc,u_lastInc,ierr) call DMDAVecGetArrayF90(mechanical_grid,solution_lastInc,u_lastInc,err_PETSc)
CHKERRQ(ierr) CHKERRQ(err_PETSc)
if (cutBack) then if (cutBack) then
C_volAvg = C_volAvgLastInc C_volAvg = C_volAvgLastInc
else else
C_volAvgLastInc = C_volAvg C_volAvgLastInc = C_volAvg
F_aimDot = merge(merge(.0_pReal,(F_aim-F_aim_lastInc)/Delta_t_old,stress_BC%mask),.0_pReal,guess) ! estimate deformation rate for prescribed stress components F_aimDot = merge(merge(.0_pReal,(F_aim-F_aim_lastInc)/Delta_t_old,stress_BC%mask),.0_pReal,guess) ! estimate deformation rate for prescribed stress components
F_aim_lastInc = F_aim F_aim_lastInc = F_aim
!----------------------------------------------------------------------------------------------- !-----------------------------------------------------------------------------------------------
@ -380,13 +381,14 @@ subroutine grid_mechanical_FEM_forward(cutBack,guess,Delta_t,Delta_t_old,t_remai
endif endif
if (guess) then if (guess) then
call VecWAXPY(solution_rate,-1.0_pReal,solution_lastInc,solution_current,ierr) call VecWAXPY(solution_rate,-1.0_pReal,solution_lastInc,solution_current,err_PETSc)
CHKERRQ(ierr) CHKERRQ(err_PETSc)
call VecScale(solution_rate,1.0_pReal/Delta_t_old,ierr); CHKERRQ(ierr) call VecScale(solution_rate,1.0_pReal/Delta_t_old,err_PETSc)
CHKERRQ(err_PETSc)
else else
call VecSet(solution_rate,0.0_pReal,ierr); CHKERRQ(ierr) call VecSet(solution_rate,0.0_pReal,err_PETSc); CHKERRQ(err_PETSc)
endif endif
call VecCopy(solution_current,solution_lastInc,ierr); CHKERRQ(ierr) call VecCopy(solution_current,solution_lastInc,err_PETSc); CHKERRQ(err_PETSc)
F_lastInc = F F_lastInc = F
@ -401,12 +403,12 @@ subroutine grid_mechanical_FEM_forward(cutBack,guess,Delta_t,Delta_t_old,t_remai
if (stress_BC%myType=='dot_P') P_aim = P_aim & if (stress_BC%myType=='dot_P') P_aim = P_aim &
+ merge(.0_pReal,stress_BC%values,stress_BC%mask)*Delta_t + merge(.0_pReal,stress_BC%values,stress_BC%mask)*Delta_t
call VecAXPY(solution_current,Delta_t,solution_rate,ierr); CHKERRQ(ierr) call VecAXPY(solution_current,Delta_t,solution_rate,err_PETSc)
CHKERRQ(err_PETSc)
call DMDAVecRestoreArrayF90(mechanical_grid,solution_current,u_current,ierr) call DMDAVecRestoreArrayF90(mechanical_grid,solution_current,u_current,err_PETSc)
CHKERRQ(ierr) CHKERRQ(err_PETSc)
call DMDAVecRestoreArrayF90(mechanical_grid,solution_lastInc,u_lastInc,ierr) call DMDAVecRestoreArrayF90(mechanical_grid,solution_lastInc,u_lastInc,err_PETSc)
CHKERRQ(ierr) CHKERRQ(err_PETSc)
!-------------------------------------------------------------------------------------------------- !--------------------------------------------------------------------------------------------------
! set module wide available data ! set module wide available data
@ -432,15 +434,15 @@ end subroutine grid_mechanical_FEM_updateCoords
!-------------------------------------------------------------------------------------------------- !--------------------------------------------------------------------------------------------------
subroutine grid_mechanical_FEM_restartWrite subroutine grid_mechanical_FEM_restartWrite
PetscErrorCode :: ierr PetscErrorCode :: err_PETSc
integer(HID_T) :: fileHandle, groupHandle integer(HID_T) :: fileHandle, groupHandle
PetscScalar, dimension(:,:,:,:), pointer :: u_current,u_lastInc PetscScalar, dimension(:,:,:,:), pointer :: u_current,u_lastInc
call DMDAVecGetArrayF90(mechanical_grid,solution_current,u_current,ierr) call DMDAVecGetArrayF90(mechanical_grid,solution_current,u_current,err_PETSc)
CHKERRQ(ierr) CHKERRQ(err_PETSc)
call DMDAVecGetArrayF90(mechanical_grid,solution_lastInc,u_lastInc,ierr) call DMDAVecGetArrayF90(mechanical_grid,solution_lastInc,u_lastInc,err_PETSc)
CHKERRQ(ierr) CHKERRQ(err_PETSc)
print'(1x,a)', 'writing solver data required for restart to file'; flush(IO_STDOUT) print'(1x,a)', 'writing solver data required for restart to file'; flush(IO_STDOUT)
@ -466,10 +468,10 @@ subroutine grid_mechanical_FEM_restartWrite
call HDF5_closeFile(fileHandle) call HDF5_closeFile(fileHandle)
endif endif
call DMDAVecRestoreArrayF90(mechanical_grid,solution_current,u_current,ierr) call DMDAVecRestoreArrayF90(mechanical_grid,solution_current,u_current,err_PETSc)
CHKERRQ(ierr) CHKERRQ(err_PETSc)
call DMDAVecRestoreArrayF90(mechanical_grid,solution_lastInc,u_lastInc,ierr) call DMDAVecRestoreArrayF90(mechanical_grid,solution_lastInc,u_lastInc,err_PETSc)
CHKERRQ(ierr) CHKERRQ(err_PETSc)
end subroutine grid_mechanical_FEM_restartWrite end subroutine grid_mechanical_FEM_restartWrite
@ -477,7 +479,7 @@ end subroutine grid_mechanical_FEM_restartWrite
!-------------------------------------------------------------------------------------------------- !--------------------------------------------------------------------------------------------------
!> @brief convergence check !> @brief convergence check
!-------------------------------------------------------------------------------------------------- !--------------------------------------------------------------------------------------------------
subroutine converged(snes_local,PETScIter,devNull1,devNull2,fnorm,reason,dummy,ierr) subroutine converged(snes_local,PETScIter,devNull1,devNull2,fnorm,reason,dummy,err_PETSc)
SNES :: snes_local SNES :: snes_local
PetscInt, intent(in) :: PETScIter PetscInt, intent(in) :: PETScIter
@ -487,7 +489,7 @@ subroutine converged(snes_local,PETScIter,devNull1,devNull2,fnorm,reason,dummy,i
fnorm fnorm
SNESConvergedReason :: reason SNESConvergedReason :: reason
PetscObject :: dummy PetscObject :: dummy
PetscErrorCode :: ierr PetscErrorCode :: err_PETSc
real(pReal) :: & real(pReal) :: &
err_div, & err_div, &
divTol, & divTol, &
@ -521,7 +523,7 @@ end subroutine converged
!> @brief forms the residual vector !> @brief forms the residual vector
!-------------------------------------------------------------------------------------------------- !--------------------------------------------------------------------------------------------------
subroutine formResidual(da_local,x_local, & subroutine formResidual(da_local,x_local, &
f_local,dummy,ierr) f_local,dummy,err_PETSc)
DM :: da_local DM :: da_local
Vec :: x_local, f_local Vec :: x_local, f_local
@ -532,13 +534,14 @@ subroutine formResidual(da_local,x_local, &
PETScIter, & PETScIter, &
nfuncs nfuncs
PetscObject :: dummy PetscObject :: dummy
PetscErrorCode :: ierr PetscErrorCode :: err_PETSc
integer(MPI_INTEGER_KIND) :: err_MPI
real(pReal), dimension(3,3,3,3) :: devNull real(pReal), dimension(3,3,3,3) :: devNull
call SNESGetNumberFunctionEvals(mechanical_snes,nfuncs,ierr) call SNESGetNumberFunctionEvals(mechanical_snes,nfuncs,err_PETSc)
CHKERRQ(ierr) CHKERRQ(err_PETSc)
call SNESGetIterationNumber(mechanical_snes,PETScIter,ierr) call SNESGetIterationNumber(mechanical_snes,PETScIter,err_PETSc)
CHKERRQ(ierr) CHKERRQ(err_PETSc)
if (nfuncs == 0 .and. PETScIter == 0) totalIter = -1 ! new increment if (nfuncs == 0 .and. PETScIter == 0) totalIter = -1 ! new increment
@ -556,7 +559,7 @@ subroutine formResidual(da_local,x_local, &
!-------------------------------------------------------------------------------------------------- !--------------------------------------------------------------------------------------------------
! get deformation gradient ! get deformation gradient
call DMDAVecGetArrayF90(da_local,x_local,x_scal,ierr);CHKERRQ(ierr) call DMDAVecGetArrayF90(da_local,x_local,x_scal,err_PETSc);CHKERRQ(err_PETSc)
do k = zstart, zend; do j = ystart, yend; do i = xstart, xend do k = zstart, zend; do j = ystart, yend; do i = xstart, xend
ctr = 0 ctr = 0
do kk = 0, 1; do jj = 0, 1; do ii = 0, 1 do kk = 0, 1; do jj = 0, 1; do ii = 0, 1
@ -566,14 +569,15 @@ subroutine formResidual(da_local,x_local, &
ii = i-xstart+1; jj = j-ystart+1; kk = k-zstart+1 ii = i-xstart+1; jj = j-ystart+1; kk = k-zstart+1
F(1:3,1:3,ii,jj,kk) = params%rotation_BC%rotate(F_aim,active=.true.) + transpose(matmul(BMat,x_elem)) F(1:3,1:3,ii,jj,kk) = params%rotation_BC%rotate(F_aim,active=.true.) + transpose(matmul(BMat,x_elem))
enddo; enddo; enddo enddo; enddo; enddo
call DMDAVecRestoreArrayF90(da_local,x_local,x_scal,ierr);CHKERRQ(ierr) call DMDAVecRestoreArrayF90(da_local,x_local,x_scal,err_PETSc);CHKERRQ(err_PETSc)
!-------------------------------------------------------------------------------------------------- !--------------------------------------------------------------------------------------------------
! evaluate constitutive response ! evaluate constitutive response
call utilities_constitutiveResponse(P_current,& call utilities_constitutiveResponse(P_current,&
P_av,C_volAvg,devNull, & P_av,C_volAvg,devNull, &
F,params%Delta_t,params%rotation_BC) F,params%Delta_t,params%rotation_BC)
call MPI_Allreduce(MPI_IN_PLACE,terminallyIll,1,MPI_LOGICAL,MPI_LOR,MPI_COMM_WORLD,ierr) call MPI_Allreduce(MPI_IN_PLACE,terminallyIll,1_MPI_INTEGER_KIND,MPI_LOGICAL,MPI_LOR,MPI_COMM_WORLD,err_MPI)
if(err_MPI /= 0_MPI_INTEGER_KIND) error stop 'MPI error'
!-------------------------------------------------------------------------------------------------- !--------------------------------------------------------------------------------------------------
! stress BC handling ! stress BC handling
@ -582,9 +586,9 @@ subroutine formResidual(da_local,x_local, &
!-------------------------------------------------------------------------------------------------- !--------------------------------------------------------------------------------------------------
! constructing residual ! constructing residual
call VecSet(f_local,0.0_pReal,ierr);CHKERRQ(ierr) call VecSet(f_local,0.0_pReal,err_PETSc);CHKERRQ(err_PETSc)
call DMDAVecGetArrayF90(da_local,f_local,f_scal,ierr);CHKERRQ(ierr) call DMDAVecGetArrayF90(da_local,f_local,f_scal,err_PETSc);CHKERRQ(err_PETSc)
call DMDAVecGetArrayF90(da_local,x_local,x_scal,ierr);CHKERRQ(ierr) call DMDAVecGetArrayF90(da_local,x_local,x_scal,err_PETSc);CHKERRQ(err_PETSc)
ele = 0 ele = 0
do k = zstart, zend; do j = ystart, yend; do i = xstart, xend do k = zstart, zend; do j = ystart, yend; do i = xstart, xend
ctr = 0 ctr = 0
@ -604,12 +608,12 @@ subroutine formResidual(da_local,x_local, &
f_scal(0:2,i+ii,j+jj,k+kk) = f_scal(0:2,i+ii,j+jj,k+kk) + f_elem(ctr,1:3) f_scal(0:2,i+ii,j+jj,k+kk) = f_scal(0:2,i+ii,j+jj,k+kk) + f_elem(ctr,1:3)
enddo; enddo; enddo enddo; enddo; enddo
enddo; enddo; enddo enddo; enddo; enddo
call DMDAVecRestoreArrayF90(da_local,x_local,x_scal,ierr);CHKERRQ(ierr) call DMDAVecRestoreArrayF90(da_local,x_local,x_scal,err_PETSc);CHKERRQ(err_PETSc)
call DMDAVecRestoreArrayF90(da_local,f_local,f_scal,ierr);CHKERRQ(ierr) call DMDAVecRestoreArrayF90(da_local,f_local,f_scal,err_PETSc);CHKERRQ(err_PETSc)
!-------------------------------------------------------------------------------------------------- !--------------------------------------------------------------------------------------------------
! applying boundary conditions ! applying boundary conditions
call DMDAVecGetArrayF90(da_local,f_local,f_scal,ierr);CHKERRQ(ierr) call DMDAVecGetArrayF90(da_local,f_local,f_scal,err_PETSc);CHKERRQ(err_PETSc)
if (zstart == 0) then if (zstart == 0) then
f_scal(0:2,xstart,ystart,zstart) = 0.0 f_scal(0:2,xstart,ystart,zstart) = 0.0
f_scal(0:2,xend+1,ystart,zstart) = 0.0 f_scal(0:2,xend+1,ystart,zstart) = 0.0
@ -622,7 +626,7 @@ subroutine formResidual(da_local,x_local, &
f_scal(0:2,xstart,yend+1,zend+1) = 0.0 f_scal(0:2,xstart,yend+1,zend+1) = 0.0
f_scal(0:2,xend+1,yend+1,zend+1) = 0.0 f_scal(0:2,xend+1,yend+1,zend+1) = 0.0
endif endif
call DMDAVecRestoreArrayF90(da_local,f_local,f_scal,ierr);CHKERRQ(ierr) call DMDAVecRestoreArrayF90(da_local,f_local,f_scal,err_PETSc);CHKERRQ(err_PETSc)
end subroutine formResidual end subroutine formResidual
@ -630,7 +634,7 @@ end subroutine formResidual
!-------------------------------------------------------------------------------------------------- !--------------------------------------------------------------------------------------------------
!> @brief forms the FEM stiffness matrix !> @brief forms the FEM stiffness matrix
!-------------------------------------------------------------------------------------------------- !--------------------------------------------------------------------------------------------------
subroutine formJacobian(da_local,x_local,Jac_pre,Jac,dummy,ierr) subroutine formJacobian(da_local,x_local,Jac_pre,Jac,dummy,err_PETSc)
DM :: da_local DM :: da_local
Vec :: x_local, coordinates Vec :: x_local, coordinates
@ -644,15 +648,17 @@ subroutine formJacobian(da_local,x_local,Jac_pre,Jac,dummy,ierr)
PetscScalar :: diag PetscScalar :: diag
PetscObject :: dummy PetscObject :: dummy
MatNullSpace :: matnull MatNullSpace :: matnull
PetscErrorCode :: ierr PetscErrorCode :: err_PETSc
BMatFull = 0.0 BMatFull = 0.0
BMatFull(1:3,1 :8 ) = BMat BMatFull(1:3,1 :8 ) = BMat
BMatFull(4:6,9 :16) = BMat BMatFull(4:6,9 :16) = BMat
BMatFull(7:9,17:24) = BMat BMatFull(7:9,17:24) = BMat
call MatSetOption(Jac,MAT_KEEP_NONZERO_PATTERN,PETSC_TRUE,ierr); CHKERRQ(ierr) call MatSetOption(Jac,MAT_KEEP_NONZERO_PATTERN,PETSC_TRUE,err_PETSc)
call MatSetOption(Jac,MAT_NEW_NONZERO_ALLOCATION_ERR,PETSC_FALSE,ierr); CHKERRQ(ierr) CHKERRQ(err_PETSc)
call MatZeroEntries(Jac,ierr); CHKERRQ(ierr) call MatSetOption(Jac,MAT_NEW_NONZERO_ALLOCATION_ERR,PETSC_FALSE,err_PETSc)
CHKERRQ(err_PETSc)
call MatZeroEntries(Jac,err_PETSc); CHKERRQ(err_PETSc)
ele = 0 ele = 0
do k = zstart, zend; do j = ystart, yend; do i = xstart, xend do k = zstart, zend; do j = ystart, yend; do i = xstart, xend
ctr = 0 ctr = 0
@ -687,34 +693,42 @@ subroutine formJacobian(da_local,x_local,Jac_pre,Jac,dummy,ierr)
matmul(transpose(BMatFull), & matmul(transpose(BMatFull), &
matmul(reshape(reshape(homogenization_dPdF(1:3,1:3,1:3,1:3,ele), & matmul(reshape(reshape(homogenization_dPdF(1:3,1:3,1:3,1:3,ele), &
shape=[3,3,3,3], order=[2,1,4,3]),shape=[9,9]),BMatFull))*detJ shape=[3,3,3,3], order=[2,1,4,3]),shape=[9,9]),BMatFull))*detJ
call MatSetValuesStencil(Jac,24,row,24,col,K_ele,ADD_VALUES,ierr) call MatSetValuesStencil(Jac,24_pPETScInt,row,24_pPetscInt,col,K_ele,ADD_VALUES,err_PETSc)
CHKERRQ(ierr) CHKERRQ(err_PETSc)
enddo; enddo; enddo enddo; enddo; enddo
call MatAssemblyBegin(Jac,MAT_FINAL_ASSEMBLY,ierr); CHKERRQ(ierr) call MatAssemblyBegin(Jac,MAT_FINAL_ASSEMBLY,err_PETSc); CHKERRQ(err_PETSc)
call MatAssemblyEnd(Jac,MAT_FINAL_ASSEMBLY,ierr); CHKERRQ(ierr) call MatAssemblyEnd(Jac,MAT_FINAL_ASSEMBLY,err_PETSc); CHKERRQ(err_PETSc)
call MatAssemblyBegin(Jac_pre,MAT_FINAL_ASSEMBLY,ierr); CHKERRQ(ierr) call MatAssemblyBegin(Jac_pre,MAT_FINAL_ASSEMBLY,err_PETSc); CHKERRQ(err_PETSc)
call MatAssemblyEnd(Jac_pre,MAT_FINAL_ASSEMBLY,ierr); CHKERRQ(ierr) call MatAssemblyEnd(Jac_pre,MAT_FINAL_ASSEMBLY,err_PETSc); CHKERRQ(err_PETSc)
!-------------------------------------------------------------------------------------------------- !--------------------------------------------------------------------------------------------------
! applying boundary conditions ! applying boundary conditions
diag = (C_volAvg(1,1,1,1)/delta(1)**2 + & diag = (C_volAvg(1,1,1,1)/delta(1)**2 + &
C_volAvg(2,2,2,2)/delta(2)**2 + & C_volAvg(2,2,2,2)/delta(2)**2 + &
C_volAvg(3,3,3,3)/delta(3)**2)*detJ C_volAvg(3,3,3,3)/delta(3)**2)*detJ
call MatZeroRowsColumns(Jac,size(rows),rows,diag,PETSC_NULL_VEC,PETSC_NULL_VEC,ierr) call MatZeroRowsColumns(Jac,size(rows,kind=pPetscInt),rows,diag,PETSC_NULL_VEC,PETSC_NULL_VEC,err_PETSc)
CHKERRQ(ierr) CHKERRQ(err_PETSc)
call DMGetGlobalVector(da_local,coordinates,ierr); CHKERRQ(ierr) call DMGetGlobalVector(da_local,coordinates,err_PETSc)
call DMDAVecGetArrayF90(da_local,coordinates,x_scal,ierr); CHKERRQ(ierr) CHKERRQ(err_PETSc)
call DMDAVecGetArrayF90(da_local,coordinates,x_scal,err_PETSc)
CHKERRQ(err_PETSc)
ele = 0 ele = 0
do k = zstart, zend; do j = ystart, yend; do i = xstart, xend do k = zstart, zend; do j = ystart, yend; do i = xstart, xend
ele = ele + 1 ele = ele + 1
x_scal(0:2,i,j,k) = discretization_IPcoords(1:3,ele) x_scal(0:2,i,j,k) = discretization_IPcoords(1:3,ele)
enddo; enddo; enddo enddo; enddo; enddo
call DMDAVecRestoreArrayF90(da_local,coordinates,x_scal,ierr); CHKERRQ(ierr) ! initialize to undeformed coordinates (ToDo: use ip coordinates) call DMDAVecRestoreArrayF90(da_local,coordinates,x_scal,err_PETSc)
call MatNullSpaceCreateRigidBody(coordinates,matnull,ierr); CHKERRQ(ierr) ! get rigid body deformation modes CHKERRQ(err_PETSc) ! initialize to undeformed coordinates (ToDo: use ip coordinates)
call DMRestoreGlobalVector(da_local,coordinates,ierr); CHKERRQ(ierr) call MatNullSpaceCreateRigidBody(coordinates,matnull,err_PETSc)
call MatSetNullSpace(Jac,matnull,ierr); CHKERRQ(ierr) CHKERRQ(err_PETSc) ! get rigid body deformation modes
call MatSetNearNullSpace(Jac,matnull,ierr); CHKERRQ(ierr) call DMRestoreGlobalVector(da_local,coordinates,err_PETSc)
call MatNullSpaceDestroy(matnull,ierr); CHKERRQ(ierr) CHKERRQ(err_PETSc)
call MatSetNullSpace(Jac,matnull,err_PETSc)
CHKERRQ(err_PETSc)
call MatSetNearNullSpace(Jac,matnull,err_PETSc)
CHKERRQ(err_PETSc)
call MatNullSpaceDestroy(matnull,err_PETSc)
CHKERRQ(err_PETSc)
end subroutine formJacobian end subroutine formJacobian

View File

@ -97,7 +97,8 @@ contains
subroutine grid_mechanical_spectral_basic_init subroutine grid_mechanical_spectral_basic_init
real(pReal), dimension(3,3,grid(1),grid(2),grid3) :: P real(pReal), dimension(3,3,grid(1),grid(2),grid3) :: P
PetscErrorCode :: ierr PetscErrorCode :: err_PETSc
integer(MPI_INTEGER_KIND) :: err_MPI
PetscScalar, pointer, dimension(:,:,:,:) :: & PetscScalar, pointer, dimension(:,:,:,:) :: &
F ! pointer to solution data F ! pointer to solution data
PetscInt, dimension(0:worldsize-1) :: localK PetscInt, dimension(0:worldsize-1) :: localK
@ -145,10 +146,10 @@ subroutine grid_mechanical_spectral_basic_init
!-------------------------------------------------------------------------------------------------- !--------------------------------------------------------------------------------------------------
! set default and user defined options for PETSc ! set default and user defined options for PETSc
call PetscOptionsInsertString(PETSC_NULL_OPTIONS,'-mechanical_snes_type ngmres',ierr) call PetscOptionsInsertString(PETSC_NULL_OPTIONS,'-mechanical_snes_type ngmres',err_PETSc)
CHKERRQ(ierr) CHKERRQ(err_PETSc)
call PetscOptionsInsertString(PETSC_NULL_OPTIONS,num_grid%get_asString('petsc_options',defaultVal=''),ierr) call PetscOptionsInsertString(PETSC_NULL_OPTIONS,num_grid%get_asString('petsc_options',defaultVal=''),err_PETSc)
CHKERRQ(ierr) CHKERRQ(err_PETSc)
!-------------------------------------------------------------------------------------------------- !--------------------------------------------------------------------------------------------------
! allocate global fields ! allocate global fields
@ -157,33 +158,34 @@ subroutine grid_mechanical_spectral_basic_init
!-------------------------------------------------------------------------------------------------- !--------------------------------------------------------------------------------------------------
! initialize solver specific parts of PETSc ! initialize solver specific parts of PETSc
call SNESCreate(PETSC_COMM_WORLD,snes,ierr); CHKERRQ(ierr) call SNESCreate(PETSC_COMM_WORLD,snes,err_PETSc); CHKERRQ(err_PETSc)
call SNESSetOptionsPrefix(snes,'mechanical_',ierr);CHKERRQ(ierr) call SNESSetOptionsPrefix(snes,'mechanical_',err_PETSc);CHKERRQ(err_PETSc)
localK = 0 localK = 0_pPetscInt
localK(worldrank) = grid3 localK(worldrank) = int(grid3,pPetscInt)
call MPI_Allreduce(MPI_IN_PLACE,localK,worldsize,MPI_INTEGER,MPI_SUM,MPI_COMM_WORLD,ierr) call MPI_Allreduce(MPI_IN_PLACE,localK,worldsize,MPI_INTEGER,MPI_SUM,MPI_COMM_WORLD,err_MPI)
if (err_MPI /= 0_MPI_INTEGER_KIND) error stop 'MPI error'
call DMDACreate3d(PETSC_COMM_WORLD, & call DMDACreate3d(PETSC_COMM_WORLD, &
DM_BOUNDARY_NONE, DM_BOUNDARY_NONE, DM_BOUNDARY_NONE, & ! cut off stencil at boundary DM_BOUNDARY_NONE, DM_BOUNDARY_NONE, DM_BOUNDARY_NONE, & ! cut off stencil at boundary
DMDA_STENCIL_BOX, & ! Moore (26) neighborhood around central point DMDA_STENCIL_BOX, & ! Moore (26) neighborhood around central point
grid(1),grid(2),grid(3), & ! global grid int(grid(1),pPetscInt),int(grid(2),pPetscInt),int(grid(3),pPetscInt), & ! global grid
1 , 1, worldsize, & 1_pPetscInt, 1_pPetscInt, int(worldsize,pPetscInt), &
9, 0, & ! #dof (F tensor), ghost boundary width (domain overlap) 9_pPetscInt, 0_pPetscInt, & ! #dof (F, tensor), ghost boundary width (domain overlap)
[grid(1)],[grid(2)],localK, & ! local grid [int(grid(1),pPetscInt)],[int(grid(2),pPetscInt)],localK, & ! local grid
da,ierr) ! handle, error da,err_PETSc) ! handle, error
CHKERRQ(ierr) CHKERRQ(err_PETSc)
call SNESSetDM(snes,da,ierr); CHKERRQ(ierr) ! connect snes to da call SNESSetDM(snes,da,err_PETSc); CHKERRQ(err_PETSc) ! connect snes to da
call DMsetFromOptions(da,ierr); CHKERRQ(ierr) call DMsetFromOptions(da,err_PETSc); CHKERRQ(err_PETSc)
call DMsetUp(da,ierr); CHKERRQ(ierr) call DMsetUp(da,err_PETSc); CHKERRQ(err_PETSc)
call DMcreateGlobalVector(da,solution_vec,ierr); CHKERRQ(ierr) ! global solution vector (grid x 9, i.e. every def grad tensor) call DMcreateGlobalVector(da,solution_vec,err_PETSc); CHKERRQ(err_PETSc) ! global solution vector (grid x 9, i.e. every def grad tensor)
call DMDASNESsetFunctionLocal(da,INSERT_VALUES,formResidual,PETSC_NULL_SNES,ierr) ! residual vector of same shape as solution vector call DMDASNESsetFunctionLocal(da,INSERT_VALUES,formResidual,PETSC_NULL_SNES,err_PETSc) ! residual vector of same shape as solution vector
CHKERRQ(ierr) CHKERRQ(err_PETSc)
call SNESsetConvergenceTest(snes,converged,PETSC_NULL_SNES,PETSC_NULL_FUNCTION,ierr) ! specify custom convergence check function "converged" call SNESsetConvergenceTest(snes,converged,PETSC_NULL_SNES,PETSC_NULL_FUNCTION,err_PETSc) ! specify custom convergence check function "converged"
CHKERRQ(ierr) CHKERRQ(err_PETSc)
call SNESsetFromOptions(snes,ierr); CHKERRQ(ierr) ! pull it all together with additional CLI arguments call SNESsetFromOptions(snes,err_PETSc); CHKERRQ(err_PETSc) ! pull it all together with additional CLI arguments
!-------------------------------------------------------------------------------------------------- !--------------------------------------------------------------------------------------------------
! init fields ! init fields
call DMDAVecGetArrayF90(da,solution_vec,F,ierr); CHKERRQ(ierr) ! places pointer on PETSc data call DMDAVecGetArrayF90(da,solution_vec,F,err_PETSc); CHKERRQ(err_PETSc) ! places pointer on PETSc data
restartRead: if (interface_restartInc > 0) then restartRead: if (interface_restartInc > 0) then
print'(/,1x,a,i0,a)', 'reading restart data of increment ', interface_restartInc, ' from file' print'(/,1x,a,i0,a)', 'reading restart data of increment ', interface_restartInc, ' from file'
@ -192,17 +194,17 @@ subroutine grid_mechanical_spectral_basic_init
groupHandle = HDF5_openGroup(fileHandle,'solver') groupHandle = HDF5_openGroup(fileHandle,'solver')
call HDF5_read(P_aim,groupHandle,'P_aim',.false.) call HDF5_read(P_aim,groupHandle,'P_aim',.false.)
call MPI_Bcast(P_aim,9,MPI_DOUBLE,0,MPI_COMM_WORLD,ierr) call MPI_Bcast(P_aim,9_MPI_INTEGER_KIND,MPI_DOUBLE,0_MPI_INTEGER_KIND,MPI_COMM_WORLD,err_MPI)
if (ierr /=0) error stop 'MPI error' if (err_MPI /= 0_MPI_INTEGER_KIND) error stop 'MPI error'
call HDF5_read(F_aim,groupHandle,'F_aim',.false.) call HDF5_read(F_aim,groupHandle,'F_aim',.false.)
call MPI_Bcast(F_aim,9,MPI_DOUBLE,0,MPI_COMM_WORLD,ierr) call MPI_Bcast(F_aim,9_MPI_INTEGER_KIND,MPI_DOUBLE,0_MPI_INTEGER_KIND,MPI_COMM_WORLD,err_MPI)
if (ierr /=0) error stop 'MPI error' if (err_MPI /= 0_MPI_INTEGER_KIND) error stop 'MPI error'
call HDF5_read(F_aim_lastInc,groupHandle,'F_aim_lastInc',.false.) call HDF5_read(F_aim_lastInc,groupHandle,'F_aim_lastInc',.false.)
call MPI_Bcast(F_aim_lastInc,9,MPI_DOUBLE,0,MPI_COMM_WORLD,ierr) call MPI_Bcast(F_aim_lastInc,9_MPI_INTEGER_KIND,MPI_DOUBLE,0_MPI_INTEGER_KIND,MPI_COMM_WORLD,err_MPI)
if (ierr /=0) error stop 'MPI error' if (err_MPI /= 0_MPI_INTEGER_KIND) error stop 'MPI error'
call HDF5_read(F_aimDot,groupHandle,'F_aimDot',.false.) call HDF5_read(F_aimDot,groupHandle,'F_aimDot',.false.)
call MPI_Bcast(F_aimDot,9,MPI_DOUBLE,0,MPI_COMM_WORLD,ierr) call MPI_Bcast(F_aimDot,9_MPI_INTEGER_KIND,MPI_DOUBLE,0_MPI_INTEGER_KIND,MPI_COMM_WORLD,err_MPI)
if (ierr /=0) error stop 'MPI error' if (err_MPI /= 0_MPI_INTEGER_KIND) error stop 'MPI error'
call HDF5_read(F,groupHandle,'F') call HDF5_read(F,groupHandle,'F')
call HDF5_read(F_lastInc,groupHandle,'F_lastInc') call HDF5_read(F_lastInc,groupHandle,'F_lastInc')
@ -216,24 +218,27 @@ subroutine grid_mechanical_spectral_basic_init
call utilities_constitutiveResponse(P,P_av,C_volAvg,C_minMaxAvg, & ! stress field, stress avg, global average of stiffness and (min+max)/2 call utilities_constitutiveResponse(P,P_av,C_volAvg,C_minMaxAvg, & ! stress field, stress avg, global average of stiffness and (min+max)/2
reshape(F,shape(F_lastInc)), & ! target F reshape(F,shape(F_lastInc)), & ! target F
0.0_pReal) ! time increment 0.0_pReal) ! time increment
call DMDAVecRestoreArrayF90(da,solution_vec,F,ierr); CHKERRQ(ierr) ! deassociate pointer call DMDAVecRestoreArrayF90(da,solution_vec,F,err_PETSc); CHKERRQ(err_PETSc) ! deassociate pointer
restartRead2: if (interface_restartInc > 0) then restartRead2: if (interface_restartInc > 0) then
print'(1x,a,i0,a)', 'reading more restart data of increment ', interface_restartInc, ' from file' print'(1x,a,i0,a)', 'reading more restart data of increment ', interface_restartInc, ' from file'
call HDF5_read(C_volAvg,groupHandle,'C_volAvg',.false.) call HDF5_read(C_volAvg,groupHandle,'C_volAvg',.false.)
call MPI_Bcast(C_volAvg,81,MPI_DOUBLE,0,MPI_COMM_WORLD,ierr) call MPI_Bcast(C_volAvg,81_MPI_INTEGER_KIND,MPI_DOUBLE,0_MPI_INTEGER_KIND,MPI_COMM_WORLD,err_MPI)
if (ierr /=0) error stop 'MPI error' if (err_MPI /= 0_MPI_INTEGER_KIND) error stop 'MPI error'
call HDF5_read(C_volAvgLastInc,groupHandle,'C_volAvgLastInc',.false.) call HDF5_read(C_volAvgLastInc,groupHandle,'C_volAvgLastInc',.false.)
call MPI_Bcast(C_volAvgLastInc,81,MPI_DOUBLE,0,MPI_COMM_WORLD,ierr) call MPI_Bcast(C_volAvgLastInc,81_MPI_INTEGER_KIND,MPI_DOUBLE,0_MPI_INTEGER_KIND,MPI_COMM_WORLD,err_MPI)
if (ierr /=0) error stop 'MPI error' if (err_MPI /= 0_MPI_INTEGER_KIND) error stop 'MPI error'
call HDF5_closeGroup(groupHandle) call HDF5_closeGroup(groupHandle)
call HDF5_closeFile(fileHandle) call HDF5_closeFile(fileHandle)
call MPI_File_open(MPI_COMM_WORLD, trim(getSolverJobName())//'.C_ref', & call MPI_File_open(MPI_COMM_WORLD, trim(getSolverJobName())//'.C_ref', &
MPI_MODE_RDONLY,MPI_INFO_NULL,fileUnit,ierr) MPI_MODE_RDONLY,MPI_INFO_NULL,fileUnit,err_MPI)
call MPI_File_read(fileUnit,C_minMaxAvg,81,MPI_DOUBLE,MPI_STATUS_IGNORE,ierr) if (err_MPI /= 0_MPI_INTEGER_KIND) error stop 'MPI error'
call MPI_File_close(fileUnit,ierr) call MPI_File_read(fileUnit,C_minMaxAvg,81_MPI_INTEGER_KIND,MPI_DOUBLE,MPI_STATUS_IGNORE,err_MPI)
if (err_MPI /= 0_MPI_INTEGER_KIND) error stop 'MPI error'
call MPI_File_close(fileUnit,err_MPI)
if (err_MPI /= 0_MPI_INTEGER_KIND) error stop 'MPI error'
end if restartRead2 end if restartRead2
call utilities_updateGamma(C_minMaxAvg) call utilities_updateGamma(C_minMaxAvg)
@ -255,7 +260,7 @@ function grid_mechanical_spectral_basic_solution(incInfoIn) result(solution)
solution solution
!-------------------------------------------------------------------------------------------------- !--------------------------------------------------------------------------------------------------
! PETSc Data ! PETSc Data
PetscErrorCode :: ierr PetscErrorCode :: err_PETSc
SNESConvergedReason :: reason SNESConvergedReason :: reason
incInfo = incInfoIn incInfo = incInfoIn
@ -267,11 +272,11 @@ function grid_mechanical_spectral_basic_solution(incInfoIn) result(solution)
!-------------------------------------------------------------------------------------------------- !--------------------------------------------------------------------------------------------------
! solve BVP ! solve BVP
call SNESsolve(snes,PETSC_NULL_VEC,solution_vec,ierr); CHKERRQ(ierr) call SNESsolve(snes,PETSC_NULL_VEC,solution_vec,err_PETSc); CHKERRQ(err_PETSc)
!-------------------------------------------------------------------------------------------------- !--------------------------------------------------------------------------------------------------
! check convergence ! check convergence
call SNESGetConvergedReason(snes,reason,ierr); CHKERRQ(ierr) call SNESGetConvergedReason(snes,reason,err_PETSc); CHKERRQ(err_PETSc)
solution%converged = reason > 0 solution%converged = reason > 0
solution%iterationsNeeded = totalIter solution%iterationsNeeded = totalIter
@ -301,11 +306,11 @@ subroutine grid_mechanical_spectral_basic_forward(cutBack,guess,Delta_t,Delta_t_
deformation_BC deformation_BC
type(rotation), intent(in) :: & type(rotation), intent(in) :: &
rotation_BC rotation_BC
PetscErrorCode :: ierr PetscErrorCode :: err_PETSc
PetscScalar, pointer, dimension(:,:,:,:) :: F PetscScalar, pointer, dimension(:,:,:,:) :: F
call DMDAVecGetArrayF90(da,solution_vec,F,ierr); CHKERRQ(ierr) call DMDAVecGetArrayF90(da,solution_vec,F,err_PETSc); CHKERRQ(err_PETSc)
if (cutBack) then if (cutBack) then
C_volAvg = C_volAvgLastInc C_volAvg = C_volAvgLastInc
@ -314,7 +319,7 @@ subroutine grid_mechanical_spectral_basic_forward(cutBack,guess,Delta_t,Delta_t_
C_volAvgLastInc = C_volAvg C_volAvgLastInc = C_volAvg
C_minMaxAvgLastInc = C_minMaxAvg C_minMaxAvgLastInc = C_minMaxAvg
F_aimDot = merge(merge(.0_pReal,(F_aim-F_aim_lastInc)/Delta_t_old,stress_BC%mask),.0_pReal,guess) ! estimate deformation rate for prescribed stress components F_aimDot = merge(merge(.0_pReal,(F_aim-F_aim_lastInc)/Delta_t_old,stress_BC%mask),.0_pReal,guess) ! estimate deformation rate for prescribed stress components
F_aim_lastInc = F_aim F_aim_lastInc = F_aim
!----------------------------------------------------------------------------------------------- !-----------------------------------------------------------------------------------------------
@ -348,7 +353,7 @@ subroutine grid_mechanical_spectral_basic_forward(cutBack,guess,Delta_t,Delta_t_
F = reshape(utilities_forwardField(Delta_t,F_lastInc,Fdot, & ! estimate of F at end of time+Delta_t that matches rotated F_aim on average F = reshape(utilities_forwardField(Delta_t,F_lastInc,Fdot, & ! estimate of F at end of time+Delta_t that matches rotated F_aim on average
rotation_BC%rotate(F_aim,active=.true.)),[9,grid(1),grid(2),grid3]) rotation_BC%rotate(F_aim,active=.true.)),[9,grid(1),grid(2),grid3])
call DMDAVecRestoreArrayF90(da,solution_vec,F,ierr); CHKERRQ(ierr) call DMDAVecRestoreArrayF90(da,solution_vec,F,err_PETSc); CHKERRQ(err_PETSc)
!-------------------------------------------------------------------------------------------------- !--------------------------------------------------------------------------------------------------
! set module wide available data ! set module wide available data
@ -364,12 +369,12 @@ end subroutine grid_mechanical_spectral_basic_forward
!-------------------------------------------------------------------------------------------------- !--------------------------------------------------------------------------------------------------
subroutine grid_mechanical_spectral_basic_updateCoords subroutine grid_mechanical_spectral_basic_updateCoords
PetscErrorCode :: ierr PetscErrorCode :: err_PETSc
PetscScalar, dimension(:,:,:,:), pointer :: F PetscScalar, dimension(:,:,:,:), pointer :: F
call DMDAVecGetArrayF90(da,solution_vec,F,ierr); CHKERRQ(ierr) call DMDAVecGetArrayF90(da,solution_vec,F,err_PETSc); CHKERRQ(err_PETSc)
call utilities_updateCoords(F) call utilities_updateCoords(F)
call DMDAVecRestoreArrayF90(da,solution_vec,F,ierr); CHKERRQ(ierr) call DMDAVecRestoreArrayF90(da,solution_vec,F,err_PETSc); CHKERRQ(err_PETSc)
end subroutine grid_mechanical_spectral_basic_updateCoords end subroutine grid_mechanical_spectral_basic_updateCoords
@ -379,11 +384,11 @@ end subroutine grid_mechanical_spectral_basic_updateCoords
!-------------------------------------------------------------------------------------------------- !--------------------------------------------------------------------------------------------------
subroutine grid_mechanical_spectral_basic_restartWrite subroutine grid_mechanical_spectral_basic_restartWrite
PetscErrorCode :: ierr PetscErrorCode :: err_PETSc
integer(HID_T) :: fileHandle, groupHandle integer(HID_T) :: fileHandle, groupHandle
PetscScalar, dimension(:,:,:,:), pointer :: F PetscScalar, dimension(:,:,:,:), pointer :: F
call DMDAVecGetArrayF90(da,solution_vec,F,ierr); CHKERRQ(ierr) call DMDAVecGetArrayF90(da,solution_vec,F,err_PETSc); CHKERRQ(err_PETSc)
print'(1x,a)', 'writing solver data required for restart to file'; flush(IO_STDOUT) print'(1x,a)', 'writing solver data required for restart to file'; flush(IO_STDOUT)
@ -410,7 +415,7 @@ subroutine grid_mechanical_spectral_basic_restartWrite
if (num%update_gamma) call utilities_saveReferenceStiffness if (num%update_gamma) call utilities_saveReferenceStiffness
call DMDAVecRestoreArrayF90(da,solution_vec,F,ierr); CHKERRQ(ierr) call DMDAVecRestoreArrayF90(da,solution_vec,F,err_PETSc); CHKERRQ(err_PETSc)
end subroutine grid_mechanical_spectral_basic_restartWrite end subroutine grid_mechanical_spectral_basic_restartWrite
@ -418,7 +423,7 @@ end subroutine grid_mechanical_spectral_basic_restartWrite
!-------------------------------------------------------------------------------------------------- !--------------------------------------------------------------------------------------------------
!> @brief convergence check !> @brief convergence check
!-------------------------------------------------------------------------------------------------- !--------------------------------------------------------------------------------------------------
subroutine converged(snes_local,PETScIter,devNull1,devNull2,devNull3,reason,dummy,ierr) subroutine converged(snes_local,PETScIter,devNull1,devNull2,devNull3,reason,dummy,err_PETSc)
SNES :: snes_local SNES :: snes_local
PetscInt, intent(in) :: PETScIter PetscInt, intent(in) :: PETScIter
@ -428,7 +433,7 @@ subroutine converged(snes_local,PETScIter,devNull1,devNull2,devNull3,reason,dumm
devNull3 devNull3
SNESConvergedReason :: reason SNESConvergedReason :: reason
PetscObject :: dummy PetscObject :: dummy
PetscErrorCode :: ierr PetscErrorCode :: err_PETSc
real(pReal) :: & real(pReal) :: &
divTol, & divTol, &
BCTol BCTol
@ -460,7 +465,7 @@ end subroutine converged
!> @brief forms the residual vector !> @brief forms the residual vector
!-------------------------------------------------------------------------------------------------- !--------------------------------------------------------------------------------------------------
subroutine formResidual(in, F, & subroutine formResidual(in, F, &
residuum, dummy, ierr) residuum, dummy, err_PETSc)
DMDALocalInfo, dimension(DMDA_LOCAL_INFO_SIZE) :: in !< DMDA info (needs to be named "in" for macros like XRANGE to work) DMDALocalInfo, dimension(DMDA_LOCAL_INFO_SIZE) :: in !< DMDA info (needs to be named "in" for macros like XRANGE to work)
PetscScalar, dimension(3,3,XG_RANGE,YG_RANGE,ZG_RANGE), & PetscScalar, dimension(3,3,XG_RANGE,YG_RANGE,ZG_RANGE), &
@ -473,10 +478,11 @@ subroutine formResidual(in, F, &
PETScIter, & PETScIter, &
nfuncs nfuncs
PetscObject :: dummy PetscObject :: dummy
PetscErrorCode :: ierr PetscErrorCode :: err_PETSc
integer(MPI_INTEGER_KIND) :: err_MPI
call SNESGetNumberFunctionEvals(snes,nfuncs,ierr); CHKERRQ(ierr) call SNESGetNumberFunctionEvals(snes,nfuncs,err_PETSc); CHKERRQ(err_PETSc)
call SNESGetIterationNumber(snes,PETScIter,ierr); CHKERRQ(ierr) call SNESGetIterationNumber(snes,PETScIter,err_PETSc); CHKERRQ(err_PETSc)
if (nfuncs == 0 .and. PETScIter == 0) totalIter = -1 ! new increment if (nfuncs == 0 .and. PETScIter == 0) totalIter = -1 ! new increment
@ -497,7 +503,8 @@ subroutine formResidual(in, F, &
call utilities_constitutiveResponse(residuum, & ! "residuum" gets field of first PK stress (to save memory) call utilities_constitutiveResponse(residuum, & ! "residuum" gets field of first PK stress (to save memory)
P_av,C_volAvg,C_minMaxAvg, & P_av,C_volAvg,C_minMaxAvg, &
F,params%Delta_t,params%rotation_BC) F,params%Delta_t,params%rotation_BC)
call MPI_Allreduce(MPI_IN_PLACE,terminallyIll,1,MPI_LOGICAL,MPI_LOR,MPI_COMM_WORLD,ierr) call MPI_Allreduce(MPI_IN_PLACE,terminallyIll,1_MPI_INTEGER_KIND,MPI_LOGICAL,MPI_LOR,MPI_COMM_WORLD,err_MPI)
if (err_MPI /= 0_MPI_INTEGER_KIND) error stop 'MPI error'
!-------------------------------------------------------------------------------------------------- !--------------------------------------------------------------------------------------------------
! stress BC handling ! stress BC handling

View File

@ -108,7 +108,8 @@ contains
subroutine grid_mechanical_spectral_polarisation_init subroutine grid_mechanical_spectral_polarisation_init
real(pReal), dimension(3,3,grid(1),grid(2),grid3) :: P real(pReal), dimension(3,3,grid(1),grid(2),grid3) :: P
PetscErrorCode :: ierr PetscErrorCode :: err_PETSc
integer(MPI_INTEGER_KIND) :: err_MPI
PetscScalar, pointer, dimension(:,:,:,:) :: & PetscScalar, pointer, dimension(:,:,:,:) :: &
FandF_tau, & ! overall pointer to solution data FandF_tau, & ! overall pointer to solution data
F, & ! specific (sub)pointer F, & ! specific (sub)pointer
@ -163,10 +164,10 @@ subroutine grid_mechanical_spectral_polarisation_init
!-------------------------------------------------------------------------------------------------- !--------------------------------------------------------------------------------------------------
! set default and user defined options for PETSc ! set default and user defined options for PETSc
call PetscOptionsInsertString(PETSC_NULL_OPTIONS,'-mechanical_snes_type ngmres',ierr) call PetscOptionsInsertString(PETSC_NULL_OPTIONS,'-mechanical_snes_type ngmres',err_PETSc)
CHKERRQ(ierr) CHKERRQ(err_PETSc)
call PetscOptionsInsertString(PETSC_NULL_OPTIONS,num_grid%get_asString('petsc_options',defaultVal=''),ierr) call PetscOptionsInsertString(PETSC_NULL_OPTIONS,num_grid%get_asString('petsc_options',defaultVal=''),err_PETSc)
CHKERRQ(ierr) CHKERRQ(err_PETSc)
!-------------------------------------------------------------------------------------------------- !--------------------------------------------------------------------------------------------------
! allocate global fields ! allocate global fields
@ -177,33 +178,34 @@ subroutine grid_mechanical_spectral_polarisation_init
!-------------------------------------------------------------------------------------------------- !--------------------------------------------------------------------------------------------------
! initialize solver specific parts of PETSc ! initialize solver specific parts of PETSc
call SNESCreate(PETSC_COMM_WORLD,snes,ierr); CHKERRQ(ierr) call SNESCreate(PETSC_COMM_WORLD,snes,err_PETSc); CHKERRQ(err_PETSc)
call SNESSetOptionsPrefix(snes,'mechanical_',ierr);CHKERRQ(ierr) call SNESSetOptionsPrefix(snes,'mechanical_',err_PETSc);CHKERRQ(err_PETSc)
localK = 0 localK = 0_pPetscInt
localK(worldrank) = grid3 localK(worldrank) = int(grid3,pPetscInt)
call MPI_Allreduce(MPI_IN_PLACE,localK,worldsize,MPI_INTEGER,MPI_SUM,MPI_COMM_WORLD,ierr) call MPI_Allreduce(MPI_IN_PLACE,localK,worldsize,MPI_INTEGER,MPI_SUM,MPI_COMM_WORLD,err_MPI)
if (err_MPI /= 0_MPI_INTEGER_KIND) error stop 'MPI error'
call DMDACreate3d(PETSC_COMM_WORLD, & call DMDACreate3d(PETSC_COMM_WORLD, &
DM_BOUNDARY_NONE, DM_BOUNDARY_NONE, DM_BOUNDARY_NONE, & ! cut off stencil at boundary DM_BOUNDARY_NONE, DM_BOUNDARY_NONE, DM_BOUNDARY_NONE, & ! cut off stencil at boundary
DMDA_STENCIL_BOX, & ! Moore (26) neighborhood around central point DMDA_STENCIL_BOX, & ! Moore (26) neighborhood around central point
grid(1),grid(2),grid(3), & ! global grid int(grid(1),pPetscInt),int(grid(2),pPetscInt),int(grid(3),pPetscInt), & ! global grid
1 , 1, worldsize, & 1_pPetscInt, 1_pPetscInt, int(worldsize,pPetscInt), &
18, 0, & ! #dof (F tensor), ghost boundary width (domain overlap) 18_pPetscInt, 0_pPetscInt, & ! #dof (2xtensor), ghost boundary width (domain overlap)
[grid(1)],[grid(2)],localK, & ! local grid [int(grid(1),pPetscInt)],[int(grid(2),pPetscInt)],localK, & ! local grid
da,ierr) ! handle, error da,err_PETSc) ! handle, error
CHKERRQ(ierr) CHKERRQ(err_PETSc)
call SNESSetDM(snes,da,ierr); CHKERRQ(ierr) ! connect snes to da call SNESSetDM(snes,da,err_PETSc); CHKERRQ(err_PETSc) ! connect snes to da
call DMsetFromOptions(da,ierr); CHKERRQ(ierr) call DMsetFromOptions(da,err_PETSc); CHKERRQ(err_PETSc)
call DMsetUp(da,ierr); CHKERRQ(ierr) call DMsetUp(da,err_PETSc); CHKERRQ(err_PETSc)
call DMcreateGlobalVector(da,solution_vec,ierr); CHKERRQ(ierr) ! global solution vector (grid x 18, i.e. every def grad tensor) call DMcreateGlobalVector(da,solution_vec,err_PETSc); CHKERRQ(err_PETSc) ! global solution vector (grid x 18, i.e. every def grad tensor)
call DMDASNESsetFunctionLocal(da,INSERT_VALUES,formResidual,PETSC_NULL_SNES,ierr) ! residual vector of same shape as solution vector call DMDASNESsetFunctionLocal(da,INSERT_VALUES,formResidual,PETSC_NULL_SNES,err_PETSc) ! residual vector of same shape as solution vector
CHKERRQ(ierr) CHKERRQ(err_PETSc)
call SNESsetConvergenceTest(snes,converged,PETSC_NULL_SNES,PETSC_NULL_FUNCTION,ierr) ! specify custom convergence check function "converged" call SNESsetConvergenceTest(snes,converged,PETSC_NULL_SNES,PETSC_NULL_FUNCTION,err_PETSc) ! specify custom convergence check function "converged"
CHKERRQ(ierr) CHKERRQ(err_PETSc)
call SNESsetFromOptions(snes,ierr); CHKERRQ(ierr) ! pull it all together with additional CLI arguments call SNESsetFromOptions(snes,err_PETSc); CHKERRQ(err_PETSc) ! pull it all together with additional CLI arguments
!-------------------------------------------------------------------------------------------------- !--------------------------------------------------------------------------------------------------
! init fields ! init fields
call DMDAVecGetArrayF90(da,solution_vec,FandF_tau,ierr); CHKERRQ(ierr) ! places pointer on PETSc data call DMDAVecGetArrayF90(da,solution_vec,FandF_tau,err_PETSc); CHKERRQ(err_PETSc) ! places pointer on PETSc data
F => FandF_tau(0: 8,:,:,:) F => FandF_tau(0: 8,:,:,:)
F_tau => FandF_tau(9:17,:,:,:) F_tau => FandF_tau(9:17,:,:,:)
@ -214,17 +216,17 @@ subroutine grid_mechanical_spectral_polarisation_init
groupHandle = HDF5_openGroup(fileHandle,'solver') groupHandle = HDF5_openGroup(fileHandle,'solver')
call HDF5_read(P_aim,groupHandle,'P_aim',.false.) call HDF5_read(P_aim,groupHandle,'P_aim',.false.)
call MPI_Bcast(P_aim,9,MPI_DOUBLE,0,MPI_COMM_WORLD,ierr) call MPI_Bcast(P_aim,9_MPI_INTEGER_KIND,MPI_DOUBLE,0_MPI_INTEGER_KIND,MPI_COMM_WORLD,err_MPI)
if (ierr /=0) error stop 'MPI error' if (err_MPI /= 0_MPI_INTEGER_KIND) error stop 'MPI error'
call HDF5_read(F_aim,groupHandle,'F_aim',.false.) call HDF5_read(F_aim,groupHandle,'F_aim',.false.)
call MPI_Bcast(F_aim,9,MPI_DOUBLE,0,MPI_COMM_WORLD,ierr) call MPI_Bcast(F_aim,9_MPI_INTEGER_KIND,MPI_DOUBLE,0_MPI_INTEGER_KIND,MPI_COMM_WORLD,err_MPI)
if (ierr /=0) error stop 'MPI error' if (err_MPI /= 0_MPI_INTEGER_KIND) error stop 'MPI error'
call HDF5_read(F_aim_lastInc,groupHandle,'F_aim_lastInc',.false.) call HDF5_read(F_aim_lastInc,groupHandle,'F_aim_lastInc',.false.)
call MPI_Bcast(F_aim_lastInc,9,MPI_DOUBLE,0,MPI_COMM_WORLD,ierr) call MPI_Bcast(F_aim_lastInc,9_MPI_INTEGER_KIND,MPI_DOUBLE,0_MPI_INTEGER_KIND,MPI_COMM_WORLD,err_MPI)
if (ierr /=0) error stop 'MPI error' if (err_MPI /= 0_MPI_INTEGER_KIND) error stop 'MPI error'
call HDF5_read(F_aimDot,groupHandle,'F_aimDot',.false.) call HDF5_read(F_aimDot,groupHandle,'F_aimDot',.false.)
call MPI_Bcast(F_aimDot,9,MPI_DOUBLE,0,MPI_COMM_WORLD,ierr) call MPI_Bcast(F_aimDot,9_MPI_INTEGER_KIND,MPI_DOUBLE,0_MPI_INTEGER_KIND,MPI_COMM_WORLD,err_MPI)
if (ierr /=0) error stop 'MPI error' if (err_MPI /= 0_MPI_INTEGER_KIND) error stop 'MPI error'
call HDF5_read(F,groupHandle,'F') call HDF5_read(F,groupHandle,'F')
call HDF5_read(F_lastInc,groupHandle,'F_lastInc') call HDF5_read(F_lastInc,groupHandle,'F_lastInc')
call HDF5_read(F_tau,groupHandle,'F_tau') call HDF5_read(F_tau,groupHandle,'F_tau')
@ -242,24 +244,28 @@ subroutine grid_mechanical_spectral_polarisation_init
call utilities_constitutiveResponse(P,P_av,C_volAvg,C_minMaxAvg, & ! stress field, stress avg, global average of stiffness and (min+max)/2 call utilities_constitutiveResponse(P,P_av,C_volAvg,C_minMaxAvg, & ! stress field, stress avg, global average of stiffness and (min+max)/2
reshape(F,shape(F_lastInc)), & ! target F reshape(F,shape(F_lastInc)), & ! target F
0.0_pReal) ! time increment 0.0_pReal) ! time increment
call DMDAVecRestoreArrayF90(da,solution_vec,FandF_tau,ierr); CHKERRQ(ierr) ! deassociate pointer call DMDAVecRestoreArrayF90(da,solution_vec,FandF_tau,err_PETSc) ! deassociate pointer
CHKERRQ(err_PETSc)
restartRead2: if (interface_restartInc > 0) then restartRead2: if (interface_restartInc > 0) then
print'(1x,a,i0,a)', 'reading more restart data of increment ', interface_restartInc, ' from file' print'(1x,a,i0,a)', 'reading more restart data of increment ', interface_restartInc, ' from file'
call HDF5_read(C_volAvg,groupHandle,'C_volAvg',.false.) call HDF5_read(C_volAvg,groupHandle,'C_volAvg',.false.)
call MPI_Bcast(C_volAvg,81,MPI_DOUBLE,0,MPI_COMM_WORLD,ierr) call MPI_Bcast(C_volAvg,81_MPI_INTEGER_KIND,MPI_DOUBLE,0_MPI_INTEGER_KIND,MPI_COMM_WORLD,err_MPI)
if (ierr /=0) error stop 'MPI error' if (err_MPI /= 0_MPI_INTEGER_KIND) error stop 'MPI error'
call HDF5_read(C_volAvgLastInc,groupHandle,'C_volAvgLastInc',.false.) call HDF5_read(C_volAvgLastInc,groupHandle,'C_volAvgLastInc',.false.)
call MPI_Bcast(C_volAvgLastInc,81,MPI_DOUBLE,0,MPI_COMM_WORLD,ierr) call MPI_Bcast(C_volAvgLastInc,81_MPI_INTEGER_KIND,MPI_DOUBLE,0_MPI_INTEGER_KIND,MPI_COMM_WORLD,err_MPI)
if (ierr /=0) error stop 'MPI error' if (err_MPI /= 0_MPI_INTEGER_KIND) error stop 'MPI error'
call HDF5_closeGroup(groupHandle) call HDF5_closeGroup(groupHandle)
call HDF5_closeFile(fileHandle) call HDF5_closeFile(fileHandle)
call MPI_File_open(MPI_COMM_WORLD, trim(getSolverJobName())//'.C_ref', & call MPI_File_open(MPI_COMM_WORLD, trim(getSolverJobName())//'.C_ref', &
MPI_MODE_RDONLY,MPI_INFO_NULL,fileUnit,ierr) MPI_MODE_RDONLY,MPI_INFO_NULL,fileUnit,err_MPI)
call MPI_File_read(fileUnit,C_minMaxAvg,81,MPI_DOUBLE,MPI_STATUS_IGNORE,ierr) if (err_MPI /= 0_MPI_INTEGER_KIND) error stop 'MPI error'
call MPI_File_close(fileUnit,ierr) call MPI_File_read(fileUnit,C_minMaxAvg,81_MPI_INTEGER_KIND,MPI_DOUBLE,MPI_STATUS_IGNORE,err_MPI)
if (err_MPI /= 0_MPI_INTEGER_KIND) error stop 'MPI error'
call MPI_File_close(fileUnit,err_MPI)
if (err_MPI /= 0_MPI_INTEGER_KIND) error stop 'MPI error'
end if restartRead2 end if restartRead2
call utilities_updateGamma(C_minMaxAvg) call utilities_updateGamma(C_minMaxAvg)
@ -283,7 +289,7 @@ function grid_mechanical_spectral_polarisation_solution(incInfoIn) result(soluti
solution solution
!-------------------------------------------------------------------------------------------------- !--------------------------------------------------------------------------------------------------
! PETSc Data ! PETSc Data
PetscErrorCode :: ierr PetscErrorCode :: err_PETSc
SNESConvergedReason :: reason SNESConvergedReason :: reason
incInfo = incInfoIn incInfo = incInfoIn
@ -299,11 +305,11 @@ function grid_mechanical_spectral_polarisation_solution(incInfoIn) result(soluti
!-------------------------------------------------------------------------------------------------- !--------------------------------------------------------------------------------------------------
! solve BVP ! solve BVP
call SNESsolve(snes,PETSC_NULL_VEC,solution_vec,ierr); CHKERRQ(ierr) call SNESsolve(snes,PETSC_NULL_VEC,solution_vec,err_PETSc); CHKERRQ(err_PETSc)
!-------------------------------------------------------------------------------------------------- !--------------------------------------------------------------------------------------------------
! check convergence ! check convergence
call SNESGetConvergedReason(snes,reason,ierr); CHKERRQ(ierr) call SNESGetConvergedReason(snes,reason,err_PETSc); CHKERRQ(err_PETSc)
solution%converged = reason > 0 solution%converged = reason > 0
solution%iterationsNeeded = totalIter solution%iterationsNeeded = totalIter
@ -333,13 +339,13 @@ subroutine grid_mechanical_spectral_polarisation_forward(cutBack,guess,Delta_t,D
deformation_BC deformation_BC
type(rotation), intent(in) :: & type(rotation), intent(in) :: &
rotation_BC rotation_BC
PetscErrorCode :: ierr PetscErrorCode :: err_PETSc
PetscScalar, pointer, dimension(:,:,:,:) :: FandF_tau, F, F_tau PetscScalar, pointer, dimension(:,:,:,:) :: FandF_tau, F, F_tau
integer :: i, j, k integer :: i, j, k
real(pReal), dimension(3,3) :: F_lambda33 real(pReal), dimension(3,3) :: F_lambda33
call DMDAVecGetArrayF90(da,solution_vec,FandF_tau,ierr); CHKERRQ(ierr) call DMDAVecGetArrayF90(da,solution_vec,FandF_tau,err_PETSc); CHKERRQ(err_PETSc)
F => FandF_tau(0: 8,:,:,:) F => FandF_tau(0: 8,:,:,:)
F_tau => FandF_tau(9:17,:,:,:) F_tau => FandF_tau(9:17,:,:,:)
@ -402,7 +408,8 @@ subroutine grid_mechanical_spectral_polarisation_forward(cutBack,guess,Delta_t,D
end do; end do; end do end do; end do; end do
end if end if
call DMDAVecRestoreArrayF90(da,solution_vec,FandF_tau,ierr); CHKERRQ(ierr) call DMDAVecRestoreArrayF90(da,solution_vec,FandF_tau,err_PETSc)
CHKERRQ(err_PETSc)
!-------------------------------------------------------------------------------------------------- !--------------------------------------------------------------------------------------------------
! set module wide available data ! set module wide available data
@ -418,12 +425,14 @@ end subroutine grid_mechanical_spectral_polarisation_forward
!-------------------------------------------------------------------------------------------------- !--------------------------------------------------------------------------------------------------
subroutine grid_mechanical_spectral_polarisation_updateCoords subroutine grid_mechanical_spectral_polarisation_updateCoords
PetscErrorCode :: ierr PetscErrorCode :: err_PETSc
PetscScalar, dimension(:,:,:,:), pointer :: FandF_tau PetscScalar, dimension(:,:,:,:), pointer :: FandF_tau
call DMDAVecGetArrayF90(da,solution_vec,FandF_tau,ierr); CHKERRQ(ierr) call DMDAVecGetArrayF90(da,solution_vec,FandF_tau,err_PETSc)
CHKERRQ(err_PETSc)
call utilities_updateCoords(FandF_tau(0:8,:,:,:)) call utilities_updateCoords(FandF_tau(0:8,:,:,:))
call DMDAVecRestoreArrayF90(da,solution_vec,FandF_tau,ierr); CHKERRQ(ierr) call DMDAVecRestoreArrayF90(da,solution_vec,FandF_tau,err_PETSc)
CHKERRQ(err_PETSc)
end subroutine grid_mechanical_spectral_polarisation_updateCoords end subroutine grid_mechanical_spectral_polarisation_updateCoords
@ -433,11 +442,11 @@ end subroutine grid_mechanical_spectral_polarisation_updateCoords
!-------------------------------------------------------------------------------------------------- !--------------------------------------------------------------------------------------------------
subroutine grid_mechanical_spectral_polarisation_restartWrite subroutine grid_mechanical_spectral_polarisation_restartWrite
PetscErrorCode :: ierr PetscErrorCode :: err_PETSc
integer(HID_T) :: fileHandle, groupHandle integer(HID_T) :: fileHandle, groupHandle
PetscScalar, dimension(:,:,:,:), pointer :: FandF_tau, F, F_tau PetscScalar, dimension(:,:,:,:), pointer :: FandF_tau, F, F_tau
call DMDAVecGetArrayF90(da,solution_vec,FandF_tau,ierr); CHKERRQ(ierr) call DMDAVecGetArrayF90(da,solution_vec,FandF_tau,err_PETSc); CHKERRQ(err_PETSc)
F => FandF_tau(0: 8,:,:,:) F => FandF_tau(0: 8,:,:,:)
F_tau => FandF_tau(9:17,:,:,:) F_tau => FandF_tau(9:17,:,:,:)
@ -467,7 +476,8 @@ subroutine grid_mechanical_spectral_polarisation_restartWrite
if (num%update_gamma) call utilities_saveReferenceStiffness if (num%update_gamma) call utilities_saveReferenceStiffness
call DMDAVecRestoreArrayF90(da,solution_vec,FandF_tau,ierr); CHKERRQ(ierr) call DMDAVecRestoreArrayF90(da,solution_vec,FandF_tau,err_PETSc)
CHKERRQ(err_PETSc)
end subroutine grid_mechanical_spectral_polarisation_restartWrite end subroutine grid_mechanical_spectral_polarisation_restartWrite
@ -475,7 +485,7 @@ end subroutine grid_mechanical_spectral_polarisation_restartWrite
!-------------------------------------------------------------------------------------------------- !--------------------------------------------------------------------------------------------------
!> @brief convergence check !> @brief convergence check
!-------------------------------------------------------------------------------------------------- !--------------------------------------------------------------------------------------------------
subroutine converged(snes_local,PETScIter,devNull1,devNull2,devNull3,reason,dummy,ierr) subroutine converged(snes_local,PETScIter,devNull1,devNull2,devNull3,reason,dummy,err_PETSc)
SNES :: snes_local SNES :: snes_local
PetscInt, intent(in) :: PETScIter PetscInt, intent(in) :: PETScIter
@ -485,7 +495,7 @@ subroutine converged(snes_local,PETScIter,devNull1,devNull2,devNull3,reason,dumm
devNull3 devNull3
SNESConvergedReason :: reason SNESConvergedReason :: reason
PetscObject :: dummy PetscObject :: dummy
PetscErrorCode :: ierr PetscErrorCode :: err_PETSc
real(pReal) :: & real(pReal) :: &
curlTol, & curlTol, &
divTol, & divTol, &
@ -521,7 +531,7 @@ end subroutine converged
!> @brief forms the residual vector !> @brief forms the residual vector
!-------------------------------------------------------------------------------------------------- !--------------------------------------------------------------------------------------------------
subroutine formResidual(in, FandF_tau, & subroutine formResidual(in, FandF_tau, &
residuum, dummy,ierr) residuum, dummy,err_PETSc)
DMDALocalInfo, dimension(DMDA_LOCAL_INFO_SIZE) :: in !< DMDA info (needs to be named "in" for macros like XRANGE to work) DMDALocalInfo, dimension(DMDA_LOCAL_INFO_SIZE) :: in !< DMDA info (needs to be named "in" for macros like XRANGE to work)
PetscScalar, dimension(3,3,2,XG_RANGE,YG_RANGE,ZG_RANGE), & PetscScalar, dimension(3,3,2,XG_RANGE,YG_RANGE,ZG_RANGE), &
@ -537,8 +547,9 @@ subroutine formResidual(in, FandF_tau, &
PETScIter, & PETScIter, &
nfuncs nfuncs
PetscObject :: dummy PetscObject :: dummy
PetscErrorCode :: ierr PetscErrorCode :: err_PETSc
integer :: & integer(MPI_INTEGER_KIND) :: err_MPI
integer :: &
i, j, k, e i, j, k, e
!--------------------------------------------------------------------------------------------------- !---------------------------------------------------------------------------------------------------
@ -553,10 +564,11 @@ subroutine formResidual(in, FandF_tau, &
X_RANGE, Y_RANGE, Z_RANGE) X_RANGE, Y_RANGE, Z_RANGE)
F_av = sum(sum(sum(F,dim=5),dim=4),dim=3) * wgt F_av = sum(sum(sum(F,dim=5),dim=4),dim=3) * wgt
call MPI_Allreduce(MPI_IN_PLACE,F_av,9,MPI_DOUBLE,MPI_SUM,MPI_COMM_WORLD,ierr) call MPI_Allreduce(MPI_IN_PLACE,F_av,9_MPI_INTEGER_KIND,MPI_DOUBLE,MPI_SUM,MPI_COMM_WORLD,err_MPI)
if (err_MPI /= 0_MPI_INTEGER_KIND) error stop 'MPI error'
call SNESGetNumberFunctionEvals(snes,nfuncs,ierr); CHKERRQ(ierr) call SNESGetNumberFunctionEvals(snes,nfuncs,err_PETSc); CHKERRQ(err_PETSc)
call SNESGetIterationNumber(snes,PETScIter,ierr); CHKERRQ(ierr) call SNESGetIterationNumber(snes,PETScIter,err_PETSc); CHKERRQ(err_PETSc)
if (nfuncs == 0 .and. PETScIter == 0) totalIter = -1 ! new increment if (nfuncs == 0 .and. PETScIter == 0) totalIter = -1 ! new increment
@ -597,7 +609,7 @@ subroutine formResidual(in, FandF_tau, &
call utilities_constitutiveResponse(residual_F, & ! "residuum" gets field of first PK stress (to save memory) call utilities_constitutiveResponse(residual_F, & ! "residuum" gets field of first PK stress (to save memory)
P_av,C_volAvg,C_minMaxAvg, & P_av,C_volAvg,C_minMaxAvg, &
F - residual_F_tau/num%beta,params%Delta_t,params%rotation_BC) F - residual_F_tau/num%beta,params%Delta_t,params%rotation_BC)
call MPI_Allreduce(MPI_IN_PLACE,terminallyIll,1,MPI_LOGICAL,MPI_LOR,MPI_COMM_WORLD,ierr) call MPI_Allreduce(MPI_IN_PLACE,terminallyIll,1_MPI_INTEGER_KIND,MPI_LOGICAL,MPI_LOR,MPI_COMM_WORLD,err_MPI)
!-------------------------------------------------------------------------------------------------- !--------------------------------------------------------------------------------------------------
! stress BC handling ! stress BC handling

View File

@ -71,7 +71,8 @@ subroutine grid_thermal_spectral_init(T_0)
integer :: i, j, k, ce integer :: i, j, k, ce
DM :: thermal_grid DM :: thermal_grid
PetscScalar, dimension(:,:,:), pointer :: T_PETSc PetscScalar, dimension(:,:,:), pointer :: T_PETSc
PetscErrorCode :: ierr integer(MPI_INTEGER_KIND) :: err_MPI
PetscErrorCode :: err_PETSc
class(tNode), pointer :: & class(tNode), pointer :: &
num_grid num_grid
@ -94,39 +95,41 @@ subroutine grid_thermal_spectral_init(T_0)
!-------------------------------------------------------------------------------------------------- !--------------------------------------------------------------------------------------------------
! set default and user defined options for PETSc ! set default and user defined options for PETSc
call PetscOptionsInsertString(PETSC_NULL_OPTIONS,'-thermal_snes_type newtonls -thermal_snes_mf & call PetscOptionsInsertString(PETSC_NULL_OPTIONS,'-thermal_snes_type newtonls -thermal_snes_mf &
&-thermal_snes_ksp_ew -thermal_ksp_type fgmres',ierr) &-thermal_snes_ksp_ew -thermal_ksp_type fgmres',err_PETSc)
CHKERRQ(ierr) CHKERRQ(err_PETSc)
call PetscOptionsInsertString(PETSC_NULL_OPTIONS,num_grid%get_asString('petsc_options',defaultVal=''),ierr) call PetscOptionsInsertString(PETSC_NULL_OPTIONS,num_grid%get_asString('petsc_options',defaultVal=''),err_PETSc)
CHKERRQ(ierr) CHKERRQ(err_PETSc)
!-------------------------------------------------------------------------------------------------- !--------------------------------------------------------------------------------------------------
! initialize solver specific parts of PETSc ! initialize solver specific parts of PETSc
call SNESCreate(PETSC_COMM_WORLD,thermal_snes,ierr); CHKERRQ(ierr) call SNESCreate(PETSC_COMM_WORLD,thermal_snes,err_PETSc); CHKERRQ(err_PETSc)
call SNESSetOptionsPrefix(thermal_snes,'thermal_',ierr);CHKERRQ(ierr) call SNESSetOptionsPrefix(thermal_snes,'thermal_',err_PETSc);CHKERRQ(err_PETSc)
localK = 0 localK = 0_pPetscInt
localK(worldrank) = grid3 localK(worldrank) = int(grid3,pPetscInt)
call MPI_Allreduce(MPI_IN_PLACE,localK,worldsize,MPI_INTEGER,MPI_SUM,MPI_COMM_WORLD,ierr) call MPI_Allreduce(MPI_IN_PLACE,localK,worldsize,MPI_INTEGER,MPI_SUM,MPI_COMM_WORLD,err_MPI)
if (err_MPI /= 0_MPI_INTEGER_KIND) error stop 'MPI error'
call DMDACreate3D(PETSC_COMM_WORLD, & call DMDACreate3D(PETSC_COMM_WORLD, &
DM_BOUNDARY_NONE, DM_BOUNDARY_NONE, DM_BOUNDARY_NONE, & ! cut off stencil at boundary DM_BOUNDARY_NONE, DM_BOUNDARY_NONE, DM_BOUNDARY_NONE, & ! cut off stencil at boundary
DMDA_STENCIL_BOX, & ! Moore (26) neighborhood around central point DMDA_STENCIL_BOX, & ! Moore (26) neighborhood around central point
grid(1),grid(2),grid(3), & ! global grid int(grid(1),pPetscInt),int(grid(2),pPetscInt),int(grid(3),pPetscInt), & ! global grid
1, 1, worldsize, & 1_pPetscInt, 1_pPetscInt, int(worldsize,pPetscInt), &
1, 0, & ! #dof (T field), ghost boundary width (domain overlap) 1_pPetscInt, 0_pPetscInt, & ! #dof (T, scalar), ghost boundary width (domain overlap)
[grid(1)],[grid(2)],localK, & ! local grid [int(grid(1),pPetscInt)],[int(grid(2),pPetscInt)],localK, & ! local grid
thermal_grid,ierr) ! handle, error thermal_grid,err_PETSc) ! handle, error
CHKERRQ(ierr) CHKERRQ(err_PETSc)
call SNESSetDM(thermal_snes,thermal_grid,ierr); CHKERRQ(ierr) ! connect snes to da call SNESSetDM(thermal_snes,thermal_grid,err_PETSc); CHKERRQ(err_PETSc) ! connect snes to da
call DMsetFromOptions(thermal_grid,ierr); CHKERRQ(ierr) call DMsetFromOptions(thermal_grid,err_PETSc); CHKERRQ(err_PETSc)
call DMsetUp(thermal_grid,ierr); CHKERRQ(ierr) call DMsetUp(thermal_grid,err_PETSc); CHKERRQ(err_PETSc)
call DMCreateGlobalVector(thermal_grid,solution_vec,ierr); CHKERRQ(ierr) ! global solution vector (grid x 1, i.e. every def grad tensor) call DMCreateGlobalVector(thermal_grid,solution_vec,err_PETSc) ! global solution vector (grid x 1, i.e. every def grad tensor)
call DMDASNESSetFunctionLocal(thermal_grid,INSERT_VALUES,formResidual,PETSC_NULL_SNES,ierr) ! residual vector of same shape as solution vector CHKERRQ(err_PETSc)
CHKERRQ(ierr) call DMDASNESSetFunctionLocal(thermal_grid,INSERT_VALUES,formResidual,PETSC_NULL_SNES,err_PETSc) ! residual vector of same shape as solution vector
call SNESSetFromOptions(thermal_snes,ierr); CHKERRQ(ierr) ! pull it all together with additional CLI arguments CHKERRQ(err_PETSc)
call SNESSetFromOptions(thermal_snes,err_PETSc); CHKERRQ(err_PETSc) ! pull it all together with additional CLI arguments
!-------------------------------------------------------------------------------------------------- !--------------------------------------------------------------------------------------------------
! init fields ! init fields
call DMDAGetCorners(thermal_grid,xstart,ystart,zstart,xend,yend,zend,ierr) call DMDAGetCorners(thermal_grid,xstart,ystart,zstart,xend,yend,zend,err_PETSc)
CHKERRQ(ierr) CHKERRQ(err_PETSc)
xend = xstart + xend - 1 xend = xstart + xend - 1
yend = ystart + yend - 1 yend = ystart + yend - 1
zend = zstart + zend - 1 zend = zstart + zend - 1
@ -143,9 +146,11 @@ subroutine grid_thermal_spectral_init(T_0)
call homogenization_thermal_setField(T_0,0.0_pReal,ce) call homogenization_thermal_setField(T_0,0.0_pReal,ce)
end do; end do; end do end do; end do; end do
call DMDAVecGetArrayF90(thermal_grid,solution_vec,T_PETSc,ierr); CHKERRQ(ierr) call DMDAVecGetArrayF90(thermal_grid,solution_vec,T_PETSc,err_PETSc)
CHKERRQ(err_PETSc)
T_PETSc(xstart:xend,ystart:yend,zstart:zend) = T_current T_PETSc(xstart:xend,ystart:yend,zstart:zend) = T_current
call DMDAVecRestoreArrayF90(thermal_grid,solution_vec,T_PETSc,ierr); CHKERRQ(ierr) call DMDAVecRestoreArrayF90(thermal_grid,solution_vec,T_PETSc,err_PETSc)
CHKERRQ(err_PETSc)
call updateReference call updateReference
@ -164,7 +169,8 @@ function grid_thermal_spectral_solution(Delta_t) result(solution)
PetscInt :: devNull PetscInt :: devNull
PetscReal :: T_min, T_max, stagNorm PetscReal :: T_min, T_max, stagNorm
PetscErrorCode :: ierr integer(MPI_INTEGER_KIND) :: err_MPI
PetscErrorCode :: err_PETSc
SNESConvergedReason :: reason SNESConvergedReason :: reason
solution%converged =.false. solution%converged =.false.
@ -173,8 +179,10 @@ function grid_thermal_spectral_solution(Delta_t) result(solution)
! set module wide availabe data ! set module wide availabe data
params%Delta_t = Delta_t params%Delta_t = Delta_t
call SNESSolve(thermal_snes,PETSC_NULL_VEC,solution_vec,ierr); CHKERRQ(ierr) call SNESSolve(thermal_snes,PETSC_NULL_VEC,solution_vec,err_PETSc)
call SNESGetConvergedReason(thermal_snes,reason,ierr); CHKERRQ(ierr) CHKERRQ(err_PETSc)
call SNESGetConvergedReason(thermal_snes,reason,err_PETSc)
CHKERRQ(err_PETSc)
if (reason < 1) then if (reason < 1) then
solution%converged = .false. solution%converged = .false.
@ -184,9 +192,11 @@ function grid_thermal_spectral_solution(Delta_t) result(solution)
solution%iterationsNeeded = totalIter solution%iterationsNeeded = totalIter
end if end if
stagNorm = maxval(abs(T_current - T_stagInc)) stagNorm = maxval(abs(T_current - T_stagInc))
call MPI_Allreduce(MPI_IN_PLACE,stagNorm,1,MPI_DOUBLE,MPI_MAX,MPI_COMM_WORLD,ierr) call MPI_Allreduce(MPI_IN_PLACE,stagNorm,1_MPI_INTEGER_KIND,MPI_DOUBLE,MPI_MAX,MPI_COMM_WORLD,err_MPI)
if (err_MPI /= 0_MPI_INTEGER_KIND) error stop 'MPI error'
solution%stagConverged = stagNorm < max(num%eps_thermal_atol, num%eps_thermal_rtol*maxval(T_current)) solution%stagConverged = stagNorm < max(num%eps_thermal_atol, num%eps_thermal_rtol*maxval(T_current))
call MPI_Allreduce(MPI_IN_PLACE,solution%stagConverged,1,MPI_LOGICAL,MPI_LAND,MPI_COMM_WORLD,ierr) call MPI_Allreduce(MPI_IN_PLACE,solution%stagConverged,1_MPI_INTEGER_KIND,MPI_LOGICAL,MPI_LAND,MPI_COMM_WORLD,err_MPI)
if (err_MPI /= 0_MPI_INTEGER_KIND) error stop 'MPI error'
T_stagInc = T_current T_stagInc = T_current
!-------------------------------------------------------------------------------------------------- !--------------------------------------------------------------------------------------------------
@ -197,8 +207,8 @@ function grid_thermal_spectral_solution(Delta_t) result(solution)
call homogenization_thermal_setField(T_current(i,j,k),(T_current(i,j,k)-T_lastInc(i,j,k))/params%Delta_t,ce) call homogenization_thermal_setField(T_current(i,j,k),(T_current(i,j,k)-T_lastInc(i,j,k))/params%Delta_t,ce)
end do; end do; end do end do; end do; end do
call VecMin(solution_vec,devNull,T_min,ierr); CHKERRQ(ierr) call VecMin(solution_vec,devNull,T_min,err_PETSc); CHKERRQ(err_PETSc)
call VecMax(solution_vec,devNull,T_max,ierr); CHKERRQ(ierr) call VecMax(solution_vec,devNull,T_max,err_PETSc); CHKERRQ(err_PETSc)
if (solution%converged) & if (solution%converged) &
print'(/,1x,a)', '... thermal conduction converged ..................................' print'(/,1x,a)', '... thermal conduction converged ..................................'
print'(/,1x,a,f8.4,2x,f8.4,2x,f8.4)', 'Minimum|Maximum|Delta Temperature / K = ', T_min, T_max, stagNorm print'(/,1x,a,f8.4,2x,f8.4,2x,f8.4)', 'Minimum|Maximum|Delta Temperature / K = ', T_min, T_max, stagNorm
@ -217,7 +227,7 @@ subroutine grid_thermal_spectral_forward(cutBack)
integer :: i, j, k, ce integer :: i, j, k, ce
DM :: dm_local DM :: dm_local
PetscScalar, dimension(:,:,:), pointer :: x_scal PetscScalar, dimension(:,:,:), pointer :: x_scal
PetscErrorCode :: ierr PetscErrorCode :: err_PETSc
if (cutBack) then if (cutBack) then
T_current = T_lastInc T_current = T_lastInc
@ -225,10 +235,13 @@ subroutine grid_thermal_spectral_forward(cutBack)
!-------------------------------------------------------------------------------------------------- !--------------------------------------------------------------------------------------------------
! reverting thermal field state ! reverting thermal field state
call SNESGetDM(thermal_snes,dm_local,ierr); CHKERRQ(ierr) call SNESGetDM(thermal_snes,dm_local,err_PETSc)
call DMDAVecGetArrayF90(dm_local,solution_vec,x_scal,ierr); CHKERRQ(ierr) !< get the data out of PETSc to work with CHKERRQ(err_PETSc)
call DMDAVecGetArrayF90(dm_local,solution_vec,x_scal,err_PETSc) !< get the data out of PETSc to work with
CHKERRQ(err_PETSc)
x_scal(xstart:xend,ystart:yend,zstart:zend) = T_current x_scal(xstart:xend,ystart:yend,zstart:zend) = T_current
call DMDAVecRestoreArrayF90(dm_local,solution_vec,x_scal,ierr); CHKERRQ(ierr) call DMDAVecRestoreArrayF90(dm_local,solution_vec,x_scal,err_PETSc)
CHKERRQ(err_PETSc)
ce = 0 ce = 0
do k = 1, grid3; do j = 1, grid(2); do i = 1,grid(1) do k = 1, grid3; do j = 1, grid(2); do i = 1,grid(1)
ce = ce + 1 ce = ce + 1
@ -245,7 +258,7 @@ end subroutine grid_thermal_spectral_forward
!-------------------------------------------------------------------------------------------------- !--------------------------------------------------------------------------------------------------
!> @brief forms the spectral thermal residual vector !> @brief forms the spectral thermal residual vector
!-------------------------------------------------------------------------------------------------- !--------------------------------------------------------------------------------------------------
subroutine formResidual(in,x_scal,f_scal,dummy,ierr) subroutine formResidual(in,x_scal,f_scal,dummy,dummy_err)
DMDALocalInfo, dimension(DMDA_LOCAL_INFO_SIZE) :: & DMDALocalInfo, dimension(DMDA_LOCAL_INFO_SIZE) :: &
in in
@ -256,7 +269,7 @@ subroutine formResidual(in,x_scal,f_scal,dummy,ierr)
X_RANGE,Y_RANGE,Z_RANGE), intent(out) :: & X_RANGE,Y_RANGE,Z_RANGE), intent(out) :: &
f_scal f_scal
PetscObject :: dummy PetscObject :: dummy
PetscErrorCode :: ierr PetscErrorCode :: dummy_err
integer :: i, j, k, ce integer :: i, j, k, ce
T_current = x_scal T_current = x_scal
@ -301,7 +314,8 @@ end subroutine formResidual
!-------------------------------------------------------------------------------------------------- !--------------------------------------------------------------------------------------------------
subroutine updateReference() subroutine updateReference()
integer :: ce,ierr integer :: ce
integer(MPI_INTEGER_KIND) :: err_MPI
K_ref = 0.0_pReal K_ref = 0.0_pReal
@ -312,9 +326,11 @@ subroutine updateReference()
end do end do
K_ref = K_ref*wgt K_ref = K_ref*wgt
call MPI_Allreduce(MPI_IN_PLACE,K_ref,9,MPI_DOUBLE,MPI_SUM,MPI_COMM_WORLD,ierr) call MPI_Allreduce(MPI_IN_PLACE,K_ref,9_MPI_INTEGER_KIND,MPI_DOUBLE,MPI_SUM,MPI_COMM_WORLD,err_MPI)
if (err_MPI /= 0_MPI_INTEGER_KIND) error stop 'MPI error'
mu_ref = mu_ref*wgt mu_ref = mu_ref*wgt
call MPI_Allreduce(MPI_IN_PLACE,mu_ref,1,MPI_DOUBLE,MPI_SUM,MPI_COMM_WORLD,ierr) call MPI_Allreduce(MPI_IN_PLACE,mu_ref,1_MPI_INTEGER_KIND,MPI_DOUBLE,MPI_SUM,MPI_COMM_WORLD,err_MPI)
if (err_MPI /= 0_MPI_INTEGER_KIND) error stop 'MPI error'
end subroutine updateReference end subroutine updateReference

View File

@ -144,7 +144,7 @@ contains
!-------------------------------------------------------------------------------------------------- !--------------------------------------------------------------------------------------------------
subroutine spectral_utilities_init subroutine spectral_utilities_init
PetscErrorCode :: ierr PetscErrorCode :: err_PETSc
integer :: i, j, k, & integer :: i, j, k, &
FFTW_planner_flag FFTW_planner_flag
integer, dimension(3) :: k_s integer, dimension(3) :: k_s
@ -193,13 +193,13 @@ subroutine spectral_utilities_init
'add more using the "PETSc_options" keyword in numerics.yaml' 'add more using the "PETSc_options" keyword in numerics.yaml'
flush(IO_STDOUT) flush(IO_STDOUT)
call PetscOptionsClear(PETSC_NULL_OPTIONS,ierr) call PetscOptionsClear(PETSC_NULL_OPTIONS,err_PETSc)
CHKERRQ(ierr) CHKERRQ(err_PETSc)
if (debugPETSc) call PetscOptionsInsertString(PETSC_NULL_OPTIONS,trim(PETSCDEBUG),ierr) if (debugPETSc) call PetscOptionsInsertString(PETSC_NULL_OPTIONS,trim(PETSCDEBUG),err_PETSc)
CHKERRQ(ierr) CHKERRQ(err_PETSc)
call PetscOptionsInsertString(PETSC_NULL_OPTIONS,& call PetscOptionsInsertString(PETSC_NULL_OPTIONS,&
num_grid%get_asString('PETSc_options',defaultVal=''),ierr) num_grid%get_asString('PETSc_options',defaultVal=''),err_PETSc)
CHKERRQ(ierr) CHKERRQ(err_PETSc)
grid1Red = grid(1)/2 + 1 grid1Red = grid(1)/2 + 1
wgt = 1.0/real(product(grid),pReal) wgt = 1.0/real(product(grid),pReal)
@ -292,12 +292,12 @@ subroutine spectral_utilities_init
tensorSize, FFTW_MPI_DEFAULT_BLOCK, FFTW_MPI_DEFAULT_BLOCK, &! no. of transforms, default iblock and oblock tensorSize, FFTW_MPI_DEFAULT_BLOCK, FFTW_MPI_DEFAULT_BLOCK, &! no. of transforms, default iblock and oblock
tensorField_real, tensorField_fourier, & ! input data, output data tensorField_real, tensorField_fourier, & ! input data, output data
PETSC_COMM_WORLD, FFTW_planner_flag) ! use all processors, planer precision PETSC_COMM_WORLD, FFTW_planner_flag) ! use all processors, planer precision
if (.not. C_ASSOCIATED(planTensorForth)) error stop 'FFTW error' if (.not. c_associated(planTensorForth)) error stop 'FFTW error'
planTensorBack = fftw_mpi_plan_many_dft_c2r(3, [gridFFTW(3),gridFFTW(2),gridFFTW(1)], & ! dimension, logical length in each dimension in reversed order planTensorBack = fftw_mpi_plan_many_dft_c2r(3, [gridFFTW(3),gridFFTW(2),gridFFTW(1)], & ! dimension, logical length in each dimension in reversed order
tensorSize, FFTW_MPI_DEFAULT_BLOCK, FFTW_MPI_DEFAULT_BLOCK, &! no. of transforms, default iblock and oblock tensorSize, FFTW_MPI_DEFAULT_BLOCK, FFTW_MPI_DEFAULT_BLOCK, &! no. of transforms, default iblock and oblock
tensorField_fourier,tensorField_real, & ! input data, output data tensorField_fourier,tensorField_real, & ! input data, output data
PETSC_COMM_WORLD, FFTW_planner_flag) ! all processors, planer precision PETSC_COMM_WORLD, FFTW_planner_flag) ! all processors, planer precision
if (.not. C_ASSOCIATED(planTensorBack)) error stop 'FFTW error' if (.not. c_associated(planTensorBack)) error stop 'FFTW error'
!-------------------------------------------------------------------------------------------------- !--------------------------------------------------------------------------------------------------
! vector MPI fftw plans ! vector MPI fftw plans
@ -559,8 +559,9 @@ end subroutine utilities_fourierGreenConvolution
!-------------------------------------------------------------------------------------------------- !--------------------------------------------------------------------------------------------------
real(pReal) function utilities_divergenceRMS() real(pReal) function utilities_divergenceRMS()
integer :: i, j, k, ierr integer :: i, j, k
complex(pReal), dimension(3) :: rescaledGeom integer(MPI_INTEGER_KIND) :: err_MPI
complex(pReal), dimension(3) :: rescaledGeom
print'(/,1x,a)', '... calculating divergence ................................................' print'(/,1x,a)', '... calculating divergence ................................................'
flush(IO_STDOUT) flush(IO_STDOUT)
@ -589,8 +590,8 @@ real(pReal) function utilities_divergenceRMS()
conjg(-xi1st(1:3,grid1Red,j,k))*rescaledGeom))**2) conjg(-xi1st(1:3,grid1Red,j,k))*rescaledGeom))**2)
enddo; enddo enddo; enddo
if (grid(1) == 1) utilities_divergenceRMS = utilities_divergenceRMS * 0.5_pReal ! counted twice in case of grid(1) == 1 if (grid(1) == 1) utilities_divergenceRMS = utilities_divergenceRMS * 0.5_pReal ! counted twice in case of grid(1) == 1
call MPI_Allreduce(MPI_IN_PLACE,utilities_divergenceRMS,1,MPI_DOUBLE,MPI_SUM,MPI_COMM_WORLD,ierr) call MPI_Allreduce(MPI_IN_PLACE,utilities_divergenceRMS,1_MPI_INTEGER_KIND,MPI_DOUBLE,MPI_SUM,MPI_COMM_WORLD,err_MPI)
if (ierr /=0) error stop 'MPI error' if (err_MPI /= 0_MPI_INTEGER_KIND) error stop 'MPI error'
utilities_divergenceRMS = sqrt(utilities_divergenceRMS) * wgt ! RMS in real space calculated with Parsevals theorem from Fourier space utilities_divergenceRMS = sqrt(utilities_divergenceRMS) * wgt ! RMS in real space calculated with Parsevals theorem from Fourier space
end function utilities_divergenceRMS end function utilities_divergenceRMS
@ -601,7 +602,8 @@ end function utilities_divergenceRMS
!-------------------------------------------------------------------------------------------------- !--------------------------------------------------------------------------------------------------
real(pReal) function utilities_curlRMS() real(pReal) function utilities_curlRMS()
integer :: i, j, k, l, ierr integer :: i, j, k, l
integer(MPI_INTEGER_KIND) :: err_MPI
complex(pReal), dimension(3,3) :: curl_fourier complex(pReal), dimension(3,3) :: curl_fourier
complex(pReal), dimension(3) :: rescaledGeom complex(pReal), dimension(3) :: rescaledGeom
@ -649,8 +651,8 @@ real(pReal) function utilities_curlRMS()
+ sum(curl_fourier%re**2 + curl_fourier%im**2) ! this layer (Nyquist) does not have a conjugate complex counterpart (if grid(1) /= 1) + sum(curl_fourier%re**2 + curl_fourier%im**2) ! this layer (Nyquist) does not have a conjugate complex counterpart (if grid(1) /= 1)
enddo; enddo enddo; enddo
call MPI_Allreduce(MPI_IN_PLACE,utilities_curlRMS,1,MPI_DOUBLE,MPI_SUM,MPI_COMM_WORLD,ierr) call MPI_Allreduce(MPI_IN_PLACE,utilities_curlRMS,1_MPI_INTEGER_KIND,MPI_DOUBLE,MPI_SUM,MPI_COMM_WORLD,err_MPI)
if (ierr /=0) error stop 'MPI error' if (err_MPI /= 0_MPI_INTEGER_KIND) error stop 'MPI error'
utilities_curlRMS = sqrt(utilities_curlRMS) * wgt utilities_curlRMS = sqrt(utilities_curlRMS) * wgt
if (grid(1) == 1) utilities_curlRMS = utilities_curlRMS * 0.5_pReal ! counted twice in case of grid(1) == 1 if (grid(1) == 1) utilities_curlRMS = utilities_curlRMS * 0.5_pReal ! counted twice in case of grid(1) == 1
@ -799,8 +801,8 @@ subroutine utilities_constitutiveResponse(P,P_av,C_volAvg,C_minmaxAvg,&
type(rotation), intent(in), optional :: rotation_BC !< rotation of load frame type(rotation), intent(in), optional :: rotation_BC !< rotation of load frame
integer :: & integer :: i
i,ierr integer(MPI_INTEGER_KIND) :: err_MPI
real(pReal), dimension(3,3,3,3) :: dPdF_max, dPdF_min real(pReal), dimension(3,3,3,3) :: dPdF_max, dPdF_min
real(pReal) :: dPdF_norm_max, dPdF_norm_min real(pReal) :: dPdF_norm_max, dPdF_norm_min
real(pReal), dimension(2) :: valueAndRank !< pair of min/max norm of dPdF to synchronize min/max of dPdF real(pReal), dimension(2) :: valueAndRank !< pair of min/max norm of dPdF to synchronize min/max of dPdF
@ -818,7 +820,8 @@ subroutine utilities_constitutiveResponse(P,P_av,C_volAvg,C_minmaxAvg,&
P = reshape(homogenization_P, [3,3,grid(1),grid(2),grid3]) P = reshape(homogenization_P, [3,3,grid(1),grid(2),grid3])
P_av = sum(sum(sum(P,dim=5),dim=4),dim=3) * wgt P_av = sum(sum(sum(P,dim=5),dim=4),dim=3) * wgt
call MPI_Allreduce(MPI_IN_PLACE,P_av,9,MPI_DOUBLE,MPI_SUM,MPI_COMM_WORLD,ierr) call MPI_Allreduce(MPI_IN_PLACE,P_av,9_MPI_INTEGER_KIND,MPI_DOUBLE,MPI_SUM,MPI_COMM_WORLD,err_MPI)
if (err_MPI /= 0_MPI_INTEGER_KIND) error stop 'MPI error'
if (debugRotation) print'(/,1x,a,/,2(3(2x,f12.4,1x)/),3(2x,f12.4,1x))', & if (debugRotation) print'(/,1x,a,/,2(3(2x,f12.4,1x)/),3(2x,f12.4,1x))', &
'Piola--Kirchhoff stress (lab) / MPa =', transpose(P_av)*1.e-6_pReal 'Piola--Kirchhoff stress (lab) / MPa =', transpose(P_av)*1.e-6_pReal
if (present(rotation_BC)) P_av = rotation_BC%rotate(P_av) if (present(rotation_BC)) P_av = rotation_BC%rotate(P_av)
@ -842,22 +845,22 @@ subroutine utilities_constitutiveResponse(P,P_av,C_volAvg,C_minmaxAvg,&
enddo enddo
valueAndRank = [dPdF_norm_max,real(worldrank,pReal)] valueAndRank = [dPdF_norm_max,real(worldrank,pReal)]
call MPI_Allreduce(MPI_IN_PLACE,valueAndRank,1, MPI_2DOUBLE_PRECISION, MPI_MAXLOC, MPI_COMM_WORLD, ierr) call MPI_Allreduce(MPI_IN_PLACE,valueAndRank,1_MPI_INTEGER_KIND,MPI_2DOUBLE_PRECISION,MPI_MAXLOC,MPI_COMM_WORLD,err_MPI)
if (ierr /= 0) error stop 'MPI error' if (err_MPI /= 0_MPI_INTEGER_KIND) error stop 'MPI error'
call MPI_Bcast(dPdF_max,81,MPI_DOUBLE,int(valueAndRank(2)),MPI_COMM_WORLD, ierr) call MPI_Bcast(dPdF_max,81_MPI_INTEGER_KIND,MPI_DOUBLE,int(valueAndRank(2),MPI_INTEGER_KIND),MPI_COMM_WORLD,err_MPI)
if (ierr /= 0) error stop 'MPI error' if (err_MPI /= 0_MPI_INTEGER_KIND) error stop 'MPI error'
valueAndRank = [dPdF_norm_min,real(worldrank,pReal)] valueAndRank = [dPdF_norm_min,real(worldrank,pReal)]
call MPI_Allreduce(MPI_IN_PLACE,valueAndRank,1, MPI_2DOUBLE_PRECISION, MPI_MINLOC, MPI_COMM_WORLD, ierr) call MPI_Allreduce(MPI_IN_PLACE,valueAndRank,1_MPI_INTEGER_KIND,MPI_2DOUBLE_PRECISION,MPI_MINLOC,MPI_COMM_WORLD,err_MPI)
if (ierr /= 0) error stop 'MPI error' if (err_MPI /= 0_MPI_INTEGER_KIND) error stop 'MPI error'
call MPI_Bcast(dPdF_min,81,MPI_DOUBLE,int(valueAndRank(2)),MPI_COMM_WORLD, ierr) call MPI_Bcast(dPdF_min,81_MPI_INTEGER_KIND,MPI_DOUBLE,int(valueAndRank(2),MPI_INTEGER_KIND),MPI_COMM_WORLD,err_MPI)
if (ierr /= 0) error stop 'MPI error' if (err_MPI /= 0_MPI_INTEGER_KIND) error stop 'MPI error'
C_minmaxAvg = 0.5_pReal*(dPdF_max + dPdF_min) C_minmaxAvg = 0.5_pReal*(dPdF_max + dPdF_min)
C_volAvg = sum(homogenization_dPdF,dim=5) C_volAvg = sum(homogenization_dPdF,dim=5)
call MPI_Allreduce(MPI_IN_PLACE,C_volAvg,81,MPI_DOUBLE,MPI_SUM,MPI_COMM_WORLD,ierr) call MPI_Allreduce(MPI_IN_PLACE,C_volAvg,81_MPI_INTEGER_KIND,MPI_DOUBLE,MPI_SUM,MPI_COMM_WORLD,err_MPI)
if (ierr /= 0) error stop 'MPI error' if (err_MPI /= 0_MPI_INTEGER_KIND) error stop 'MPI error'
C_volAvg = C_volAvg * wgt C_volAvg = C_volAvg * wgt
@ -906,12 +909,13 @@ function utilities_forwardField(Delta_t,field_lastInc,rate,aim)
real(pReal), dimension(3,3,grid(1),grid(2),grid3) :: & real(pReal), dimension(3,3,grid(1),grid(2),grid3) :: &
utilities_forwardField utilities_forwardField
real(pReal), dimension(3,3) :: fieldDiff !< <a + adot*t> - aim real(pReal), dimension(3,3) :: fieldDiff !< <a + adot*t> - aim
PetscErrorCode :: ierr integer(MPI_INTEGER_KIND) :: err_MPI
utilities_forwardField = field_lastInc + rate*Delta_t utilities_forwardField = field_lastInc + rate*Delta_t
if (present(aim)) then !< correct to match average if (present(aim)) then !< correct to match average
fieldDiff = sum(sum(sum(utilities_forwardField,dim=5),dim=4),dim=3)*wgt fieldDiff = sum(sum(sum(utilities_forwardField,dim=5),dim=4),dim=3)*wgt
call MPI_Allreduce(MPI_IN_PLACE,fieldDiff,9,MPI_DOUBLE,MPI_SUM,MPI_COMM_WORLD,ierr) call MPI_Allreduce(MPI_IN_PLACE,fieldDiff,9_MPI_INTEGER_KIND,MPI_DOUBLE,MPI_SUM,MPI_COMM_WORLD,err_MPI)
if (err_MPI /= 0_MPI_INTEGER_KIND) error stop 'MPI error'
fieldDiff = fieldDiff - aim fieldDiff = fieldDiff - aim
utilities_forwardField = utilities_forwardField - & utilities_forwardField = utilities_forwardField - &
spread(spread(spread(fieldDiff,3,grid(1)),4,grid(2)),5,grid3) spread(spread(spread(fieldDiff,3,grid(1)),4,grid(2)),5,grid3)
@ -981,9 +985,10 @@ subroutine utilities_updateCoords(F)
real(pReal), dimension(3, grid(1)+1,grid(2)+1,grid3+1) :: nodeCoords real(pReal), dimension(3, grid(1)+1,grid(2)+1,grid3+1) :: nodeCoords
integer :: & integer :: &
i,j,k,n, & i,j,k,n, &
rank_t, rank_b, & c
c, & integer(MPI_INTEGER_KIND) :: &
ierr rank_t, rank_b
integer(MPI_INTEGER_KIND) :: err_MPI
#if (PETSC_VERSION_MAJOR==3 && PETSC_VERSION_MINOR>14) && !defined(PETSC_HAVE_MPI_F90MODULE_VISIBILITY) #if (PETSC_VERSION_MAJOR==3 && PETSC_VERSION_MINOR>14) && !defined(PETSC_HAVE_MPI_F90MODULE_VISIBILITY)
type(MPI_Request), dimension(4) :: request type(MPI_Request), dimension(4) :: request
type(MPI_Status), dimension(4) :: status type(MPI_Status), dimension(4) :: status
@ -1025,30 +1030,30 @@ subroutine utilities_updateCoords(F)
!-------------------------------------------------------------------------------------------------- !--------------------------------------------------------------------------------------------------
! average F ! average F
if (grid3Offset == 0) Favg = real(tensorField_fourier(1:3,1:3,1,1,1),pReal)*wgt if (grid3Offset == 0) Favg = real(tensorField_fourier(1:3,1:3,1,1,1),pReal)*wgt
call MPI_Bcast(Favg,9,MPI_DOUBLE,0,MPI_COMM_WORLD,ierr) call MPI_Bcast(Favg,9_MPI_INTEGER_KIND,MPI_DOUBLE,0_MPI_INTEGER_KIND,MPI_COMM_WORLD,err_MPI)
if (ierr /=0) error stop 'MPI error' if (err_MPI /= 0_MPI_INTEGER_KIND) error stop 'MPI error'
!-------------------------------------------------------------------------------------------------- !--------------------------------------------------------------------------------------------------
! pad cell center fluctuations along z-direction (needed when running MPI simulation) ! pad cell center fluctuations along z-direction (needed when running MPI simulation)
IPfluct_padded(1:3,1:grid(1),1:grid(2),2:grid3+1) = vectorField_real(1:3,1:grid(1),1:grid(2),1:grid3) IPfluct_padded(1:3,1:grid(1),1:grid(2),2:grid3+1) = vectorField_real(1:3,1:grid(1),1:grid(2),1:grid3)
c = product(shape(IPfluct_padded(:,:,:,1))) !< amount of data to transfer c = product(shape(IPfluct_padded(:,:,:,1))) !< amount of data to transfer
rank_t = modulo(worldrank+1,worldsize) rank_t = modulo(worldrank+1_MPI_INTEGER_KIND,worldsize)
rank_b = modulo(worldrank-1,worldsize) rank_b = modulo(worldrank-1_MPI_INTEGER_KIND,worldsize)
! send bottom layer to process below ! send bottom layer to process below
call MPI_Isend(IPfluct_padded(:,:,:,2), c,MPI_DOUBLE,rank_b,0,MPI_COMM_WORLD,request(1),ierr) call MPI_Isend(IPfluct_padded(:,:,:,2), c,MPI_DOUBLE,rank_b,0_MPI_INTEGER_KIND,MPI_COMM_WORLD,request(1),err_MPI)
if (ierr /=0) error stop 'MPI error' if (err_MPI /= 0_MPI_INTEGER_KIND) error stop 'MPI error'
call MPI_Irecv(IPfluct_padded(:,:,:,grid3+2),c,MPI_DOUBLE,rank_t,0,MPI_COMM_WORLD,request(2),ierr) call MPI_Irecv(IPfluct_padded(:,:,:,grid3+2),c,MPI_DOUBLE,rank_t,0_MPI_INTEGER_KIND,MPI_COMM_WORLD,request(2),err_MPI)
if (ierr /=0) error stop 'MPI error' if (err_MPI /= 0_MPI_INTEGER_KIND) error stop 'MPI error'
! send top layer to process above ! send top layer to process above
call MPI_Isend(IPfluct_padded(:,:,:,grid3+1),c,MPI_DOUBLE,rank_t,1,MPI_COMM_WORLD,request(3),ierr) call MPI_Isend(IPfluct_padded(:,:,:,grid3+1),c,MPI_DOUBLE,rank_t,1_MPI_INTEGER_KIND,MPI_COMM_WORLD,request(3),err_MPI)
if (ierr /=0) error stop 'MPI error' if (err_MPI /= 0_MPI_INTEGER_KIND) error stop 'MPI error'
call MPI_Irecv(IPfluct_padded(:,:,:,1), c,MPI_DOUBLE,rank_b,1,MPI_COMM_WORLD,request(4),ierr) call MPI_Irecv(IPfluct_padded(:,:,:,1), c,MPI_DOUBLE,rank_b,1_MPI_INTEGER_KIND,MPI_COMM_WORLD,request(4),err_MPI)
if (ierr /=0) error stop 'MPI error' if (err_MPI /= 0_MPI_INTEGER_KIND) error stop 'MPI error'
call MPI_Waitall(4,request,status,ierr) call MPI_Waitall(4,request,status,err_MPI)
if (ierr /=0) error stop 'MPI error' if (err_MPI /= 0_MPI_INTEGER_KIND) error stop 'MPI error'
#if (PETSC_VERSION_MAJOR==3 && PETSC_VERSION_MINOR>14) && !defined(PETSC_HAVE_MPI_F90MODULE_VISIBILITY) #if (PETSC_VERSION_MAJOR==3 && PETSC_VERSION_MINOR>14) && !defined(PETSC_HAVE_MPI_F90MODULE_VISIBILITY)
! ToDo ! ToDo
#else #else

View File

@ -78,7 +78,7 @@ program DAMASK_mesh
type(tLoadCase), allocatable, dimension(:) :: loadCases !< array of all load cases type(tLoadCase), allocatable, dimension(:) :: loadCases !< array of all load cases
type(tSolutionState), allocatable, dimension(:) :: solres type(tSolutionState), allocatable, dimension(:) :: solres
PetscInt :: faceSet, currentFaceSet, dimPlex PetscInt :: faceSet, currentFaceSet, dimPlex
PetscErrorCode :: ierr PetscErrorCode :: err_PETSc
integer(kind(COMPONENT_UNDEFINED_ID)) :: ID integer(kind(COMPONENT_UNDEFINED_ID)) :: ID
external :: & external :: &
quit quit
@ -98,8 +98,8 @@ program DAMASK_mesh
if (maxCutBack < 0) call IO_error(301,ext_msg='maxCutBack') if (maxCutBack < 0) call IO_error(301,ext_msg='maxCutBack')
! reading basic information from load case file and allocate data structure containing load cases ! reading basic information from load case file and allocate data structure containing load cases
call DMGetDimension(geomMesh,dimPlex,ierr) !< dimension of mesh (2D or 3D) call DMGetDimension(geomMesh,dimPlex,err_PETSc) !< dimension of mesh (2D or 3D)
CHKERRA(ierr) CHKERRA(err_PETSc)
allocate(solres(1)) allocate(solres(1))
!-------------------------------------------------------------------------------------------------- !--------------------------------------------------------------------------------------------------

View File

@ -50,7 +50,7 @@ module FEM_utilities
type, public :: tSolutionState !< return type of solution from FEM solver variants type, public :: tSolutionState !< return type of solution from FEM solver variants
logical :: converged = .true. logical :: converged = .true.
logical :: stagConverged = .true. logical :: stagConverged = .true.
integer :: iterationsNeeded = 0 PetscInt :: iterationsNeeded = 0_pPETSCINT
end type tSolutionState end type tSolutionState
type, public :: tComponentBC type, public :: tComponentBC
@ -92,7 +92,7 @@ subroutine FEM_utilities_init
p_i !< integration order (quadrature rule) p_i !< integration order (quadrature rule)
character(len=*), parameter :: & character(len=*), parameter :: &
PETSCDEBUG = ' -snes_view -snes_monitor ' PETSCDEBUG = ' -snes_view -snes_monitor '
PetscErrorCode :: ierr PetscErrorCode :: err_PETSc
logical :: debugPETSc !< use some in debug defined options for more verbose PETSc solution logical :: debugPETSc !< use some in debug defined options for more verbose PETSc solution
@ -103,9 +103,9 @@ subroutine FEM_utilities_init
p_s = num_mesh%get_asInt('p_s',defaultVal = 2) p_s = num_mesh%get_asInt('p_s',defaultVal = 2)
p_i = num_mesh%get_asInt('p_i',defaultVal = p_s) p_i = num_mesh%get_asInt('p_i',defaultVal = p_s)
if (p_s < 1_pInt .or. p_s > size(FEM_nQuadrature,2)) & if (p_s < 1 .or. p_s > size(FEM_nQuadrature,2)) &
call IO_error(821,ext_msg='shape function order (p_s) out of bounds') call IO_error(821,ext_msg='shape function order (p_s) out of bounds')
if (p_i < max(1_pInt,p_s-1_pInt) .or. p_i > p_s) & if (p_i < max(1,p_s-1) .or. p_i > p_s) &
call IO_error(821,ext_msg='integration order (p_i) out of bounds') call IO_error(821,ext_msg='integration order (p_i) out of bounds')
debug_mesh => config_debug%get('mesh',defaultVal=emptyList) debug_mesh => config_debug%get('mesh',defaultVal=emptyList)
@ -116,20 +116,20 @@ subroutine FEM_utilities_init
trim(PETScDebug), & trim(PETScDebug), &
'add more using the "PETSc_options" keyword in numerics.yaml' 'add more using the "PETSc_options" keyword in numerics.yaml'
flush(IO_STDOUT) flush(IO_STDOUT)
call PetscOptionsClear(PETSC_NULL_OPTIONS,ierr) call PetscOptionsClear(PETSC_NULL_OPTIONS,err_PETSc)
CHKERRQ(ierr) CHKERRQ(err_PETSc)
if(debugPETSc) call PetscOptionsInsertString(PETSC_NULL_OPTIONS,trim(PETSCDEBUG),ierr) if(debugPETSc) call PetscOptionsInsertString(PETSC_NULL_OPTIONS,trim(PETSCDEBUG),err_PETSc)
CHKERRQ(ierr) CHKERRQ(err_PETSc)
call PetscOptionsInsertString(PETSC_NULL_OPTIONS,'-mechanical_snes_type newtonls & call PetscOptionsInsertString(PETSC_NULL_OPTIONS,'-mechanical_snes_type newtonls &
&-mechanical_snes_linesearch_type cp -mechanical_snes_ksp_ew & &-mechanical_snes_linesearch_type cp -mechanical_snes_ksp_ew &
&-mechanical_snes_ksp_ew_rtol0 0.01 -mechanical_snes_ksp_ew_rtolmax 0.01 & &-mechanical_snes_ksp_ew_rtol0 0.01 -mechanical_snes_ksp_ew_rtolmax 0.01 &
&-mechanical_ksp_type fgmres -mechanical_ksp_max_it 25', ierr) &-mechanical_ksp_type fgmres -mechanical_ksp_max_it 25', err_PETSc)
CHKERRQ(ierr) CHKERRQ(err_PETSc)
call PetscOptionsInsertString(PETSC_NULL_OPTIONS,num_mesh%get_asString('PETSc_options',defaultVal=''),ierr) call PetscOptionsInsertString(PETSC_NULL_OPTIONS,num_mesh%get_asString('PETSc_options',defaultVal=''),err_PETSc)
CHKERRQ(ierr) CHKERRQ(err_PETSc)
write(petsc_optionsOrder,'(a,i0)') '-mechFE_petscspace_degree ', p_s write(petsc_optionsOrder,'(a,i0)') '-mechFE_petscspace_degree ', p_s
call PetscOptionsInsertString(PETSC_NULL_OPTIONS,trim(petsc_optionsOrder),ierr) call PetscOptionsInsertString(PETSC_NULL_OPTIONS,trim(petsc_optionsOrder),err_PETSc)
CHKERRQ(ierr) CHKERRQ(err_PETSc)
wgt = 1.0/real(mesh_maxNips*mesh_NcpElemsGlobal,pReal) wgt = 1.0/real(mesh_maxNips*mesh_NcpElemsGlobal,pReal)
@ -144,10 +144,9 @@ subroutine utilities_constitutiveResponse(timeinc,P_av,forwardData)
real(pReal), intent(in) :: timeinc !< loading time real(pReal), intent(in) :: timeinc !< loading time
logical, intent(in) :: forwardData !< age results logical, intent(in) :: forwardData !< age results
real(pReal),intent(out), dimension(3,3) :: P_av !< average PK stress real(pReal),intent(out), dimension(3,3) :: P_av !< average PK stress
PetscErrorCode :: ierr integer(MPI_INTEGER_KIND) :: err_MPI
print'(/,1x,a)', '... evaluating constitutive response ......................................' print'(/,1x,a)', '... evaluating constitutive response ......................................'
@ -157,7 +156,9 @@ subroutine utilities_constitutiveResponse(timeinc,P_av,forwardData)
cutBack = .false. cutBack = .false.
P_av = sum(homogenization_P,dim=3) * wgt P_av = sum(homogenization_P,dim=3) * wgt
call MPI_Allreduce(MPI_IN_PLACE,P_av,9,MPI_DOUBLE,MPI_SUM,MPI_COMM_WORLD,ierr) call MPI_Allreduce(MPI_IN_PLACE,P_av,9_MPI_INTEGER_KIND,MPI_DOUBLE,MPI_SUM,MPI_COMM_WORLD,err_MPI)
if (err_MPI /= 0_MPI_INTEGER_KIND) error stop 'MPI error'
end subroutine utilities_constitutiveResponse end subroutine utilities_constitutiveResponse
@ -174,26 +175,29 @@ subroutine utilities_projectBCValues(localVec,section,field,comp,bcPointsIS,BCVa
PetscInt, pointer :: bcPoints(:) PetscInt, pointer :: bcPoints(:)
PetscScalar, pointer :: localArray(:) PetscScalar, pointer :: localArray(:)
PetscScalar :: BCValue,BCDotValue,timeinc PetscScalar :: BCValue,BCDotValue,timeinc
PetscErrorCode :: ierr PetscErrorCode :: err_PETSc
call PetscSectionGetFieldComponents(section,field,numComp,ierr); CHKERRQ(ierr) call PetscSectionGetFieldComponents(section,field,numComp,err_PETSc)
call ISGetSize(bcPointsIS,nBcPoints,ierr); CHKERRQ(ierr) CHKERRQ(err_PETSc)
if (nBcPoints > 0) call ISGetIndicesF90(bcPointsIS,bcPoints,ierr) call ISGetSize(bcPointsIS,nBcPoints,err_PETSc)
call VecGetArrayF90(localVec,localArray,ierr); CHKERRQ(ierr) CHKERRQ(err_PETSc)
if (nBcPoints > 0) call ISGetIndicesF90(bcPointsIS,bcPoints,err_PETSc)
call VecGetArrayF90(localVec,localArray,err_PETSc); CHKERRQ(err_PETSc)
do point = 1, nBcPoints do point = 1, nBcPoints
call PetscSectionGetFieldDof(section,bcPoints(point),field,numDof,ierr) call PetscSectionGetFieldDof(section,bcPoints(point),field,numDof,err_PETSc)
CHKERRQ(ierr) CHKERRQ(err_PETSc)
call PetscSectionGetFieldOffset(section,bcPoints(point),field,offset,ierr) call PetscSectionGetFieldOffset(section,bcPoints(point),field,offset,err_PETSc)
CHKERRQ(ierr) CHKERRQ(err_PETSc)
do dof = offset+comp+1, offset+numDof, numComp do dof = offset+comp+1, offset+numDof, numComp
localArray(dof) = localArray(dof) + BCValue + BCDotValue*timeinc localArray(dof) = localArray(dof) + BCValue + BCDotValue*timeinc
end do end do
end do end do
call VecRestoreArrayF90(localVec,localArray,ierr); CHKERRQ(ierr) call VecRestoreArrayF90(localVec,localArray,err_PETSc); CHKERRQ(err_PETSc)
call VecAssemblyBegin(localVec, ierr); CHKERRQ(ierr) call VecAssemblyBegin(localVec, err_PETSc); CHKERRQ(err_PETSc)
call VecAssemblyEnd (localVec, ierr); CHKERRQ(ierr) call VecAssemblyEnd (localVec, err_PETSc); CHKERRQ(err_PETSc)
if (nBcPoints > 0) call ISRestoreIndicesF90(bcPointsIS,bcPoints,ierr) if (nBcPoints > 0) call ISRestoreIndicesF90(bcPointsIS,bcPoints,err_PETSc)
CHKERRQ(err_PETSc)
end subroutine utilities_projectBCValues end subroutine utilities_projectBCValues

View File

@ -71,21 +71,22 @@ subroutine discretization_mesh_init(restart)
logical, intent(in) :: restart logical, intent(in) :: restart
integer :: dimPlex, & PetscInt :: dimPlex, &
mesh_Nnodes, & !< total number of nodes in mesh mesh_Nnodes, & !< total number of nodes in mesh
j, & j, &
debug_element, debug_ip debug_element, debug_ip
PetscSF :: sf PetscSF :: sf
DM :: globalMesh DM :: globalMesh
PetscInt :: nFaceSets PetscInt :: nFaceSets, Nboundaries, NelemsGlobal, Nelems
PetscInt, pointer, dimension(:) :: pFaceSets PetscInt, pointer, dimension(:) :: pFaceSets
IS :: faceSetIS IS :: faceSetIS
PetscErrorCode :: ierr PetscErrorCode :: err_PETSc
integer, dimension(:), allocatable :: & integer(MPI_INTEGER_KIND) :: err_MPI
PetscInt, dimension(:), allocatable :: &
materialAt materialAt
class(tNode), pointer :: & class(tNode), pointer :: &
num_mesh num_mesh
integer :: p_i !< integration order (quadrature rule) integer :: p_i, dim !< integration order (quadrature rule)
type(tvec) :: coords_node0 type(tvec) :: coords_node0
print'(/,1x,a)', '<<<+- discretization_mesh init -+>>>' print'(/,1x,a)', '<<<+- discretization_mesh init -+>>>'
@ -100,56 +101,64 @@ subroutine discretization_mesh_init(restart)
debug_element = config_debug%get_asInt('element',defaultVal=1) debug_element = config_debug%get_asInt('element',defaultVal=1)
debug_ip = config_debug%get_asInt('integrationpoint',defaultVal=1) debug_ip = config_debug%get_asInt('integrationpoint',defaultVal=1)
call DMPlexCreateFromFile(PETSC_COMM_WORLD,interface_geomFile,PETSC_TRUE,globalMesh,ierr) call DMPlexCreateFromFile(PETSC_COMM_WORLD,interface_geomFile,PETSC_TRUE,globalMesh,err_PETSc)
CHKERRQ(ierr) CHKERRQ(err_PETSc)
call DMGetDimension(globalMesh,dimPlex,ierr) call DMGetDimension(globalMesh,dimPlex,err_PETSc)
CHKERRQ(ierr) CHKERRQ(err_PETSc)
call DMGetStratumSize(globalMesh,'depth',dimPlex,mesh_NcpElemsGlobal,ierr) call DMGetStratumSize(globalMesh,'depth',dimPlex,NelemsGlobal,err_PETSc)
CHKERRQ(ierr) CHKERRQ(err_PETSc)
print'()' mesh_NcpElemsGlobal = int(NelemsGlobal)
call DMView(globalMesh, PETSC_VIEWER_STDOUT_WORLD,ierr) call DMView(globalMesh, PETSC_VIEWER_STDOUT_WORLD,err_PETSc)
CHKERRQ(ierr) CHKERRQ(err_PETSc)
! get number of IDs in face sets (for boundary conditions?) ! get number of IDs in face sets (for boundary conditions?)
call DMGetLabelSize(globalMesh,'Face Sets',mesh_Nboundaries,ierr) call DMGetLabelSize(globalMesh,'Face Sets',Nboundaries,err_PETSc)
CHKERRQ(ierr) CHKERRQ(err_PETSc)
call MPI_Bcast(mesh_Nboundaries,1,MPI_INTEGER,0,MPI_COMM_WORLD,ierr) mesh_Nboundaries = int(Nboundaries)
call MPI_Bcast(mesh_NcpElemsGlobal,1,MPI_INTEGER,0,MPI_COMM_WORLD,ierr) call MPI_Bcast(mesh_Nboundaries,1_MPI_INTEGER_KIND,MPI_INTEGER,0_MPI_INTEGER_KIND,MPI_COMM_WORLD,err_MPI)
call MPI_Bcast(dimPlex,1,MPI_INTEGER,0,MPI_COMM_WORLD,ierr) if (err_MPI /= 0_MPI_INTEGER_KIND) error stop 'MPI error'
call MPI_Bcast(mesh_NcpElemsGlobal,1_MPI_INTEGER_KIND,MPI_INTEGER,0_MPI_INTEGER_KIND,MPI_COMM_WORLD,err_MPI)
if (err_MPI /= 0_MPI_INTEGER_KIND) error stop 'MPI error'
dim = int(dimPlex)
call MPI_Bcast(dim,1_MPI_INTEGER_KIND,MPI_INTEGER,0_MPI_INTEGER_KIND,MPI_COMM_WORLD,err_MPI)
dimPlex = int(dim,pPETSCINT)
if (err_MPI /= 0_MPI_INTEGER_KIND) error stop 'MPI error'
if (worldrank == 0) then if (worldrank == 0) then
call DMClone(globalMesh,geomMesh,ierr) call DMClone(globalMesh,geomMesh,err_PETSc)
else else
call DMPlexDistribute(globalMesh,0,sf,geomMesh,ierr) call DMPlexDistribute(globalMesh,0_pPETSCINT,sf,geomMesh,err_PETSc)
endif endif
CHKERRQ(ierr) CHKERRQ(err_PETSc)
allocate(mesh_boundaries(mesh_Nboundaries), source = 0) allocate(mesh_boundaries(mesh_Nboundaries), source = 0_pPETSCINT)
call DMGetLabelSize(globalMesh,'Face Sets',nFaceSets,ierr) call DMGetLabelSize(globalMesh,'Face Sets',nFaceSets,err_PETSc)
CHKERRQ(ierr) CHKERRQ(err_PETSc)
call DMGetLabelIdIS(globalMesh,'Face Sets',faceSetIS,ierr) call DMGetLabelIdIS(globalMesh,'Face Sets',faceSetIS,err_PETSc)
CHKERRQ(ierr) CHKERRQ(err_PETSc)
if (nFaceSets > 0) then if (nFaceSets > 0) then
call ISGetIndicesF90(faceSetIS,pFaceSets,ierr) call ISGetIndicesF90(faceSetIS,pFaceSets,err_PETSc)
CHKERRQ(ierr) CHKERRQ(err_PETSc)
mesh_boundaries(1:nFaceSets) = pFaceSets mesh_boundaries(1:nFaceSets) = pFaceSets
CHKERRQ(ierr) CHKERRQ(err_PETSc)
call ISRestoreIndicesF90(faceSetIS,pFaceSets,ierr) call ISRestoreIndicesF90(faceSetIS,pFaceSets,err_PETSc)
endif endif
call MPI_Bcast(mesh_boundaries,mesh_Nboundaries,MPI_INTEGER,0,MPI_COMM_WORLD,ierr) call MPI_Bcast(mesh_boundaries,mesh_Nboundaries,MPI_INTEGER,0_MPI_INTEGER_KIND,MPI_COMM_WORLD,err_MPI)
if (err_MPI /= 0_MPI_INTEGER_KIND) error stop 'MPI error'
call DMDestroy(globalMesh,ierr); CHKERRQ(ierr) call DMDestroy(globalMesh,err_PETSc); CHKERRQ(err_PETSc)
call DMGetStratumSize(geomMesh,'depth',dimPlex,mesh_NcpElems,ierr) call DMGetStratumSize(geomMesh,'depth',dimPlex,Nelems,err_PETSc)
CHKERRQ(ierr) CHKERRQ(err_PETSc)
call DMGetStratumSize(geomMesh,'depth',0,mesh_Nnodes,ierr) mesh_NcpElems = int(Nelems)
CHKERRQ(ierr) call DMGetStratumSize(geomMesh,'depth',0_pPETSCINT,mesh_Nnodes,err_PETSc)
CHKERRQ(err_PETSc)
! Get initial nodal coordinates ! Get initial nodal coordinates
call DMGetCoordinates(geomMesh,coords_node0,ierr) call DMGetCoordinates(geomMesh,coords_node0,err_PETSc)
CHKERRQ(ierr) CHKERRQ(err_PETSc)
call VecGetArrayF90(coords_node0, mesh_node0_temp,ierr) call VecGetArrayF90(coords_node0, mesh_node0_temp,err_PETSc)
CHKERRQ(ierr) CHKERRQ(err_PETSc)
mesh_maxNips = FEM_nQuadrature(dimPlex,p_i) mesh_maxNips = FEM_nQuadrature(dimPlex,p_i)
@ -158,10 +167,10 @@ subroutine discretization_mesh_init(restart)
allocate(materialAt(mesh_NcpElems)) allocate(materialAt(mesh_NcpElems))
do j = 1, mesh_NcpElems do j = 1, mesh_NcpElems
call DMGetLabelValue(geomMesh,'Cell Sets',j-1,materialAt(j),ierr) call DMGetLabelValue(geomMesh,'Cell Sets',j-1,materialAt(j),err_PETSc)
CHKERRQ(ierr) CHKERRQ(err_PETSc)
enddo enddo
materialAt = materialAt + 1 materialAt = materialAt + 1_pPETSCINT
if (debug_element < 1 .or. debug_element > mesh_NcpElems) call IO_error(602,ext_msg='element') if (debug_element < 1 .or. debug_element > mesh_NcpElems) call IO_error(602,ext_msg='element')
if (debug_ip < 1 .or. debug_ip > mesh_maxNips) call IO_error(602,ext_msg='IP') if (debug_ip < 1 .or. debug_ip > mesh_maxNips) call IO_error(602,ext_msg='IP')
@ -170,7 +179,7 @@ subroutine discretization_mesh_init(restart)
mesh_node0(1:dimPlex,:) = reshape(mesh_node0_temp,[dimPlex,mesh_Nnodes]) mesh_node0(1:dimPlex,:) = reshape(mesh_node0_temp,[dimPlex,mesh_Nnodes])
call discretization_init(materialAt,& call discretization_init(int(materialAt),&
reshape(mesh_ipCoordinates,[3,mesh_maxNips*mesh_NcpElems]), & reshape(mesh_ipCoordinates,[3,mesh_maxNips*mesh_NcpElems]), &
mesh_node0) mesh_node0)
@ -188,16 +197,17 @@ subroutine mesh_FEM_build_ipVolumes(dimPlex)
PetscReal :: vol PetscReal :: vol
PetscReal, pointer,dimension(:) :: pCent, pNorm PetscReal, pointer,dimension(:) :: pCent, pNorm
PetscInt :: cellStart, cellEnd, cell PetscInt :: cellStart, cellEnd, cell
PetscErrorCode :: ierr PetscErrorCode :: err_PETSc
allocate(mesh_ipVolume(mesh_maxNips,mesh_NcpElems),source=0.0_pReal) allocate(mesh_ipVolume(mesh_maxNips,mesh_NcpElems),source=0.0_pReal)
call DMPlexGetHeightStratum(geomMesh,0,cellStart,cellEnd,ierr); CHKERRQ(ierr) call DMPlexGetHeightStratum(geomMesh,0_pPETSCINT,cellStart,cellEnd,err_PETSc)
CHKERRQ(err_PETSc)
allocate(pCent(dimPlex)) allocate(pCent(dimPlex))
allocate(pNorm(dimPlex)) allocate(pNorm(dimPlex))
do cell = cellStart, cellEnd-1 do cell = cellStart, cellEnd-1
call DMPlexComputeCellGeometryFVM(geomMesh,cell,vol,pCent,pNorm,ierr) call DMPlexComputeCellGeometryFVM(geomMesh,cell,vol,pCent,pNorm,err_PETSc)
CHKERRQ(ierr) CHKERRQ(err_PETSc)
mesh_ipVolume(:,cell+1) = vol/real(mesh_maxNips,pReal) mesh_ipVolume(:,cell+1) = vol/real(mesh_maxNips,pReal)
enddo enddo
@ -215,7 +225,7 @@ subroutine mesh_FEM_build_ipCoordinates(dimPlex,qPoints)
PetscReal, pointer,dimension(:) :: pV0, pCellJ, pInvcellJ PetscReal, pointer,dimension(:) :: pV0, pCellJ, pInvcellJ
PetscReal :: detJ PetscReal :: detJ
PetscInt :: cellStart, cellEnd, cell, qPt, dirI, dirJ, qOffset PetscInt :: cellStart, cellEnd, cell, qPt, dirI, dirJ, qOffset
PetscErrorCode :: ierr PetscErrorCode :: err_PETSc
allocate(mesh_ipCoordinates(3,mesh_maxNips,mesh_NcpElems),source=0.0_pReal) allocate(mesh_ipCoordinates(3,mesh_maxNips,mesh_NcpElems),source=0.0_pReal)
@ -223,10 +233,11 @@ subroutine mesh_FEM_build_ipCoordinates(dimPlex,qPoints)
allocate(pV0(dimPlex)) allocate(pV0(dimPlex))
allocatE(pCellJ(dimPlex**2)) allocatE(pCellJ(dimPlex**2))
allocatE(pinvCellJ(dimPlex**2)) allocatE(pinvCellJ(dimPlex**2))
call DMPlexGetHeightStratum(geomMesh,0,cellStart,cellEnd,ierr); CHKERRQ(ierr) call DMPlexGetHeightStratum(geomMesh,0_pPETSCINT,cellStart,cellEnd,err_PETSc)
CHKERRQ(err_PETSc)
do cell = cellStart, cellEnd-1 !< loop over all elements do cell = cellStart, cellEnd-1 !< loop over all elements
call DMPlexComputeCellGeometryAffineFEM(geomMesh,cell,pV0,pCellJ,pInvcellJ,detJ,ierr) call DMPlexComputeCellGeometryAffineFEM(geomMesh,cell,pV0,pCellJ,pInvcellJ,detJ,err_PETSc)
CHKERRQ(ierr) CHKERRQ(err_PETSc)
qOffset = 0 qOffset = 0
do qPt = 1, mesh_maxNips do qPt = 1, mesh_maxNips
do dirI = 1, dimPlex do dirI = 1, dimPlex

View File

@ -40,7 +40,7 @@ module mesh_mechanical_FEM
type(tSolutionParams) :: params type(tSolutionParams) :: params
type, private :: tNumerics type, private :: tNumerics
integer :: & PetscInt :: &
p_i, & !< integration order (quadrature rule) p_i, & !< integration order (quadrature rule)
itmax itmax
logical :: & logical :: &
@ -55,7 +55,8 @@ module mesh_mechanical_FEM
! PETSc data ! PETSc data
SNES :: mechanical_snes SNES :: mechanical_snes
Vec :: solution, solution_rate, solution_local Vec :: solution, solution_rate, solution_local
PetscInt :: dimPlex, cellDof, nQuadrature, nBasis PetscInt :: dimPlex, cellDof, nBasis
integer :: nQuadrature
PetscReal, allocatable, target :: qPoints(:), qWeights(:) PetscReal, allocatable, target :: qPoints(:), qWeights(:)
MatNullSpace :: matnull MatNullSpace :: matnull
@ -104,11 +105,11 @@ subroutine FEM_mechanical_init(fieldBC)
PetscReal :: detJ PetscReal :: detJ
PetscReal, allocatable, target :: cellJMat(:,:) PetscReal, allocatable, target :: cellJMat(:,:)
PetscScalar, pointer :: px_scal(:) PetscScalar, pointer, dimension(:) :: px_scal
PetscScalar, allocatable, target :: x_scal(:) PetscScalar, allocatable, target, dimension(:) :: x_scal
character(len=*), parameter :: prefix = 'mechFE_' character(len=*), parameter :: prefix = 'mechFE_'
PetscErrorCode :: ierr PetscErrorCode :: err_PETSc
real(pReal), dimension(3,3) :: devNull real(pReal), dimension(3,3) :: devNull
class(tNode), pointer :: & class(tNode), pointer :: &
num_mesh num_mesh
@ -118,8 +119,8 @@ subroutine FEM_mechanical_init(fieldBC)
!----------------------------------------------------------------------------- !-----------------------------------------------------------------------------
! read numerical parametes and do sanity checks ! read numerical parametes and do sanity checks
num_mesh => config_numerics%get('mesh',defaultVal=emptyDict) num_mesh => config_numerics%get('mesh',defaultVal=emptyDict)
num%p_i = num_mesh%get_asInt('p_i',defaultVal = 2) num%p_i = int(num_mesh%get_asInt('p_i',defaultVal = 2),pPETSCINT)
num%itmax = num_mesh%get_asInt('itmax',defaultVal=250) num%itmax = int(num_mesh%get_asInt('itmax',defaultVal=250),pPETSCINT)
num%BBarStabilisation = num_mesh%get_asBool('bbarstabilisation',defaultVal = .false.) num%BBarStabilisation = num_mesh%get_asBool('bbarstabilisation',defaultVal = .false.)
num%eps_struct_atol = num_mesh%get_asFloat('eps_struct_atol', defaultVal = 1.0e-10_pReal) num%eps_struct_atol = num_mesh%get_asFloat('eps_struct_atol', defaultVal = 1.0e-10_pReal)
num%eps_struct_rtol = num_mesh%get_asFloat('eps_struct_rtol', defaultVal = 1.0e-4_pReal) num%eps_struct_rtol = num_mesh%get_asFloat('eps_struct_rtol', defaultVal = 1.0e-4_pReal)
@ -130,8 +131,8 @@ subroutine FEM_mechanical_init(fieldBC)
!-------------------------------------------------------------------------------------------------- !--------------------------------------------------------------------------------------------------
! Setup FEM mech mesh ! Setup FEM mech mesh
call DMClone(geomMesh,mechanical_mesh,ierr); CHKERRQ(ierr) call DMClone(geomMesh,mechanical_mesh,err_PETSc); CHKERRQ(err_PETSc)
call DMGetDimension(mechanical_mesh,dimPlex,ierr); CHKERRQ(ierr) call DMGetDimension(mechanical_mesh,dimPlex,err_PETSc); CHKERRQ(err_PETSc)
!-------------------------------------------------------------------------------------------------- !--------------------------------------------------------------------------------------------------
! Setup FEM mech discretization ! Setup FEM mech discretization
@ -140,96 +141,104 @@ subroutine FEM_mechanical_init(fieldBC)
nQuadrature = FEM_nQuadrature( dimPlex,num%p_i) nQuadrature = FEM_nQuadrature( dimPlex,num%p_i)
qPointsP => qPoints qPointsP => qPoints
qWeightsP => qWeights qWeightsP => qWeights
call PetscQuadratureCreate(PETSC_COMM_SELF,mechQuad,ierr); CHKERRQ(ierr) call PetscQuadratureCreate(PETSC_COMM_SELF,mechQuad,err_PETSc)
CHKERRQ(ierr) CHKERRQ(err_PETSc)
nc = dimPlex nc = dimPlex
call PetscQuadratureSetData(mechQuad,dimPlex,nc,nQuadrature,qPointsP,qWeightsP,ierr) call PetscQuadratureSetData(mechQuad,dimPlex,nc,int(nQuadrature,pPETSCINT),qPointsP,qWeightsP,err_PETSc)
CHKERRQ(ierr) CHKERRQ(err_PETSc)
call PetscFECreateDefault(PETSC_COMM_SELF,dimPlex,nc,PETSC_TRUE,prefix, & call PetscFECreateDefault(PETSC_COMM_SELF,dimPlex,nc,PETSC_TRUE,prefix, &
num%p_i,mechFE,ierr); CHKERRQ(ierr) num%p_i,mechFE,err_PETSc); CHKERRQ(err_PETSc)
call PetscFESetQuadrature(mechFE,mechQuad,ierr); CHKERRQ(ierr) call PetscFESetQuadrature(mechFE,mechQuad,err_PETSc); CHKERRQ(err_PETSc)
call PetscFEGetDimension(mechFE,nBasis,ierr); CHKERRQ(ierr) call PetscFEGetDimension(mechFE,nBasis,err_PETSc); CHKERRQ(err_PETSc)
nBasis = nBasis/nc nBasis = nBasis/nc
call DMAddField(mechanical_mesh,PETSC_NULL_DMLABEL,mechFE,ierr); CHKERRQ(ierr) call DMAddField(mechanical_mesh,PETSC_NULL_DMLABEL,mechFE,err_PETSc)
call DMCreateDS(mechanical_mesh,ierr); CHKERRQ(ierr) CHKERRQ(err_PETSc)
call DMGetDS(mechanical_mesh,mechDS,ierr); CHKERRQ(ierr) call DMCreateDS(mechanical_mesh,err_PETSc); CHKERRQ(err_PETSc)
call PetscDSGetTotalDimension(mechDS,cellDof,ierr); CHKERRQ(ierr) call DMGetDS(mechanical_mesh,mechDS,err_PETSc); CHKERRQ(err_PETSc)
call PetscFEDestroy(mechFE,ierr); CHKERRQ(ierr) call PetscDSGetTotalDimension(mechDS,cellDof,err_PETSc); CHKERRQ(err_PETSc)
call PetscQuadratureDestroy(mechQuad,ierr); CHKERRQ(ierr) call PetscFEDestroy(mechFE,err_PETSc); CHKERRQ(err_PETSc)
call PetscQuadratureDestroy(mechQuad,err_PETSc); CHKERRQ(err_PETSc)
!-------------------------------------------------------------------------------------------------- !--------------------------------------------------------------------------------------------------
! Setup FEM mech boundary conditions ! Setup FEM mech boundary conditions
call DMGetLabel(mechanical_mesh,'Face Sets',BCLabel,ierr); CHKERRQ(ierr) call DMGetLabel(mechanical_mesh,'Face Sets',BCLabel,err_PETSc)
call DMPlexLabelComplete(mechanical_mesh,BCLabel,ierr); CHKERRQ(ierr) CHKERRQ(err_PETSc)
call DMGetLocalSection(mechanical_mesh,section,ierr); CHKERRQ(ierr) call DMPlexLabelComplete(mechanical_mesh,BCLabel,err_PETSc); CHKERRQ(err_PETSc)
call DMGetLocalSection(mechanical_mesh,section,err_PETSc); CHKERRQ(err_PETSc)
allocate(pnumComp(1), source=dimPlex) allocate(pnumComp(1), source=dimPlex)
allocate(pnumDof(0:dimPlex), source = 0) allocate(pnumDof(0:dimPlex), source = 0_pPETSCINT)
do topologDim = 0, dimPlex do topologDim = 0, dimPlex
call DMPlexGetDepthStratum(mechanical_mesh,topologDim,cellStart,cellEnd,ierr) call DMPlexGetDepthStratum(mechanical_mesh,topologDim,cellStart,cellEnd,err_PETSc)
CHKERRQ(ierr) CHKERRQ(err_PETSc)
call PetscSectionGetDof(section,cellStart,pnumDof(topologDim),ierr) call PetscSectionGetDof(section,cellStart,pnumDof(topologDim),err_PETSc)
CHKERRQ(ierr) CHKERRQ(err_PETSc)
enddo enddo
numBC = 0 numBC = 0
do field = 1, dimPlex; do faceSet = 1, mesh_Nboundaries do field = 1, dimPlex; do faceSet = 1, mesh_Nboundaries
if (fieldBC%componentBC(field)%Mask(faceSet)) numBC = numBC + 1 if (fieldBC%componentBC(field)%Mask(faceSet)) numBC = numBC + 1
enddo; enddo enddo; enddo
allocate(pbcField(numBC), source=0) allocate(pbcField(numBC), source=0_pPETSCINT)
allocate(pbcComps(numBC)) allocate(pbcComps(numBC))
allocate(pbcPoints(numBC)) allocate(pbcPoints(numBC))
numBC = 0 numBC = 0
do field = 1, dimPlex; do faceSet = 1, mesh_Nboundaries do field = 1, dimPlex; do faceSet = 1, mesh_Nboundaries
if (fieldBC%componentBC(field)%Mask(faceSet)) then if (fieldBC%componentBC(field)%Mask(faceSet)) then
numBC = numBC + 1 numBC = numBC + 1
call ISCreateGeneral(PETSC_COMM_WORLD,1,[field-1],PETSC_COPY_VALUES,pbcComps(numBC),ierr) call ISCreateGeneral(PETSC_COMM_WORLD,1_pPETSCINT,[field-1],PETSC_COPY_VALUES,pbcComps(numBC),err_PETSc)
CHKERRQ(ierr) CHKERRQ(err_PETSc)
call DMGetStratumSize(mechanical_mesh,'Face Sets',mesh_boundaries(faceSet),bcSize,ierr) call DMGetStratumSize(mechanical_mesh,'Face Sets',mesh_boundaries(faceSet),bcSize,err_PETSc)
CHKERRQ(ierr) CHKERRQ(err_PETSc)
if (bcSize > 0) then if (bcSize > 0) then
call DMGetStratumIS(mechanical_mesh,'Face Sets',mesh_boundaries(faceSet),bcPoint,ierr) call DMGetStratumIS(mechanical_mesh,'Face Sets',mesh_boundaries(faceSet),bcPoint,err_PETSc)
CHKERRQ(ierr) CHKERRQ(err_PETSc)
call ISGetIndicesF90(bcPoint,pBcPoint,ierr); CHKERRQ(ierr) call ISGetIndicesF90(bcPoint,pBcPoint,err_PETSc); CHKERRQ(err_PETSc)
call ISCreateGeneral(PETSC_COMM_WORLD,bcSize,pBcPoint,PETSC_COPY_VALUES,pbcPoints(numBC),ierr) call ISCreateGeneral(PETSC_COMM_WORLD,bcSize,pBcPoint,PETSC_COPY_VALUES,pbcPoints(numBC),err_PETSc)
CHKERRQ(ierr) CHKERRQ(err_PETSc)
call ISRestoreIndicesF90(bcPoint,pBcPoint,ierr); CHKERRQ(ierr) call ISRestoreIndicesF90(bcPoint,pBcPoint,err_PETSc); CHKERRQ(err_PETSc)
call ISDestroy(bcPoint,ierr); CHKERRQ(ierr) call ISDestroy(bcPoint,err_PETSc); CHKERRQ(err_PETSc)
else else
call ISCreateGeneral(PETSC_COMM_WORLD,0,[0],PETSC_COPY_VALUES,pbcPoints(numBC),ierr) call ISCreateGeneral(PETSC_COMM_WORLD,0_pPETSCINT,[0_pPETSCINT],PETSC_COPY_VALUES,pbcPoints(numBC),err_PETSc)
CHKERRQ(ierr) CHKERRQ(err_PETSc)
endif endif
endif endif
enddo; enddo enddo; enddo
call DMPlexCreateSection(mechanical_mesh,nolabel,pNumComp,pNumDof, & call DMPlexCreateSection(mechanical_mesh,nolabel,pNumComp,pNumDof, &
numBC,pBcField,pBcComps,pBcPoints,PETSC_NULL_IS,section,ierr) numBC,pBcField,pBcComps,pBcPoints,PETSC_NULL_IS,section,err_PETSc)
CHKERRQ(ierr) CHKERRQ(err_PETSc)
call DMSetSection(mechanical_mesh,section,ierr); CHKERRQ(ierr) call DMSetSection(mechanical_mesh,section,err_PETSc); CHKERRQ(err_PETSc)
do faceSet = 1, numBC do faceSet = 1, numBC
call ISDestroy(pbcPoints(faceSet),ierr); CHKERRQ(ierr) call ISDestroy(pbcPoints(faceSet),err_PETSc); CHKERRQ(err_PETSc)
enddo enddo
!-------------------------------------------------------------------------------------------------- !--------------------------------------------------------------------------------------------------
! initialize solver specific parts of PETSc ! initialize solver specific parts of PETSc
call SNESCreate(PETSC_COMM_WORLD,mechanical_snes,ierr);CHKERRQ(ierr) call SNESCreate(PETSC_COMM_WORLD,mechanical_snes,err_PETSc);CHKERRQ(err_PETSc)
call SNESSetOptionsPrefix(mechanical_snes,'mechanical_',ierr);CHKERRQ(ierr) call SNESSetOptionsPrefix(mechanical_snes,'mechanical_',err_PETSc)
call SNESSetDM(mechanical_snes,mechanical_mesh,ierr); CHKERRQ(ierr) !< set the mesh for non-linear solver CHKERRQ(err_PETSc)
call DMCreateGlobalVector(mechanical_mesh,solution ,ierr); CHKERRQ(ierr) !< locally owned displacement Dofs call SNESSetDM(mechanical_snes,mechanical_mesh,err_PETSc) ! set the mesh for non-linear solver
call DMCreateGlobalVector(mechanical_mesh,solution_rate ,ierr); CHKERRQ(ierr) !< locally owned velocity Dofs to guess solution at next load step CHKERRQ(err_PETSc)
call DMCreateLocalVector (mechanical_mesh,solution_local ,ierr); CHKERRQ(ierr) !< locally owned velocity Dofs to guess solution at next load step call DMCreateGlobalVector(mechanical_mesh,solution, err_PETSc) ! locally owned displacement Dofs
call DMSNESSetFunctionLocal(mechanical_mesh,FEM_mechanical_formResidual,PETSC_NULL_VEC,ierr) !< function to evaluate residual forces CHKERRQ(err_PETSc)
CHKERRQ(ierr) call DMCreateGlobalVector(mechanical_mesh,solution_rate, err_PETSc) ! locally owned velocity Dofs to guess solution at next load step
call DMSNESSetJacobianLocal(mechanical_mesh,FEM_mechanical_formJacobian,PETSC_NULL_VEC,ierr) !< function to evaluate stiffness matrix CHKERRQ(err_PETSc)
CHKERRQ(ierr) call DMCreateLocalVector (mechanical_mesh,solution_local,err_PETSc) ! locally owned velocity Dofs to guess solution at next load step
call SNESSetMaxLinearSolveFailures(mechanical_snes, huge(1), ierr); CHKERRQ(ierr) !< ignore linear solve failures CHKERRQ(err_PETSc)
call SNESSetConvergenceTest(mechanical_snes,FEM_mechanical_converged,PETSC_NULL_VEC,PETSC_NULL_FUNCTION,ierr) call DMSNESSetFunctionLocal(mechanical_mesh,FEM_mechanical_formResidual,PETSC_NULL_VEC,err_PETSc) ! function to evaluate residual forces
CHKERRQ(ierr) CHKERRQ(err_PETSc)
call SNESSetTolerances(mechanical_snes,1.0,0.0,0.0,num%itmax,num%itmax,ierr) call DMSNESSetJacobianLocal(mechanical_mesh,FEM_mechanical_formJacobian,PETSC_NULL_VEC,err_PETSc) ! function to evaluate stiffness matrix
CHKERRQ(ierr) CHKERRQ(err_PETSc)
call SNESSetFromOptions(mechanical_snes,ierr); CHKERRQ(ierr) call SNESSetMaxLinearSolveFailures(mechanical_snes, huge(1_pPETSCINT), err_PETSc) ! ignore linear solve failures
CHKERRQ(err_PETSc)
call SNESSetConvergenceTest(mechanical_snes,FEM_mechanical_converged,PETSC_NULL_VEC,PETSC_NULL_FUNCTION,err_PETSc)
CHKERRQ(err_PETSc)
call SNESSetTolerances(mechanical_snes,1.0,0.0,0.0,num%itmax,num%itmax,err_PETSc)
CHKERRQ(err_PETSc)
call SNESSetFromOptions(mechanical_snes,err_PETSc); CHKERRQ(err_PETSc)
!-------------------------------------------------------------------------------------------------- !--------------------------------------------------------------------------------------------------
! init fields ! init fields
call VecSet(solution ,0.0,ierr); CHKERRQ(ierr) call VecSet(solution ,0.0,err_PETSc); CHKERRQ(err_PETSc)
call VecSet(solution_rate ,0.0,ierr); CHKERRQ(ierr) call VecSet(solution_rate ,0.0,err_PETSc); CHKERRQ(err_PETSc)
allocate(x_scal(cellDof)) allocate(x_scal(cellDof))
allocate(nodalWeightsP(1)) allocate(nodalWeightsP(1))
allocate(nodalPointsP(dimPlex)) allocate(nodalPointsP(dimPlex))
@ -237,26 +246,26 @@ subroutine FEM_mechanical_init(fieldBC)
allocate(pcellJ(dimPlex**2)) allocate(pcellJ(dimPlex**2))
allocate(pinvcellJ(dimPlex**2)) allocate(pinvcellJ(dimPlex**2))
allocate(cellJMat(dimPlex,dimPlex)) allocate(cellJMat(dimPlex,dimPlex))
call PetscDSGetDiscretization(mechDS,0,mechFE,ierr) call PetscDSGetDiscretization(mechDS,0_pPETSCINT,mechFE,err_PETSc)
CHKERRQ(ierr) CHKERRQ(err_PETSc)
call PetscFEGetDualSpace(mechFE,mechDualSpace,ierr); CHKERRQ(ierr) call PetscFEGetDualSpace(mechFE,mechDualSpace,err_PETSc); CHKERRQ(err_PETSc)
call DMPlexGetHeightStratum(mechanical_mesh,0,cellStart,cellEnd,ierr) call DMPlexGetHeightStratum(mechanical_mesh,0_pPETSCINT,cellStart,cellEnd,err_PETSc)
CHKERRQ(ierr) CHKERRQ(err_PETSc)
do cell = cellStart, cellEnd-1 !< loop over all elements do cell = cellStart, cellEnd-1 !< loop over all elements
x_scal = 0.0_pReal x_scal = 0.0_pReal
call DMPlexComputeCellGeometryAffineFEM(mechanical_mesh,cell,pV0,pCellJ,pInvcellJ,detJ,ierr) call DMPlexComputeCellGeometryAffineFEM(mechanical_mesh,cell,pV0,pCellJ,pInvcellJ,detJ,err_PETSc)
CHKERRQ(ierr) CHKERRQ(err_PETSc)
cellJMat = reshape(pCellJ,shape=[dimPlex,dimPlex]) cellJMat = reshape(pCellJ,shape=[dimPlex,dimPlex])
do basis = 0, nBasis*dimPlex-1, dimPlex do basis = 0, nBasis*dimPlex-1, dimPlex
call PetscDualSpaceGetFunctional(mechDualSpace,basis,functional,ierr) call PetscDualSpaceGetFunctional(mechDualSpace,basis,functional,err_PETSc)
CHKERRQ(ierr) CHKERRQ(err_PETSc)
call PetscQuadratureGetData(functional,dimPlex,nc,nNodalPoints,nodalPointsP,nodalWeightsP,ierr) call PetscQuadratureGetData(functional,dimPlex,nc,nNodalPoints,nodalPointsP,nodalWeightsP,err_PETSc)
CHKERRQ(ierr) CHKERRQ(err_PETSc)
x_scal(basis+1:basis+dimPlex) = pV0 + matmul(transpose(cellJMat),nodalPointsP + 1.0_pReal) x_scal(basis+1:basis+dimPlex) = pV0 + matmul(transpose(cellJMat),nodalPointsP + 1.0_pReal)
enddo enddo
px_scal => x_scal px_scal => x_scal
call DMPlexVecSetClosure(mechanical_mesh,section,solution_local,cell,px_scal,5,ierr) call DMPlexVecSetClosure(mechanical_mesh,section,solution_local,cell,px_scal,5,err_PETSc)
CHKERRQ(ierr) CHKERRQ(err_PETSc)
enddo enddo
call utilities_constitutiveResponse(0.0_pReal,devNull,.true.) call utilities_constitutiveResponse(0.0_pReal,devNull,.true.)
@ -279,7 +288,7 @@ type(tSolutionState) function FEM_mechanical_solution( &
character(len=*), intent(in) :: & character(len=*), intent(in) :: &
incInfoIn incInfoIn
PetscErrorCode :: ierr PetscErrorCode :: err_PETSc
SNESConvergedReason :: reason SNESConvergedReason :: reason
incInfo = incInfoIn incInfo = incInfoIn
@ -289,8 +298,10 @@ type(tSolutionState) function FEM_mechanical_solution( &
params%timeinc = timeinc params%timeinc = timeinc
params%fieldBC = fieldBC params%fieldBC = fieldBC
call SNESSolve(mechanical_snes,PETSC_NULL_VEC,solution,ierr); CHKERRQ(ierr) ! solve mechanical_snes based on solution guess (result in solution) call SNESSolve(mechanical_snes,PETSC_NULL_VEC,solution,err_PETSc) ! solve mechanical_snes based on solution guess (result in solution)
call SNESGetConvergedReason(mechanical_snes,reason,ierr); CHKERRQ(ierr) ! solution converged? CHKERRQ(err_PETSc)
call SNESGetConvergedReason(mechanical_snes,reason,err_PETSc) ! solution converged?
CHKERRQ(err_PETSc)
terminallyIll = .false. terminallyIll = .false.
if (reason < 1) then ! 0: still iterating (will not occur), negative -> convergence error if (reason < 1) then ! 0: still iterating (will not occur), negative -> convergence error
@ -298,8 +309,8 @@ type(tSolutionState) function FEM_mechanical_solution( &
FEM_mechanical_solution%iterationsNeeded = num%itmax FEM_mechanical_solution%iterationsNeeded = num%itmax
else ! >= 1 proper convergence (or terminally ill) else ! >= 1 proper convergence (or terminally ill)
FEM_mechanical_solution%converged = .true. FEM_mechanical_solution%converged = .true.
call SNESGetIterationNumber(mechanical_snes,FEM_mechanical_solution%iterationsNeeded,ierr) call SNESGetIterationNumber(mechanical_snes,FEM_mechanical_solution%iterationsNeeded,err_PETSc)
CHKERRQ(ierr) CHKERRQ(err_PETSc)
endif endif
print'(/,1x,a)', '===========================================================================' print'(/,1x,a)', '==========================================================================='
@ -311,11 +322,12 @@ end function FEM_mechanical_solution
!-------------------------------------------------------------------------------------------------- !--------------------------------------------------------------------------------------------------
!> @brief forms the FEM residual vector !> @brief forms the FEM residual vector
!-------------------------------------------------------------------------------------------------- !--------------------------------------------------------------------------------------------------
subroutine FEM_mechanical_formResidual(dm_local,xx_local,f_local,dummy,ierr) subroutine FEM_mechanical_formResidual(dm_local,xx_local,f_local,dummy,err_PETSc)
DM :: dm_local DM :: dm_local
PetscObject,intent(in) :: dummy PetscObject,intent(in) :: dummy
PetscErrorCode :: ierr PetscErrorCode :: err_PETSc
integer(MPI_INTEGER_KIND) :: err_MPI
PetscDS :: prob PetscDS :: prob
Vec :: x_local, f_local, xx_local Vec :: x_local, f_local, xx_local
@ -339,22 +351,25 @@ subroutine FEM_mechanical_formResidual(dm_local,xx_local,f_local,dummy,ierr)
allocate(pinvcellJ(dimPlex**2)) allocate(pinvcellJ(dimPlex**2))
allocate(x_scal(cellDof)) allocate(x_scal(cellDof))
call DMGetLocalSection(dm_local,section,ierr); CHKERRQ(ierr) call DMGetLocalSection(dm_local,section,err_PETSc); CHKERRQ(err_PETSc)
call DMGetDS(dm_local,prob,ierr); CHKERRQ(ierr) call DMGetDS(dm_local,prob,err_PETSc); CHKERRQ(err_PETSc)
call PetscDSGetTabulation(prob,0,basisField,basisFieldDer,ierr) call PetscDSGetTabulation(prob,0_pPETSCINT,basisField,basisFieldDer,err_PETSc)
CHKERRQ(ierr) CHKERRQ(err_PETSc)
call DMPlexGetHeightStratum(dm_local,0,cellStart,cellEnd,ierr); CHKERRQ(ierr) call DMPlexGetHeightStratum(dm_local,0_pPETSCINT,cellStart,cellEnd,err_PETSc)
call DMGetLocalVector(dm_local,x_local,ierr); CHKERRQ(ierr) CHKERRQ(err_PETSc)
call VecWAXPY(x_local,1.0,xx_local,solution_local,ierr); CHKERRQ(ierr) call DMGetLocalVector(dm_local,x_local,err_PETSc)
CHKERRQ(err_PETSc)
call VecWAXPY(x_local,1.0,xx_local,solution_local,err_PETSc)
CHKERRQ(err_PETSc)
do field = 1, dimPlex; do face = 1, mesh_Nboundaries do field = 1, dimPlex; do face = 1, mesh_Nboundaries
if (params%fieldBC%componentBC(field)%Mask(face)) then if (params%fieldBC%componentBC(field)%Mask(face)) then
call DMGetStratumSize(dm_local,'Face Sets',mesh_boundaries(face),bcSize,ierr) call DMGetStratumSize(dm_local,'Face Sets',mesh_boundaries(face),bcSize,err_PETSc)
if (bcSize > 0) then if (bcSize > 0) then
call DMGetStratumIS(dm_local,'Face Sets',mesh_boundaries(face),bcPoints,ierr) call DMGetStratumIS(dm_local,'Face Sets',mesh_boundaries(face),bcPoints,err_PETSc)
CHKERRQ(ierr) CHKERRQ(err_PETSc)
call utilities_projectBCValues(x_local,section,0,field-1,bcPoints, & call utilities_projectBCValues(x_local,section,0_pPETSCINT,field-1,bcPoints, &
0.0_pReal,params%fieldBC%componentBC(field)%Value(face),params%timeinc) 0.0_pReal,params%fieldBC%componentBC(field)%Value(face),params%timeinc)
call ISDestroy(bcPoints,ierr); CHKERRQ(ierr) call ISDestroy(bcPoints,err_PETSc); CHKERRQ(err_PETSc)
endif endif
endif endif
enddo; enddo enddo; enddo
@ -363,12 +378,12 @@ subroutine FEM_mechanical_formResidual(dm_local,xx_local,f_local,dummy,ierr)
! evaluate field derivatives ! evaluate field derivatives
do cell = cellStart, cellEnd-1 !< loop over all elements do cell = cellStart, cellEnd-1 !< loop over all elements
call PetscSectionGetNumFields(section,numFields,ierr) call PetscSectionGetNumFields(section,numFields,err_PETSc)
CHKERRQ(ierr) CHKERRQ(err_PETSc)
call DMPlexVecGetClosure(dm_local,section,x_local,cell,x_scal,ierr) !< get Dofs belonging to element call DMPlexVecGetClosure(dm_local,section,x_local,cell,x_scal,err_PETSc) !< get Dofs belonging to element
CHKERRQ(ierr) CHKERRQ(err_PETSc)
call DMPlexComputeCellGeometryAffineFEM(dm_local,cell,pV0,pCellJ,pInvcellJ,detJ,ierr) call DMPlexComputeCellGeometryAffineFEM(dm_local,cell,pV0,pCellJ,pInvcellJ,detJ,err_PETSc)
CHKERRQ(ierr) CHKERRQ(err_PETSc)
IcellJMat = reshape(pInvcellJ,shape=[dimPlex,dimPlex]) IcellJMat = reshape(pInvcellJ,shape=[dimPlex,dimPlex])
do qPt = 0, nQuadrature-1 do qPt = 0, nQuadrature-1
m = cell*nQuadrature + qPt+1 m = cell*nQuadrature + qPt+1
@ -392,23 +407,24 @@ subroutine FEM_mechanical_formResidual(dm_local,xx_local,f_local,dummy,ierr)
enddo enddo
endif endif
call DMPlexVecRestoreClosure(dm_local,section,x_local,cell,x_scal,ierr) call DMPlexVecRestoreClosure(dm_local,section,x_local,cell,x_scal,err_PETSc)
CHKERRQ(ierr) CHKERRQ(err_PETSc)
enddo enddo
!-------------------------------------------------------------------------------------------------- !--------------------------------------------------------------------------------------------------
! evaluate constitutive response ! evaluate constitutive response
call utilities_constitutiveResponse(params%timeinc,P_av,ForwardData) call utilities_constitutiveResponse(params%timeinc,P_av,ForwardData)
call MPI_Allreduce(MPI_IN_PLACE,terminallyIll,1,MPI_LOGICAL,MPI_LOR,MPI_COMM_WORLD,ierr) call MPI_Allreduce(MPI_IN_PLACE,terminallyIll,1_MPI_INTEGER_KIND,MPI_LOGICAL,MPI_LOR,MPI_COMM_WORLD,err_MPI)
if (err_MPI /= 0_MPI_INTEGER_KIND) error stop 'MPI error'
ForwardData = .false. ForwardData = .false.
!-------------------------------------------------------------------------------------------------- !--------------------------------------------------------------------------------------------------
! integrating residual ! integrating residual
do cell = cellStart, cellEnd-1 !< loop over all elements do cell = cellStart, cellEnd-1 !< loop over all elements
call DMPlexVecGetClosure(dm_local,section,x_local,cell,x_scal,ierr) !< get Dofs belonging to element call DMPlexVecGetClosure(dm_local,section,x_local,cell,x_scal,err_PETSc) !< get Dofs belonging to element
CHKERRQ(ierr) CHKERRQ(err_PETSc)
call DMPlexComputeCellGeometryAffineFEM(dm_local,cell,pV0,pCellJ,pInvcellJ,detJ,ierr) call DMPlexComputeCellGeometryAffineFEM(dm_local,cell,pV0,pCellJ,pInvcellJ,detJ,err_PETSc)
CHKERRQ(ierr) CHKERRQ(err_PETSc)
IcellJMat = reshape(pInvcellJ,shape=[dimPlex,dimPlex]) IcellJMat = reshape(pInvcellJ,shape=[dimPlex,dimPlex])
f_scal = 0.0 f_scal = 0.0
do qPt = 0, nQuadrature-1 do qPt = 0, nQuadrature-1
@ -429,12 +445,12 @@ subroutine FEM_mechanical_formResidual(dm_local,xx_local,f_local,dummy,ierr)
enddo enddo
f_scal = f_scal*abs(detJ) f_scal = f_scal*abs(detJ)
pf_scal => f_scal pf_scal => f_scal
call DMPlexVecSetClosure(dm_local,section,f_local,cell,pf_scal,ADD_VALUES,ierr) call DMPlexVecSetClosure(dm_local,section,f_local,cell,pf_scal,ADD_VALUES,err_PETSc)
CHKERRQ(ierr) CHKERRQ(err_PETSc)
call DMPlexVecRestoreClosure(dm_local,section,x_local,cell,x_scal,ierr) call DMPlexVecRestoreClosure(dm_local,section,x_local,cell,x_scal,err_PETSc)
CHKERRQ(ierr) CHKERRQ(err_PETSc)
enddo enddo
call DMRestoreLocalVector(dm_local,x_local,ierr); CHKERRQ(ierr) call DMRestoreLocalVector(dm_local,x_local,err_PETSc); CHKERRQ(err_PETSc)
end subroutine FEM_mechanical_formResidual end subroutine FEM_mechanical_formResidual
@ -442,13 +458,13 @@ end subroutine FEM_mechanical_formResidual
!-------------------------------------------------------------------------------------------------- !--------------------------------------------------------------------------------------------------
!> @brief forms the FEM stiffness matrix !> @brief forms the FEM stiffness matrix
!-------------------------------------------------------------------------------------------------- !--------------------------------------------------------------------------------------------------
subroutine FEM_mechanical_formJacobian(dm_local,xx_local,Jac_pre,Jac,dummy,ierr) subroutine FEM_mechanical_formJacobian(dm_local,xx_local,Jac_pre,Jac,dummy,err_PETSc)
DM :: dm_local DM :: dm_local
Mat :: Jac_pre, Jac Mat :: Jac_pre, Jac
PetscObject, intent(in) :: dummy PetscObject, intent(in) :: dummy
PetscErrorCode :: ierr PetscErrorCode :: err_PETSc
PetscDS :: prob PetscDS :: prob
Vec :: x_local, xx_local Vec :: x_local, xx_local
@ -478,34 +494,43 @@ subroutine FEM_mechanical_formJacobian(dm_local,xx_local,Jac_pre,Jac,dummy,ierr)
allocate(pcellJ(dimPlex**2)) allocate(pcellJ(dimPlex**2))
allocate(pinvcellJ(dimPlex**2)) allocate(pinvcellJ(dimPlex**2))
call MatSetOption(Jac,MAT_KEEP_NONZERO_PATTERN,PETSC_TRUE,ierr); CHKERRQ(ierr) call MatSetOption(Jac,MAT_KEEP_NONZERO_PATTERN,PETSC_TRUE,err_PETSc)
call MatSetOption(Jac,MAT_NEW_NONZERO_ALLOCATION_ERR,PETSC_FALSE,ierr); CHKERRQ(ierr) CHKERRQ(err_PETSc)
call MatZeroEntries(Jac,ierr); CHKERRQ(ierr) call MatSetOption(Jac,MAT_NEW_NONZERO_ALLOCATION_ERR,PETSC_FALSE,err_PETSc)
call DMGetDS(dm_local,prob,ierr); CHKERRQ(ierr) CHKERRQ(err_PETSc)
call PetscDSGetTabulation(prob,0,basisField,basisFieldDer,ierr) call MatZeroEntries(Jac,err_PETSc)
call DMGetLocalSection(dm_local,section,ierr); CHKERRQ(ierr) CHKERRQ(err_PETSc)
call DMGetGlobalSection(dm_local,gSection,ierr); CHKERRQ(ierr) call DMGetDS(dm_local,prob,err_PETSc)
CHKERRQ(err_PETSc)
call PetscDSGetTabulation(prob,0_pPETSCINT,basisField,basisFieldDer,err_PETSc)
call DMGetLocalSection(dm_local,section,err_PETSc)
CHKERRQ(err_PETSc)
call DMGetGlobalSection(dm_local,gSection,err_PETSc)
CHKERRQ(err_PETSc)
call DMGetLocalVector(dm_local,x_local,ierr); CHKERRQ(ierr) call DMGetLocalVector(dm_local,x_local,err_PETSc)
call VecWAXPY(x_local,1.0_pReal,xx_local,solution_local,ierr); CHKERRQ(ierr) CHKERRQ(err_PETSc)
call VecWAXPY(x_local,1.0_pReal,xx_local,solution_local,err_PETSc)
CHKERRQ(err_PETSc)
do field = 1, dimPlex; do face = 1, mesh_Nboundaries do field = 1, dimPlex; do face = 1, mesh_Nboundaries
if (params%fieldBC%componentBC(field)%Mask(face)) then if (params%fieldBC%componentBC(field)%Mask(face)) then
call DMGetStratumSize(dm_local,'Face Sets',mesh_boundaries(face),bcSize,ierr) call DMGetStratumSize(dm_local,'Face Sets',mesh_boundaries(face),bcSize,err_PETSc)
if (bcSize > 0) then if (bcSize > 0) then
call DMGetStratumIS(dm_local,'Face Sets',mesh_boundaries(face),bcPoints,ierr) call DMGetStratumIS(dm_local,'Face Sets',mesh_boundaries(face),bcPoints,err_PETSc)
CHKERRQ(ierr) CHKERRQ(err_PETSc)
call utilities_projectBCValues(x_local,section,0,field-1,bcPoints, & call utilities_projectBCValues(x_local,section,0_pPETSCINT,field-1,bcPoints, &
0.0_pReal,params%fieldBC%componentBC(field)%Value(face),params%timeinc) 0.0_pReal,params%fieldBC%componentBC(field)%Value(face),params%timeinc)
call ISDestroy(bcPoints,ierr); CHKERRQ(ierr) call ISDestroy(bcPoints,err_PETSc); CHKERRQ(err_PETSc)
endif endif
endif endif
enddo; enddo enddo; enddo
call DMPlexGetHeightStratum(dm_local,0,cellStart,cellEnd,ierr); CHKERRQ(ierr) call DMPlexGetHeightStratum(dm_local,0_pPETSCINT,cellStart,cellEnd,err_PETSc)
CHKERRQ(err_PETSc)
do cell = cellStart, cellEnd-1 !< loop over all elements do cell = cellStart, cellEnd-1 !< loop over all elements
call DMPlexVecGetClosure(dm_local,section,x_local,cell,x_scal,ierr) !< get Dofs belonging to element call DMPlexVecGetClosure(dm_local,section,x_local,cell,x_scal,err_PETSc) !< get Dofs belonging to element
CHKERRQ(ierr) CHKERRQ(err_PETSc)
call DMPlexComputeCellGeometryAffineFEM(dm_local,cell,pV0,pCellJ,pInvcellJ,detJ,ierr) call DMPlexComputeCellGeometryAffineFEM(dm_local,cell,pV0,pCellJ,pInvcellJ,detJ,err_PETSc)
CHKERRQ(ierr) CHKERRQ(err_PETSc)
K_eA = 0.0 K_eA = 0.0
K_eB = 0.0 K_eB = 0.0
MatB = 0.0 MatB = 0.0
@ -531,11 +556,11 @@ subroutine FEM_mechanical_formJacobian(dm_local,xx_local,Jac_pre,Jac,dummy,ierr)
FInv = math_inv33(F) FInv = math_inv33(F)
K_eA = K_eA + matmul(transpose(BMat),MatA)*math_det33(FInv)**(1.0/real(dimPlex)) K_eA = K_eA + matmul(transpose(BMat),MatA)*math_det33(FInv)**(1.0/real(dimPlex))
K_eB = K_eB - & K_eB = K_eB - &
matmul(transpose(matmul(reshape(homogenization_F(1:dimPlex,1:dimPlex,m),shape=[dimPlex*dimPlex,1]), & matmul(transpose(matmul(reshape(homogenization_F(1:dimPlex,1:dimPlex,m),shape=[dimPlex**2,1_pPETSCINT]), &
matmul(reshape(FInv(1:dimPlex,1:dimPlex), & matmul(reshape(FInv(1:dimPlex,1:dimPlex), &
shape=[1,dimPlex*dimPlex],order=[2,1]),BMat))),MatA) shape=[1_pPETSCINT,dimPlex**2],order=[2,1]),BMat))),MatA)
MatB = MatB & MatB = MatB &
+ matmul(reshape(homogenization_F(1:dimPlex,1:dimPlex,m),shape=[1,dimPlex*dimPlex]),MatA) + matmul(reshape(homogenization_F(1:dimPlex,1:dimPlex,m),shape=[1_pPETSCINT,dimPlex**2]),MatA)
FAvg = FAvg + F FAvg = FAvg + F
BMatAvg = BMatAvg + BMat BMatAvg = BMatAvg + BMat
else else
@ -546,39 +571,40 @@ subroutine FEM_mechanical_formJacobian(dm_local,xx_local,Jac_pre,Jac,dummy,ierr)
FInv = math_inv33(FAvg) FInv = math_inv33(FAvg)
K_e = K_eA*math_det33(FAvg/real(nQuadrature))**(1.0/real(dimPlex)) + & K_e = K_eA*math_det33(FAvg/real(nQuadrature))**(1.0/real(dimPlex)) + &
(matmul(matmul(transpose(BMatAvg), & (matmul(matmul(transpose(BMatAvg), &
reshape(FInv(1:dimPlex,1:dimPlex),shape=[dimPlex*dimPlex,1],order=[2,1])),MatB) + & reshape(FInv(1:dimPlex,1:dimPlex),shape=[dimPlex**2,1_pPETSCINT],order=[2,1])),MatB) + &
K_eB)/real(dimPlex) K_eB)/real(dimPlex)
else else
K_e = K_eA K_e = K_eA
endif endif
K_e = (K_e + eps*math_eye(cellDof)) * abs(detJ) K_e = (K_e + eps*math_eye(int(cellDof))) * abs(detJ)
#ifndef __INTEL_COMPILER #ifndef __INTEL_COMPILER
pK_e(1:cellDOF**2) => K_e pK_e(1:cellDOF**2) => K_e
#else #else
! https://software.intel.com/en-us/forums/intel-fortran-compiler/topic/782230 (bug) ! https://software.intel.com/en-us/forums/intel-fortran-compiler/topic/782230 (bug)
allocate(pK_e(cellDOF**2),source = reshape(K_e,[cellDOF**2])) allocate(pK_e(cellDOF**2),source = reshape(K_e,[cellDOF**2]))
#endif #endif
call DMPlexMatSetClosure(dm_local,section,gSection,Jac,cell,pK_e,ADD_VALUES,ierr) call DMPlexMatSetClosure(dm_local,section,gSection,Jac,cell,pK_e,ADD_VALUES,err_PETSc)
CHKERRQ(ierr) CHKERRQ(err_PETSc)
call DMPlexVecRestoreClosure(dm_local,section,x_local,cell,x_scal,ierr) call DMPlexVecRestoreClosure(dm_local,section,x_local,cell,x_scal,err_PETSc)
CHKERRQ(ierr) CHKERRQ(err_PETSc)
enddo enddo
call MatAssemblyBegin(Jac,MAT_FINAL_ASSEMBLY,ierr); CHKERRQ(ierr) call MatAssemblyBegin(Jac,MAT_FINAL_ASSEMBLY,err_PETSc); CHKERRQ(err_PETSc)
call MatAssemblyEnd(Jac,MAT_FINAL_ASSEMBLY,ierr); CHKERRQ(ierr) call MatAssemblyEnd(Jac,MAT_FINAL_ASSEMBLY,err_PETSc); CHKERRQ(err_PETSc)
call MatAssemblyBegin(Jac_pre,MAT_FINAL_ASSEMBLY,ierr); CHKERRQ(ierr) call MatAssemblyBegin(Jac_pre,MAT_FINAL_ASSEMBLY,err_PETSc); CHKERRQ(err_PETSc)
call MatAssemblyEnd(Jac_pre,MAT_FINAL_ASSEMBLY,ierr); CHKERRQ(ierr) call MatAssemblyEnd(Jac_pre,MAT_FINAL_ASSEMBLY,err_PETSc); CHKERRQ(err_PETSc)
call DMRestoreLocalVector(dm_local,x_local,ierr); CHKERRQ(ierr) call DMRestoreLocalVector(dm_local,x_local,err_PETSc); CHKERRQ(err_PETSc)
!-------------------------------------------------------------------------------------------------- !--------------------------------------------------------------------------------------------------
! apply boundary conditions ! apply boundary conditions
#if (PETSC_VERSION_MINOR < 14) #if (PETSC_VERSION_MINOR < 14)
call DMPlexCreateRigidBody(dm_local,matnull,ierr); CHKERRQ(ierr) call DMPlexCreateRigidBody(dm_local,matnull,err_PETSc); CHKERRQ(err_PETSc)
#else #else
call DMPlexCreateRigidBody(dm_local,0,matnull,ierr); CHKERRQ(ierr) call DMPlexCreateRigidBody(dm_local,0_pPETSCINT,matnull,err_PETSc)
CHKERRQ(err_PETSc)
#endif #endif
call MatSetNullSpace(Jac,matnull,ierr); CHKERRQ(ierr) call MatSetNullSpace(Jac,matnull,err_PETSc); CHKERRQ(err_PETSc)
call MatSetNearNullSpace(Jac,matnull,ierr); CHKERRQ(ierr) call MatSetNearNullSpace(Jac,matnull,err_PETSc); CHKERRQ(err_PETSc)
call MatNullSpaceDestroy(matnull,ierr); CHKERRQ(ierr) call MatNullSpaceDestroy(matnull,err_PETSc); CHKERRQ(err_PETSc)
end subroutine FEM_mechanical_formJacobian end subroutine FEM_mechanical_formJacobian
@ -601,43 +627,43 @@ subroutine FEM_mechanical_forward(guess,timeinc,timeinc_old,fieldBC)
Vec :: x_local Vec :: x_local
PetscSection :: section PetscSection :: section
IS :: bcPoints IS :: bcPoints
PetscErrorCode :: ierr PetscErrorCode :: err_PETSc
!-------------------------------------------------------------------------------------------------- !--------------------------------------------------------------------------------------------------
! forward last inc ! forward last inc
if (guess .and. .not. cutBack) then if (guess .and. .not. cutBack) then
ForwardData = .True. ForwardData = .True.
homogenization_F0 = homogenization_F homogenization_F0 = homogenization_F
call SNESGetDM(mechanical_snes,dm_local,ierr); CHKERRQ(ierr) !< retrieve mesh info from mechanical_snes into dm_local call SNESGetDM(mechanical_snes,dm_local,err_PETSc); CHKERRQ(err_PETSc) !< retrieve mesh info from mechanical_snes into dm_local
call DMGetSection(dm_local,section,ierr); CHKERRQ(ierr) call DMGetSection(dm_local,section,err_PETSc); CHKERRQ(err_PETSc)
call DMGetLocalVector(dm_local,x_local,ierr); CHKERRQ(ierr) call DMGetLocalVector(dm_local,x_local,err_PETSc); CHKERRQ(err_PETSc)
call VecSet(x_local,0.0_pReal,ierr); CHKERRQ(ierr) call VecSet(x_local,0.0_pReal,err_PETSc); CHKERRQ(err_PETSc)
call DMGlobalToLocalBegin(dm_local,solution,INSERT_VALUES,x_local,ierr) !< retrieve my partition of global solution vector call DMGlobalToLocalBegin(dm_local,solution,INSERT_VALUES,x_local,err_PETSc) !< retrieve my partition of global solution vector
CHKERRQ(ierr) CHKERRQ(err_PETSc)
call DMGlobalToLocalEnd(dm_local,solution,INSERT_VALUES,x_local,ierr) call DMGlobalToLocalEnd(dm_local,solution,INSERT_VALUES,x_local,err_PETSc)
CHKERRQ(ierr) CHKERRQ(err_PETSc)
call VecAXPY(solution_local,1.0,x_local,ierr); CHKERRQ(ierr) call VecAXPY(solution_local,1.0,x_local,err_PETSc); CHKERRQ(err_PETSc)
do field = 1, dimPlex; do face = 1, mesh_Nboundaries do field = 1, dimPlex; do face = 1, mesh_Nboundaries
if (fieldBC%componentBC(field)%Mask(face)) then if (fieldBC%componentBC(field)%Mask(face)) then
call DMGetStratumSize(dm_local,'Face Sets',mesh_boundaries(face),bcSize,ierr) call DMGetStratumSize(dm_local,'Face Sets',mesh_boundaries(face),bcSize,err_PETSc)
if (bcSize > 0) then if (bcSize > 0) then
call DMGetStratumIS(dm_local,'Face Sets',mesh_boundaries(face),bcPoints,ierr) call DMGetStratumIS(dm_local,'Face Sets',mesh_boundaries(face),bcPoints,err_PETSc)
CHKERRQ(ierr) CHKERRQ(err_PETSc)
call utilities_projectBCValues(solution_local,section,0,field-1,bcPoints, & call utilities_projectBCValues(solution_local,section,0_pPETSCINT,field-1,bcPoints, &
0.0_pReal,fieldBC%componentBC(field)%Value(face),timeinc_old) 0.0_pReal,fieldBC%componentBC(field)%Value(face),timeinc_old)
call ISDestroy(bcPoints,ierr); CHKERRQ(ierr) call ISDestroy(bcPoints,err_PETSc); CHKERRQ(err_PETSc)
endif endif
endif endif
enddo; enddo enddo; enddo
call DMRestoreLocalVector(dm_local,x_local,ierr); CHKERRQ(ierr) call DMRestoreLocalVector(dm_local,x_local,err_PETSc); CHKERRQ(err_PETSc)
!-------------------------------------------------------------------------------------------------- !--------------------------------------------------------------------------------------------------
! update rate and forward last inc ! update rate and forward last inc
call VecCopy(solution,solution_rate,ierr); CHKERRQ(ierr) call VecCopy(solution,solution_rate,err_PETSc); CHKERRQ(err_PETSc)
call VecScale(solution_rate,1.0/timeinc_old,ierr); CHKERRQ(ierr) call VecScale(solution_rate,1.0/timeinc_old,err_PETSc); CHKERRQ(err_PETSc)
endif endif
call VecCopy(solution_rate,solution,ierr); CHKERRQ(ierr) call VecCopy(solution_rate,solution,err_PETSc); CHKERRQ(err_PETSc)
call VecScale(solution,timeinc,ierr); CHKERRQ(ierr) call VecScale(solution,timeinc,err_PETSc); CHKERRQ(err_PETSc)
end subroutine FEM_mechanical_forward end subroutine FEM_mechanical_forward
@ -645,24 +671,24 @@ end subroutine FEM_mechanical_forward
!-------------------------------------------------------------------------------------------------- !--------------------------------------------------------------------------------------------------
!> @brief reporting !> @brief reporting
!-------------------------------------------------------------------------------------------------- !--------------------------------------------------------------------------------------------------
subroutine FEM_mechanical_converged(snes_local,PETScIter,xnorm,snorm,fnorm,reason,dummy,ierr) subroutine FEM_mechanical_converged(snes_local,PETScIter,xnorm,snorm,fnorm,reason,dummy,err_PETSc)
SNES :: snes_local SNES :: snes_local
PetscInt :: PETScIter PetscInt :: PETScIter
PetscReal :: xnorm,snorm,fnorm,divTol PetscReal :: xnorm,snorm,fnorm,divTol
SNESConvergedReason :: reason SNESConvergedReason :: reason
PetscObject :: dummy PetscObject :: dummy
PetscErrorCode :: ierr PetscErrorCode :: err_PETSc
!-------------------------------------------------------------------------------------------------- !--------------------------------------------------------------------------------------------------
! report ! report
divTol = max(maxval(abs(P_av(1:dimPlex,1:dimPlex)))*num%eps_struct_rtol,num%eps_struct_atol) divTol = max(maxval(abs(P_av(1:dimPlex,1:dimPlex)))*num%eps_struct_rtol,num%eps_struct_atol)
call SNESConvergedDefault(snes_local,PETScIter,xnorm,snorm,fnorm/divTol,reason,dummy,ierr) call SNESConvergedDefault(snes_local,PETScIter,xnorm,snorm,fnorm/divTol,reason,dummy,err_PETSc)
CHKERRQ(ierr) CHKERRQ(err_PETSc)
if (terminallyIll) reason = SNES_DIVERGED_FUNCTION_DOMAIN if (terminallyIll) reason = SNES_DIVERGED_FUNCTION_DOMAIN
print'(/,1x,a,a,i0,a,i0,f0.3)', trim(incInfo), & print'(/,1x,a,a,i0,a,i0,f0.3)', trim(incInfo), &
' @ Iteration ',PETScIter,' mechanical residual norm = ', & ' @ Iteration ',PETScIter,' mechanical residual norm = ', &
int(fnorm/divTol),fnorm/divTol-int(fnorm/divTol) int(fnorm/divTol),fnorm/divTol-int(fnorm/divTol) ! ToDo: int casting?
print'(/,1x,a,/,2(3(2x,f12.4,1x)/),3(2x,f12.4,1x))', & print'(/,1x,a,/,2(3(2x,f12.4,1x)/),3(2x,f12.4,1x))', &
'Piola--Kirchhoff stress / MPa =',transpose(P_av)*1.e-6_pReal 'Piola--Kirchhoff stress / MPa =',transpose(P_av)*1.e-6_pReal
flush(IO_STDOUT) flush(IO_STDOUT)
@ -675,9 +701,7 @@ end subroutine FEM_mechanical_converged
!-------------------------------------------------------------------------------------------------- !--------------------------------------------------------------------------------------------------
subroutine FEM_mechanical_updateCoords() subroutine FEM_mechanical_updateCoords()
real(pReal), pointer, dimension(:) :: & PetscReal, pointer, dimension(:,:) :: &
nodeCoords_linear !< nodal coordinates (dimPlex*Nnodes)
real(pReal), pointer, dimension(:,:) :: &
nodeCoords !< nodal coordinates (3,Nnodes) nodeCoords !< nodal coordinates (3,Nnodes)
real(pReal), pointer, dimension(:,:,:) :: & real(pReal), pointer, dimension(:,:,:) :: &
ipCoords !< ip coordinates (3,nQuadrature,mesh_NcpElems) ipCoords !< ip coordinates (3,nQuadrature,mesh_NcpElems)
@ -690,42 +714,47 @@ subroutine FEM_mechanical_updateCoords()
DM :: dm_local DM :: dm_local
Vec :: x_local Vec :: x_local
PetscErrorCode :: ierr PetscErrorCode :: err_PETSc
PetscInt :: pStart, pEnd, p, s, e, q, & PetscInt :: pStart, pEnd, p, s, e, q, &
cellStart, cellEnd, c, n cellStart, cellEnd, c, n
PetscSection :: section PetscSection :: section
PetscQuadrature :: mechQuad PetscQuadrature :: mechQuad
PetscReal, dimension(:), pointer :: basisField, basisFieldDer PetscReal, dimension(:), pointer :: basisField, basisFieldDer, &
PetscScalar, dimension(:), pointer :: x_scal nodeCoords_linear !< nodal coordinates (dimPlex*Nnodes)
PetscScalar, dimension(:), pointer :: x_scal
call SNESGetDM(mechanical_snes,dm_local,ierr); CHKERRQ(ierr) call SNESGetDM(mechanical_snes,dm_local,err_PETSc); CHKERRQ(err_PETSc)
call DMGetDS(dm_local,mechQuad,ierr); CHKERRQ(ierr) call DMGetDS(dm_local,mechQuad,err_PETSc); CHKERRQ(err_PETSc)
call DMGetLocalSection(dm_local,section,ierr); CHKERRQ(ierr) call DMGetLocalSection(dm_local,section,err_PETSc); CHKERRQ(err_PETSc)
call DMGetLocalVector(dm_local,x_local,ierr); CHKERRQ(ierr) call DMGetLocalVector(dm_local,x_local,err_PETSc); CHKERRQ(err_PETSc)
call DMGetDimension(dm_local,dimPlex,ierr); CHKERRQ(ierr) call DMGetDimension(dm_local,dimPlex,err_PETSc); CHKERRQ(err_PETSc)
! write cell vertex displacements ! write cell vertex displacements
call DMPlexGetDepthStratum(dm_local,0,pStart,pEnd,ierr); CHKERRQ(ierr) call DMPlexGetDepthStratum(dm_local,0_pPETSCINT,pStart,pEnd,err_PETSc)
CHKERRQ(err_PETSc)
allocate(nodeCoords(3,pStart:pEnd-1),source=0.0_pReal) allocate(nodeCoords(3,pStart:pEnd-1),source=0.0_pReal)
call VecGetArrayF90(x_local,nodeCoords_linear,ierr); CHKERRQ(ierr) call VecGetArrayF90(x_local,nodeCoords_linear,err_PETSc); CHKERRQ(err_PETSc)
do p=pStart, pEnd-1 do p=pStart, pEnd-1
call DMPlexGetPointLocal(dm_local, p, s, e, ierr); CHKERRQ(ierr) call DMPlexGetPointLocal(dm_local, p, s, e, err_PETSc); CHKERRQ(err_PETSc)
nodeCoords(1:dimPlex,p)=nodeCoords_linear(s+1:e) nodeCoords(1:dimPlex,p)=nodeCoords_linear(s+1:e)
enddo enddo
call discretization_setNodeCoords(nodeCoords) call discretization_setNodeCoords(nodeCoords)
call VecRestoreArrayF90(x_local,nodeCoords_linear,ierr); CHKERRQ(ierr) call VecRestoreArrayF90(x_local,nodeCoords_linear,err_PETSc); CHKERRQ(err_PETSc)
! write ip displacements ! write ip displacements
call DMPlexGetHeightStratum(dm_local,0,cellStart,cellEnd,ierr); CHKERRQ(ierr) call DMPlexGetHeightStratum(dm_local,0_pPETSCINT,cellStart,cellEnd,err_PETSc)
call PetscDSGetTabulation(mechQuad,0,basisField,basisFieldDer,ierr); CHKERRQ(ierr) CHKERRQ(err_PETSc)
call PetscDSGetTabulation(mechQuad,0_pPETSCINT,basisField,basisFieldDer,err_PETSc)
CHKERRQ(err_PETSc)
allocate(ipCoords(3,nQuadrature,mesh_NcpElems),source=0.0_pReal) allocate(ipCoords(3,nQuadrature,mesh_NcpElems),source=0.0_pReal)
do c=cellStart,cellEnd-1 do c=cellStart,cellEnd-1
qOffset=0 qOffset=0
call DMPlexVecGetClosure(dm_local,section,x_local,c,x_scal,ierr); CHKERRQ(ierr) !< get nodal coordinates of each element call DMPlexVecGetClosure(dm_local,section,x_local,c,x_scal,err_PETSc) !< get nodal coordinates of each element
CHKERRQ(err_PETSc)
do qPt=0,nQuadrature-1 do qPt=0,nQuadrature-1
qOffset= qPt * (size(basisField)/nQuadrature) qOffset= qPt * (size(basisField)/nQuadrature)
do comp=0,dimPlex-1 !< loop over components do comp=0,dimPlex-1 !< loop over components
nOffset=0 nOffset=0
q = comp q = comp
do n=0,nBasis-1 do n=0,nBasis-1
@ -737,10 +766,11 @@ subroutine FEM_mechanical_updateCoords()
enddo enddo
enddo enddo
enddo enddo
call DMPlexVecRestoreClosure(dm_local,section,x_local,c,x_scal,ierr); CHKERRQ(ierr) call DMPlexVecRestoreClosure(dm_local,section,x_local,c,x_scal,err_PETSc)
CHKERRQ(err_PETSc)
end do end do
call discretization_setIPcoords(reshape(ipCoords,[3,mesh_NcpElems*nQuadrature])) call discretization_setIPcoords(reshape(ipCoords,[3,mesh_NcpElems*nQuadrature]))
call DMRestoreLocalVector(dm_local,x_local,ierr); CHKERRQ(ierr) call DMRestoreLocalVector(dm_local,x_local,err_PETSc); CHKERRQ(err_PETSc)
end subroutine FEM_mechanical_updateCoords end subroutine FEM_mechanical_updateCoords

View File

@ -20,9 +20,17 @@ module parallelization
implicit none implicit none
private private
integer, protected, public :: & #ifndef PETSC
worldrank = 0, & !< MPI worldrank (/=0 for MPI simulations only) integer, parameter, public :: &
worldsize = 1 !< MPI worldsize (/=1 for MPI simulations only) MPI_INTEGER_KIND = pI64
integer(MPI_INTEGER_KIND), parameter, public :: &
worldrank = 0_MPI_INTEGER_KIND, & !< MPI dummy worldrank
worldsize = 1_MPI_INTEGER_KIND !< MPI dummy worldsize
#else
integer(MPI_INTEGER_KIND), protected, public :: &
worldrank = 0_MPI_INTEGER_KIND, & !< MPI worldrank (/=0 for MPI simulations only)
worldsize = 1_MPI_INTEGER_KIND !< MPI worldsize (/=1 for MPI simulations only)
#endif
#ifndef PETSC #ifndef PETSC
public :: parallelization_bcast_str public :: parallelization_bcast_str
@ -44,51 +52,63 @@ contains
!-------------------------------------------------------------------------------------------------- !--------------------------------------------------------------------------------------------------
subroutine parallelization_init subroutine parallelization_init
integer :: err, typeSize integer(MPI_INTEGER_KIND) :: err_MPI, typeSize
!$ integer :: got_env, threadLevel !$ integer :: got_env, threadLevel
!$ integer(pI32) :: OMP_NUM_THREADS !$ integer(pI32) :: OMP_NUM_THREADS
!$ character(len=6) NumThreadsString !$ character(len=6) NumThreadsString
PetscErrorCode :: petsc_err PetscErrorCode :: err_PETSc
#ifdef _OPENMP #ifdef _OPENMP
! If openMP is enabled, check if the MPI libary supports it and initialize accordingly. ! If openMP is enabled, check if the MPI libary supports it and initialize accordingly.
! Otherwise, the first call to PETSc will do the initialization. ! Otherwise, the first call to PETSc will do the initialization.
call MPI_Init_Thread(MPI_THREAD_FUNNELED,threadLevel,err) call MPI_Init_Thread(MPI_THREAD_FUNNELED,threadLevel,err_MPI)
if (err /= 0) error stop 'MPI init failed' if (err_MPI /= 0_MPI_INTEGER_KIND) error stop 'MPI init failed'
if (threadLevel<MPI_THREAD_FUNNELED) error stop 'MPI library does not support OpenMP' if (threadLevel<MPI_THREAD_FUNNELED) error stop 'MPI library does not support OpenMP'
#endif #endif
#if defined(DEBUG) #if defined(DEBUG)
call PetscInitialize(PETSC_NULL_CHARACTER,petsc_err) call PetscInitialize(PETSC_NULL_CHARACTER,err_PETSc)
#else #else
call PetscInitializeNoArguments(petsc_err) call PetscInitializeNoArguments(err_PETSc)
#endif #endif
CHKERRQ(petsc_err) CHKERRQ(err_PETSc)
#if defined(DEBUG) && defined(__INTEL_COMPILER) #if defined(DEBUG) && defined(__INTEL_COMPILER)
call PetscSetFPTrap(PETSC_FP_TRAP_ON,petsc_err) call PetscSetFPTrap(PETSC_FP_TRAP_ON,err_PETSc)
#else #else
call PetscSetFPTrap(PETSC_FP_TRAP_OFF,petsc_err) call PetscSetFPTrap(PETSC_FP_TRAP_OFF,err_PETSc)
#endif #endif
CHKERRQ(petsc_err) CHKERRQ(err_PETSc)
call MPI_Comm_rank(MPI_COMM_WORLD,worldrank,err) call MPI_Comm_rank(MPI_COMM_WORLD,worldrank,err_MPI)
if (err /= 0) error stop 'Could not determine worldrank' if (err_MPI /= 0_MPI_INTEGER_KIND) &
error stop 'Could not determine worldrank'
if (worldrank == 0) print'(/,1x,a)', '<<<+- parallelization init -+>>>' if (worldrank == 0) print'(/,1x,a)', '<<<+- parallelization init -+>>>'
call MPI_Comm_size(MPI_COMM_WORLD,worldsize,err) call MPI_Comm_size(MPI_COMM_WORLD,worldsize,err_MPI)
if (err /= 0) error stop 'Could not determine worldsize' if (err_MPI /= 0_MPI_INTEGER_KIND) &
error stop 'Could not determine worldsize'
if (worldrank == 0) print'(/,1x,a,i3)', 'MPI processes: ',worldsize if (worldrank == 0) print'(/,1x,a,i3)', 'MPI processes: ',worldsize
call MPI_Type_size(MPI_INTEGER,typeSize,err) call MPI_Type_size(MPI_INTEGER,typeSize,err_MPI)
if (err /= 0) error stop 'Could not determine MPI integer size' if (err_MPI /= 0_MPI_INTEGER_KIND) &
if (typeSize*8 /= bit_size(0)) error stop 'Mismatch between MPI and DAMASK integer' error stop 'Could not determine size of MPI_INTEGER'
if (typeSize*8_MPI_INTEGER_KIND /= int(bit_size(0),MPI_INTEGER_KIND)) &
error stop 'Mismatch between MPI_INTEGER and DAMASK default integer'
call MPI_Type_size(MPI_DOUBLE,typeSize,err) call MPI_Type_size(MPI_INTEGER8,typeSize,err_MPI)
if (err /= 0) error stop 'Could not determine MPI real size' if (err_MPI /= 0) &
if (typeSize*8 /= storage_size(0.0_pReal)) error stop 'Mismatch between MPI and DAMASK real' error stop 'Could not determine size of MPI_INTEGER8'
if (typeSize*8_MPI_INTEGER_KIND /= int(bit_size(0_pI64),MPI_INTEGER_KIND)) &
error stop 'Mismatch between MPI_INTEGER8 and DAMASK pI64'
call MPI_Type_size(MPI_DOUBLE,typeSize,err_MPI)
if (err_MPI /= 0_MPI_INTEGER_KIND) &
error stop 'Could not determine size of MPI_DOUBLE'
if (typeSize*8_MPI_INTEGER_KIND /= int(storage_size(0.0_pReal),MPI_INTEGER_KIND)) &
error stop 'Mismatch between MPI_DOUBLE and DAMASK pReal'
if (worldrank /= 0) then if (worldrank /= 0) then
close(OUTPUT_UNIT) ! disable output close(OUTPUT_UNIT) ! disable output
@ -119,14 +139,14 @@ subroutine parallelization_bcast_str(string)
character(len=:), allocatable, intent(inout) :: string character(len=:), allocatable, intent(inout) :: string
integer :: strlen, ierr ! pI64 for strlen not supported by MPI integer(MPI_INTEGER_KIND) :: strlen, err_MPI
if (worldrank == 0) strlen = len(string) if (worldrank == 0) strlen = len(string,MPI_INTEGER_KIND)
call MPI_Bcast(strlen,1,MPI_INTEGER,0,MPI_COMM_WORLD, ierr) call MPI_Bcast(strlen,1_MPI_INTEGER_KIND,MPI_INTEGER,0_MPI_INTEGER_KIND,MPI_COMM_WORLD, err_MPI)
if (worldrank /= 0) allocate(character(len=strlen)::string) if (worldrank /= 0) allocate(character(len=strlen)::string)
call MPI_Bcast(string,strlen,MPI_CHARACTER,0,MPI_COMM_WORLD, ierr) call MPI_Bcast(string,strlen,MPI_CHARACTER,0_MPI_INTEGER_KIND,MPI_COMM_WORLD, err_MPI)
end subroutine parallelization_bcast_str end subroutine parallelization_bcast_str

View File

@ -10,6 +10,11 @@ module prec
use, intrinsic :: IEEE_arithmetic use, intrinsic :: IEEE_arithmetic
use, intrinsic :: ISO_C_binding use, intrinsic :: ISO_C_binding
#ifdef PETSC
#include <petsc/finclude/petscsys.h>
use PETScSys
#endif
implicit none implicit none
public public
@ -17,13 +22,12 @@ module prec
integer, parameter :: pReal = IEEE_selected_real_kind(15,307) !< number with 15 significant digits, up to 1e+-307 (typically 64 bit) integer, parameter :: pReal = IEEE_selected_real_kind(15,307) !< number with 15 significant digits, up to 1e+-307 (typically 64 bit)
integer, parameter :: pI32 = selected_int_kind(9) !< number with at least up to +-1e9 (typically 32 bit) integer, parameter :: pI32 = selected_int_kind(9) !< number with at least up to +-1e9 (typically 32 bit)
integer, parameter :: pI64 = selected_int_kind(18) !< number with at least up to +-1e18 (typically 64 bit) integer, parameter :: pI64 = selected_int_kind(18) !< number with at least up to +-1e18 (typically 64 bit)
#if(INT==8) #ifdef PETSC
integer, parameter :: pInt = pI64 PetscInt, private :: dummy
#else integer, parameter :: pPETSCINT = kind(dummy)
integer, parameter :: pInt = pI32
#endif #endif
integer, parameter :: pStringLen = 256 !< default string length integer, parameter :: pSTRINGLEN = 256 !< default string length
integer, parameter :: pPathLen = 4096 !< maximum length of a path name on linux integer, parameter :: pPATHLEN = 4096 !< maximum length of a path name on linux
real(pReal), parameter :: tol_math_check = 1.0e-8_pReal !< tolerance for internal math self-checks (rotation) real(pReal), parameter :: tol_math_check = 1.0e-8_pReal !< tolerance for internal math self-checks (rotation)
@ -268,7 +272,7 @@ subroutine selfTest
integer, allocatable, dimension(:) :: realloc_lhs_test integer, allocatable, dimension(:) :: realloc_lhs_test
real(pReal), dimension(1) :: f real(pReal), dimension(1) :: f
integer(pInt), dimension(1) :: i integer(pI64), dimension(1) :: i
real(pReal), dimension(2) :: r real(pReal), dimension(2) :: r
@ -289,11 +293,11 @@ subroutine selfTest
f = real(prec_bytesToC_DOUBLE(int([0,0,0,-32,+119,+65,+115,65],C_SIGNED_CHAR)),pReal) f = real(prec_bytesToC_DOUBLE(int([0,0,0,-32,+119,+65,+115,65],C_SIGNED_CHAR)),pReal)
if (dNeq(f(1),20191102.0_pReal,0.0_pReal)) error stop 'prec_bytesToC_DOUBLE' if (dNeq(f(1),20191102.0_pReal,0.0_pReal)) error stop 'prec_bytesToC_DOUBLE'
i = int(prec_bytesToC_INT32_T(int([+126,+23,+52,+1],C_SIGNED_CHAR)),pInt) i = int(prec_bytesToC_INT32_T(int([+126,+23,+52,+1],C_SIGNED_CHAR)),pI64)
if (i(1) /= 20191102_pInt) error stop 'prec_bytesToC_INT32_T' if (i(1) /= 20191102_pI64) error stop 'prec_bytesToC_INT32_T'
i = int(prec_bytesToC_INT64_T(int([+126,+23,+52,+1,0,0,0,0],C_SIGNED_CHAR)),pInt) i = int(prec_bytesToC_INT64_T(int([+126,+23,+52,+1,0,0,0,0],C_SIGNED_CHAR)),pI64)
if (i(1) /= 20191102_pInt) error stop 'prec_bytesToC_INT64_T' if (i(1) /= 20191102_pI64) error stop 'prec_bytesToC_INT64_T'
end subroutine selfTest end subroutine selfTest

View File

@ -15,20 +15,23 @@ subroutine quit(stop_id)
implicit none implicit none
integer, intent(in) :: stop_id integer, intent(in) :: stop_id
integer, dimension(8) :: dateAndTime integer, dimension(8) :: dateAndTime
integer :: error integer :: err_HDF5
PetscErrorCode :: ierr = 0 integer(MPI_INTEGER_KIND) :: err_MPI
PetscErrorCode :: err_PETSc
call h5open_f(error) call h5open_f(err_HDF5)
if (error /= 0) write(6,'(a,i5)') ' Error in h5open_f ',error ! prevents error if not opened yet if (err_HDF5 /= 0_MPI_INTEGER_KIND) write(6,'(a,i5)') ' Error in h5open_f ',err_HDF5 ! prevents error if not opened yet
call h5close_f(error) call h5close_f(err_HDF5)
if (error /= 0) write(6,'(a,i5)') ' Error in h5close_f ',error if (err_HDF5 /= 0_MPI_INTEGER_KIND) write(6,'(a,i5)') ' Error in h5close_f ',err_HDF5
call PetscFinalize(ierr) call PetscFinalize(err_PETSc)
CHKERRQ(ierr) CHKERRQ(err_PETSc)
#ifdef _OPENMP #ifdef _OPENMP
call MPI_finalize(error) call MPI_finalize(err_MPI)
if (error /= 0) write(6,'(a,i5)') ' Error in MPI_finalize',error if (err_MPI /= 0_MPI_INTEGER_KIND) write(6,'(a,i5)') ' Error in MPI_finalize',err_MPI
#else
err_MPI = 0_MPI_INTEGER_KIND
#endif #endif
call date_and_time(values = dateAndTime) call date_and_time(values = dateAndTime)
@ -40,7 +43,10 @@ subroutine quit(stop_id)
dateAndTime(6),':',& dateAndTime(6),':',&
dateAndTime(7) dateAndTime(7)
if (stop_id == 0 .and. ierr == 0 .and. error == 0) stop 0 ! normal termination if (stop_id == 0 .and. &
err_HDF5 == 0 .and. &
err_MPI == 0_MPI_INTEGER_KIND .and. &
err_PETSC == 0) stop 0 ! normal termination
stop 1 ! error (message from IO_error) stop 1 ! error (message from IO_error)
end subroutine quit end subroutine quit

View File

@ -497,16 +497,17 @@ subroutine results_mapping_phase(ID,entry,label)
integer, dimension(:,:), intent(in) :: entry !< phase entry at (co,ce) integer, dimension(:,:), intent(in) :: entry !< phase entry at (co,ce)
character(len=*), dimension(:), intent(in) :: label !< label of each phase section character(len=*), dimension(:), intent(in) :: label !< label of each phase section
integer, dimension(size(entry,1),size(entry,2)) :: & integer(pI64), dimension(size(entry,1),size(entry,2)) :: &
entryGlobal entryGlobal
integer, dimension(size(label),0:worldsize-1) :: entryOffset !< offset in entry counting per process integer(pI64), dimension(size(label),0:worldsize-1) :: entryOffset !< offset in entry counting per process
integer, dimension(0:worldsize-1) :: writeSize !< amount of data written per process integer, dimension(0:worldsize-1) :: writeSize !< amount of data written per process
integer(HSIZE_T), dimension(2) :: & integer(HSIZE_T), dimension(2) :: &
myShape, & !< shape of the dataset (this process) myShape, & !< shape of the dataset (this process)
myOffset, & myOffset, &
totalShape !< shape of the dataset (all processes) totalShape !< shape of the dataset (all processes)
integer(HID_T) :: & integer(HID_T) :: &
pI64_t, & !< HDF5 type for pI64 (8 bit integer)
loc_id, & !< identifier of group in file loc_id, & !< identifier of group in file
dtype_id, & !< identifier of compound data type dtype_id, & !< identifier of compound data type
label_id, & !< identifier of label (string) in compound data type label_id, & !< identifier of label (string) in compound data type
@ -518,7 +519,8 @@ subroutine results_mapping_phase(ID,entry,label)
dt_id dt_id
integer(SIZE_T) :: type_size_string, type_size_int integer(SIZE_T) :: type_size_string, type_size_int
integer :: hdferr, ierr, ce, co integer :: hdferr, ce, co
integer(MPI_INTEGER_KIND) :: err_MPI
writeSize = 0 writeSize = 0
@ -528,28 +530,28 @@ subroutine results_mapping_phase(ID,entry,label)
if(hdferr < 0) error stop 'HDF5 error' if(hdferr < 0) error stop 'HDF5 error'
#ifndef PETSC #ifndef PETSC
entryGlobal = entry -1 ! 0-based entryGlobal = int(entry -1,pI64) ! 0-based
#else #else
!-------------------------------------------------------------------------------------------------- !--------------------------------------------------------------------------------------------------
! MPI settings and communication ! MPI settings and communication
call h5pset_dxpl_mpio_f(plist_id, H5FD_MPIO_COLLECTIVE_F, hdferr) call h5pset_dxpl_mpio_f(plist_id, H5FD_MPIO_COLLECTIVE_F, hdferr)
if(hdferr < 0) error stop 'HDF5 error' if(hdferr < 0) error stop 'HDF5 error'
call MPI_Allreduce(MPI_IN_PLACE,writeSize,worldsize,MPI_INT,MPI_SUM,MPI_COMM_WORLD,ierr) ! get output at each process call MPI_Allreduce(MPI_IN_PLACE,writeSize,worldsize,MPI_INTEGER,MPI_SUM,MPI_COMM_WORLD,err_MPI) ! get output at each process
if(ierr /= 0) error stop 'MPI error' if(err_MPI /= 0_MPI_INTEGER_KIND) error stop 'MPI error'
entryOffset = 0 entryOffset = 0_pI64
do co = 1, size(ID,1) do co = 1, size(ID,1)
do ce = 1, size(ID,2) do ce = 1, size(ID,2)
entryOffset(ID(co,ce),worldrank) = entryOffset(ID(co,ce),worldrank) +1 entryOffset(ID(co,ce),worldrank) = entryOffset(ID(co,ce),worldrank) +1_pI64
end do end do
end do end do
call MPI_Allreduce(MPI_IN_PLACE,entryOffset,size(entryOffset),MPI_INT,MPI_SUM,MPI_COMM_WORLD,ierr)! get offset at each process call MPI_Allreduce(MPI_IN_PLACE,entryOffset,size(entryOffset),MPI_INTEGER8,MPI_SUM,MPI_COMM_WORLD,err_MPI)! get offset at each process
if(ierr /= 0) error stop 'MPI error' if(err_MPI /= 0_MPI_INTEGER_KIND) error stop 'MPI error'
entryOffset(:,worldrank) = sum(entryOffset(:,0:worldrank-1),2) entryOffset(:,worldrank) = sum(entryOffset(:,0:worldrank-1),2)
do co = 1, size(ID,1) do co = 1, size(ID,1)
do ce = 1, size(ID,2) do ce = 1, size(ID,2)
entryGlobal(co,ce) = entry(co,ce) -1 + entryOffset(ID(co,ce),worldrank) entryGlobal(co,ce) = int(entry(co,ce),pI64) -1_pI64 + entryOffset(ID(co,ce),worldrank)
end do end do
end do end do
#endif #endif
@ -567,14 +569,15 @@ subroutine results_mapping_phase(ID,entry,label)
call h5tget_size_f(dt_id, type_size_string, hdferr) call h5tget_size_f(dt_id, type_size_string, hdferr)
if(hdferr < 0) error stop 'HDF5 error' if(hdferr < 0) error stop 'HDF5 error'
call h5tget_size_f(H5T_NATIVE_INTEGER, type_size_int, hdferr) pI64_t = h5kind_to_type(kind(entryGlobal),H5_INTEGER_KIND)
call h5tget_size_f(pI64_t, type_size_int, hdferr)
if(hdferr < 0) error stop 'HDF5 error' if(hdferr < 0) error stop 'HDF5 error'
call h5tcreate_f(H5T_COMPOUND_F, type_size_string + type_size_int, dtype_id, hdferr) call h5tcreate_f(H5T_COMPOUND_F, type_size_string + type_size_int, dtype_id, hdferr)
if(hdferr < 0) error stop 'HDF5 error' if(hdferr < 0) error stop 'HDF5 error'
call h5tinsert_f(dtype_id, 'label', 0_SIZE_T, dt_id,hdferr) call h5tinsert_f(dtype_id, 'label', 0_SIZE_T, dt_id,hdferr)
if(hdferr < 0) error stop 'HDF5 error' if(hdferr < 0) error stop 'HDF5 error'
call h5tinsert_f(dtype_id, 'entry', type_size_string, H5T_NATIVE_INTEGER, hdferr) call h5tinsert_f(dtype_id, 'entry', type_size_string, pI64_t, hdferr)
if(hdferr < 0) error stop 'HDF5 error' if(hdferr < 0) error stop 'HDF5 error'
!-------------------------------------------------------------------------------------------------- !--------------------------------------------------------------------------------------------------
@ -586,7 +589,7 @@ subroutine results_mapping_phase(ID,entry,label)
call h5tcreate_f(H5T_COMPOUND_F, type_size_int, entry_id, hdferr) call h5tcreate_f(H5T_COMPOUND_F, type_size_int, entry_id, hdferr)
if(hdferr < 0) error stop 'HDF5 error' if(hdferr < 0) error stop 'HDF5 error'
call h5tinsert_f(entry_id, 'entry', 0_SIZE_T, H5T_NATIVE_INTEGER, hdferr) call h5tinsert_f(entry_id, 'entry', 0_SIZE_T, pI64_t, hdferr)
if(hdferr < 0) error stop 'HDF5 error' if(hdferr < 0) error stop 'HDF5 error'
call h5tclose_f(dt_id, hdferr) call h5tclose_f(dt_id, hdferr)
@ -650,16 +653,17 @@ subroutine results_mapping_homogenization(ID,entry,label)
integer, dimension(:), intent(in) :: entry !< homogenization entry at (ce) integer, dimension(:), intent(in) :: entry !< homogenization entry at (ce)
character(len=*), dimension(:), intent(in) :: label !< label of each homogenization section character(len=*), dimension(:), intent(in) :: label !< label of each homogenization section
integer, dimension(size(entry,1)) :: & integer(pI64), dimension(size(entry,1)) :: &
entryGlobal entryGlobal
integer, dimension(size(label),0:worldsize-1) :: entryOffset !< offset in entry counting per process integer(pI64), dimension(size(label),0:worldsize-1) :: entryOffset !< offset in entry counting per process
integer, dimension(0:worldsize-1) :: writeSize !< amount of data written per process integer, dimension(0:worldsize-1) :: writeSize !< amount of data written per process
integer(HSIZE_T), dimension(1) :: & integer(HSIZE_T), dimension(1) :: &
myShape, & !< shape of the dataset (this process) myShape, & !< shape of the dataset (this process)
myOffset, & myOffset, &
totalShape !< shape of the dataset (all processes) totalShape !< shape of the dataset (all processes)
integer(HID_T) :: & integer(HID_T) :: &
pI64_t, & !< HDF5 type for pI64 (8 bit integer)
loc_id, & !< identifier of group in file loc_id, & !< identifier of group in file
dtype_id, & !< identifier of compound data type dtype_id, & !< identifier of compound data type
label_id, & !< identifier of label (string) in compound data type label_id, & !< identifier of label (string) in compound data type
@ -671,7 +675,8 @@ subroutine results_mapping_homogenization(ID,entry,label)
dt_id dt_id
integer(SIZE_T) :: type_size_string, type_size_int integer(SIZE_T) :: type_size_string, type_size_int
integer :: hdferr, ierr, ce integer :: hdferr, ce
integer(MPI_INTEGER_KIND) :: err_MPI
writeSize = 0 writeSize = 0
@ -681,25 +686,25 @@ subroutine results_mapping_homogenization(ID,entry,label)
if(hdferr < 0) error stop 'HDF5 error' if(hdferr < 0) error stop 'HDF5 error'
#ifndef PETSC #ifndef PETSC
entryGlobal = entry -1 ! 0-based entryGlobal = int(entry -1,pI64) ! 0-based
#else #else
!-------------------------------------------------------------------------------------------------- !--------------------------------------------------------------------------------------------------
! MPI settings and communication ! MPI settings and communication
call h5pset_dxpl_mpio_f(plist_id, H5FD_MPIO_COLLECTIVE_F, hdferr) call h5pset_dxpl_mpio_f(plist_id, H5FD_MPIO_COLLECTIVE_F, hdferr)
if(hdferr < 0) error stop 'HDF5 error' if(hdferr < 0) error stop 'HDF5 error'
call MPI_Allreduce(MPI_IN_PLACE,writeSize,worldsize,MPI_INT,MPI_SUM,MPI_COMM_WORLD,ierr) ! get output at each process call MPI_Allreduce(MPI_IN_PLACE,writeSize,worldsize,MPI_INTEGER,MPI_SUM,MPI_COMM_WORLD,err_MPI) ! get output at each process
if(ierr /= 0) error stop 'MPI error' if(err_MPI /= 0_MPI_INTEGER_KIND) error stop 'MPI error'
entryOffset = 0 entryOffset = 0_pI64
do ce = 1, size(ID,1) do ce = 1, size(ID,1)
entryOffset(ID(ce),worldrank) = entryOffset(ID(ce),worldrank) +1 entryOffset(ID(ce),worldrank) = entryOffset(ID(ce),worldrank) +1_pI64
end do end do
call MPI_Allreduce(MPI_IN_PLACE,entryOffset,size(entryOffset),MPI_INT,MPI_SUM,MPI_COMM_WORLD,ierr)! get offset at each process call MPI_Allreduce(MPI_IN_PLACE,entryOffset,size(entryOffset),MPI_INTEGER8,MPI_SUM,MPI_COMM_WORLD,err_MPI)! get offset at each process
if(ierr /= 0) error stop 'MPI error' if(err_MPI /= 0_MPI_INTEGER_KIND) error stop 'MPI error'
entryOffset(:,worldrank) = sum(entryOffset(:,0:worldrank-1),2) entryOffset(:,worldrank) = sum(entryOffset(:,0:worldrank-1),2)
do ce = 1, size(ID,1) do ce = 1, size(ID,1)
entryGlobal(ce) = entry(ce) -1 + entryOffset(ID(ce),worldrank) entryGlobal(ce) = int(entry(ce),pI64) -1_pI64 + entryOffset(ID(ce),worldrank)
end do end do
#endif #endif
@ -716,14 +721,15 @@ subroutine results_mapping_homogenization(ID,entry,label)
call h5tget_size_f(dt_id, type_size_string, hdferr) call h5tget_size_f(dt_id, type_size_string, hdferr)
if(hdferr < 0) error stop 'HDF5 error' if(hdferr < 0) error stop 'HDF5 error'
call h5tget_size_f(H5T_NATIVE_INTEGER, type_size_int, hdferr) pI64_t = h5kind_to_type(kind(entryGlobal),H5_INTEGER_KIND)
call h5tget_size_f(pI64_t, type_size_int, hdferr)
if(hdferr < 0) error stop 'HDF5 error' if(hdferr < 0) error stop 'HDF5 error'
call h5tcreate_f(H5T_COMPOUND_F, type_size_string + type_size_int, dtype_id, hdferr) call h5tcreate_f(H5T_COMPOUND_F, type_size_string + type_size_int, dtype_id, hdferr)
if(hdferr < 0) error stop 'HDF5 error' if(hdferr < 0) error stop 'HDF5 error'
call h5tinsert_f(dtype_id, 'label', 0_SIZE_T, dt_id,hdferr) call h5tinsert_f(dtype_id, 'label', 0_SIZE_T, dt_id,hdferr)
if(hdferr < 0) error stop 'HDF5 error' if(hdferr < 0) error stop 'HDF5 error'
call h5tinsert_f(dtype_id, 'entry', type_size_string, H5T_NATIVE_INTEGER, hdferr) call h5tinsert_f(dtype_id, 'entry', type_size_string, pI64_t, hdferr)
if(hdferr < 0) error stop 'HDF5 error' if(hdferr < 0) error stop 'HDF5 error'
!-------------------------------------------------------------------------------------------------- !--------------------------------------------------------------------------------------------------
@ -735,7 +741,7 @@ subroutine results_mapping_homogenization(ID,entry,label)
call h5tcreate_f(H5T_COMPOUND_F, type_size_int, entry_id, hdferr) call h5tcreate_f(H5T_COMPOUND_F, type_size_int, entry_id, hdferr)
if(hdferr < 0) error stop 'HDF5 error' if(hdferr < 0) error stop 'HDF5 error'
call h5tinsert_f(entry_id, 'entry', 0_SIZE_T, H5T_NATIVE_INTEGER, hdferr) call h5tinsert_f(entry_id, 'entry', 0_SIZE_T, pI64_t, hdferr)
if(hdferr < 0) error stop 'HDF5 error' if(hdferr < 0) error stop 'HDF5 error'
call h5tclose_f(dt_id, hdferr) call h5tclose_f(dt_id, hdferr)