default integer, PETSc integer, and MPI integer might be different

This commit is contained in:
Martin Diehl 2022-01-13 10:41:36 +01:00
parent 64395d4b2b
commit a7417a7ad7
11 changed files with 607 additions and 532 deletions

View File

@ -1857,13 +1857,13 @@ subroutine initialize_read(dset_id, filespace_id, memspace_id, plist_id, aplist_
localShape localShape
integer(HSIZE_T), intent(out), dimension(size(localShape,1)):: & integer(HSIZE_T), intent(out), dimension(size(localShape,1)):: &
myStart, & myStart, &
globalShape !< shape of the dataset (all processes) globalShape !< shape of the dataset (all processes)
integer(HID_T), intent(out) :: dset_id, filespace_id, memspace_id, plist_id, aplist_id integer(HID_T), intent(out) :: dset_id, filespace_id, memspace_id, plist_id, aplist_id
integer, dimension(worldsize) :: & integer, dimension(worldsize) :: &
readSize !< contribution of all processes readSize !< contribution of all processes
integer :: ierr
integer :: hdferr integer :: hdferr
integer(MPI_INTEGER_KIND) :: err_MPI
!------------------------------------------------------------------------------------------------- !-------------------------------------------------------------------------------------------------
! creating a property list for transfer properties (is collective for MPI) ! creating a property list for transfer properties (is collective for MPI)
@ -1877,8 +1877,8 @@ subroutine initialize_read(dset_id, filespace_id, memspace_id, plist_id, aplist_
if (parallel) then if (parallel) then
call H5Pset_dxpl_mpio_f(plist_id, H5FD_MPIO_COLLECTIVE_F, hdferr) call H5Pset_dxpl_mpio_f(plist_id, H5FD_MPIO_COLLECTIVE_F, hdferr)
if(hdferr < 0) error stop 'HDF5 error' if(hdferr < 0) error stop 'HDF5 error'
call MPI_allreduce(MPI_IN_PLACE,readSize,worldsize,MPI_INTEGER,MPI_SUM,PETSC_COMM_WORLD,ierr) ! get total output size over each process call MPI_allreduce(MPI_IN_PLACE,readSize,worldsize,MPI_INTEGER,MPI_SUM,PETSC_COMM_WORLD,err_MPI) ! get total output size over each process
if (ierr /= 0) error stop 'MPI error' if (err_MPI /= 0_MPI_INTEGER_KIND) error stop 'MPI error'
end if end if
#endif #endif
myStart = int(0,HSIZE_T) myStart = int(0,HSIZE_T)
@ -1956,7 +1956,8 @@ subroutine initialize_write(dset_id, filespace_id, memspace_id, plist_id, &
integer, dimension(worldsize) :: writeSize !< contribution of all processes integer, dimension(worldsize) :: writeSize !< contribution of all processes
integer(HID_T) :: dcpl integer(HID_T) :: dcpl
integer :: ierr, hdferr integer :: hdferr
integer(MPI_INTEGER_KIND) :: err_MPI
integer(HSIZE_T), parameter :: chunkSize = 1024_HSIZE_T**2/8_HSIZE_T integer(HSIZE_T), parameter :: chunkSize = 1024_HSIZE_T**2/8_HSIZE_T
@ -1977,8 +1978,8 @@ subroutine initialize_write(dset_id, filespace_id, memspace_id, plist_id, &
writeSize(worldrank+1) = int(myShape(ubound(myShape,1))) writeSize(worldrank+1) = int(myShape(ubound(myShape,1)))
#ifdef PETSC #ifdef PETSC
if (parallel) then if (parallel) then
call MPI_allreduce(MPI_IN_PLACE,writeSize,worldsize,MPI_INTEGER,MPI_SUM,PETSC_COMM_WORLD,ierr) ! get total output size over each process call MPI_allreduce(MPI_IN_PLACE,writeSize,worldsize,MPI_INTEGER,MPI_SUM,PETSC_COMM_WORLD,err_MPI) ! get total output size over each process
if (ierr /= 0) error stop 'MPI error' if (err_MPI /= 0_MPI_INTEGER_KIND) error stop 'MPI error'
end if end if
#endif #endif
myStart = int(0,HSIZE_T) myStart = int(0,HSIZE_T)

View File

@ -75,7 +75,6 @@ program DAMASK_grid
integer :: & integer :: &
i, j, m, field, & i, j, m, field, &
errorID = 0, & errorID = 0, &
ierr,&
cutBackLevel = 0, & !< cut back level \f$ t = \frac{t_{inc}}{2^l} \f$ cutBackLevel = 0, & !< cut back level \f$ t = \frac{t_{inc}}{2^l} \f$
stepFraction = 0, & !< fraction of current time interval stepFraction = 0, & !< fraction of current time interval
l = 0, & !< current load case l = 0, & !< current load case
@ -86,6 +85,7 @@ program DAMASK_grid
nActiveFields = 0, & nActiveFields = 0, &
maxCutBack, & !< max number of cut backs maxCutBack, & !< max number of cut backs
stagItMax !< max number of field level staggered iterations stagItMax !< max number of field level staggered iterations
integer(MPI_INTEGER_KIND) :: err_MPI
character(len=pStringLen) :: & character(len=pStringLen) :: &
incInfo incInfo
@ -455,23 +455,23 @@ program DAMASK_grid
print'(/,1x,a,i0,a)', 'increment ', totalIncsCounter, ' NOT converged' print'(/,1x,a,i0,a)', 'increment ', totalIncsCounter, ' NOT converged'
endif; flush(IO_STDOUT) endif; flush(IO_STDOUT)
call MPI_Allreduce(interface_SIGUSR1,signal,1,MPI_LOGICAL,MPI_LOR,MPI_COMM_WORLD,ierr) call MPI_Allreduce(interface_SIGUSR1,signal,1_MPI_INTEGER_KIND,MPI_LOGICAL,MPI_LOR,MPI_COMM_WORLD,err_MPI)
if (ierr /= 0) error stop 'MPI error' if (err_MPI /= 0_MPI_INTEGER_KIND) error stop 'MPI error'
if (mod(inc,loadCases(l)%f_out) == 0 .or. signal) then if (mod(inc,loadCases(l)%f_out) == 0 .or. signal) then
print'(/,1x,a)', '... writing results to file ...............................................' print'(/,1x,a)', '... writing results to file ...............................................'
flush(IO_STDOUT) flush(IO_STDOUT)
call CPFEM_results(totalIncsCounter,t) call CPFEM_results(totalIncsCounter,t)
endif endif
if (signal) call interface_setSIGUSR1(.false.) if (signal) call interface_setSIGUSR1(.false.)
call MPI_Allreduce(interface_SIGUSR2,signal,1,MPI_LOGICAL,MPI_LOR,MPI_COMM_WORLD,ierr) call MPI_Allreduce(interface_SIGUSR2,signal,1_MPI_INTEGER_KIND,MPI_LOGICAL,MPI_LOR,MPI_COMM_WORLD,err_MPI)
if (ierr /= 0) error stop 'MPI error' if (err_MPI /= 0_MPI_INTEGER_KIND) error stop 'MPI error'
if (mod(inc,loadCases(l)%f_restart) == 0 .or. signal) then if (mod(inc,loadCases(l)%f_restart) == 0 .or. signal) then
call mechanical_restartWrite call mechanical_restartWrite
call CPFEM_restartWrite call CPFEM_restartWrite
endif endif
if (signal) call interface_setSIGUSR2(.false.) if (signal) call interface_setSIGUSR2(.false.)
call MPI_Allreduce(interface_SIGTERM,signal,1,MPI_LOGICAL,MPI_LOR,MPI_COMM_WORLD,ierr) call MPI_Allreduce(interface_SIGTERM,signal,1_MPI_INTEGER_KIND,MPI_LOGICAL,MPI_LOR,MPI_COMM_WORLD,err_MPI)
if (ierr /= 0) error stop 'MPI error' if (err_MPI /= 0_MPI_INTEGER_KIND) error stop 'MPI error'
if (signal) exit loadCaseLooping if (signal) exit loadCaseLooping
endif skipping endif skipping

View File

@ -27,14 +27,14 @@ module discretization_grid
private private
integer, dimension(3), public, protected :: & integer, dimension(3), public, protected :: &
grid !< (global) grid grid !< (global) grid
integer, public, protected :: & integer, public, protected :: &
grid3, & !< (local) grid in 3rd direction grid3, & !< (local) grid in 3rd direction
grid3Offset !< (local) grid offset in 3rd direction grid3Offset !< (local) grid offset in 3rd direction
real(pReal), dimension(3), public, protected :: & real(pReal), dimension(3), public, protected :: &
geomSize !< (global) physical size geomSize !< (global) physical size
real(pReal), public, protected :: & real(pReal), public, protected :: &
size3, & !< (local) size in 3rd direction size3, & !< (local) size in 3rd direction
size3offset !< (local) size offset in 3rd direction size3offset !< (local) size offset in 3rd direction
public :: & public :: &
@ -62,8 +62,8 @@ subroutine discretization_grid_init(restart)
integer :: & integer :: &
j, & j, &
debug_element, debug_ip, & debug_element, debug_ip
ierr integer(MPI_INTEGER_KIND) :: err_MPI
integer(C_INTPTR_T) :: & integer(C_INTPTR_T) :: &
devNull, z, z_offset devNull, z, z_offset
integer, dimension(worldsize) :: & integer, dimension(worldsize) :: &
@ -88,13 +88,13 @@ subroutine discretization_grid_init(restart)
end if end if
call MPI_Bcast(grid,3,MPI_INTEGER,0,MPI_COMM_WORLD, ierr) call MPI_Bcast(grid,3,MPI_INTEGER,0,MPI_COMM_WORLD, err_MPI)
if (ierr /= 0) error stop 'MPI error' if (err_MPI /= 0_MPI_INTEGER_KIND) error stop 'MPI error'
if (grid(1) < 2) call IO_error(844, ext_msg='cells(1) must be larger than 1') if (grid(1) < 2) call IO_error(844, ext_msg='cells(1) must be larger than 1')
call MPI_Bcast(geomSize,3,MPI_DOUBLE,0,MPI_COMM_WORLD, ierr) call MPI_Bcast(geomSize,3,MPI_DOUBLE,0,MPI_COMM_WORLD, err_MPI)
if (ierr /= 0) error stop 'MPI error' if (err_MPI /= 0_MPI_INTEGER_KIND) error stop 'MPI error'
call MPI_Bcast(origin,3,MPI_DOUBLE,0,MPI_COMM_WORLD, ierr) call MPI_Bcast(origin,3,MPI_DOUBLE,0,MPI_COMM_WORLD, err_MPI)
if (ierr /= 0) error stop 'MPI error' if (err_MPI /= 0_MPI_INTEGER_KIND) error stop 'MPI error'
print'(/,1x,a,3(i12,1x))', 'cells a b c: ', grid print'(/,1x,a,3(i12,1x))', 'cells a b c: ', grid
print '(1x,a,3(es12.5,1x))', 'size x y z: ', geomSize print '(1x,a,3(es12.5,1x))', 'size x y z: ', geomSize
@ -118,14 +118,17 @@ subroutine discretization_grid_init(restart)
myGrid = [grid(1:2),grid3] myGrid = [grid(1:2),grid3]
mySize = [geomSize(1:2),size3] mySize = [geomSize(1:2),size3]
call MPI_Gather(product(grid(1:2))*grid3Offset,1,MPI_INTEGER,displs, 1,MPI_INTEGER,0,MPI_COMM_WORLD,ierr) call MPI_Gather(product(grid(1:2))*grid3Offset, 1_MPI_INTEGER_KIND,MPI_INTEGER,displs,&
if (ierr /= 0) error stop 'MPI error' 1_MPI_INTEGER_KIND,MPI_INTEGER,0_MPI_INTEGER_KIND,MPI_COMM_WORLD,err_MPI)
call MPI_Gather(product(myGrid), 1,MPI_INTEGER,sendcounts,1,MPI_INTEGER,0,MPI_COMM_WORLD,ierr) if (err_MPI /= 0_MPI_INTEGER_KIND) error stop 'MPI error'
if (ierr /= 0) error stop 'MPI error' call MPI_Gather(product(myGrid), 1_MPI_INTEGER_KIND,MPI_INTEGER,sendcounts,&
1_MPI_INTEGER_KIND,MPI_INTEGER,0_MPI_INTEGER_KIND,MPI_COMM_WORLD,err_MPI)
if (err_MPI /= 0_MPI_INTEGER_KIND) error stop 'MPI error'
allocate(materialAt(product(myGrid))) allocate(materialAt(product(myGrid)))
call MPI_Scatterv(materialAt_global,sendcounts,displs,MPI_INTEGER,materialAt,size(materialAt),MPI_INTEGER,0,MPI_COMM_WORLD,ierr) call MPI_Scatterv(materialAt_global,sendcounts,displs,MPI_INTEGER,materialAt,size(materialAt),&
if (ierr /= 0) error stop 'MPI error' MPI_INTEGER,0_MPI_INTEGER_KIND,MPI_COMM_WORLD,err_MPI)
if (err_MPI /= 0_MPI_INTEGER_KIND) error stop 'MPI error'
call discretization_init(materialAt, & call discretization_init(materialAt, &
IPcoordinates0(myGrid,mySize,grid3Offset), & IPcoordinates0(myGrid,mySize,grid3Offset), &

View File

@ -107,7 +107,8 @@ subroutine grid_mechanical_FEM_init
1.0_pReal,-1.0_pReal,-1.0_pReal,-1.0_pReal, & 1.0_pReal,-1.0_pReal,-1.0_pReal,-1.0_pReal, &
1.0_pReal, 1.0_pReal, 1.0_pReal, 1.0_pReal], [4,8]) 1.0_pReal, 1.0_pReal, 1.0_pReal, 1.0_pReal], [4,8])
real(pReal), dimension(3,3,3,3) :: devNull real(pReal), dimension(3,3,3,3) :: devNull
PetscErrorCode :: ierr PetscErrorCode :: err_PETSc
integer(MPI_INTEGER_KIND) :: err_MPI
PetscScalar, pointer, dimension(:,:,:,:) :: & PetscScalar, pointer, dimension(:,:,:,:) :: &
u_current,u_lastInc u_current,u_lastInc
PetscInt, dimension(0:worldsize-1) :: localK PetscInt, dimension(0:worldsize-1) :: localK
@ -146,10 +147,10 @@ subroutine grid_mechanical_FEM_init
call PetscOptionsInsertString(PETSC_NULL_OPTIONS, & call PetscOptionsInsertString(PETSC_NULL_OPTIONS, &
'-mechanical_snes_type newtonls -mechanical_ksp_type fgmres & '-mechanical_snes_type newtonls -mechanical_ksp_type fgmres &
&-mechanical_ksp_max_it 25 -mechanical_mg_levels_ksp_type chebyshev', & &-mechanical_ksp_max_it 25 -mechanical_mg_levels_ksp_type chebyshev', &
ierr) err_PETSc)
CHKERRQ(ierr) CHKERRQ(err_PETSc)
call PetscOptionsInsertString(PETSC_NULL_OPTIONS,num_grid%get_asString('petsc_options',defaultVal=''),ierr) call PetscOptionsInsertString(PETSC_NULL_OPTIONS,num_grid%get_asString('petsc_options',defaultVal=''),err_PETSc)
CHKERRQ(ierr) CHKERRQ(err_PETSc)
!-------------------------------------------------------------------------------------------------- !--------------------------------------------------------------------------------------------------
! allocate global fields ! allocate global fields
@ -159,13 +160,14 @@ subroutine grid_mechanical_FEM_init
!-------------------------------------------------------------------------------------------------- !--------------------------------------------------------------------------------------------------
! initialize solver specific parts of PETSc ! initialize solver specific parts of PETSc
call SNESCreate(PETSC_COMM_WORLD,mechanical_snes,ierr) call SNESCreate(PETSC_COMM_WORLD,mechanical_snes,err_PETSc)
CHKERRQ(ierr) CHKERRQ(err_PETSc)
call SNESSetOptionsPrefix(mechanical_snes,'mechanical_',ierr) call SNESSetOptionsPrefix(mechanical_snes,'mechanical_',err_PETSc)
CHKERRQ(ierr) CHKERRQ(err_PETSc)
localK = 0_pPetscInt localK = 0_pPetscInt
localK(worldrank) = int(grid3,pPetscInt) localK(worldrank) = int(grid3,pPetscInt)
call MPI_Allreduce(MPI_IN_PLACE,localK,worldsize,MPI_INTEGER,MPI_SUM,MPI_COMM_WORLD,ierr) call MPI_Allreduce(MPI_IN_PLACE,localK,worldsize,MPI_INTEGER,MPI_SUM,MPI_COMM_WORLD,err_MPI)
if(err_MPI /= 0_MPI_INTEGER_KIND) error stop 'MPI error'
call DMDACreate3d(PETSC_COMM_WORLD, & call DMDACreate3d(PETSC_COMM_WORLD, &
DM_BOUNDARY_PERIODIC, DM_BOUNDARY_PERIODIC, DM_BOUNDARY_PERIODIC, & DM_BOUNDARY_PERIODIC, DM_BOUNDARY_PERIODIC, DM_BOUNDARY_PERIODIC, &
DMDA_STENCIL_BOX, & DMDA_STENCIL_BOX, &
@ -173,45 +175,45 @@ subroutine grid_mechanical_FEM_init
1_pPetscInt, 1_pPetscInt, int(worldsize,pPetscInt), & 1_pPetscInt, 1_pPetscInt, int(worldsize,pPetscInt), &
3_pPetscInt, 1_pPetscInt, & ! #dof (u, vector), ghost boundary width (domain overlap) 3_pPetscInt, 1_pPetscInt, & ! #dof (u, vector), ghost boundary width (domain overlap)
[int(grid(1),pPetscInt)],[int(grid(2),pPetscInt)],localK, & ! local grid [int(grid(1),pPetscInt)],[int(grid(2),pPetscInt)],localK, & ! local grid
mechanical_grid,ierr) mechanical_grid,err_PETSc)
CHKERRQ(ierr) CHKERRQ(err_PETSc)
call SNESSetDM(mechanical_snes,mechanical_grid,ierr) call SNESSetDM(mechanical_snes,mechanical_grid,err_PETSc)
CHKERRQ(ierr) CHKERRQ(err_PETSc)
call DMsetFromOptions(mechanical_grid,ierr) call DMsetFromOptions(mechanical_grid,err_PETSc)
CHKERRQ(ierr) CHKERRQ(err_PETSc)
call DMsetUp(mechanical_grid,ierr) call DMsetUp(mechanical_grid,err_PETSc)
CHKERRQ(ierr) CHKERRQ(err_PETSc)
call DMDASetUniformCoordinates(mechanical_grid,0.0_pReal,geomSize(1),0.0_pReal,geomSize(2),0.0_pReal,geomSize(3),ierr) call DMDASetUniformCoordinates(mechanical_grid,0.0_pReal,geomSize(1),0.0_pReal,geomSize(2),0.0_pReal,geomSize(3),err_PETSc)
CHKERRQ(ierr) CHKERRQ(err_PETSc)
call DMCreateGlobalVector(mechanical_grid,solution_current,ierr) call DMCreateGlobalVector(mechanical_grid,solution_current,err_PETSc)
CHKERRQ(ierr) CHKERRQ(err_PETSc)
call DMCreateGlobalVector(mechanical_grid,solution_lastInc,ierr) call DMCreateGlobalVector(mechanical_grid,solution_lastInc,err_PETSc)
CHKERRQ(ierr) CHKERRQ(err_PETSc)
call DMCreateGlobalVector(mechanical_grid,solution_rate ,ierr) call DMCreateGlobalVector(mechanical_grid,solution_rate ,err_PETSc)
CHKERRQ(ierr) CHKERRQ(err_PETSc)
call DMSNESSetFunctionLocal(mechanical_grid,formResidual,PETSC_NULL_SNES,ierr) call DMSNESSetFunctionLocal(mechanical_grid,formResidual,PETSC_NULL_SNES,err_PETSc)
CHKERRQ(ierr) CHKERRQ(err_PETSc)
call DMSNESSetJacobianLocal(mechanical_grid,formJacobian,PETSC_NULL_SNES,ierr) call DMSNESSetJacobianLocal(mechanical_grid,formJacobian,PETSC_NULL_SNES,err_PETSc)
CHKERRQ(ierr) CHKERRQ(err_PETSc)
call SNESSetConvergenceTest(mechanical_snes,converged,PETSC_NULL_SNES,PETSC_NULL_FUNCTION,ierr) ! specify custom convergence check function "_converged" call SNESSetConvergenceTest(mechanical_snes,converged,PETSC_NULL_SNES,PETSC_NULL_FUNCTION,err_PETSc) ! specify custom convergence check function "_converged"
CHKERRQ(ierr) CHKERRQ(err_PETSc)
call SNESSetMaxLinearSolveFailures(mechanical_snes, huge(1_pPetscInt), ierr) ! ignore linear solve failures call SNESSetMaxLinearSolveFailures(mechanical_snes, huge(1_pPetscInt), err_PETSc) ! ignore linear solve failures
CHKERRQ(ierr) CHKERRQ(err_PETSc)
call SNESSetFromOptions(mechanical_snes,ierr) ! pull it all together with additional cli arguments call SNESSetFromOptions(mechanical_snes,err_PETSc) ! pull it all together with additional cli arguments
CHKERRQ(ierr) CHKERRQ(err_PETSc)
!-------------------------------------------------------------------------------------------------- !--------------------------------------------------------------------------------------------------
! init fields ! init fields
call VecSet(solution_current,0.0_pReal,ierr);CHKERRQ(ierr) call VecSet(solution_current,0.0_pReal,err_PETSc);CHKERRQ(err_PETSc)
call VecSet(solution_lastInc,0.0_pReal,ierr);CHKERRQ(ierr) call VecSet(solution_lastInc,0.0_pReal,err_PETSc);CHKERRQ(err_PETSc)
call VecSet(solution_rate ,0.0_pReal,ierr);CHKERRQ(ierr) call VecSet(solution_rate ,0.0_pReal,err_PETSc);CHKERRQ(err_PETSc)
call DMDAVecGetArrayF90(mechanical_grid,solution_current,u_current,ierr) call DMDAVecGetArrayF90(mechanical_grid,solution_current,u_current,err_PETSc)
CHKERRQ(ierr) CHKERRQ(err_PETSc)
call DMDAVecGetArrayF90(mechanical_grid,solution_lastInc,u_lastInc,ierr) call DMDAVecGetArrayF90(mechanical_grid,solution_lastInc,u_lastInc,err_PETSc)
CHKERRQ(ierr) CHKERRQ(err_PETSc)
call DMDAGetCorners(mechanical_grid,xstart,ystart,zstart,xend,yend,zend,ierr) ! local grid extent call DMDAGetCorners(mechanical_grid,xstart,ystart,zstart,xend,yend,zend,err_PETSc) ! local grid extent
CHKERRQ(ierr) CHKERRQ(err_PETSc)
xend = xstart+xend-1 xend = xstart+xend-1
yend = ystart+yend-1 yend = ystart+yend-1
zend = zstart+zend-1 zend = zstart+zend-1
@ -239,17 +241,17 @@ subroutine grid_mechanical_FEM_init
groupHandle = HDF5_openGroup(fileHandle,'solver') groupHandle = HDF5_openGroup(fileHandle,'solver')
call HDF5_read(P_aim,groupHandle,'P_aim',.false.) call HDF5_read(P_aim,groupHandle,'P_aim',.false.)
call MPI_Bcast(P_aim,9,MPI_DOUBLE,0,MPI_COMM_WORLD,ierr) call MPI_Bcast(P_aim,9,MPI_DOUBLE,0,MPI_COMM_WORLD,err_MPI)
if(ierr /=0) error stop 'MPI error' if(err_MPI /= 0_MPI_INTEGER_KIND) error stop 'MPI error'
call HDF5_read(F_aim,groupHandle,'F_aim',.false.) call HDF5_read(F_aim,groupHandle,'F_aim',.false.)
call MPI_Bcast(F_aim,9,MPI_DOUBLE,0,MPI_COMM_WORLD,ierr) call MPI_Bcast(F_aim,9,MPI_DOUBLE,0,MPI_COMM_WORLD,err_MPI)
if(ierr /=0) error stop 'MPI error' if(err_MPI /= 0_MPI_INTEGER_KIND) error stop 'MPI error'
call HDF5_read(F_aim_lastInc,groupHandle,'F_aim_lastInc',.false.) call HDF5_read(F_aim_lastInc,groupHandle,'F_aim_lastInc',.false.)
call MPI_Bcast(F_aim_lastInc,9,MPI_DOUBLE,0,MPI_COMM_WORLD,ierr) call MPI_Bcast(F_aim_lastInc,9,MPI_DOUBLE,0,MPI_COMM_WORLD,err_MPI)
if(ierr /=0) error stop 'MPI error' if(err_MPI /= 0_MPI_INTEGER_KIND) error stop 'MPI error'
call HDF5_read(F_aimDot,groupHandle,'F_aimDot',.false.) call HDF5_read(F_aimDot,groupHandle,'F_aimDot',.false.)
call MPI_Bcast(F_aimDot,9,MPI_DOUBLE,0,MPI_COMM_WORLD,ierr) call MPI_Bcast(F_aimDot,9,MPI_DOUBLE,0,MPI_COMM_WORLD,err_MPI)
if(ierr /=0) error stop 'MPI error' if(err_MPI /= 0_MPI_INTEGER_KIND) error stop 'MPI error'
call HDF5_read(F,groupHandle,'F') call HDF5_read(F,groupHandle,'F')
call HDF5_read(F_lastInc,groupHandle,'F_lastInc') call HDF5_read(F_lastInc,groupHandle,'F_lastInc')
call HDF5_read(u_current,groupHandle,'u') call HDF5_read(u_current,groupHandle,'u')
@ -265,19 +267,19 @@ subroutine grid_mechanical_FEM_init
call utilities_constitutiveResponse(P_current,P_av,C_volAvg,devNull, & ! stress field, stress avg, global average of stiffness and (min+max)/2 call utilities_constitutiveResponse(P_current,P_av,C_volAvg,devNull, & ! stress field, stress avg, global average of stiffness and (min+max)/2
F, & ! target F F, & ! target F
0.0_pReal) ! time increment 0.0_pReal) ! time increment
call DMDAVecRestoreArrayF90(mechanical_grid,solution_current,u_current,ierr) call DMDAVecRestoreArrayF90(mechanical_grid,solution_current,u_current,err_PETSc)
CHKERRQ(ierr) CHKERRQ(err_PETSc)
call DMDAVecRestoreArrayF90(mechanical_grid,solution_lastInc,u_lastInc,ierr) call DMDAVecRestoreArrayF90(mechanical_grid,solution_lastInc,u_lastInc,err_PETSc)
CHKERRQ(ierr) CHKERRQ(err_PETSc)
restartRead2: if (interface_restartInc > 0) then restartRead2: if (interface_restartInc > 0) then
print'(1x,a,i0,a)', 'reading more restart data of increment ', interface_restartInc, ' from file' print'(1x,a,i0,a)', 'reading more restart data of increment ', interface_restartInc, ' from file'
call HDF5_read(C_volAvg,groupHandle,'C_volAvg',.false.) call HDF5_read(C_volAvg,groupHandle,'C_volAvg',.false.)
call MPI_Bcast(C_volAvg,81,MPI_DOUBLE,0,MPI_COMM_WORLD,ierr) call MPI_Bcast(C_volAvg,81_MPI_INTEGER_KIND,MPI_DOUBLE,0_MPI_INTEGER_KIND,MPI_COMM_WORLD,err_MPI)
if(ierr /=0) error stop 'MPI error' if(err_MPI /= 0_MPI_INTEGER_KIND) error stop 'MPI error'
call HDF5_read(C_volAvgLastInc,groupHandle,'C_volAvgLastInc',.false.) call HDF5_read(C_volAvgLastInc,groupHandle,'C_volAvgLastInc',.false.)
call MPI_Bcast(C_volAvgLastInc,81,MPI_DOUBLE,0,MPI_COMM_WORLD,ierr) call MPI_Bcast(C_volAvgLastInc,81_MPI_INTEGER_KIND,MPI_DOUBLE,0_MPI_INTEGER_KIND,MPI_COMM_WORLD,err_MPI)
if(ierr /=0) error stop 'MPI error' if(err_MPI /= 0_MPI_INTEGER_KIND) error stop 'MPI error'
call HDF5_closeGroup(groupHandle) call HDF5_closeGroup(groupHandle)
call HDF5_closeFile(fileHandle) call HDF5_closeFile(fileHandle)
@ -300,7 +302,7 @@ function grid_mechanical_FEM_solution(incInfoIn) result(solution)
solution solution
!-------------------------------------------------------------------------------------------------- !--------------------------------------------------------------------------------------------------
! PETSc Data ! PETSc Data
PetscErrorCode :: ierr PetscErrorCode :: err_PETSc
SNESConvergedReason :: reason SNESConvergedReason :: reason
incInfo = incInfoIn incInfo = incInfoIn
@ -311,13 +313,13 @@ function grid_mechanical_FEM_solution(incInfoIn) result(solution)
!-------------------------------------------------------------------------------------------------- !--------------------------------------------------------------------------------------------------
! solve BVP ! solve BVP
call SNESsolve(mechanical_snes,PETSC_NULL_VEC,solution_current,ierr) call SNESsolve(mechanical_snes,PETSC_NULL_VEC,solution_current,err_PETSc)
CHKERRQ(ierr) CHKERRQ(err_PETSc)
!-------------------------------------------------------------------------------------------------- !--------------------------------------------------------------------------------------------------
! check convergence ! check convergence
call SNESGetConvergedReason(mechanical_snes,reason,ierr) call SNESGetConvergedReason(mechanical_snes,reason,err_PETSc)
CHKERRQ(ierr) CHKERRQ(err_PETSc)
solution%converged = reason > 0 solution%converged = reason > 0
solution%iterationsNeeded = totalIter solution%iterationsNeeded = totalIter
@ -347,22 +349,22 @@ subroutine grid_mechanical_FEM_forward(cutBack,guess,Delta_t,Delta_t_old,t_remai
deformation_BC deformation_BC
type(rotation), intent(in) :: & type(rotation), intent(in) :: &
rotation_BC rotation_BC
PetscErrorCode :: ierr PetscErrorCode :: err_PETSc
PetscScalar, pointer, dimension(:,:,:,:) :: & PetscScalar, pointer, dimension(:,:,:,:) :: &
u_current,u_lastInc u_current,u_lastInc
call DMDAVecGetArrayF90(mechanical_grid,solution_current,u_current,ierr) call DMDAVecGetArrayF90(mechanical_grid,solution_current,u_current,err_PETSc)
CHKERRQ(ierr) CHKERRQ(err_PETSc)
call DMDAVecGetArrayF90(mechanical_grid,solution_lastInc,u_lastInc,ierr) call DMDAVecGetArrayF90(mechanical_grid,solution_lastInc,u_lastInc,err_PETSc)
CHKERRQ(ierr) CHKERRQ(err_PETSc)
if (cutBack) then if (cutBack) then
C_volAvg = C_volAvgLastInc C_volAvg = C_volAvgLastInc
else else
C_volAvgLastInc = C_volAvg C_volAvgLastInc = C_volAvg
F_aimDot = merge(merge(.0_pReal,(F_aim-F_aim_lastInc)/Delta_t_old,stress_BC%mask),.0_pReal,guess) ! estimate deformation rate for prescribed stress components F_aimDot = merge(merge(.0_pReal,(F_aim-F_aim_lastInc)/Delta_t_old,stress_BC%mask),.0_pReal,guess) ! estimate deformation rate for prescribed stress components
F_aim_lastInc = F_aim F_aim_lastInc = F_aim
!----------------------------------------------------------------------------------------------- !-----------------------------------------------------------------------------------------------
@ -379,13 +381,14 @@ subroutine grid_mechanical_FEM_forward(cutBack,guess,Delta_t,Delta_t_old,t_remai
endif endif
if (guess) then if (guess) then
call VecWAXPY(solution_rate,-1.0_pReal,solution_lastInc,solution_current,ierr) call VecWAXPY(solution_rate,-1.0_pReal,solution_lastInc,solution_current,err_PETSc)
CHKERRQ(ierr) CHKERRQ(err_PETSc)
call VecScale(solution_rate,1.0_pReal/Delta_t_old,ierr); CHKERRQ(ierr) call VecScale(solution_rate,1.0_pReal/Delta_t_old,err_PETSc)
CHKERRQ(err_PETSc)
else else
call VecSet(solution_rate,0.0_pReal,ierr); CHKERRQ(ierr) call VecSet(solution_rate,0.0_pReal,err_PETSc); CHKERRQ(err_PETSc)
endif endif
call VecCopy(solution_current,solution_lastInc,ierr); CHKERRQ(ierr) call VecCopy(solution_current,solution_lastInc,err_PETSc); CHKERRQ(err_PETSc)
F_lastInc = F F_lastInc = F
@ -400,12 +403,12 @@ subroutine grid_mechanical_FEM_forward(cutBack,guess,Delta_t,Delta_t_old,t_remai
if (stress_BC%myType=='dot_P') P_aim = P_aim & if (stress_BC%myType=='dot_P') P_aim = P_aim &
+ merge(.0_pReal,stress_BC%values,stress_BC%mask)*Delta_t + merge(.0_pReal,stress_BC%values,stress_BC%mask)*Delta_t
call VecAXPY(solution_current,Delta_t,solution_rate,ierr); CHKERRQ(ierr) call VecAXPY(solution_current,Delta_t,solution_rate,err_PETSc)
CHKERRQ(err_PETSc)
call DMDAVecRestoreArrayF90(mechanical_grid,solution_current,u_current,ierr) call DMDAVecRestoreArrayF90(mechanical_grid,solution_current,u_current,err_PETSc)
CHKERRQ(ierr) CHKERRQ(err_PETSc)
call DMDAVecRestoreArrayF90(mechanical_grid,solution_lastInc,u_lastInc,ierr) call DMDAVecRestoreArrayF90(mechanical_grid,solution_lastInc,u_lastInc,err_PETSc)
CHKERRQ(ierr) CHKERRQ(err_PETSc)
!-------------------------------------------------------------------------------------------------- !--------------------------------------------------------------------------------------------------
! set module wide available data ! set module wide available data
@ -431,15 +434,15 @@ end subroutine grid_mechanical_FEM_updateCoords
!-------------------------------------------------------------------------------------------------- !--------------------------------------------------------------------------------------------------
subroutine grid_mechanical_FEM_restartWrite subroutine grid_mechanical_FEM_restartWrite
PetscErrorCode :: ierr PetscErrorCode :: err_PETSc
integer(HID_T) :: fileHandle, groupHandle integer(HID_T) :: fileHandle, groupHandle
PetscScalar, dimension(:,:,:,:), pointer :: u_current,u_lastInc PetscScalar, dimension(:,:,:,:), pointer :: u_current,u_lastInc
call DMDAVecGetArrayF90(mechanical_grid,solution_current,u_current,ierr) call DMDAVecGetArrayF90(mechanical_grid,solution_current,u_current,err_PETSc)
CHKERRQ(ierr) CHKERRQ(err_PETSc)
call DMDAVecGetArrayF90(mechanical_grid,solution_lastInc,u_lastInc,ierr) call DMDAVecGetArrayF90(mechanical_grid,solution_lastInc,u_lastInc,err_PETSc)
CHKERRQ(ierr) CHKERRQ(err_PETSc)
print'(1x,a)', 'writing solver data required for restart to file'; flush(IO_STDOUT) print'(1x,a)', 'writing solver data required for restart to file'; flush(IO_STDOUT)
@ -465,10 +468,10 @@ subroutine grid_mechanical_FEM_restartWrite
call HDF5_closeFile(fileHandle) call HDF5_closeFile(fileHandle)
endif endif
call DMDAVecRestoreArrayF90(mechanical_grid,solution_current,u_current,ierr) call DMDAVecRestoreArrayF90(mechanical_grid,solution_current,u_current,err_PETSc)
CHKERRQ(ierr) CHKERRQ(err_PETSc)
call DMDAVecRestoreArrayF90(mechanical_grid,solution_lastInc,u_lastInc,ierr) call DMDAVecRestoreArrayF90(mechanical_grid,solution_lastInc,u_lastInc,err_PETSc)
CHKERRQ(ierr) CHKERRQ(err_PETSc)
end subroutine grid_mechanical_FEM_restartWrite end subroutine grid_mechanical_FEM_restartWrite
@ -476,7 +479,7 @@ end subroutine grid_mechanical_FEM_restartWrite
!-------------------------------------------------------------------------------------------------- !--------------------------------------------------------------------------------------------------
!> @brief convergence check !> @brief convergence check
!-------------------------------------------------------------------------------------------------- !--------------------------------------------------------------------------------------------------
subroutine converged(snes_local,PETScIter,devNull1,devNull2,fnorm,reason,dummy,ierr) subroutine converged(snes_local,PETScIter,devNull1,devNull2,fnorm,reason,dummy,err_PETSc)
SNES :: snes_local SNES :: snes_local
PetscInt, intent(in) :: PETScIter PetscInt, intent(in) :: PETScIter
@ -486,7 +489,7 @@ subroutine converged(snes_local,PETScIter,devNull1,devNull2,fnorm,reason,dummy,i
fnorm fnorm
SNESConvergedReason :: reason SNESConvergedReason :: reason
PetscObject :: dummy PetscObject :: dummy
PetscErrorCode :: ierr PetscErrorCode :: err_PETSc
real(pReal) :: & real(pReal) :: &
err_div, & err_div, &
divTol, & divTol, &
@ -520,7 +523,7 @@ end subroutine converged
!> @brief forms the residual vector !> @brief forms the residual vector
!-------------------------------------------------------------------------------------------------- !--------------------------------------------------------------------------------------------------
subroutine formResidual(da_local,x_local, & subroutine formResidual(da_local,x_local, &
f_local,dummy,ierr) f_local,dummy,err_PETSc)
DM :: da_local DM :: da_local
Vec :: x_local, f_local Vec :: x_local, f_local
@ -531,13 +534,14 @@ subroutine formResidual(da_local,x_local, &
PETScIter, & PETScIter, &
nfuncs nfuncs
PetscObject :: dummy PetscObject :: dummy
PetscErrorCode :: ierr PetscErrorCode :: err_PETSc
integer(MPI_INTEGER_KIND) :: err_MPI
real(pReal), dimension(3,3,3,3) :: devNull real(pReal), dimension(3,3,3,3) :: devNull
call SNESGetNumberFunctionEvals(mechanical_snes,nfuncs,ierr) call SNESGetNumberFunctionEvals(mechanical_snes,nfuncs,err_PETSc)
CHKERRQ(ierr) CHKERRQ(err_PETSc)
call SNESGetIterationNumber(mechanical_snes,PETScIter,ierr) call SNESGetIterationNumber(mechanical_snes,PETScIter,err_PETSc)
CHKERRQ(ierr) CHKERRQ(err_PETSc)
if (nfuncs == 0 .and. PETScIter == 0) totalIter = -1 ! new increment if (nfuncs == 0 .and. PETScIter == 0) totalIter = -1 ! new increment
@ -555,7 +559,7 @@ subroutine formResidual(da_local,x_local, &
!-------------------------------------------------------------------------------------------------- !--------------------------------------------------------------------------------------------------
! get deformation gradient ! get deformation gradient
call DMDAVecGetArrayF90(da_local,x_local,x_scal,ierr);CHKERRQ(ierr) call DMDAVecGetArrayF90(da_local,x_local,x_scal,err_PETSc);CHKERRQ(err_PETSc)
do k = zstart, zend; do j = ystart, yend; do i = xstart, xend do k = zstart, zend; do j = ystart, yend; do i = xstart, xend
ctr = 0 ctr = 0
do kk = 0, 1; do jj = 0, 1; do ii = 0, 1 do kk = 0, 1; do jj = 0, 1; do ii = 0, 1
@ -565,14 +569,15 @@ subroutine formResidual(da_local,x_local, &
ii = i-xstart+1; jj = j-ystart+1; kk = k-zstart+1 ii = i-xstart+1; jj = j-ystart+1; kk = k-zstart+1
F(1:3,1:3,ii,jj,kk) = params%rotation_BC%rotate(F_aim,active=.true.) + transpose(matmul(BMat,x_elem)) F(1:3,1:3,ii,jj,kk) = params%rotation_BC%rotate(F_aim,active=.true.) + transpose(matmul(BMat,x_elem))
enddo; enddo; enddo enddo; enddo; enddo
call DMDAVecRestoreArrayF90(da_local,x_local,x_scal,ierr);CHKERRQ(ierr) call DMDAVecRestoreArrayF90(da_local,x_local,x_scal,err_PETSc);CHKERRQ(err_PETSc)
!-------------------------------------------------------------------------------------------------- !--------------------------------------------------------------------------------------------------
! evaluate constitutive response ! evaluate constitutive response
call utilities_constitutiveResponse(P_current,& call utilities_constitutiveResponse(P_current,&
P_av,C_volAvg,devNull, & P_av,C_volAvg,devNull, &
F,params%Delta_t,params%rotation_BC) F,params%Delta_t,params%rotation_BC)
call MPI_Allreduce(MPI_IN_PLACE,terminallyIll,1,MPI_LOGICAL,MPI_LOR,MPI_COMM_WORLD,ierr) call MPI_Allreduce(MPI_IN_PLACE,terminallyIll,1_MPI_INTEGER_KIND,MPI_LOGICAL,MPI_LOR,MPI_COMM_WORLD,err_MPI)
if(err_MPI /= 0_MPI_INTEGER_KIND) error stop 'MPI error'
!-------------------------------------------------------------------------------------------------- !--------------------------------------------------------------------------------------------------
! stress BC handling ! stress BC handling
@ -581,9 +586,9 @@ subroutine formResidual(da_local,x_local, &
!-------------------------------------------------------------------------------------------------- !--------------------------------------------------------------------------------------------------
! constructing residual ! constructing residual
call VecSet(f_local,0.0_pReal,ierr);CHKERRQ(ierr) call VecSet(f_local,0.0_pReal,err_PETSc);CHKERRQ(err_PETSc)
call DMDAVecGetArrayF90(da_local,f_local,f_scal,ierr);CHKERRQ(ierr) call DMDAVecGetArrayF90(da_local,f_local,f_scal,err_PETSc);CHKERRQ(err_PETSc)
call DMDAVecGetArrayF90(da_local,x_local,x_scal,ierr);CHKERRQ(ierr) call DMDAVecGetArrayF90(da_local,x_local,x_scal,err_PETSc);CHKERRQ(err_PETSc)
ele = 0 ele = 0
do k = zstart, zend; do j = ystart, yend; do i = xstart, xend do k = zstart, zend; do j = ystart, yend; do i = xstart, xend
ctr = 0 ctr = 0
@ -603,12 +608,12 @@ subroutine formResidual(da_local,x_local, &
f_scal(0:2,i+ii,j+jj,k+kk) = f_scal(0:2,i+ii,j+jj,k+kk) + f_elem(ctr,1:3) f_scal(0:2,i+ii,j+jj,k+kk) = f_scal(0:2,i+ii,j+jj,k+kk) + f_elem(ctr,1:3)
enddo; enddo; enddo enddo; enddo; enddo
enddo; enddo; enddo enddo; enddo; enddo
call DMDAVecRestoreArrayF90(da_local,x_local,x_scal,ierr);CHKERRQ(ierr) call DMDAVecRestoreArrayF90(da_local,x_local,x_scal,err_PETSc);CHKERRQ(err_PETSc)
call DMDAVecRestoreArrayF90(da_local,f_local,f_scal,ierr);CHKERRQ(ierr) call DMDAVecRestoreArrayF90(da_local,f_local,f_scal,err_PETSc);CHKERRQ(err_PETSc)
!-------------------------------------------------------------------------------------------------- !--------------------------------------------------------------------------------------------------
! applying boundary conditions ! applying boundary conditions
call DMDAVecGetArrayF90(da_local,f_local,f_scal,ierr);CHKERRQ(ierr) call DMDAVecGetArrayF90(da_local,f_local,f_scal,err_PETSc);CHKERRQ(err_PETSc)
if (zstart == 0) then if (zstart == 0) then
f_scal(0:2,xstart,ystart,zstart) = 0.0 f_scal(0:2,xstart,ystart,zstart) = 0.0
f_scal(0:2,xend+1,ystart,zstart) = 0.0 f_scal(0:2,xend+1,ystart,zstart) = 0.0
@ -621,7 +626,7 @@ subroutine formResidual(da_local,x_local, &
f_scal(0:2,xstart,yend+1,zend+1) = 0.0 f_scal(0:2,xstart,yend+1,zend+1) = 0.0
f_scal(0:2,xend+1,yend+1,zend+1) = 0.0 f_scal(0:2,xend+1,yend+1,zend+1) = 0.0
endif endif
call DMDAVecRestoreArrayF90(da_local,f_local,f_scal,ierr);CHKERRQ(ierr) call DMDAVecRestoreArrayF90(da_local,f_local,f_scal,err_PETSc);CHKERRQ(err_PETSc)
end subroutine formResidual end subroutine formResidual
@ -629,7 +634,7 @@ end subroutine formResidual
!-------------------------------------------------------------------------------------------------- !--------------------------------------------------------------------------------------------------
!> @brief forms the FEM stiffness matrix !> @brief forms the FEM stiffness matrix
!-------------------------------------------------------------------------------------------------- !--------------------------------------------------------------------------------------------------
subroutine formJacobian(da_local,x_local,Jac_pre,Jac,dummy,ierr) subroutine formJacobian(da_local,x_local,Jac_pre,Jac,dummy,err_PETSc)
DM :: da_local DM :: da_local
Vec :: x_local, coordinates Vec :: x_local, coordinates
@ -643,15 +648,17 @@ subroutine formJacobian(da_local,x_local,Jac_pre,Jac,dummy,ierr)
PetscScalar :: diag PetscScalar :: diag
PetscObject :: dummy PetscObject :: dummy
MatNullSpace :: matnull MatNullSpace :: matnull
PetscErrorCode :: ierr PetscErrorCode :: err_PETSc
BMatFull = 0.0 BMatFull = 0.0
BMatFull(1:3,1 :8 ) = BMat BMatFull(1:3,1 :8 ) = BMat
BMatFull(4:6,9 :16) = BMat BMatFull(4:6,9 :16) = BMat
BMatFull(7:9,17:24) = BMat BMatFull(7:9,17:24) = BMat
call MatSetOption(Jac,MAT_KEEP_NONZERO_PATTERN,PETSC_TRUE,ierr); CHKERRQ(ierr) call MatSetOption(Jac,MAT_KEEP_NONZERO_PATTERN,PETSC_TRUE,err_PETSc)
call MatSetOption(Jac,MAT_NEW_NONZERO_ALLOCATION_ERR,PETSC_FALSE,ierr); CHKERRQ(ierr) CHKERRQ(err_PETSc)
call MatZeroEntries(Jac,ierr); CHKERRQ(ierr) call MatSetOption(Jac,MAT_NEW_NONZERO_ALLOCATION_ERR,PETSC_FALSE,err_PETSc)
CHKERRQ(err_PETSc)
call MatZeroEntries(Jac,err_PETSc); CHKERRQ(err_PETSc)
ele = 0 ele = 0
do k = zstart, zend; do j = ystart, yend; do i = xstart, xend do k = zstart, zend; do j = ystart, yend; do i = xstart, xend
ctr = 0 ctr = 0
@ -686,34 +693,42 @@ subroutine formJacobian(da_local,x_local,Jac_pre,Jac,dummy,ierr)
matmul(transpose(BMatFull), & matmul(transpose(BMatFull), &
matmul(reshape(reshape(homogenization_dPdF(1:3,1:3,1:3,1:3,ele), & matmul(reshape(reshape(homogenization_dPdF(1:3,1:3,1:3,1:3,ele), &
shape=[3,3,3,3], order=[2,1,4,3]),shape=[9,9]),BMatFull))*detJ shape=[3,3,3,3], order=[2,1,4,3]),shape=[9,9]),BMatFull))*detJ
call MatSetValuesStencil(Jac,24_pPETScInt,row,24_pPetscInt,col,K_ele,ADD_VALUES,ierr) call MatSetValuesStencil(Jac,24_pPETScInt,row,24_pPetscInt,col,K_ele,ADD_VALUES,err_PETSc)
CHKERRQ(ierr) CHKERRQ(err_PETSc)
enddo; enddo; enddo enddo; enddo; enddo
call MatAssemblyBegin(Jac,MAT_FINAL_ASSEMBLY,ierr); CHKERRQ(ierr) call MatAssemblyBegin(Jac,MAT_FINAL_ASSEMBLY,err_PETSc); CHKERRQ(err_PETSc)
call MatAssemblyEnd(Jac,MAT_FINAL_ASSEMBLY,ierr); CHKERRQ(ierr) call MatAssemblyEnd(Jac,MAT_FINAL_ASSEMBLY,err_PETSc); CHKERRQ(err_PETSc)
call MatAssemblyBegin(Jac_pre,MAT_FINAL_ASSEMBLY,ierr); CHKERRQ(ierr) call MatAssemblyBegin(Jac_pre,MAT_FINAL_ASSEMBLY,err_PETSc); CHKERRQ(err_PETSc)
call MatAssemblyEnd(Jac_pre,MAT_FINAL_ASSEMBLY,ierr); CHKERRQ(ierr) call MatAssemblyEnd(Jac_pre,MAT_FINAL_ASSEMBLY,err_PETSc); CHKERRQ(err_PETSc)
!-------------------------------------------------------------------------------------------------- !--------------------------------------------------------------------------------------------------
! applying boundary conditions ! applying boundary conditions
diag = (C_volAvg(1,1,1,1)/delta(1)**2 + & diag = (C_volAvg(1,1,1,1)/delta(1)**2 + &
C_volAvg(2,2,2,2)/delta(2)**2 + & C_volAvg(2,2,2,2)/delta(2)**2 + &
C_volAvg(3,3,3,3)/delta(3)**2)*detJ C_volAvg(3,3,3,3)/delta(3)**2)*detJ
call MatZeroRowsColumns(Jac,size(rows,kind=pPetscInt),rows,diag,PETSC_NULL_VEC,PETSC_NULL_VEC,ierr) call MatZeroRowsColumns(Jac,size(rows,kind=pPetscInt),rows,diag,PETSC_NULL_VEC,PETSC_NULL_VEC,err_PETSc)
CHKERRQ(ierr) CHKERRQ(err_PETSc)
call DMGetGlobalVector(da_local,coordinates,ierr); CHKERRQ(ierr) call DMGetGlobalVector(da_local,coordinates,err_PETSc)
call DMDAVecGetArrayF90(da_local,coordinates,x_scal,ierr); CHKERRQ(ierr) CHKERRQ(err_PETSc)
call DMDAVecGetArrayF90(da_local,coordinates,x_scal,err_PETSc)
CHKERRQ(err_PETSc)
ele = 0 ele = 0
do k = zstart, zend; do j = ystart, yend; do i = xstart, xend do k = zstart, zend; do j = ystart, yend; do i = xstart, xend
ele = ele + 1 ele = ele + 1
x_scal(0:2,i,j,k) = discretization_IPcoords(1:3,ele) x_scal(0:2,i,j,k) = discretization_IPcoords(1:3,ele)
enddo; enddo; enddo enddo; enddo; enddo
call DMDAVecRestoreArrayF90(da_local,coordinates,x_scal,ierr); CHKERRQ(ierr) ! initialize to undeformed coordinates (ToDo: use ip coordinates) call DMDAVecRestoreArrayF90(da_local,coordinates,x_scal,err_PETSc)
call MatNullSpaceCreateRigidBody(coordinates,matnull,ierr); CHKERRQ(ierr) ! get rigid body deformation modes CHKERRQ(err_PETSc) ! initialize to undeformed coordinates (ToDo: use ip coordinates)
call DMRestoreGlobalVector(da_local,coordinates,ierr); CHKERRQ(ierr) call MatNullSpaceCreateRigidBody(coordinates,matnull,err_PETSc)
call MatSetNullSpace(Jac,matnull,ierr); CHKERRQ(ierr) CHKERRQ(err_PETSc) ! get rigid body deformation modes
call MatSetNearNullSpace(Jac,matnull,ierr); CHKERRQ(ierr) call DMRestoreGlobalVector(da_local,coordinates,err_PETSc)
call MatNullSpaceDestroy(matnull,ierr); CHKERRQ(ierr) CHKERRQ(err_PETSc)
call MatSetNullSpace(Jac,matnull,err_PETSc)
CHKERRQ(err_PETSc)
call MatSetNearNullSpace(Jac,matnull,err_PETSc)
CHKERRQ(err_PETSc)
call MatNullSpaceDestroy(matnull,err_PETSc)
CHKERRQ(err_PETSc)
end subroutine formJacobian end subroutine formJacobian

View File

@ -97,7 +97,8 @@ contains
subroutine grid_mechanical_spectral_basic_init subroutine grid_mechanical_spectral_basic_init
real(pReal), dimension(3,3,grid(1),grid(2),grid3) :: P real(pReal), dimension(3,3,grid(1),grid(2),grid3) :: P
PetscErrorCode :: ierr PetscErrorCode :: err_PETSc
integer(MPI_INTEGER_KIND) :: err_MPI
PetscScalar, pointer, dimension(:,:,:,:) :: & PetscScalar, pointer, dimension(:,:,:,:) :: &
F ! pointer to solution data F ! pointer to solution data
PetscInt, dimension(0:worldsize-1) :: localK PetscInt, dimension(0:worldsize-1) :: localK
@ -145,10 +146,10 @@ subroutine grid_mechanical_spectral_basic_init
!-------------------------------------------------------------------------------------------------- !--------------------------------------------------------------------------------------------------
! set default and user defined options for PETSc ! set default and user defined options for PETSc
call PetscOptionsInsertString(PETSC_NULL_OPTIONS,'-mechanical_snes_type ngmres',ierr) call PetscOptionsInsertString(PETSC_NULL_OPTIONS,'-mechanical_snes_type ngmres',err_PETSc)
CHKERRQ(ierr) CHKERRQ(err_PETSc)
call PetscOptionsInsertString(PETSC_NULL_OPTIONS,num_grid%get_asString('petsc_options',defaultVal=''),ierr) call PetscOptionsInsertString(PETSC_NULL_OPTIONS,num_grid%get_asString('petsc_options',defaultVal=''),err_PETSc)
CHKERRQ(ierr) CHKERRQ(err_PETSc)
!-------------------------------------------------------------------------------------------------- !--------------------------------------------------------------------------------------------------
! allocate global fields ! allocate global fields
@ -157,11 +158,12 @@ subroutine grid_mechanical_spectral_basic_init
!-------------------------------------------------------------------------------------------------- !--------------------------------------------------------------------------------------------------
! initialize solver specific parts of PETSc ! initialize solver specific parts of PETSc
call SNESCreate(PETSC_COMM_WORLD,snes,ierr); CHKERRQ(ierr) call SNESCreate(PETSC_COMM_WORLD,snes,err_PETSc); CHKERRQ(err_PETSc)
call SNESSetOptionsPrefix(snes,'mechanical_',ierr);CHKERRQ(ierr) call SNESSetOptionsPrefix(snes,'mechanical_',err_PETSc);CHKERRQ(err_PETSc)
localK = 0_pPetscInt localK = 0_pPetscInt
localK(worldrank) = int(grid3,pPetscInt) localK(worldrank) = int(grid3,pPetscInt)
call MPI_Allreduce(MPI_IN_PLACE,localK,worldsize,MPI_INTEGER,MPI_SUM,MPI_COMM_WORLD,ierr) call MPI_Allreduce(MPI_IN_PLACE,localK,worldsize,MPI_INTEGER,MPI_SUM,MPI_COMM_WORLD,err_MPI)
if (err_MPI /= 0_MPI_INTEGER_KIND) error stop 'MPI error'
call DMDACreate3d(PETSC_COMM_WORLD, & call DMDACreate3d(PETSC_COMM_WORLD, &
DM_BOUNDARY_NONE, DM_BOUNDARY_NONE, DM_BOUNDARY_NONE, & ! cut off stencil at boundary DM_BOUNDARY_NONE, DM_BOUNDARY_NONE, DM_BOUNDARY_NONE, & ! cut off stencil at boundary
DMDA_STENCIL_BOX, & ! Moore (26) neighborhood around central point DMDA_STENCIL_BOX, & ! Moore (26) neighborhood around central point
@ -169,21 +171,21 @@ subroutine grid_mechanical_spectral_basic_init
1_pPetscInt, 1_pPetscInt, int(worldsize,pPetscInt), & 1_pPetscInt, 1_pPetscInt, int(worldsize,pPetscInt), &
9_pPetscInt, 0_pPetscInt, & ! #dof (F, tensor), ghost boundary width (domain overlap) 9_pPetscInt, 0_pPetscInt, & ! #dof (F, tensor), ghost boundary width (domain overlap)
[int(grid(1),pPetscInt)],[int(grid(2),pPetscInt)],localK, & ! local grid [int(grid(1),pPetscInt)],[int(grid(2),pPetscInt)],localK, & ! local grid
da,ierr) ! handle, error da,err_PETSc) ! handle, error
CHKERRQ(ierr) CHKERRQ(err_PETSc)
call SNESSetDM(snes,da,ierr); CHKERRQ(ierr) ! connect snes to da call SNESSetDM(snes,da,err_PETSc); CHKERRQ(err_PETSc) ! connect snes to da
call DMsetFromOptions(da,ierr); CHKERRQ(ierr) call DMsetFromOptions(da,err_PETSc); CHKERRQ(err_PETSc)
call DMsetUp(da,ierr); CHKERRQ(ierr) call DMsetUp(da,err_PETSc); CHKERRQ(err_PETSc)
call DMcreateGlobalVector(da,solution_vec,ierr); CHKERRQ(ierr) ! global solution vector (grid x 9, i.e. every def grad tensor) call DMcreateGlobalVector(da,solution_vec,err_PETSc); CHKERRQ(err_PETSc) ! global solution vector (grid x 9, i.e. every def grad tensor)
call DMDASNESsetFunctionLocal(da,INSERT_VALUES,formResidual,PETSC_NULL_SNES,ierr) ! residual vector of same shape as solution vector call DMDASNESsetFunctionLocal(da,INSERT_VALUES,formResidual,PETSC_NULL_SNES,err_PETSc) ! residual vector of same shape as solution vector
CHKERRQ(ierr) CHKERRQ(err_PETSc)
call SNESsetConvergenceTest(snes,converged,PETSC_NULL_SNES,PETSC_NULL_FUNCTION,ierr) ! specify custom convergence check function "converged" call SNESsetConvergenceTest(snes,converged,PETSC_NULL_SNES,PETSC_NULL_FUNCTION,err_PETSc) ! specify custom convergence check function "converged"
CHKERRQ(ierr) CHKERRQ(err_PETSc)
call SNESsetFromOptions(snes,ierr); CHKERRQ(ierr) ! pull it all together with additional CLI arguments call SNESsetFromOptions(snes,err_PETSc); CHKERRQ(err_PETSc) ! pull it all together with additional CLI arguments
!-------------------------------------------------------------------------------------------------- !--------------------------------------------------------------------------------------------------
! init fields ! init fields
call DMDAVecGetArrayF90(da,solution_vec,F,ierr); CHKERRQ(ierr) ! places pointer on PETSc data call DMDAVecGetArrayF90(da,solution_vec,F,err_PETSc); CHKERRQ(err_PETSc) ! places pointer on PETSc data
restartRead: if (interface_restartInc > 0) then restartRead: if (interface_restartInc > 0) then
print'(/,1x,a,i0,a)', 'reading restart data of increment ', interface_restartInc, ' from file' print'(/,1x,a,i0,a)', 'reading restart data of increment ', interface_restartInc, ' from file'
@ -192,17 +194,17 @@ subroutine grid_mechanical_spectral_basic_init
groupHandle = HDF5_openGroup(fileHandle,'solver') groupHandle = HDF5_openGroup(fileHandle,'solver')
call HDF5_read(P_aim,groupHandle,'P_aim',.false.) call HDF5_read(P_aim,groupHandle,'P_aim',.false.)
call MPI_Bcast(P_aim,9,MPI_DOUBLE,0,MPI_COMM_WORLD,ierr) call MPI_Bcast(P_aim,9,MPI_DOUBLE,0,MPI_COMM_WORLD,err_MPI)
if (ierr /=0) error stop 'MPI error' if (err_MPI /= 0_MPI_INTEGER_KIND) error stop 'MPI error'
call HDF5_read(F_aim,groupHandle,'F_aim',.false.) call HDF5_read(F_aim,groupHandle,'F_aim',.false.)
call MPI_Bcast(F_aim,9,MPI_DOUBLE,0,MPI_COMM_WORLD,ierr) call MPI_Bcast(F_aim,9,MPI_DOUBLE,0,MPI_COMM_WORLD,err_MPI)
if (ierr /=0) error stop 'MPI error' if (err_MPI /= 0_MPI_INTEGER_KIND) error stop 'MPI error'
call HDF5_read(F_aim_lastInc,groupHandle,'F_aim_lastInc',.false.) call HDF5_read(F_aim_lastInc,groupHandle,'F_aim_lastInc',.false.)
call MPI_Bcast(F_aim_lastInc,9,MPI_DOUBLE,0,MPI_COMM_WORLD,ierr) call MPI_Bcast(F_aim_lastInc,9,MPI_DOUBLE,0,MPI_COMM_WORLD,err_MPI)
if (ierr /=0) error stop 'MPI error' if (err_MPI /= 0_MPI_INTEGER_KIND) error stop 'MPI error'
call HDF5_read(F_aimDot,groupHandle,'F_aimDot',.false.) call HDF5_read(F_aimDot,groupHandle,'F_aimDot',.false.)
call MPI_Bcast(F_aimDot,9,MPI_DOUBLE,0,MPI_COMM_WORLD,ierr) call MPI_Bcast(F_aimDot,9,MPI_DOUBLE,0,MPI_COMM_WORLD,err_MPI)
if (ierr /=0) error stop 'MPI error' if (err_MPI /= 0_MPI_INTEGER_KIND) error stop 'MPI error'
call HDF5_read(F,groupHandle,'F') call HDF5_read(F,groupHandle,'F')
call HDF5_read(F_lastInc,groupHandle,'F_lastInc') call HDF5_read(F_lastInc,groupHandle,'F_lastInc')
@ -216,24 +218,27 @@ subroutine grid_mechanical_spectral_basic_init
call utilities_constitutiveResponse(P,P_av,C_volAvg,C_minMaxAvg, & ! stress field, stress avg, global average of stiffness and (min+max)/2 call utilities_constitutiveResponse(P,P_av,C_volAvg,C_minMaxAvg, & ! stress field, stress avg, global average of stiffness and (min+max)/2
reshape(F,shape(F_lastInc)), & ! target F reshape(F,shape(F_lastInc)), & ! target F
0.0_pReal) ! time increment 0.0_pReal) ! time increment
call DMDAVecRestoreArrayF90(da,solution_vec,F,ierr); CHKERRQ(ierr) ! deassociate pointer call DMDAVecRestoreArrayF90(da,solution_vec,F,err_PETSc); CHKERRQ(err_PETSc) ! deassociate pointer
restartRead2: if (interface_restartInc > 0) then restartRead2: if (interface_restartInc > 0) then
print'(1x,a,i0,a)', 'reading more restart data of increment ', interface_restartInc, ' from file' print'(1x,a,i0,a)', 'reading more restart data of increment ', interface_restartInc, ' from file'
call HDF5_read(C_volAvg,groupHandle,'C_volAvg',.false.) call HDF5_read(C_volAvg,groupHandle,'C_volAvg',.false.)
call MPI_Bcast(C_volAvg,81,MPI_DOUBLE,0,MPI_COMM_WORLD,ierr) call MPI_Bcast(C_volAvg,81,MPI_DOUBLE,0,MPI_COMM_WORLD,err_MPI)
if (ierr /=0) error stop 'MPI error' if (err_MPI /= 0_MPI_INTEGER_KIND) error stop 'MPI error'
call HDF5_read(C_volAvgLastInc,groupHandle,'C_volAvgLastInc',.false.) call HDF5_read(C_volAvgLastInc,groupHandle,'C_volAvgLastInc',.false.)
call MPI_Bcast(C_volAvgLastInc,81,MPI_DOUBLE,0,MPI_COMM_WORLD,ierr) call MPI_Bcast(C_volAvgLastInc,81,MPI_DOUBLE,0,MPI_COMM_WORLD,err_MPI)
if (ierr /=0) error stop 'MPI error' if (err_MPI /= 0_MPI_INTEGER_KIND) error stop 'MPI error'
call HDF5_closeGroup(groupHandle) call HDF5_closeGroup(groupHandle)
call HDF5_closeFile(fileHandle) call HDF5_closeFile(fileHandle)
call MPI_File_open(MPI_COMM_WORLD, trim(getSolverJobName())//'.C_ref', & call MPI_File_open(MPI_COMM_WORLD, trim(getSolverJobName())//'.C_ref', &
MPI_MODE_RDONLY,MPI_INFO_NULL,fileUnit,ierr) MPI_MODE_RDONLY,MPI_INFO_NULL,fileUnit,err_MPI)
call MPI_File_read(fileUnit,C_minMaxAvg,81,MPI_DOUBLE,MPI_STATUS_IGNORE,ierr) if (err_MPI /= 0_MPI_INTEGER_KIND) error stop 'MPI error'
call MPI_File_close(fileUnit,ierr) call MPI_File_read(fileUnit,C_minMaxAvg,81,MPI_DOUBLE,MPI_STATUS_IGNORE,err_MPI)
if (err_MPI /= 0_MPI_INTEGER_KIND) error stop 'MPI error'
call MPI_File_close(fileUnit,err_MPI)
if (err_MPI /= 0_MPI_INTEGER_KIND) error stop 'MPI error'
end if restartRead2 end if restartRead2
call utilities_updateGamma(C_minMaxAvg) call utilities_updateGamma(C_minMaxAvg)
@ -255,7 +260,7 @@ function grid_mechanical_spectral_basic_solution(incInfoIn) result(solution)
solution solution
!-------------------------------------------------------------------------------------------------- !--------------------------------------------------------------------------------------------------
! PETSc Data ! PETSc Data
PetscErrorCode :: ierr PetscErrorCode :: err_PETSc
SNESConvergedReason :: reason SNESConvergedReason :: reason
incInfo = incInfoIn incInfo = incInfoIn
@ -267,11 +272,11 @@ function grid_mechanical_spectral_basic_solution(incInfoIn) result(solution)
!-------------------------------------------------------------------------------------------------- !--------------------------------------------------------------------------------------------------
! solve BVP ! solve BVP
call SNESsolve(snes,PETSC_NULL_VEC,solution_vec,ierr); CHKERRQ(ierr) call SNESsolve(snes,PETSC_NULL_VEC,solution_vec,err_PETSc); CHKERRQ(err_PETSc)
!-------------------------------------------------------------------------------------------------- !--------------------------------------------------------------------------------------------------
! check convergence ! check convergence
call SNESGetConvergedReason(snes,reason,ierr); CHKERRQ(ierr) call SNESGetConvergedReason(snes,reason,err_PETSc); CHKERRQ(err_PETSc)
solution%converged = reason > 0 solution%converged = reason > 0
solution%iterationsNeeded = totalIter solution%iterationsNeeded = totalIter
@ -301,11 +306,11 @@ subroutine grid_mechanical_spectral_basic_forward(cutBack,guess,Delta_t,Delta_t_
deformation_BC deformation_BC
type(rotation), intent(in) :: & type(rotation), intent(in) :: &
rotation_BC rotation_BC
PetscErrorCode :: ierr PetscErrorCode :: err_PETSc
PetscScalar, pointer, dimension(:,:,:,:) :: F PetscScalar, pointer, dimension(:,:,:,:) :: F
call DMDAVecGetArrayF90(da,solution_vec,F,ierr); CHKERRQ(ierr) call DMDAVecGetArrayF90(da,solution_vec,F,err_PETSc); CHKERRQ(err_PETSc)
if (cutBack) then if (cutBack) then
C_volAvg = C_volAvgLastInc C_volAvg = C_volAvgLastInc
@ -314,7 +319,7 @@ subroutine grid_mechanical_spectral_basic_forward(cutBack,guess,Delta_t,Delta_t_
C_volAvgLastInc = C_volAvg C_volAvgLastInc = C_volAvg
C_minMaxAvgLastInc = C_minMaxAvg C_minMaxAvgLastInc = C_minMaxAvg
F_aimDot = merge(merge(.0_pReal,(F_aim-F_aim_lastInc)/Delta_t_old,stress_BC%mask),.0_pReal,guess) ! estimate deformation rate for prescribed stress components F_aimDot = merge(merge(.0_pReal,(F_aim-F_aim_lastInc)/Delta_t_old,stress_BC%mask),.0_pReal,guess) ! estimate deformation rate for prescribed stress components
F_aim_lastInc = F_aim F_aim_lastInc = F_aim
!----------------------------------------------------------------------------------------------- !-----------------------------------------------------------------------------------------------
@ -348,7 +353,7 @@ subroutine grid_mechanical_spectral_basic_forward(cutBack,guess,Delta_t,Delta_t_
F = reshape(utilities_forwardField(Delta_t,F_lastInc,Fdot, & ! estimate of F at end of time+Delta_t that matches rotated F_aim on average F = reshape(utilities_forwardField(Delta_t,F_lastInc,Fdot, & ! estimate of F at end of time+Delta_t that matches rotated F_aim on average
rotation_BC%rotate(F_aim,active=.true.)),[9,grid(1),grid(2),grid3]) rotation_BC%rotate(F_aim,active=.true.)),[9,grid(1),grid(2),grid3])
call DMDAVecRestoreArrayF90(da,solution_vec,F,ierr); CHKERRQ(ierr) call DMDAVecRestoreArrayF90(da,solution_vec,F,err_PETSc); CHKERRQ(err_PETSc)
!-------------------------------------------------------------------------------------------------- !--------------------------------------------------------------------------------------------------
! set module wide available data ! set module wide available data
@ -364,12 +369,12 @@ end subroutine grid_mechanical_spectral_basic_forward
!-------------------------------------------------------------------------------------------------- !--------------------------------------------------------------------------------------------------
subroutine grid_mechanical_spectral_basic_updateCoords subroutine grid_mechanical_spectral_basic_updateCoords
PetscErrorCode :: ierr PetscErrorCode :: err_PETSc
PetscScalar, dimension(:,:,:,:), pointer :: F PetscScalar, dimension(:,:,:,:), pointer :: F
call DMDAVecGetArrayF90(da,solution_vec,F,ierr); CHKERRQ(ierr) call DMDAVecGetArrayF90(da,solution_vec,F,err_PETSc); CHKERRQ(err_PETSc)
call utilities_updateCoords(F) call utilities_updateCoords(F)
call DMDAVecRestoreArrayF90(da,solution_vec,F,ierr); CHKERRQ(ierr) call DMDAVecRestoreArrayF90(da,solution_vec,F,err_PETSc); CHKERRQ(err_PETSc)
end subroutine grid_mechanical_spectral_basic_updateCoords end subroutine grid_mechanical_spectral_basic_updateCoords
@ -379,11 +384,11 @@ end subroutine grid_mechanical_spectral_basic_updateCoords
!-------------------------------------------------------------------------------------------------- !--------------------------------------------------------------------------------------------------
subroutine grid_mechanical_spectral_basic_restartWrite subroutine grid_mechanical_spectral_basic_restartWrite
PetscErrorCode :: ierr PetscErrorCode :: err_PETSc
integer(HID_T) :: fileHandle, groupHandle integer(HID_T) :: fileHandle, groupHandle
PetscScalar, dimension(:,:,:,:), pointer :: F PetscScalar, dimension(:,:,:,:), pointer :: F
call DMDAVecGetArrayF90(da,solution_vec,F,ierr); CHKERRQ(ierr) call DMDAVecGetArrayF90(da,solution_vec,F,err_PETSc); CHKERRQ(err_PETSc)
print'(1x,a)', 'writing solver data required for restart to file'; flush(IO_STDOUT) print'(1x,a)', 'writing solver data required for restart to file'; flush(IO_STDOUT)
@ -410,7 +415,7 @@ subroutine grid_mechanical_spectral_basic_restartWrite
if (num%update_gamma) call utilities_saveReferenceStiffness if (num%update_gamma) call utilities_saveReferenceStiffness
call DMDAVecRestoreArrayF90(da,solution_vec,F,ierr); CHKERRQ(ierr) call DMDAVecRestoreArrayF90(da,solution_vec,F,err_PETSc); CHKERRQ(err_PETSc)
end subroutine grid_mechanical_spectral_basic_restartWrite end subroutine grid_mechanical_spectral_basic_restartWrite
@ -418,7 +423,7 @@ end subroutine grid_mechanical_spectral_basic_restartWrite
!-------------------------------------------------------------------------------------------------- !--------------------------------------------------------------------------------------------------
!> @brief convergence check !> @brief convergence check
!-------------------------------------------------------------------------------------------------- !--------------------------------------------------------------------------------------------------
subroutine converged(snes_local,PETScIter,devNull1,devNull2,devNull3,reason,dummy,ierr) subroutine converged(snes_local,PETScIter,devNull1,devNull2,devNull3,reason,dummy,err_PETSc)
SNES :: snes_local SNES :: snes_local
PetscInt, intent(in) :: PETScIter PetscInt, intent(in) :: PETScIter
@ -428,7 +433,7 @@ subroutine converged(snes_local,PETScIter,devNull1,devNull2,devNull3,reason,dumm
devNull3 devNull3
SNESConvergedReason :: reason SNESConvergedReason :: reason
PetscObject :: dummy PetscObject :: dummy
PetscErrorCode :: ierr PetscErrorCode :: err_PETSc
real(pReal) :: & real(pReal) :: &
divTol, & divTol, &
BCTol BCTol
@ -460,7 +465,7 @@ end subroutine converged
!> @brief forms the residual vector !> @brief forms the residual vector
!-------------------------------------------------------------------------------------------------- !--------------------------------------------------------------------------------------------------
subroutine formResidual(in, F, & subroutine formResidual(in, F, &
residuum, dummy, ierr) residuum, dummy, err_PETSc)
DMDALocalInfo, dimension(DMDA_LOCAL_INFO_SIZE) :: in !< DMDA info (needs to be named "in" for macros like XRANGE to work) DMDALocalInfo, dimension(DMDA_LOCAL_INFO_SIZE) :: in !< DMDA info (needs to be named "in" for macros like XRANGE to work)
PetscScalar, dimension(3,3,XG_RANGE,YG_RANGE,ZG_RANGE), & PetscScalar, dimension(3,3,XG_RANGE,YG_RANGE,ZG_RANGE), &
@ -473,10 +478,11 @@ subroutine formResidual(in, F, &
PETScIter, & PETScIter, &
nfuncs nfuncs
PetscObject :: dummy PetscObject :: dummy
PetscErrorCode :: ierr PetscErrorCode :: err_PETSc
integer(MPI_INTEGER_KIND) :: err_MPI
call SNESGetNumberFunctionEvals(snes,nfuncs,ierr); CHKERRQ(ierr) call SNESGetNumberFunctionEvals(snes,nfuncs,err_PETSc); CHKERRQ(err_PETSc)
call SNESGetIterationNumber(snes,PETScIter,ierr); CHKERRQ(ierr) call SNESGetIterationNumber(snes,PETScIter,err_PETSc); CHKERRQ(err_PETSc)
if (nfuncs == 0 .and. PETScIter == 0) totalIter = -1 ! new increment if (nfuncs == 0 .and. PETScIter == 0) totalIter = -1 ! new increment
@ -497,7 +503,8 @@ subroutine formResidual(in, F, &
call utilities_constitutiveResponse(residuum, & ! "residuum" gets field of first PK stress (to save memory) call utilities_constitutiveResponse(residuum, & ! "residuum" gets field of first PK stress (to save memory)
P_av,C_volAvg,C_minMaxAvg, & P_av,C_volAvg,C_minMaxAvg, &
F,params%Delta_t,params%rotation_BC) F,params%Delta_t,params%rotation_BC)
call MPI_Allreduce(MPI_IN_PLACE,terminallyIll,1,MPI_LOGICAL,MPI_LOR,MPI_COMM_WORLD,ierr) call MPI_Allreduce(MPI_IN_PLACE,terminallyIll,1,MPI_LOGICAL,MPI_LOR,MPI_COMM_WORLD,err_MPI)
if (err_MPI /= 0_MPI_INTEGER_KIND) error stop 'MPI error'
!-------------------------------------------------------------------------------------------------- !--------------------------------------------------------------------------------------------------
! stress BC handling ! stress BC handling

View File

@ -108,7 +108,8 @@ contains
subroutine grid_mechanical_spectral_polarisation_init subroutine grid_mechanical_spectral_polarisation_init
real(pReal), dimension(3,3,grid(1),grid(2),grid3) :: P real(pReal), dimension(3,3,grid(1),grid(2),grid3) :: P
PetscErrorCode :: ierr PetscErrorCode :: err_PETSc
integer(MPI_INTEGER_KIND) :: err_MPI
PetscScalar, pointer, dimension(:,:,:,:) :: & PetscScalar, pointer, dimension(:,:,:,:) :: &
FandF_tau, & ! overall pointer to solution data FandF_tau, & ! overall pointer to solution data
F, & ! specific (sub)pointer F, & ! specific (sub)pointer
@ -163,10 +164,10 @@ subroutine grid_mechanical_spectral_polarisation_init
!-------------------------------------------------------------------------------------------------- !--------------------------------------------------------------------------------------------------
! set default and user defined options for PETSc ! set default and user defined options for PETSc
call PetscOptionsInsertString(PETSC_NULL_OPTIONS,'-mechanical_snes_type ngmres',ierr) call PetscOptionsInsertString(PETSC_NULL_OPTIONS,'-mechanical_snes_type ngmres',err_PETSc)
CHKERRQ(ierr) CHKERRQ(err_PETSc)
call PetscOptionsInsertString(PETSC_NULL_OPTIONS,num_grid%get_asString('petsc_options',defaultVal=''),ierr) call PetscOptionsInsertString(PETSC_NULL_OPTIONS,num_grid%get_asString('petsc_options',defaultVal=''),err_PETSc)
CHKERRQ(ierr) CHKERRQ(err_PETSc)
!-------------------------------------------------------------------------------------------------- !--------------------------------------------------------------------------------------------------
! allocate global fields ! allocate global fields
@ -177,11 +178,12 @@ subroutine grid_mechanical_spectral_polarisation_init
!-------------------------------------------------------------------------------------------------- !--------------------------------------------------------------------------------------------------
! initialize solver specific parts of PETSc ! initialize solver specific parts of PETSc
call SNESCreate(PETSC_COMM_WORLD,snes,ierr); CHKERRQ(ierr) call SNESCreate(PETSC_COMM_WORLD,snes,err_PETSc); CHKERRQ(err_PETSc)
call SNESSetOptionsPrefix(snes,'mechanical_',ierr);CHKERRQ(ierr) call SNESSetOptionsPrefix(snes,'mechanical_',err_PETSc);CHKERRQ(err_PETSc)
localK = 0_pPetscInt localK = 0_pPetscInt
localK(worldrank) = int(grid3,pPetscInt) localK(worldrank) = int(grid3,pPetscInt)
call MPI_Allreduce(MPI_IN_PLACE,localK,worldsize,MPI_INTEGER,MPI_SUM,MPI_COMM_WORLD,ierr) call MPI_Allreduce(MPI_IN_PLACE,localK,worldsize,MPI_INTEGER,MPI_SUM,MPI_COMM_WORLD,err_MPI)
if (err_MPI /= 0_MPI_INTEGER_KIND) error stop 'MPI error'
call DMDACreate3d(PETSC_COMM_WORLD, & call DMDACreate3d(PETSC_COMM_WORLD, &
DM_BOUNDARY_NONE, DM_BOUNDARY_NONE, DM_BOUNDARY_NONE, & ! cut off stencil at boundary DM_BOUNDARY_NONE, DM_BOUNDARY_NONE, DM_BOUNDARY_NONE, & ! cut off stencil at boundary
DMDA_STENCIL_BOX, & ! Moore (26) neighborhood around central point DMDA_STENCIL_BOX, & ! Moore (26) neighborhood around central point
@ -189,21 +191,21 @@ subroutine grid_mechanical_spectral_polarisation_init
1_pPetscInt, 1_pPetscInt, int(worldsize,pPetscInt), & 1_pPetscInt, 1_pPetscInt, int(worldsize,pPetscInt), &
18_pPetscInt, 0_pPetscInt, & ! #dof (2xtensor), ghost boundary width (domain overlap) 18_pPetscInt, 0_pPetscInt, & ! #dof (2xtensor), ghost boundary width (domain overlap)
[int(grid(1),pPetscInt)],[int(grid(2),pPetscInt)],localK, & ! local grid [int(grid(1),pPetscInt)],[int(grid(2),pPetscInt)],localK, & ! local grid
da,ierr) ! handle, error da,err_PETSc) ! handle, error
CHKERRQ(ierr) CHKERRQ(err_PETSc)
call SNESSetDM(snes,da,ierr); CHKERRQ(ierr) ! connect snes to da call SNESSetDM(snes,da,err_PETSc); CHKERRQ(err_PETSc) ! connect snes to da
call DMsetFromOptions(da,ierr); CHKERRQ(ierr) call DMsetFromOptions(da,err_PETSc); CHKERRQ(err_PETSc)
call DMsetUp(da,ierr); CHKERRQ(ierr) call DMsetUp(da,err_PETSc); CHKERRQ(err_PETSc)
call DMcreateGlobalVector(da,solution_vec,ierr); CHKERRQ(ierr) ! global solution vector (grid x 18, i.e. every def grad tensor) call DMcreateGlobalVector(da,solution_vec,err_PETSc); CHKERRQ(err_PETSc) ! global solution vector (grid x 18, i.e. every def grad tensor)
call DMDASNESsetFunctionLocal(da,INSERT_VALUES,formResidual,PETSC_NULL_SNES,ierr) ! residual vector of same shape as solution vector call DMDASNESsetFunctionLocal(da,INSERT_VALUES,formResidual,PETSC_NULL_SNES,err_PETSc) ! residual vector of same shape as solution vector
CHKERRQ(ierr) CHKERRQ(err_PETSc)
call SNESsetConvergenceTest(snes,converged,PETSC_NULL_SNES,PETSC_NULL_FUNCTION,ierr) ! specify custom convergence check function "converged" call SNESsetConvergenceTest(snes,converged,PETSC_NULL_SNES,PETSC_NULL_FUNCTION,err_PETSc) ! specify custom convergence check function "converged"
CHKERRQ(ierr) CHKERRQ(err_PETSc)
call SNESsetFromOptions(snes,ierr); CHKERRQ(ierr) ! pull it all together with additional CLI arguments call SNESsetFromOptions(snes,err_PETSc); CHKERRQ(err_PETSc) ! pull it all together with additional CLI arguments
!-------------------------------------------------------------------------------------------------- !--------------------------------------------------------------------------------------------------
! init fields ! init fields
call DMDAVecGetArrayF90(da,solution_vec,FandF_tau,ierr); CHKERRQ(ierr) ! places pointer on PETSc data call DMDAVecGetArrayF90(da,solution_vec,FandF_tau,err_PETSc); CHKERRQ(err_PETSc) ! places pointer on PETSc data
F => FandF_tau(0: 8,:,:,:) F => FandF_tau(0: 8,:,:,:)
F_tau => FandF_tau(9:17,:,:,:) F_tau => FandF_tau(9:17,:,:,:)
@ -214,17 +216,17 @@ subroutine grid_mechanical_spectral_polarisation_init
groupHandle = HDF5_openGroup(fileHandle,'solver') groupHandle = HDF5_openGroup(fileHandle,'solver')
call HDF5_read(P_aim,groupHandle,'P_aim',.false.) call HDF5_read(P_aim,groupHandle,'P_aim',.false.)
call MPI_Bcast(P_aim,9,MPI_DOUBLE,0,MPI_COMM_WORLD,ierr) call MPI_Bcast(P_aim,9,MPI_DOUBLE,0,MPI_COMM_WORLD,err_MPI)
if (ierr /=0) error stop 'MPI error' if (err_MPI /= 0_MPI_INTEGER_KIND) error stop 'MPI error'
call HDF5_read(F_aim,groupHandle,'F_aim',.false.) call HDF5_read(F_aim,groupHandle,'F_aim',.false.)
call MPI_Bcast(F_aim,9,MPI_DOUBLE,0,MPI_COMM_WORLD,ierr) call MPI_Bcast(F_aim,9,MPI_DOUBLE,0,MPI_COMM_WORLD,err_MPI)
if (ierr /=0) error stop 'MPI error' if (err_MPI /= 0_MPI_INTEGER_KIND) error stop 'MPI error'
call HDF5_read(F_aim_lastInc,groupHandle,'F_aim_lastInc',.false.) call HDF5_read(F_aim_lastInc,groupHandle,'F_aim_lastInc',.false.)
call MPI_Bcast(F_aim_lastInc,9,MPI_DOUBLE,0,MPI_COMM_WORLD,ierr) call MPI_Bcast(F_aim_lastInc,9,MPI_DOUBLE,0,MPI_COMM_WORLD,err_MPI)
if (ierr /=0) error stop 'MPI error' if (err_MPI /= 0_MPI_INTEGER_KIND) error stop 'MPI error'
call HDF5_read(F_aimDot,groupHandle,'F_aimDot',.false.) call HDF5_read(F_aimDot,groupHandle,'F_aimDot',.false.)
call MPI_Bcast(F_aimDot,9,MPI_DOUBLE,0,MPI_COMM_WORLD,ierr) call MPI_Bcast(F_aimDot,9,MPI_DOUBLE,0,MPI_COMM_WORLD,err_MPI)
if (ierr /=0) error stop 'MPI error' if (err_MPI /= 0_MPI_INTEGER_KIND) error stop 'MPI error'
call HDF5_read(F,groupHandle,'F') call HDF5_read(F,groupHandle,'F')
call HDF5_read(F_lastInc,groupHandle,'F_lastInc') call HDF5_read(F_lastInc,groupHandle,'F_lastInc')
call HDF5_read(F_tau,groupHandle,'F_tau') call HDF5_read(F_tau,groupHandle,'F_tau')
@ -242,24 +244,28 @@ subroutine grid_mechanical_spectral_polarisation_init
call utilities_constitutiveResponse(P,P_av,C_volAvg,C_minMaxAvg, & ! stress field, stress avg, global average of stiffness and (min+max)/2 call utilities_constitutiveResponse(P,P_av,C_volAvg,C_minMaxAvg, & ! stress field, stress avg, global average of stiffness and (min+max)/2
reshape(F,shape(F_lastInc)), & ! target F reshape(F,shape(F_lastInc)), & ! target F
0.0_pReal) ! time increment 0.0_pReal) ! time increment
call DMDAVecRestoreArrayF90(da,solution_vec,FandF_tau,ierr); CHKERRQ(ierr) ! deassociate pointer call DMDAVecRestoreArrayF90(da,solution_vec,FandF_tau,err_PETSc) ! deassociate pointer
CHKERRQ(err_PETSc)
restartRead2: if (interface_restartInc > 0) then restartRead2: if (interface_restartInc > 0) then
print'(1x,a,i0,a)', 'reading more restart data of increment ', interface_restartInc, ' from file' print'(1x,a,i0,a)', 'reading more restart data of increment ', interface_restartInc, ' from file'
call HDF5_read(C_volAvg,groupHandle,'C_volAvg',.false.) call HDF5_read(C_volAvg,groupHandle,'C_volAvg',.false.)
call MPI_Bcast(C_volAvg,81,MPI_DOUBLE,0,MPI_COMM_WORLD,ierr) call MPI_Bcast(C_volAvg,81,MPI_DOUBLE,0,MPI_COMM_WORLD,err_MPI)
if (ierr /=0) error stop 'MPI error' if (err_MPI /= 0_MPI_INTEGER_KIND) error stop 'MPI error'
call HDF5_read(C_volAvgLastInc,groupHandle,'C_volAvgLastInc',.false.) call HDF5_read(C_volAvgLastInc,groupHandle,'C_volAvgLastInc',.false.)
call MPI_Bcast(C_volAvgLastInc,81,MPI_DOUBLE,0,MPI_COMM_WORLD,ierr) call MPI_Bcast(C_volAvgLastInc,81,MPI_DOUBLE,0,MPI_COMM_WORLD,err_MPI)
if (ierr /=0) error stop 'MPI error' if (err_MPI /= 0_MPI_INTEGER_KIND) error stop 'MPI error'
call HDF5_closeGroup(groupHandle) call HDF5_closeGroup(groupHandle)
call HDF5_closeFile(fileHandle) call HDF5_closeFile(fileHandle)
call MPI_File_open(MPI_COMM_WORLD, trim(getSolverJobName())//'.C_ref', & call MPI_File_open(MPI_COMM_WORLD, trim(getSolverJobName())//'.C_ref', &
MPI_MODE_RDONLY,MPI_INFO_NULL,fileUnit,ierr) MPI_MODE_RDONLY,MPI_INFO_NULL,fileUnit,err_MPI)
call MPI_File_read(fileUnit,C_minMaxAvg,81,MPI_DOUBLE,MPI_STATUS_IGNORE,ierr) if (err_MPI /= 0_MPI_INTEGER_KIND) error stop 'MPI error'
call MPI_File_close(fileUnit,ierr) call MPI_File_read(fileUnit,C_minMaxAvg,81,MPI_DOUBLE,MPI_STATUS_IGNORE,err_MPI)
if (err_MPI /= 0_MPI_INTEGER_KIND) error stop 'MPI error'
call MPI_File_close(fileUnit,err_MPI)
if (err_MPI /= 0_MPI_INTEGER_KIND) error stop 'MPI error'
end if restartRead2 end if restartRead2
call utilities_updateGamma(C_minMaxAvg) call utilities_updateGamma(C_minMaxAvg)
@ -283,7 +289,7 @@ function grid_mechanical_spectral_polarisation_solution(incInfoIn) result(soluti
solution solution
!-------------------------------------------------------------------------------------------------- !--------------------------------------------------------------------------------------------------
! PETSc Data ! PETSc Data
PetscErrorCode :: ierr PetscErrorCode :: err_PETSc
SNESConvergedReason :: reason SNESConvergedReason :: reason
incInfo = incInfoIn incInfo = incInfoIn
@ -299,11 +305,11 @@ function grid_mechanical_spectral_polarisation_solution(incInfoIn) result(soluti
!-------------------------------------------------------------------------------------------------- !--------------------------------------------------------------------------------------------------
! solve BVP ! solve BVP
call SNESsolve(snes,PETSC_NULL_VEC,solution_vec,ierr); CHKERRQ(ierr) call SNESsolve(snes,PETSC_NULL_VEC,solution_vec,err_PETSc); CHKERRQ(err_PETSc)
!-------------------------------------------------------------------------------------------------- !--------------------------------------------------------------------------------------------------
! check convergence ! check convergence
call SNESGetConvergedReason(snes,reason,ierr); CHKERRQ(ierr) call SNESGetConvergedReason(snes,reason,err_PETSc); CHKERRQ(err_PETSc)
solution%converged = reason > 0 solution%converged = reason > 0
solution%iterationsNeeded = totalIter solution%iterationsNeeded = totalIter
@ -333,13 +339,13 @@ subroutine grid_mechanical_spectral_polarisation_forward(cutBack,guess,Delta_t,D
deformation_BC deformation_BC
type(rotation), intent(in) :: & type(rotation), intent(in) :: &
rotation_BC rotation_BC
PetscErrorCode :: ierr PetscErrorCode :: err_PETSc
PetscScalar, pointer, dimension(:,:,:,:) :: FandF_tau, F, F_tau PetscScalar, pointer, dimension(:,:,:,:) :: FandF_tau, F, F_tau
integer :: i, j, k integer :: i, j, k
real(pReal), dimension(3,3) :: F_lambda33 real(pReal), dimension(3,3) :: F_lambda33
call DMDAVecGetArrayF90(da,solution_vec,FandF_tau,ierr); CHKERRQ(ierr) call DMDAVecGetArrayF90(da,solution_vec,FandF_tau,err_PETSc); CHKERRQ(err_PETSc)
F => FandF_tau(0: 8,:,:,:) F => FandF_tau(0: 8,:,:,:)
F_tau => FandF_tau(9:17,:,:,:) F_tau => FandF_tau(9:17,:,:,:)
@ -402,7 +408,8 @@ subroutine grid_mechanical_spectral_polarisation_forward(cutBack,guess,Delta_t,D
end do; end do; end do end do; end do; end do
end if end if
call DMDAVecRestoreArrayF90(da,solution_vec,FandF_tau,ierr); CHKERRQ(ierr) call DMDAVecRestoreArrayF90(da,solution_vec,FandF_tau,err_PETSc)
CHKERRQ(err_PETSc)
!-------------------------------------------------------------------------------------------------- !--------------------------------------------------------------------------------------------------
! set module wide available data ! set module wide available data
@ -418,12 +425,14 @@ end subroutine grid_mechanical_spectral_polarisation_forward
!-------------------------------------------------------------------------------------------------- !--------------------------------------------------------------------------------------------------
subroutine grid_mechanical_spectral_polarisation_updateCoords subroutine grid_mechanical_spectral_polarisation_updateCoords
PetscErrorCode :: ierr PetscErrorCode :: err_PETSc
PetscScalar, dimension(:,:,:,:), pointer :: FandF_tau PetscScalar, dimension(:,:,:,:), pointer :: FandF_tau
call DMDAVecGetArrayF90(da,solution_vec,FandF_tau,ierr); CHKERRQ(ierr) call DMDAVecGetArrayF90(da,solution_vec,FandF_tau,err_PETSc)
CHKERRQ(err_PETSc)
call utilities_updateCoords(FandF_tau(0:8,:,:,:)) call utilities_updateCoords(FandF_tau(0:8,:,:,:))
call DMDAVecRestoreArrayF90(da,solution_vec,FandF_tau,ierr); CHKERRQ(ierr) call DMDAVecRestoreArrayF90(da,solution_vec,FandF_tau,err_PETSc)
CHKERRQ(err_PETSc)
end subroutine grid_mechanical_spectral_polarisation_updateCoords end subroutine grid_mechanical_spectral_polarisation_updateCoords
@ -433,11 +442,11 @@ end subroutine grid_mechanical_spectral_polarisation_updateCoords
!-------------------------------------------------------------------------------------------------- !--------------------------------------------------------------------------------------------------
subroutine grid_mechanical_spectral_polarisation_restartWrite subroutine grid_mechanical_spectral_polarisation_restartWrite
PetscErrorCode :: ierr PetscErrorCode :: err_PETSc
integer(HID_T) :: fileHandle, groupHandle integer(HID_T) :: fileHandle, groupHandle
PetscScalar, dimension(:,:,:,:), pointer :: FandF_tau, F, F_tau PetscScalar, dimension(:,:,:,:), pointer :: FandF_tau, F, F_tau
call DMDAVecGetArrayF90(da,solution_vec,FandF_tau,ierr); CHKERRQ(ierr) call DMDAVecGetArrayF90(da,solution_vec,FandF_tau,err_PETSc); CHKERRQ(err_PETSc)
F => FandF_tau(0: 8,:,:,:) F => FandF_tau(0: 8,:,:,:)
F_tau => FandF_tau(9:17,:,:,:) F_tau => FandF_tau(9:17,:,:,:)
@ -467,7 +476,8 @@ subroutine grid_mechanical_spectral_polarisation_restartWrite
if (num%update_gamma) call utilities_saveReferenceStiffness if (num%update_gamma) call utilities_saveReferenceStiffness
call DMDAVecRestoreArrayF90(da,solution_vec,FandF_tau,ierr); CHKERRQ(ierr) call DMDAVecRestoreArrayF90(da,solution_vec,FandF_tau,err_PETSc)
CHKERRQ(err_PETSc)
end subroutine grid_mechanical_spectral_polarisation_restartWrite end subroutine grid_mechanical_spectral_polarisation_restartWrite
@ -475,7 +485,7 @@ end subroutine grid_mechanical_spectral_polarisation_restartWrite
!-------------------------------------------------------------------------------------------------- !--------------------------------------------------------------------------------------------------
!> @brief convergence check !> @brief convergence check
!-------------------------------------------------------------------------------------------------- !--------------------------------------------------------------------------------------------------
subroutine converged(snes_local,PETScIter,devNull1,devNull2,devNull3,reason,dummy,ierr) subroutine converged(snes_local,PETScIter,devNull1,devNull2,devNull3,reason,dummy,err_PETSc)
SNES :: snes_local SNES :: snes_local
PetscInt, intent(in) :: PETScIter PetscInt, intent(in) :: PETScIter
@ -485,7 +495,7 @@ subroutine converged(snes_local,PETScIter,devNull1,devNull2,devNull3,reason,dumm
devNull3 devNull3
SNESConvergedReason :: reason SNESConvergedReason :: reason
PetscObject :: dummy PetscObject :: dummy
PetscErrorCode :: ierr PetscErrorCode :: err_PETSc
real(pReal) :: & real(pReal) :: &
curlTol, & curlTol, &
divTol, & divTol, &
@ -521,7 +531,7 @@ end subroutine converged
!> @brief forms the residual vector !> @brief forms the residual vector
!-------------------------------------------------------------------------------------------------- !--------------------------------------------------------------------------------------------------
subroutine formResidual(in, FandF_tau, & subroutine formResidual(in, FandF_tau, &
residuum, dummy,ierr) residuum, dummy,err_PETSc)
DMDALocalInfo, dimension(DMDA_LOCAL_INFO_SIZE) :: in !< DMDA info (needs to be named "in" for macros like XRANGE to work) DMDALocalInfo, dimension(DMDA_LOCAL_INFO_SIZE) :: in !< DMDA info (needs to be named "in" for macros like XRANGE to work)
PetscScalar, dimension(3,3,2,XG_RANGE,YG_RANGE,ZG_RANGE), & PetscScalar, dimension(3,3,2,XG_RANGE,YG_RANGE,ZG_RANGE), &
@ -537,8 +547,9 @@ subroutine formResidual(in, FandF_tau, &
PETScIter, & PETScIter, &
nfuncs nfuncs
PetscObject :: dummy PetscObject :: dummy
PetscErrorCode :: ierr PetscErrorCode :: err_PETSc
integer :: & integer(MPI_INTEGER_KIND) :: err_MPI
integer :: &
i, j, k, e i, j, k, e
!--------------------------------------------------------------------------------------------------- !---------------------------------------------------------------------------------------------------
@ -553,10 +564,11 @@ subroutine formResidual(in, FandF_tau, &
X_RANGE, Y_RANGE, Z_RANGE) X_RANGE, Y_RANGE, Z_RANGE)
F_av = sum(sum(sum(F,dim=5),dim=4),dim=3) * wgt F_av = sum(sum(sum(F,dim=5),dim=4),dim=3) * wgt
call MPI_Allreduce(MPI_IN_PLACE,F_av,9,MPI_DOUBLE,MPI_SUM,MPI_COMM_WORLD,ierr) call MPI_Allreduce(MPI_IN_PLACE,F_av,9,MPI_DOUBLE,MPI_SUM,MPI_COMM_WORLD,err_MPI)
if (err_MPI /= 0_MPI_INTEGER_KIND) error stop 'MPI error'
call SNESGetNumberFunctionEvals(snes,nfuncs,ierr); CHKERRQ(ierr) call SNESGetNumberFunctionEvals(snes,nfuncs,err_PETSc); CHKERRQ(err_PETSc)
call SNESGetIterationNumber(snes,PETScIter,ierr); CHKERRQ(ierr) call SNESGetIterationNumber(snes,PETScIter,err_PETSc); CHKERRQ(err_PETSc)
if (nfuncs == 0 .and. PETScIter == 0) totalIter = -1 ! new increment if (nfuncs == 0 .and. PETScIter == 0) totalIter = -1 ! new increment
@ -597,7 +609,7 @@ subroutine formResidual(in, FandF_tau, &
call utilities_constitutiveResponse(residual_F, & ! "residuum" gets field of first PK stress (to save memory) call utilities_constitutiveResponse(residual_F, & ! "residuum" gets field of first PK stress (to save memory)
P_av,C_volAvg,C_minMaxAvg, & P_av,C_volAvg,C_minMaxAvg, &
F - residual_F_tau/num%beta,params%Delta_t,params%rotation_BC) F - residual_F_tau/num%beta,params%Delta_t,params%rotation_BC)
call MPI_Allreduce(MPI_IN_PLACE,terminallyIll,1,MPI_LOGICAL,MPI_LOR,MPI_COMM_WORLD,ierr) call MPI_Allreduce(MPI_IN_PLACE,terminallyIll,1,MPI_LOGICAL,MPI_LOR,MPI_COMM_WORLD,err_MPI)
!-------------------------------------------------------------------------------------------------- !--------------------------------------------------------------------------------------------------
! stress BC handling ! stress BC handling

View File

@ -78,7 +78,7 @@ program DAMASK_mesh
type(tLoadCase), allocatable, dimension(:) :: loadCases !< array of all load cases type(tLoadCase), allocatable, dimension(:) :: loadCases !< array of all load cases
type(tSolutionState), allocatable, dimension(:) :: solres type(tSolutionState), allocatable, dimension(:) :: solres
PetscInt :: faceSet, currentFaceSet, dimPlex PetscInt :: faceSet, currentFaceSet, dimPlex
PetscErrorCode :: ierr PetscErrorCode :: err_PETSc
integer(kind(COMPONENT_UNDEFINED_ID)) :: ID integer(kind(COMPONENT_UNDEFINED_ID)) :: ID
external :: & external :: &
quit quit
@ -98,8 +98,8 @@ program DAMASK_mesh
if (maxCutBack < 0) call IO_error(301,ext_msg='maxCutBack') if (maxCutBack < 0) call IO_error(301,ext_msg='maxCutBack')
! reading basic information from load case file and allocate data structure containing load cases ! reading basic information from load case file and allocate data structure containing load cases
call DMGetDimension(geomMesh,dimPlex,ierr) !< dimension of mesh (2D or 3D) call DMGetDimension(geomMesh,dimPlex,err_PETSc) !< dimension of mesh (2D or 3D)
CHKERRA(ierr) CHKERRA(err_PETSc)
allocate(solres(1)) allocate(solres(1))
!-------------------------------------------------------------------------------------------------- !--------------------------------------------------------------------------------------------------

View File

@ -92,7 +92,7 @@ subroutine FEM_utilities_init
p_i !< integration order (quadrature rule) p_i !< integration order (quadrature rule)
character(len=*), parameter :: & character(len=*), parameter :: &
PETSCDEBUG = ' -snes_view -snes_monitor ' PETSCDEBUG = ' -snes_view -snes_monitor '
PetscErrorCode :: ierr PetscErrorCode :: err_PETSc
logical :: debugPETSc !< use some in debug defined options for more verbose PETSc solution logical :: debugPETSc !< use some in debug defined options for more verbose PETSc solution
@ -116,20 +116,20 @@ subroutine FEM_utilities_init
trim(PETScDebug), & trim(PETScDebug), &
'add more using the "PETSc_options" keyword in numerics.yaml' 'add more using the "PETSc_options" keyword in numerics.yaml'
flush(IO_STDOUT) flush(IO_STDOUT)
call PetscOptionsClear(PETSC_NULL_OPTIONS,ierr) call PetscOptionsClear(PETSC_NULL_OPTIONS,err_PETSc)
CHKERRQ(ierr) CHKERRQ(err_PETSc)
if(debugPETSc) call PetscOptionsInsertString(PETSC_NULL_OPTIONS,trim(PETSCDEBUG),ierr) if(debugPETSc) call PetscOptionsInsertString(PETSC_NULL_OPTIONS,trim(PETSCDEBUG),err_PETSc)
CHKERRQ(ierr) CHKERRQ(err_PETSc)
call PetscOptionsInsertString(PETSC_NULL_OPTIONS,'-mechanical_snes_type newtonls & call PetscOptionsInsertString(PETSC_NULL_OPTIONS,'-mechanical_snes_type newtonls &
&-mechanical_snes_linesearch_type cp -mechanical_snes_ksp_ew & &-mechanical_snes_linesearch_type cp -mechanical_snes_ksp_ew &
&-mechanical_snes_ksp_ew_rtol0 0.01 -mechanical_snes_ksp_ew_rtolmax 0.01 & &-mechanical_snes_ksp_ew_rtol0 0.01 -mechanical_snes_ksp_ew_rtolmax 0.01 &
&-mechanical_ksp_type fgmres -mechanical_ksp_max_it 25', ierr) &-mechanical_ksp_type fgmres -mechanical_ksp_max_it 25', err_PETSc)
CHKERRQ(ierr) CHKERRQ(err_PETSc)
call PetscOptionsInsertString(PETSC_NULL_OPTIONS,num_mesh%get_asString('PETSc_options',defaultVal=''),ierr) call PetscOptionsInsertString(PETSC_NULL_OPTIONS,num_mesh%get_asString('PETSc_options',defaultVal=''),err_PETSc)
CHKERRQ(ierr) CHKERRQ(err_PETSc)
write(petsc_optionsOrder,'(a,i0)') '-mechFE_petscspace_degree ', p_s write(petsc_optionsOrder,'(a,i0)') '-mechFE_petscspace_degree ', p_s
call PetscOptionsInsertString(PETSC_NULL_OPTIONS,trim(petsc_optionsOrder),ierr) call PetscOptionsInsertString(PETSC_NULL_OPTIONS,trim(petsc_optionsOrder),err_PETSc)
CHKERRQ(ierr) CHKERRQ(err_PETSc)
wgt = 1.0/real(mesh_maxNips*mesh_NcpElemsGlobal,pReal) wgt = 1.0/real(mesh_maxNips*mesh_NcpElemsGlobal,pReal)
@ -144,10 +144,9 @@ subroutine utilities_constitutiveResponse(timeinc,P_av,forwardData)
real(pReal), intent(in) :: timeinc !< loading time real(pReal), intent(in) :: timeinc !< loading time
logical, intent(in) :: forwardData !< age results logical, intent(in) :: forwardData !< age results
real(pReal),intent(out), dimension(3,3) :: P_av !< average PK stress real(pReal),intent(out), dimension(3,3) :: P_av !< average PK stress
PetscErrorCode :: ierr integer(MPI_INTEGER_KIND) :: err_MPI
print'(/,1x,a)', '... evaluating constitutive response ......................................' print'(/,1x,a)', '... evaluating constitutive response ......................................'
@ -157,7 +156,9 @@ subroutine utilities_constitutiveResponse(timeinc,P_av,forwardData)
cutBack = .false. cutBack = .false.
P_av = sum(homogenization_P,dim=3) * wgt P_av = sum(homogenization_P,dim=3) * wgt
call MPI_Allreduce(MPI_IN_PLACE,P_av,9,MPI_DOUBLE,MPI_SUM,MPI_COMM_WORLD,ierr) call MPI_Allreduce(MPI_IN_PLACE,P_av,9,MPI_DOUBLE,MPI_SUM,MPI_COMM_WORLD,err_MPI)
if (err_MPI /= 0_MPI_INTEGER_KIND) error stop 'MPI error'
end subroutine utilities_constitutiveResponse end subroutine utilities_constitutiveResponse
@ -174,26 +175,29 @@ subroutine utilities_projectBCValues(localVec,section,field,comp,bcPointsIS,BCVa
PetscInt, pointer :: bcPoints(:) PetscInt, pointer :: bcPoints(:)
PetscScalar, pointer :: localArray(:) PetscScalar, pointer :: localArray(:)
PetscScalar :: BCValue,BCDotValue,timeinc PetscScalar :: BCValue,BCDotValue,timeinc
PetscErrorCode :: ierr PetscErrorCode :: err_PETSc
call PetscSectionGetFieldComponents(section,field,numComp,ierr); CHKERRQ(ierr) call PetscSectionGetFieldComponents(section,field,numComp,err_PETSc)
call ISGetSize(bcPointsIS,nBcPoints,ierr); CHKERRQ(ierr) CHKERRQ(err_PETSc)
if (nBcPoints > 0) call ISGetIndicesF90(bcPointsIS,bcPoints,ierr) call ISGetSize(bcPointsIS,nBcPoints,err_PETSc)
call VecGetArrayF90(localVec,localArray,ierr); CHKERRQ(ierr) CHKERRQ(err_PETSc)
if (nBcPoints > 0) call ISGetIndicesF90(bcPointsIS,bcPoints,err_PETSc)
call VecGetArrayF90(localVec,localArray,err_PETSc); CHKERRQ(err_PETSc)
do point = 1, nBcPoints do point = 1, nBcPoints
call PetscSectionGetFieldDof(section,bcPoints(point),field,numDof,ierr) call PetscSectionGetFieldDof(section,bcPoints(point),field,numDof,err_PETSc)
CHKERRQ(ierr) CHKERRQ(err_PETSc)
call PetscSectionGetFieldOffset(section,bcPoints(point),field,offset,ierr) call PetscSectionGetFieldOffset(section,bcPoints(point),field,offset,err_PETSc)
CHKERRQ(ierr) CHKERRQ(err_PETSc)
do dof = offset+comp+1, offset+numDof, numComp do dof = offset+comp+1, offset+numDof, numComp
localArray(dof) = localArray(dof) + BCValue + BCDotValue*timeinc localArray(dof) = localArray(dof) + BCValue + BCDotValue*timeinc
end do end do
end do end do
call VecRestoreArrayF90(localVec,localArray,ierr); CHKERRQ(ierr) call VecRestoreArrayF90(localVec,localArray,err_PETSc); CHKERRQ(err_PETSc)
call VecAssemblyBegin(localVec, ierr); CHKERRQ(ierr) call VecAssemblyBegin(localVec, err_PETSc); CHKERRQ(err_PETSc)
call VecAssemblyEnd (localVec, ierr); CHKERRQ(ierr) call VecAssemblyEnd (localVec, err_PETSc); CHKERRQ(err_PETSc)
if (nBcPoints > 0) call ISRestoreIndicesF90(bcPointsIS,bcPoints,ierr) if (nBcPoints > 0) call ISRestoreIndicesF90(bcPointsIS,bcPoints,err_PETSc)
CHKERRQ(err_PETSc)
end subroutine utilities_projectBCValues end subroutine utilities_projectBCValues

View File

@ -80,7 +80,8 @@ subroutine discretization_mesh_init(restart)
PetscInt :: nFaceSets PetscInt :: nFaceSets
PetscInt, pointer, dimension(:) :: pFaceSets PetscInt, pointer, dimension(:) :: pFaceSets
IS :: faceSetIS IS :: faceSetIS
PetscErrorCode :: ierr PetscErrorCode :: err_PETSc
integer(MPI_INTEGER_KIND) :: err_MPI
integer, dimension(:), allocatable :: & integer, dimension(:), allocatable :: &
materialAt materialAt
class(tNode), pointer :: & class(tNode), pointer :: &
@ -100,56 +101,60 @@ subroutine discretization_mesh_init(restart)
debug_element = config_debug%get_asInt('element',defaultVal=1) debug_element = config_debug%get_asInt('element',defaultVal=1)
debug_ip = config_debug%get_asInt('integrationpoint',defaultVal=1) debug_ip = config_debug%get_asInt('integrationpoint',defaultVal=1)
call DMPlexCreateFromFile(PETSC_COMM_WORLD,interface_geomFile,PETSC_TRUE,globalMesh,ierr) call DMPlexCreateFromFile(PETSC_COMM_WORLD,interface_geomFile,PETSC_TRUE,globalMesh,err_PETSc)
CHKERRQ(ierr) CHKERRQ(err_PETSc)
call DMGetDimension(globalMesh,dimPlex,ierr) call DMGetDimension(globalMesh,dimPlex,err_PETSc)
CHKERRQ(ierr) CHKERRQ(err_PETSc)
call DMGetStratumSize(globalMesh,'depth',dimPlex,mesh_NcpElemsGlobal,ierr) call DMGetStratumSize(globalMesh,'depth',dimPlex,mesh_NcpElemsGlobal,err_PETSc)
CHKERRQ(ierr) CHKERRQ(err_PETSc)
print'()' print'()'
call DMView(globalMesh, PETSC_VIEWER_STDOUT_WORLD,ierr) call DMView(globalMesh, PETSC_VIEWER_STDOUT_WORLD,err_PETSc)
CHKERRQ(ierr) CHKERRQ(err_PETSc)
! get number of IDs in face sets (for boundary conditions?) ! get number of IDs in face sets (for boundary conditions?)
call DMGetLabelSize(globalMesh,'Face Sets',mesh_Nboundaries,ierr) call DMGetLabelSize(globalMesh,'Face Sets',mesh_Nboundaries,err_PETSc)
CHKERRQ(ierr) CHKERRQ(err_PETSc)
call MPI_Bcast(mesh_Nboundaries,1,MPI_INTEGER,0,MPI_COMM_WORLD,ierr) call MPI_Bcast(mesh_Nboundaries,1,MPI_INTEGER,0,MPI_COMM_WORLD,err_MPI)
call MPI_Bcast(mesh_NcpElemsGlobal,1,MPI_INTEGER,0,MPI_COMM_WORLD,ierr) if (err_MPI /= 0_MPI_INTEGER_KIND) error stop 'MPI error'
call MPI_Bcast(dimPlex,1,MPI_INTEGER,0,MPI_COMM_WORLD,ierr) call MPI_Bcast(mesh_NcpElemsGlobal,1,MPI_INTEGER,0,MPI_COMM_WORLD,err_MPI)
if (err_MPI /= 0_MPI_INTEGER_KIND) error stop 'MPI error'
call MPI_Bcast(dimPlex,1,MPI_INTEGER,0,MPI_COMM_WORLD,err_MPI)
if (err_MPI /= 0_MPI_INTEGER_KIND) error stop 'MPI error'
if (worldrank == 0) then if (worldrank == 0) then
call DMClone(globalMesh,geomMesh,ierr) call DMClone(globalMesh,geomMesh,err_PETSc)
else else
call DMPlexDistribute(globalMesh,0,sf,geomMesh,ierr) call DMPlexDistribute(globalMesh,0,sf,geomMesh,err_PETSc)
endif endif
CHKERRQ(ierr) CHKERRQ(err_PETSc)
allocate(mesh_boundaries(mesh_Nboundaries), source = 0) allocate(mesh_boundaries(mesh_Nboundaries), source = 0)
call DMGetLabelSize(globalMesh,'Face Sets',nFaceSets,ierr) call DMGetLabelSize(globalMesh,'Face Sets',nFaceSets,err_PETSc)
CHKERRQ(ierr) CHKERRQ(err_PETSc)
call DMGetLabelIdIS(globalMesh,'Face Sets',faceSetIS,ierr) call DMGetLabelIdIS(globalMesh,'Face Sets',faceSetIS,err_PETSc)
CHKERRQ(ierr) CHKERRQ(err_PETSc)
if (nFaceSets > 0) then if (nFaceSets > 0) then
call ISGetIndicesF90(faceSetIS,pFaceSets,ierr) call ISGetIndicesF90(faceSetIS,pFaceSets,err_PETSc)
CHKERRQ(ierr) CHKERRQ(err_PETSc)
mesh_boundaries(1:nFaceSets) = pFaceSets mesh_boundaries(1:nFaceSets) = pFaceSets
CHKERRQ(ierr) CHKERRQ(err_PETSc)
call ISRestoreIndicesF90(faceSetIS,pFaceSets,ierr) call ISRestoreIndicesF90(faceSetIS,pFaceSets,err_PETSc)
endif endif
call MPI_Bcast(mesh_boundaries,mesh_Nboundaries,MPI_INTEGER,0,MPI_COMM_WORLD,ierr) call MPI_Bcast(mesh_boundaries,mesh_Nboundaries,MPI_INTEGER,0,MPI_COMM_WORLD,err_MPI)
if (err_MPI /= 0_MPI_INTEGER_KIND) error stop 'MPI error'
call DMDestroy(globalMesh,ierr); CHKERRQ(ierr) call DMDestroy(globalMesh,err_PETSc); CHKERRQ(err_PETSc)
call DMGetStratumSize(geomMesh,'depth',dimPlex,mesh_NcpElems,ierr) call DMGetStratumSize(geomMesh,'depth',dimPlex,mesh_NcpElems,err_PETSc)
CHKERRQ(ierr) CHKERRQ(err_PETSc)
call DMGetStratumSize(geomMesh,'depth',0,mesh_Nnodes,ierr) call DMGetStratumSize(geomMesh,'depth',0,mesh_Nnodes,err_PETSc)
CHKERRQ(ierr) CHKERRQ(err_PETSc)
! Get initial nodal coordinates ! Get initial nodal coordinates
call DMGetCoordinates(geomMesh,coords_node0,ierr) call DMGetCoordinates(geomMesh,coords_node0,err_PETSc)
CHKERRQ(ierr) CHKERRQ(err_PETSc)
call VecGetArrayF90(coords_node0, mesh_node0_temp,ierr) call VecGetArrayF90(coords_node0, mesh_node0_temp,err_PETSc)
CHKERRQ(ierr) CHKERRQ(err_PETSc)
mesh_maxNips = FEM_nQuadrature(dimPlex,p_i) mesh_maxNips = FEM_nQuadrature(dimPlex,p_i)
@ -158,8 +163,8 @@ subroutine discretization_mesh_init(restart)
allocate(materialAt(mesh_NcpElems)) allocate(materialAt(mesh_NcpElems))
do j = 1, mesh_NcpElems do j = 1, mesh_NcpElems
call DMGetLabelValue(geomMesh,'Cell Sets',j-1,materialAt(j),ierr) call DMGetLabelValue(geomMesh,'Cell Sets',j-1,materialAt(j),err_PETSc)
CHKERRQ(ierr) CHKERRQ(err_PETSc)
enddo enddo
materialAt = materialAt + 1 materialAt = materialAt + 1
@ -188,16 +193,17 @@ subroutine mesh_FEM_build_ipVolumes(dimPlex)
PetscReal :: vol PetscReal :: vol
PetscReal, pointer,dimension(:) :: pCent, pNorm PetscReal, pointer,dimension(:) :: pCent, pNorm
PetscInt :: cellStart, cellEnd, cell PetscInt :: cellStart, cellEnd, cell
PetscErrorCode :: ierr PetscErrorCode :: err_PETSc
allocate(mesh_ipVolume(mesh_maxNips,mesh_NcpElems),source=0.0_pReal) allocate(mesh_ipVolume(mesh_maxNips,mesh_NcpElems),source=0.0_pReal)
call DMPlexGetHeightStratum(geomMesh,0,cellStart,cellEnd,ierr); CHKERRQ(ierr) call DMPlexGetHeightStratum(geomMesh,0,cellStart,cellEnd,err_PETSc)
CHKERRQ(err_PETSc)
allocate(pCent(dimPlex)) allocate(pCent(dimPlex))
allocate(pNorm(dimPlex)) allocate(pNorm(dimPlex))
do cell = cellStart, cellEnd-1 do cell = cellStart, cellEnd-1
call DMPlexComputeCellGeometryFVM(geomMesh,cell,vol,pCent,pNorm,ierr) call DMPlexComputeCellGeometryFVM(geomMesh,cell,vol,pCent,pNorm,err_PETSc)
CHKERRQ(ierr) CHKERRQ(err_PETSc)
mesh_ipVolume(:,cell+1) = vol/real(mesh_maxNips,pReal) mesh_ipVolume(:,cell+1) = vol/real(mesh_maxNips,pReal)
enddo enddo
@ -215,7 +221,7 @@ subroutine mesh_FEM_build_ipCoordinates(dimPlex,qPoints)
PetscReal, pointer,dimension(:) :: pV0, pCellJ, pInvcellJ PetscReal, pointer,dimension(:) :: pV0, pCellJ, pInvcellJ
PetscReal :: detJ PetscReal :: detJ
PetscInt :: cellStart, cellEnd, cell, qPt, dirI, dirJ, qOffset PetscInt :: cellStart, cellEnd, cell, qPt, dirI, dirJ, qOffset
PetscErrorCode :: ierr PetscErrorCode :: err_PETSc
allocate(mesh_ipCoordinates(3,mesh_maxNips,mesh_NcpElems),source=0.0_pReal) allocate(mesh_ipCoordinates(3,mesh_maxNips,mesh_NcpElems),source=0.0_pReal)
@ -223,10 +229,11 @@ subroutine mesh_FEM_build_ipCoordinates(dimPlex,qPoints)
allocate(pV0(dimPlex)) allocate(pV0(dimPlex))
allocatE(pCellJ(dimPlex**2)) allocatE(pCellJ(dimPlex**2))
allocatE(pinvCellJ(dimPlex**2)) allocatE(pinvCellJ(dimPlex**2))
call DMPlexGetHeightStratum(geomMesh,0,cellStart,cellEnd,ierr); CHKERRQ(ierr) call DMPlexGetHeightStratum(geomMesh,0,cellStart,cellEnd,err_PETSc)
CHKERRQ(err_PETSc)
do cell = cellStart, cellEnd-1 !< loop over all elements do cell = cellStart, cellEnd-1 !< loop over all elements
call DMPlexComputeCellGeometryAffineFEM(geomMesh,cell,pV0,pCellJ,pInvcellJ,detJ,ierr) call DMPlexComputeCellGeometryAffineFEM(geomMesh,cell,pV0,pCellJ,pInvcellJ,detJ,err_PETSc)
CHKERRQ(ierr) CHKERRQ(err_PETSc)
qOffset = 0 qOffset = 0
do qPt = 1, mesh_maxNips do qPt = 1, mesh_maxNips
do dirI = 1, dimPlex do dirI = 1, dimPlex

View File

@ -108,7 +108,7 @@ subroutine FEM_mechanical_init(fieldBC)
PetscScalar, allocatable, target :: x_scal(:) PetscScalar, allocatable, target :: x_scal(:)
character(len=*), parameter :: prefix = 'mechFE_' character(len=*), parameter :: prefix = 'mechFE_'
PetscErrorCode :: ierr PetscErrorCode :: err_PETSc
real(pReal), dimension(3,3) :: devNull real(pReal), dimension(3,3) :: devNull
class(tNode), pointer :: & class(tNode), pointer :: &
num_mesh num_mesh
@ -130,8 +130,8 @@ subroutine FEM_mechanical_init(fieldBC)
!-------------------------------------------------------------------------------------------------- !--------------------------------------------------------------------------------------------------
! Setup FEM mech mesh ! Setup FEM mech mesh
call DMClone(geomMesh,mechanical_mesh,ierr); CHKERRQ(ierr) call DMClone(geomMesh,mechanical_mesh,err_PETSc); CHKERRQ(err_PETSc)
call DMGetDimension(mechanical_mesh,dimPlex,ierr); CHKERRQ(ierr) call DMGetDimension(mechanical_mesh,dimPlex,err_PETSc); CHKERRQ(err_PETSc)
!-------------------------------------------------------------------------------------------------- !--------------------------------------------------------------------------------------------------
! Setup FEM mech discretization ! Setup FEM mech discretization
@ -140,35 +140,37 @@ subroutine FEM_mechanical_init(fieldBC)
nQuadrature = FEM_nQuadrature( dimPlex,num%p_i) nQuadrature = FEM_nQuadrature( dimPlex,num%p_i)
qPointsP => qPoints qPointsP => qPoints
qWeightsP => qWeights qWeightsP => qWeights
call PetscQuadratureCreate(PETSC_COMM_SELF,mechQuad,ierr); CHKERRQ(ierr) call PetscQuadratureCreate(PETSC_COMM_SELF,mechQuad,err_PETSc)
CHKERRQ(ierr) CHKERRQ(err_PETSc)
nc = dimPlex nc = dimPlex
call PetscQuadratureSetData(mechQuad,dimPlex,nc,nQuadrature,qPointsP,qWeightsP,ierr) call PetscQuadratureSetData(mechQuad,dimPlex,nc,nQuadrature,qPointsP,qWeightsP,err_PETSc)
CHKERRQ(ierr) CHKERRQ(err_PETSc)
call PetscFECreateDefault(PETSC_COMM_SELF,dimPlex,nc,PETSC_TRUE,prefix, & call PetscFECreateDefault(PETSC_COMM_SELF,dimPlex,nc,PETSC_TRUE,prefix, &
num%p_i,mechFE,ierr); CHKERRQ(ierr) num%p_i,mechFE,err_PETSc); CHKERRQ(err_PETSc)
call PetscFESetQuadrature(mechFE,mechQuad,ierr); CHKERRQ(ierr) call PetscFESetQuadrature(mechFE,mechQuad,err_PETSc); CHKERRQ(err_PETSc)
call PetscFEGetDimension(mechFE,nBasis,ierr); CHKERRQ(ierr) call PetscFEGetDimension(mechFE,nBasis,err_PETSc); CHKERRQ(err_PETSc)
nBasis = nBasis/nc nBasis = nBasis/nc
call DMAddField(mechanical_mesh,PETSC_NULL_DMLABEL,mechFE,ierr); CHKERRQ(ierr) call DMAddField(mechanical_mesh,PETSC_NULL_DMLABEL,mechFE,err_PETSc)
call DMCreateDS(mechanical_mesh,ierr); CHKERRQ(ierr) CHKERRQ(err_PETSc)
call DMGetDS(mechanical_mesh,mechDS,ierr); CHKERRQ(ierr) call DMCreateDS(mechanical_mesh,err_PETSc); CHKERRQ(err_PETSc)
call PetscDSGetTotalDimension(mechDS,cellDof,ierr); CHKERRQ(ierr) call DMGetDS(mechanical_mesh,mechDS,err_PETSc); CHKERRQ(err_PETSc)
call PetscFEDestroy(mechFE,ierr); CHKERRQ(ierr) call PetscDSGetTotalDimension(mechDS,cellDof,err_PETSc); CHKERRQ(err_PETSc)
call PetscQuadratureDestroy(mechQuad,ierr); CHKERRQ(ierr) call PetscFEDestroy(mechFE,err_PETSc); CHKERRQ(err_PETSc)
call PetscQuadratureDestroy(mechQuad,err_PETSc); CHKERRQ(err_PETSc)
!-------------------------------------------------------------------------------------------------- !--------------------------------------------------------------------------------------------------
! Setup FEM mech boundary conditions ! Setup FEM mech boundary conditions
call DMGetLabel(mechanical_mesh,'Face Sets',BCLabel,ierr); CHKERRQ(ierr) call DMGetLabel(mechanical_mesh,'Face Sets',BCLabel,err_PETSc)
call DMPlexLabelComplete(mechanical_mesh,BCLabel,ierr); CHKERRQ(ierr) CHKERRQ(err_PETSc)
call DMGetLocalSection(mechanical_mesh,section,ierr); CHKERRQ(ierr) call DMPlexLabelComplete(mechanical_mesh,BCLabel,err_PETSc); CHKERRQ(err_PETSc)
call DMGetLocalSection(mechanical_mesh,section,err_PETSc); CHKERRQ(err_PETSc)
allocate(pnumComp(1), source=dimPlex) allocate(pnumComp(1), source=dimPlex)
allocate(pnumDof(0:dimPlex), source = 0) allocate(pnumDof(0:dimPlex), source = 0)
do topologDim = 0, dimPlex do topologDim = 0, dimPlex
call DMPlexGetDepthStratum(mechanical_mesh,topologDim,cellStart,cellEnd,ierr) call DMPlexGetDepthStratum(mechanical_mesh,topologDim,cellStart,cellEnd,err_PETSc)
CHKERRQ(ierr) CHKERRQ(err_PETSc)
call PetscSectionGetDof(section,cellStart,pnumDof(topologDim),ierr) call PetscSectionGetDof(section,cellStart,pnumDof(topologDim),err_PETSc)
CHKERRQ(ierr) CHKERRQ(err_PETSc)
enddo enddo
numBC = 0 numBC = 0
do field = 1, dimPlex; do faceSet = 1, mesh_Nboundaries do field = 1, dimPlex; do faceSet = 1, mesh_Nboundaries
@ -181,55 +183,61 @@ subroutine FEM_mechanical_init(fieldBC)
do field = 1, dimPlex; do faceSet = 1, mesh_Nboundaries do field = 1, dimPlex; do faceSet = 1, mesh_Nboundaries
if (fieldBC%componentBC(field)%Mask(faceSet)) then if (fieldBC%componentBC(field)%Mask(faceSet)) then
numBC = numBC + 1 numBC = numBC + 1
call ISCreateGeneral(PETSC_COMM_WORLD,1,[field-1],PETSC_COPY_VALUES,pbcComps(numBC),ierr) call ISCreateGeneral(PETSC_COMM_WORLD,1,[field-1],PETSC_COPY_VALUES,pbcComps(numBC),err_PETSc)
CHKERRQ(ierr) CHKERRQ(err_PETSc)
call DMGetStratumSize(mechanical_mesh,'Face Sets',mesh_boundaries(faceSet),bcSize,ierr) call DMGetStratumSize(mechanical_mesh,'Face Sets',mesh_boundaries(faceSet),bcSize,err_PETSc)
CHKERRQ(ierr) CHKERRQ(err_PETSc)
if (bcSize > 0) then if (bcSize > 0) then
call DMGetStratumIS(mechanical_mesh,'Face Sets',mesh_boundaries(faceSet),bcPoint,ierr) call DMGetStratumIS(mechanical_mesh,'Face Sets',mesh_boundaries(faceSet),bcPoint,err_PETSc)
CHKERRQ(ierr) CHKERRQ(err_PETSc)
call ISGetIndicesF90(bcPoint,pBcPoint,ierr); CHKERRQ(ierr) call ISGetIndicesF90(bcPoint,pBcPoint,err_PETSc); CHKERRQ(err_PETSc)
call ISCreateGeneral(PETSC_COMM_WORLD,bcSize,pBcPoint,PETSC_COPY_VALUES,pbcPoints(numBC),ierr) call ISCreateGeneral(PETSC_COMM_WORLD,bcSize,pBcPoint,PETSC_COPY_VALUES,pbcPoints(numBC),err_PETSc)
CHKERRQ(ierr) CHKERRQ(err_PETSc)
call ISRestoreIndicesF90(bcPoint,pBcPoint,ierr); CHKERRQ(ierr) call ISRestoreIndicesF90(bcPoint,pBcPoint,err_PETSc); CHKERRQ(err_PETSc)
call ISDestroy(bcPoint,ierr); CHKERRQ(ierr) call ISDestroy(bcPoint,err_PETSc); CHKERRQ(err_PETSc)
else else
call ISCreateGeneral(PETSC_COMM_WORLD,0,[0],PETSC_COPY_VALUES,pbcPoints(numBC),ierr) call ISCreateGeneral(PETSC_COMM_WORLD,0,[0],PETSC_COPY_VALUES,pbcPoints(numBC),err_PETSc)
CHKERRQ(ierr) CHKERRQ(err_PETSc)
endif endif
endif endif
enddo; enddo enddo; enddo
call DMPlexCreateSection(mechanical_mesh,nolabel,pNumComp,pNumDof, & call DMPlexCreateSection(mechanical_mesh,nolabel,pNumComp,pNumDof, &
numBC,pBcField,pBcComps,pBcPoints,PETSC_NULL_IS,section,ierr) numBC,pBcField,pBcComps,pBcPoints,PETSC_NULL_IS,section,err_PETSc)
CHKERRQ(ierr) CHKERRQ(err_PETSc)
call DMSetSection(mechanical_mesh,section,ierr); CHKERRQ(ierr) call DMSetSection(mechanical_mesh,section,err_PETSc); CHKERRQ(err_PETSc)
do faceSet = 1, numBC do faceSet = 1, numBC
call ISDestroy(pbcPoints(faceSet),ierr); CHKERRQ(ierr) call ISDestroy(pbcPoints(faceSet),err_PETSc); CHKERRQ(err_PETSc)
enddo enddo
!-------------------------------------------------------------------------------------------------- !--------------------------------------------------------------------------------------------------
! initialize solver specific parts of PETSc ! initialize solver specific parts of PETSc
call SNESCreate(PETSC_COMM_WORLD,mechanical_snes,ierr);CHKERRQ(ierr) call SNESCreate(PETSC_COMM_WORLD,mechanical_snes,err_PETSc);CHKERRQ(err_PETSc)
call SNESSetOptionsPrefix(mechanical_snes,'mechanical_',ierr);CHKERRQ(ierr) call SNESSetOptionsPrefix(mechanical_snes,'mechanical_',err_PETSc)
call SNESSetDM(mechanical_snes,mechanical_mesh,ierr); CHKERRQ(ierr) !< set the mesh for non-linear solver CHKERRQ(err_PETSc)
call DMCreateGlobalVector(mechanical_mesh,solution ,ierr); CHKERRQ(ierr) !< locally owned displacement Dofs call SNESSetDM(mechanical_snes,mechanical_mesh,err_PETSc) ! set the mesh for non-linear solver
call DMCreateGlobalVector(mechanical_mesh,solution_rate ,ierr); CHKERRQ(ierr) !< locally owned velocity Dofs to guess solution at next load step CHKERRQ(err_PETSc)
call DMCreateLocalVector (mechanical_mesh,solution_local ,ierr); CHKERRQ(ierr) !< locally owned velocity Dofs to guess solution at next load step call DMCreateGlobalVector(mechanical_mesh,solution, err_PETSc) ! locally owned displacement Dofs
call DMSNESSetFunctionLocal(mechanical_mesh,FEM_mechanical_formResidual,PETSC_NULL_VEC,ierr) !< function to evaluate residual forces CHKERRQ(err_PETSc)
CHKERRQ(ierr) call DMCreateGlobalVector(mechanical_mesh,solution_rate, err_PETSc) ! locally owned velocity Dofs to guess solution at next load step
call DMSNESSetJacobianLocal(mechanical_mesh,FEM_mechanical_formJacobian,PETSC_NULL_VEC,ierr) !< function to evaluate stiffness matrix CHKERRQ(err_PETSc)
CHKERRQ(ierr) call DMCreateLocalVector (mechanical_mesh,solution_local,err_PETSc) ! locally owned velocity Dofs to guess solution at next load step
call SNESSetMaxLinearSolveFailures(mechanical_snes, huge(1), ierr); CHKERRQ(ierr) !< ignore linear solve failures CHKERRQ(err_PETSc)
call SNESSetConvergenceTest(mechanical_snes,FEM_mechanical_converged,PETSC_NULL_VEC,PETSC_NULL_FUNCTION,ierr) call DMSNESSetFunctionLocal(mechanical_mesh,FEM_mechanical_formResidual,PETSC_NULL_VEC,err_PETSc) ! function to evaluate residual forces
CHKERRQ(ierr) CHKERRQ(err_PETSc)
call SNESSetTolerances(mechanical_snes,1.0,0.0,0.0,num%itmax,num%itmax,ierr) call DMSNESSetJacobianLocal(mechanical_mesh,FEM_mechanical_formJacobian,PETSC_NULL_VEC,err_PETSc) ! function to evaluate stiffness matrix
CHKERRQ(ierr) CHKERRQ(err_PETSc)
call SNESSetFromOptions(mechanical_snes,ierr); CHKERRQ(ierr) call SNESSetMaxLinearSolveFailures(mechanical_snes, huge(1), err_PETSc) ! ignore linear solve failures CHKERRQ(err_PETSc) !< ignore linear solve failures
CHKERRQ(err_PETSc)
call SNESSetConvergenceTest(mechanical_snes,FEM_mechanical_converged,PETSC_NULL_VEC,PETSC_NULL_FUNCTION,err_PETSc)
CHKERRQ(err_PETSc)
call SNESSetTolerances(mechanical_snes,1.0,0.0,0.0,num%itmax,num%itmax,err_PETSc)
CHKERRQ(err_PETSc)
call SNESSetFromOptions(mechanical_snes,err_PETSc); CHKERRQ(err_PETSc)
!-------------------------------------------------------------------------------------------------- !--------------------------------------------------------------------------------------------------
! init fields ! init fields
call VecSet(solution ,0.0,ierr); CHKERRQ(ierr) call VecSet(solution ,0.0,err_PETSc); CHKERRQ(err_PETSc)
call VecSet(solution_rate ,0.0,ierr); CHKERRQ(ierr) call VecSet(solution_rate ,0.0,err_PETSc); CHKERRQ(err_PETSc)
allocate(x_scal(cellDof)) allocate(x_scal(cellDof))
allocate(nodalWeightsP(1)) allocate(nodalWeightsP(1))
allocate(nodalPointsP(dimPlex)) allocate(nodalPointsP(dimPlex))
@ -237,26 +245,26 @@ subroutine FEM_mechanical_init(fieldBC)
allocate(pcellJ(dimPlex**2)) allocate(pcellJ(dimPlex**2))
allocate(pinvcellJ(dimPlex**2)) allocate(pinvcellJ(dimPlex**2))
allocate(cellJMat(dimPlex,dimPlex)) allocate(cellJMat(dimPlex,dimPlex))
call PetscDSGetDiscretization(mechDS,0,mechFE,ierr) call PetscDSGetDiscretization(mechDS,0,mechFE,err_PETSc)
CHKERRQ(ierr) CHKERRQ(err_PETSc)
call PetscFEGetDualSpace(mechFE,mechDualSpace,ierr); CHKERRQ(ierr) call PetscFEGetDualSpace(mechFE,mechDualSpace,err_PETSc); CHKERRQ(err_PETSc)
call DMPlexGetHeightStratum(mechanical_mesh,0,cellStart,cellEnd,ierr) call DMPlexGetHeightStratum(mechanical_mesh,0,cellStart,cellEnd,err_PETSc)
CHKERRQ(ierr) CHKERRQ(err_PETSc)
do cell = cellStart, cellEnd-1 !< loop over all elements do cell = cellStart, cellEnd-1 !< loop over all elements
x_scal = 0.0_pReal x_scal = 0.0_pReal
call DMPlexComputeCellGeometryAffineFEM(mechanical_mesh,cell,pV0,pCellJ,pInvcellJ,detJ,ierr) call DMPlexComputeCellGeometryAffineFEM(mechanical_mesh,cell,pV0,pCellJ,pInvcellJ,detJ,err_PETSc)
CHKERRQ(ierr) CHKERRQ(err_PETSc)
cellJMat = reshape(pCellJ,shape=[dimPlex,dimPlex]) cellJMat = reshape(pCellJ,shape=[dimPlex,dimPlex])
do basis = 0, nBasis*dimPlex-1, dimPlex do basis = 0, nBasis*dimPlex-1, dimPlex
call PetscDualSpaceGetFunctional(mechDualSpace,basis,functional,ierr) call PetscDualSpaceGetFunctional(mechDualSpace,basis,functional,err_PETSc)
CHKERRQ(ierr) CHKERRQ(err_PETSc)
call PetscQuadratureGetData(functional,dimPlex,nc,nNodalPoints,nodalPointsP,nodalWeightsP,ierr) call PetscQuadratureGetData(functional,dimPlex,nc,nNodalPoints,nodalPointsP,nodalWeightsP,err_PETSc)
CHKERRQ(ierr) CHKERRQ(err_PETSc)
x_scal(basis+1:basis+dimPlex) = pV0 + matmul(transpose(cellJMat),nodalPointsP + 1.0_pReal) x_scal(basis+1:basis+dimPlex) = pV0 + matmul(transpose(cellJMat),nodalPointsP + 1.0_pReal)
enddo enddo
px_scal => x_scal px_scal => x_scal
call DMPlexVecSetClosure(mechanical_mesh,section,solution_local,cell,px_scal,5,ierr) call DMPlexVecSetClosure(mechanical_mesh,section,solution_local,cell,px_scal,5,err_PETSc)
CHKERRQ(ierr) CHKERRQ(err_PETSc)
enddo enddo
call utilities_constitutiveResponse(0.0_pReal,devNull,.true.) call utilities_constitutiveResponse(0.0_pReal,devNull,.true.)
@ -279,7 +287,7 @@ type(tSolutionState) function FEM_mechanical_solution( &
character(len=*), intent(in) :: & character(len=*), intent(in) :: &
incInfoIn incInfoIn
PetscErrorCode :: ierr PetscErrorCode :: err_PETSc
SNESConvergedReason :: reason SNESConvergedReason :: reason
incInfo = incInfoIn incInfo = incInfoIn
@ -289,8 +297,10 @@ type(tSolutionState) function FEM_mechanical_solution( &
params%timeinc = timeinc params%timeinc = timeinc
params%fieldBC = fieldBC params%fieldBC = fieldBC
call SNESSolve(mechanical_snes,PETSC_NULL_VEC,solution,ierr); CHKERRQ(ierr) ! solve mechanical_snes based on solution guess (result in solution) call SNESSolve(mechanical_snes,PETSC_NULL_VEC,solution,err_PETSc) ! solve mechanical_snes based on solution guess (result in solution)
call SNESGetConvergedReason(mechanical_snes,reason,ierr); CHKERRQ(ierr) ! solution converged? CHKERRQ(err_PETSc)
call SNESGetConvergedReason(mechanical_snes,reason,err_PETSc) ! solution converged?
CHKERRQ(err_PETSc)
terminallyIll = .false. terminallyIll = .false.
if (reason < 1) then ! 0: still iterating (will not occur), negative -> convergence error if (reason < 1) then ! 0: still iterating (will not occur), negative -> convergence error
@ -298,8 +308,8 @@ type(tSolutionState) function FEM_mechanical_solution( &
FEM_mechanical_solution%iterationsNeeded = num%itmax FEM_mechanical_solution%iterationsNeeded = num%itmax
else ! >= 1 proper convergence (or terminally ill) else ! >= 1 proper convergence (or terminally ill)
FEM_mechanical_solution%converged = .true. FEM_mechanical_solution%converged = .true.
call SNESGetIterationNumber(mechanical_snes,FEM_mechanical_solution%iterationsNeeded,ierr) call SNESGetIterationNumber(mechanical_snes,FEM_mechanical_solution%iterationsNeeded,err_PETSc)
CHKERRQ(ierr) CHKERRQ(err_PETSc)
endif endif
print'(/,1x,a)', '===========================================================================' print'(/,1x,a)', '==========================================================================='
@ -311,11 +321,12 @@ end function FEM_mechanical_solution
!-------------------------------------------------------------------------------------------------- !--------------------------------------------------------------------------------------------------
!> @brief forms the FEM residual vector !> @brief forms the FEM residual vector
!-------------------------------------------------------------------------------------------------- !--------------------------------------------------------------------------------------------------
subroutine FEM_mechanical_formResidual(dm_local,xx_local,f_local,dummy,ierr) subroutine FEM_mechanical_formResidual(dm_local,xx_local,f_local,dummy,err_PETSc)
DM :: dm_local DM :: dm_local
PetscObject,intent(in) :: dummy PetscObject,intent(in) :: dummy
PetscErrorCode :: ierr PetscErrorCode :: err_PETSc
integer(MPI_INTEGER_KIND) :: err_MPI
PetscDS :: prob PetscDS :: prob
Vec :: x_local, f_local, xx_local Vec :: x_local, f_local, xx_local
@ -339,22 +350,23 @@ subroutine FEM_mechanical_formResidual(dm_local,xx_local,f_local,dummy,ierr)
allocate(pinvcellJ(dimPlex**2)) allocate(pinvcellJ(dimPlex**2))
allocate(x_scal(cellDof)) allocate(x_scal(cellDof))
call DMGetLocalSection(dm_local,section,ierr); CHKERRQ(ierr) call DMGetLocalSection(dm_local,section,err_PETSc); CHKERRQ(err_PETSc)
call DMGetDS(dm_local,prob,ierr); CHKERRQ(ierr) call DMGetDS(dm_local,prob,err_PETSc); CHKERRQ(err_PETSc)
call PetscDSGetTabulation(prob,0,basisField,basisFieldDer,ierr) call PetscDSGetTabulation(prob,0,basisField,basisFieldDer,err_PETSc)
CHKERRQ(ierr) CHKERRQ(err_PETSc)
call DMPlexGetHeightStratum(dm_local,0,cellStart,cellEnd,ierr); CHKERRQ(ierr) call DMPlexGetHeightStratum(dm_local,0,cellStart,cellEnd,err_PETSc)
call DMGetLocalVector(dm_local,x_local,ierr); CHKERRQ(ierr) CHKERRQ(err_PETSc)
call VecWAXPY(x_local,1.0,xx_local,solution_local,ierr); CHKERRQ(ierr) call DMGetLocalVector(dm_local,x_local,err_PETSc); CHKERRQ(err_PETSc)
call VecWAXPY(x_local,1.0,xx_local,solution_local,err_PETSc); CHKERRQ(err_PETSc)
do field = 1, dimPlex; do face = 1, mesh_Nboundaries do field = 1, dimPlex; do face = 1, mesh_Nboundaries
if (params%fieldBC%componentBC(field)%Mask(face)) then if (params%fieldBC%componentBC(field)%Mask(face)) then
call DMGetStratumSize(dm_local,'Face Sets',mesh_boundaries(face),bcSize,ierr) call DMGetStratumSize(dm_local,'Face Sets',mesh_boundaries(face),bcSize,err_PETSc)
if (bcSize > 0) then if (bcSize > 0) then
call DMGetStratumIS(dm_local,'Face Sets',mesh_boundaries(face),bcPoints,ierr) call DMGetStratumIS(dm_local,'Face Sets',mesh_boundaries(face),bcPoints,err_PETSc)
CHKERRQ(ierr) CHKERRQ(err_PETSc)
call utilities_projectBCValues(x_local,section,0,field-1,bcPoints, & call utilities_projectBCValues(x_local,section,0,field-1,bcPoints, &
0.0_pReal,params%fieldBC%componentBC(field)%Value(face),params%timeinc) 0.0_pReal,params%fieldBC%componentBC(field)%Value(face),params%timeinc)
call ISDestroy(bcPoints,ierr); CHKERRQ(ierr) call ISDestroy(bcPoints,err_PETSc); CHKERRQ(err_PETSc)
endif endif
endif endif
enddo; enddo enddo; enddo
@ -363,12 +375,12 @@ subroutine FEM_mechanical_formResidual(dm_local,xx_local,f_local,dummy,ierr)
! evaluate field derivatives ! evaluate field derivatives
do cell = cellStart, cellEnd-1 !< loop over all elements do cell = cellStart, cellEnd-1 !< loop over all elements
call PetscSectionGetNumFields(section,numFields,ierr) call PetscSectionGetNumFields(section,numFields,err_PETSc)
CHKERRQ(ierr) CHKERRQ(err_PETSc)
call DMPlexVecGetClosure(dm_local,section,x_local,cell,x_scal,ierr) !< get Dofs belonging to element call DMPlexVecGetClosure(dm_local,section,x_local,cell,x_scal,err_PETSc) !< get Dofs belonging to element
CHKERRQ(ierr) CHKERRQ(err_PETSc)
call DMPlexComputeCellGeometryAffineFEM(dm_local,cell,pV0,pCellJ,pInvcellJ,detJ,ierr) call DMPlexComputeCellGeometryAffineFEM(dm_local,cell,pV0,pCellJ,pInvcellJ,detJ,err_PETSc)
CHKERRQ(ierr) CHKERRQ(err_PETSc)
IcellJMat = reshape(pInvcellJ,shape=[dimPlex,dimPlex]) IcellJMat = reshape(pInvcellJ,shape=[dimPlex,dimPlex])
do qPt = 0, nQuadrature-1 do qPt = 0, nQuadrature-1
m = cell*nQuadrature + qPt+1 m = cell*nQuadrature + qPt+1
@ -392,23 +404,24 @@ subroutine FEM_mechanical_formResidual(dm_local,xx_local,f_local,dummy,ierr)
enddo enddo
endif endif
call DMPlexVecRestoreClosure(dm_local,section,x_local,cell,x_scal,ierr) call DMPlexVecRestoreClosure(dm_local,section,x_local,cell,x_scal,err_PETSc)
CHKERRQ(ierr) CHKERRQ(err_PETSc)
enddo enddo
!-------------------------------------------------------------------------------------------------- !--------------------------------------------------------------------------------------------------
! evaluate constitutive response ! evaluate constitutive response
call utilities_constitutiveResponse(params%timeinc,P_av,ForwardData) call utilities_constitutiveResponse(params%timeinc,P_av,ForwardData)
call MPI_Allreduce(MPI_IN_PLACE,terminallyIll,1,MPI_LOGICAL,MPI_LOR,MPI_COMM_WORLD,ierr) call MPI_Allreduce(MPI_IN_PLACE,terminallyIll,1_MPI_INTEGER_KIND,MPI_LOGICAL,MPI_LOR,MPI_COMM_WORLD,err_MPI)
if (err_MPI /= 0_MPI_INTEGER_KIND) error stop 'MPI error'
ForwardData = .false. ForwardData = .false.
!-------------------------------------------------------------------------------------------------- !--------------------------------------------------------------------------------------------------
! integrating residual ! integrating residual
do cell = cellStart, cellEnd-1 !< loop over all elements do cell = cellStart, cellEnd-1 !< loop over all elements
call DMPlexVecGetClosure(dm_local,section,x_local,cell,x_scal,ierr) !< get Dofs belonging to element call DMPlexVecGetClosure(dm_local,section,x_local,cell,x_scal,err_PETSc) !< get Dofs belonging to element
CHKERRQ(ierr) CHKERRQ(err_PETSc)
call DMPlexComputeCellGeometryAffineFEM(dm_local,cell,pV0,pCellJ,pInvcellJ,detJ,ierr) call DMPlexComputeCellGeometryAffineFEM(dm_local,cell,pV0,pCellJ,pInvcellJ,detJ,err_PETSc)
CHKERRQ(ierr) CHKERRQ(err_PETSc)
IcellJMat = reshape(pInvcellJ,shape=[dimPlex,dimPlex]) IcellJMat = reshape(pInvcellJ,shape=[dimPlex,dimPlex])
f_scal = 0.0 f_scal = 0.0
do qPt = 0, nQuadrature-1 do qPt = 0, nQuadrature-1
@ -429,12 +442,12 @@ subroutine FEM_mechanical_formResidual(dm_local,xx_local,f_local,dummy,ierr)
enddo enddo
f_scal = f_scal*abs(detJ) f_scal = f_scal*abs(detJ)
pf_scal => f_scal pf_scal => f_scal
call DMPlexVecSetClosure(dm_local,section,f_local,cell,pf_scal,ADD_VALUES,ierr) call DMPlexVecSetClosure(dm_local,section,f_local,cell,pf_scal,ADD_VALUES,err_PETSc)
CHKERRQ(ierr) CHKERRQ(err_PETSc)
call DMPlexVecRestoreClosure(dm_local,section,x_local,cell,x_scal,ierr) call DMPlexVecRestoreClosure(dm_local,section,x_local,cell,x_scal,err_PETSc)
CHKERRQ(ierr) CHKERRQ(err_PETSc)
enddo enddo
call DMRestoreLocalVector(dm_local,x_local,ierr); CHKERRQ(ierr) call DMRestoreLocalVector(dm_local,x_local,err_PETSc); CHKERRQ(err_PETSc)
end subroutine FEM_mechanical_formResidual end subroutine FEM_mechanical_formResidual
@ -442,13 +455,13 @@ end subroutine FEM_mechanical_formResidual
!-------------------------------------------------------------------------------------------------- !--------------------------------------------------------------------------------------------------
!> @brief forms the FEM stiffness matrix !> @brief forms the FEM stiffness matrix
!-------------------------------------------------------------------------------------------------- !--------------------------------------------------------------------------------------------------
subroutine FEM_mechanical_formJacobian(dm_local,xx_local,Jac_pre,Jac,dummy,ierr) subroutine FEM_mechanical_formJacobian(dm_local,xx_local,Jac_pre,Jac,dummy,err_PETSc)
DM :: dm_local DM :: dm_local
Mat :: Jac_pre, Jac Mat :: Jac_pre, Jac
PetscObject, intent(in) :: dummy PetscObject, intent(in) :: dummy
PetscErrorCode :: ierr PetscErrorCode :: err_PETSc
PetscDS :: prob PetscDS :: prob
Vec :: x_local, xx_local Vec :: x_local, xx_local
@ -478,34 +491,43 @@ subroutine FEM_mechanical_formJacobian(dm_local,xx_local,Jac_pre,Jac,dummy,ierr)
allocate(pcellJ(dimPlex**2)) allocate(pcellJ(dimPlex**2))
allocate(pinvcellJ(dimPlex**2)) allocate(pinvcellJ(dimPlex**2))
call MatSetOption(Jac,MAT_KEEP_NONZERO_PATTERN,PETSC_TRUE,ierr); CHKERRQ(ierr) call MatSetOption(Jac,MAT_KEEP_NONZERO_PATTERN,PETSC_TRUE,err_PETSc)
call MatSetOption(Jac,MAT_NEW_NONZERO_ALLOCATION_ERR,PETSC_FALSE,ierr); CHKERRQ(ierr) CHKERRQ(err_PETSc)
call MatZeroEntries(Jac,ierr); CHKERRQ(ierr) call MatSetOption(Jac,MAT_NEW_NONZERO_ALLOCATION_ERR,PETSC_FALSE,err_PETSc)
call DMGetDS(dm_local,prob,ierr); CHKERRQ(ierr) CHKERRQ(err_PETSc)
call PetscDSGetTabulation(prob,0,basisField,basisFieldDer,ierr) call MatZeroEntries(Jac,err_PETSc)
call DMGetLocalSection(dm_local,section,ierr); CHKERRQ(ierr) CHKERRQ(err_PETSc)
call DMGetGlobalSection(dm_local,gSection,ierr); CHKERRQ(ierr) call DMGetDS(dm_local,prob,err_PETSc)
CHKERRQ(err_PETSc)
call PetscDSGetTabulation(prob,0_pPETSCINT,basisField,basisFieldDer,err_PETSc)
call DMGetLocalSection(dm_local,section,err_PETSc)
CHKERRQ(err_PETSc)
call DMGetGlobalSection(dm_local,gSection,err_PETSc)
CHKERRQ(err_PETSc)
call DMGetLocalVector(dm_local,x_local,ierr); CHKERRQ(ierr) call DMGetLocalVector(dm_local,x_local,err_PETSc)
call VecWAXPY(x_local,1.0_pReal,xx_local,solution_local,ierr); CHKERRQ(ierr) CHKERRQ(err_PETSc)
call VecWAXPY(x_local,1.0_pReal,xx_local,solution_local,err_PETSc)
CHKERRQ(err_PETSc)
do field = 1, dimPlex; do face = 1, mesh_Nboundaries do field = 1, dimPlex; do face = 1, mesh_Nboundaries
if (params%fieldBC%componentBC(field)%Mask(face)) then if (params%fieldBC%componentBC(field)%Mask(face)) then
call DMGetStratumSize(dm_local,'Face Sets',mesh_boundaries(face),bcSize,ierr) call DMGetStratumSize(dm_local,'Face Sets',mesh_boundaries(face),bcSize,err_PETSc)
if (bcSize > 0) then if (bcSize > 0) then
call DMGetStratumIS(dm_local,'Face Sets',mesh_boundaries(face),bcPoints,ierr) call DMGetStratumIS(dm_local,'Face Sets',mesh_boundaries(face),bcPoints,err_PETSc)
CHKERRQ(ierr) CHKERRQ(err_PETSc)
call utilities_projectBCValues(x_local,section,0,field-1,bcPoints, & call utilities_projectBCValues(x_local,section,0,field-1,bcPoints, &
0.0_pReal,params%fieldBC%componentBC(field)%Value(face),params%timeinc) 0.0_pReal,params%fieldBC%componentBC(field)%Value(face),params%timeinc)
call ISDestroy(bcPoints,ierr); CHKERRQ(ierr) call ISDestroy(bcPoints,err_PETSc); CHKERRQ(err_PETSc)
endif endif
endif endif
enddo; enddo enddo; enddo
call DMPlexGetHeightStratum(dm_local,0,cellStart,cellEnd,ierr); CHKERRQ(ierr) call DMPlexGetHeightStratum(dm_local,0_pPETSCINT,cellStart,cellEnd,err_PETSc)
CHKERRQ(err_PETSc)
do cell = cellStart, cellEnd-1 !< loop over all elements do cell = cellStart, cellEnd-1 !< loop over all elements
call DMPlexVecGetClosure(dm_local,section,x_local,cell,x_scal,ierr) !< get Dofs belonging to element call DMPlexVecGetClosure(dm_local,section,x_local,cell,x_scal,err_PETSc) !< get Dofs belonging to element
CHKERRQ(ierr) CHKERRQ(err_PETSc)
call DMPlexComputeCellGeometryAffineFEM(dm_local,cell,pV0,pCellJ,pInvcellJ,detJ,ierr) call DMPlexComputeCellGeometryAffineFEM(dm_local,cell,pV0,pCellJ,pInvcellJ,detJ,err_PETSc)
CHKERRQ(ierr) CHKERRQ(err_PETSc)
K_eA = 0.0 K_eA = 0.0
K_eB = 0.0 K_eB = 0.0
MatB = 0.0 MatB = 0.0
@ -558,27 +580,27 @@ subroutine FEM_mechanical_formJacobian(dm_local,xx_local,Jac_pre,Jac,dummy,ierr)
! https://software.intel.com/en-us/forums/intel-fortran-compiler/topic/782230 (bug) ! https://software.intel.com/en-us/forums/intel-fortran-compiler/topic/782230 (bug)
allocate(pK_e(cellDOF**2),source = reshape(K_e,[cellDOF**2])) allocate(pK_e(cellDOF**2),source = reshape(K_e,[cellDOF**2]))
#endif #endif
call DMPlexMatSetClosure(dm_local,section,gSection,Jac,cell,pK_e,ADD_VALUES,ierr) call DMPlexMatSetClosure(dm_local,section,gSection,Jac,cell,pK_e,ADD_VALUES,err_PETSc)
CHKERRQ(ierr) CHKERRQ(err_PETSc)
call DMPlexVecRestoreClosure(dm_local,section,x_local,cell,x_scal,ierr) call DMPlexVecRestoreClosure(dm_local,section,x_local,cell,x_scal,err_PETSc)
CHKERRQ(ierr) CHKERRQ(err_PETSc)
enddo enddo
call MatAssemblyBegin(Jac,MAT_FINAL_ASSEMBLY,ierr); CHKERRQ(ierr) call MatAssemblyBegin(Jac,MAT_FINAL_ASSEMBLY,err_PETSc); CHKERRQ(err_PETSc)
call MatAssemblyEnd(Jac,MAT_FINAL_ASSEMBLY,ierr); CHKERRQ(ierr) call MatAssemblyEnd(Jac,MAT_FINAL_ASSEMBLY,err_PETSc); CHKERRQ(err_PETSc)
call MatAssemblyBegin(Jac_pre,MAT_FINAL_ASSEMBLY,ierr); CHKERRQ(ierr) call MatAssemblyBegin(Jac_pre,MAT_FINAL_ASSEMBLY,err_PETSc); CHKERRQ(err_PETSc)
call MatAssemblyEnd(Jac_pre,MAT_FINAL_ASSEMBLY,ierr); CHKERRQ(ierr) call MatAssemblyEnd(Jac_pre,MAT_FINAL_ASSEMBLY,err_PETSc); CHKERRQ(err_PETSc)
call DMRestoreLocalVector(dm_local,x_local,ierr); CHKERRQ(ierr) call DMRestoreLocalVector(dm_local,x_local,err_PETSc); CHKERRQ(err_PETSc)
!-------------------------------------------------------------------------------------------------- !--------------------------------------------------------------------------------------------------
! apply boundary conditions ! apply boundary conditions
#if (PETSC_VERSION_MINOR < 14) #if (PETSC_VERSION_MINOR < 14)
call DMPlexCreateRigidBody(dm_local,matnull,ierr); CHKERRQ(ierr) call DMPlexCreateRigidBody(dm_local,matnull,err_PETSc); CHKERRQ(err_PETSc)
#else #else
call DMPlexCreateRigidBody(dm_local,0,matnull,ierr); CHKERRQ(ierr) call DMPlexCreateRigidBody(dm_local,0,matnull,err_PETSc); CHKERRQ(err_PETSc)
#endif #endif
call MatSetNullSpace(Jac,matnull,ierr); CHKERRQ(ierr) call MatSetNullSpace(Jac,matnull,err_PETSc); CHKERRQ(err_PETSc)
call MatSetNearNullSpace(Jac,matnull,ierr); CHKERRQ(ierr) call MatSetNearNullSpace(Jac,matnull,err_PETSc); CHKERRQ(err_PETSc)
call MatNullSpaceDestroy(matnull,ierr); CHKERRQ(ierr) call MatNullSpaceDestroy(matnull,err_PETSc); CHKERRQ(err_PETSc)
end subroutine FEM_mechanical_formJacobian end subroutine FEM_mechanical_formJacobian
@ -601,43 +623,43 @@ subroutine FEM_mechanical_forward(guess,timeinc,timeinc_old,fieldBC)
Vec :: x_local Vec :: x_local
PetscSection :: section PetscSection :: section
IS :: bcPoints IS :: bcPoints
PetscErrorCode :: ierr PetscErrorCode :: err_PETSc
!-------------------------------------------------------------------------------------------------- !--------------------------------------------------------------------------------------------------
! forward last inc ! forward last inc
if (guess .and. .not. cutBack) then if (guess .and. .not. cutBack) then
ForwardData = .True. ForwardData = .True.
homogenization_F0 = homogenization_F homogenization_F0 = homogenization_F
call SNESGetDM(mechanical_snes,dm_local,ierr); CHKERRQ(ierr) !< retrieve mesh info from mechanical_snes into dm_local call SNESGetDM(mechanical_snes,dm_local,err_PETSc); CHKERRQ(err_PETSc) !< retrieve mesh info from mechanical_snes into dm_local
call DMGetSection(dm_local,section,ierr); CHKERRQ(ierr) call DMGetSection(dm_local,section,err_PETSc); CHKERRQ(err_PETSc)
call DMGetLocalVector(dm_local,x_local,ierr); CHKERRQ(ierr) call DMGetLocalVector(dm_local,x_local,err_PETSc); CHKERRQ(err_PETSc)
call VecSet(x_local,0.0_pReal,ierr); CHKERRQ(ierr) call VecSet(x_local,0.0_pReal,err_PETSc); CHKERRQ(err_PETSc)
call DMGlobalToLocalBegin(dm_local,solution,INSERT_VALUES,x_local,ierr) !< retrieve my partition of global solution vector call DMGlobalToLocalBegin(dm_local,solution,INSERT_VALUES,x_local,err_PETSc) !< retrieve my partition of global solution vector
CHKERRQ(ierr) CHKERRQ(err_PETSc)
call DMGlobalToLocalEnd(dm_local,solution,INSERT_VALUES,x_local,ierr) call DMGlobalToLocalEnd(dm_local,solution,INSERT_VALUES,x_local,err_PETSc)
CHKERRQ(ierr) CHKERRQ(err_PETSc)
call VecAXPY(solution_local,1.0,x_local,ierr); CHKERRQ(ierr) call VecAXPY(solution_local,1.0,x_local,err_PETSc); CHKERRQ(err_PETSc)
do field = 1, dimPlex; do face = 1, mesh_Nboundaries do field = 1, dimPlex; do face = 1, mesh_Nboundaries
if (fieldBC%componentBC(field)%Mask(face)) then if (fieldBC%componentBC(field)%Mask(face)) then
call DMGetStratumSize(dm_local,'Face Sets',mesh_boundaries(face),bcSize,ierr) call DMGetStratumSize(dm_local,'Face Sets',mesh_boundaries(face),bcSize,err_PETSc)
if (bcSize > 0) then if (bcSize > 0) then
call DMGetStratumIS(dm_local,'Face Sets',mesh_boundaries(face),bcPoints,ierr) call DMGetStratumIS(dm_local,'Face Sets',mesh_boundaries(face),bcPoints,err_PETSc)
CHKERRQ(ierr) CHKERRQ(err_PETSc)
call utilities_projectBCValues(solution_local,section,0,field-1,bcPoints, & call utilities_projectBCValues(solution_local,section,0,field-1,bcPoints, &
0.0_pReal,fieldBC%componentBC(field)%Value(face),timeinc_old) 0.0_pReal,fieldBC%componentBC(field)%Value(face),timeinc_old)
call ISDestroy(bcPoints,ierr); CHKERRQ(ierr) call ISDestroy(bcPoints,err_PETSc); CHKERRQ(err_PETSc)
endif endif
endif endif
enddo; enddo enddo; enddo
call DMRestoreLocalVector(dm_local,x_local,ierr); CHKERRQ(ierr) call DMRestoreLocalVector(dm_local,x_local,err_PETSc); CHKERRQ(err_PETSc)
!-------------------------------------------------------------------------------------------------- !--------------------------------------------------------------------------------------------------
! update rate and forward last inc ! update rate and forward last inc
call VecCopy(solution,solution_rate,ierr); CHKERRQ(ierr) call VecCopy(solution,solution_rate,err_PETSc); CHKERRQ(err_PETSc)
call VecScale(solution_rate,1.0/timeinc_old,ierr); CHKERRQ(ierr) call VecScale(solution_rate,1.0/timeinc_old,err_PETSc); CHKERRQ(err_PETSc)
endif endif
call VecCopy(solution_rate,solution,ierr); CHKERRQ(ierr) call VecCopy(solution_rate,solution,err_PETSc); CHKERRQ(err_PETSc)
call VecScale(solution,timeinc,ierr); CHKERRQ(ierr) call VecScale(solution,timeinc,err_PETSc); CHKERRQ(err_PETSc)
end subroutine FEM_mechanical_forward end subroutine FEM_mechanical_forward
@ -645,20 +667,20 @@ end subroutine FEM_mechanical_forward
!-------------------------------------------------------------------------------------------------- !--------------------------------------------------------------------------------------------------
!> @brief reporting !> @brief reporting
!-------------------------------------------------------------------------------------------------- !--------------------------------------------------------------------------------------------------
subroutine FEM_mechanical_converged(snes_local,PETScIter,xnorm,snorm,fnorm,reason,dummy,ierr) subroutine FEM_mechanical_converged(snes_local,PETScIter,xnorm,snorm,fnorm,reason,dummy,err_PETSc)
SNES :: snes_local SNES :: snes_local
PetscInt :: PETScIter PetscInt :: PETScIter
PetscReal :: xnorm,snorm,fnorm,divTol PetscReal :: xnorm,snorm,fnorm,divTol
SNESConvergedReason :: reason SNESConvergedReason :: reason
PetscObject :: dummy PetscObject :: dummy
PetscErrorCode :: ierr PetscErrorCode :: err_PETSc
!-------------------------------------------------------------------------------------------------- !--------------------------------------------------------------------------------------------------
! report ! report
divTol = max(maxval(abs(P_av(1:dimPlex,1:dimPlex)))*num%eps_struct_rtol,num%eps_struct_atol) divTol = max(maxval(abs(P_av(1:dimPlex,1:dimPlex)))*num%eps_struct_rtol,num%eps_struct_atol)
call SNESConvergedDefault(snes_local,PETScIter,xnorm,snorm,fnorm/divTol,reason,dummy,ierr) call SNESConvergedDefault(snes_local,PETScIter,xnorm,snorm,fnorm/divTol,reason,dummy,err_PETSc)
CHKERRQ(ierr) CHKERRQ(err_PETSc)
if (terminallyIll) reason = SNES_DIVERGED_FUNCTION_DOMAIN if (terminallyIll) reason = SNES_DIVERGED_FUNCTION_DOMAIN
print'(/,1x,a,a,i0,a,i0,f0.3)', trim(incInfo), & print'(/,1x,a,a,i0,a,i0,f0.3)', trim(incInfo), &
' @ Iteration ',PETScIter,' mechanical residual norm = ', & ' @ Iteration ',PETScIter,' mechanical residual norm = ', &
@ -690,7 +712,7 @@ subroutine FEM_mechanical_updateCoords()
DM :: dm_local DM :: dm_local
Vec :: x_local Vec :: x_local
PetscErrorCode :: ierr PetscErrorCode :: err_PETSc
PetscInt :: pStart, pEnd, p, s, e, q, & PetscInt :: pStart, pEnd, p, s, e, q, &
cellStart, cellEnd, c, n cellStart, cellEnd, c, n
PetscSection :: section PetscSection :: section
@ -698,34 +720,37 @@ subroutine FEM_mechanical_updateCoords()
PetscReal, dimension(:), pointer :: basisField, basisFieldDer PetscReal, dimension(:), pointer :: basisField, basisFieldDer
PetscScalar, dimension(:), pointer :: x_scal PetscScalar, dimension(:), pointer :: x_scal
call SNESGetDM(mechanical_snes,dm_local,ierr); CHKERRQ(ierr) call SNESGetDM(mechanical_snes,dm_local,err_PETSc); CHKERRQ(err_PETSc)
call DMGetDS(dm_local,mechQuad,ierr); CHKERRQ(ierr) call DMGetDS(dm_local,mechQuad,err_PETSc); CHKERRQ(err_PETSc)
call DMGetLocalSection(dm_local,section,ierr); CHKERRQ(ierr) call DMGetLocalSection(dm_local,section,err_PETSc); CHKERRQ(err_PETSc)
call DMGetLocalVector(dm_local,x_local,ierr); CHKERRQ(ierr) call DMGetLocalVector(dm_local,x_local,err_PETSc); CHKERRQ(err_PETSc)
call DMGetDimension(dm_local,dimPlex,ierr); CHKERRQ(ierr) call DMGetDimension(dm_local,dimPlex,err_PETSc); CHKERRQ(err_PETSc)
! write cell vertex displacements ! write cell vertex displacements
call DMPlexGetDepthStratum(dm_local,0,pStart,pEnd,ierr); CHKERRQ(ierr) call DMPlexGetDepthStratum(dm_local,0,pStart,pEnd,err_PETSc); CHKERRQ(err_PETSc)
allocate(nodeCoords(3,pStart:pEnd-1),source=0.0_pReal) allocate(nodeCoords(3,pStart:pEnd-1),source=0.0_pReal)
call VecGetArrayF90(x_local,nodeCoords_linear,ierr); CHKERRQ(ierr) call VecGetArrayF90(x_local,nodeCoords_linear,err_PETSc); CHKERRQ(err_PETSc)
do p=pStart, pEnd-1 do p=pStart, pEnd-1
call DMPlexGetPointLocal(dm_local, p, s, e, ierr); CHKERRQ(ierr) call DMPlexGetPointLocal(dm_local, p, s, e, err_PETSc); CHKERRQ(err_PETSc)
nodeCoords(1:dimPlex,p)=nodeCoords_linear(s+1:e) nodeCoords(1:dimPlex,p)=nodeCoords_linear(s+1:e)
enddo enddo
call discretization_setNodeCoords(nodeCoords) call discretization_setNodeCoords(nodeCoords)
call VecRestoreArrayF90(x_local,nodeCoords_linear,ierr); CHKERRQ(ierr) call VecRestoreArrayF90(x_local,nodeCoords_linear,err_PETSc); CHKERRQ(err_PETSc)
! write ip displacements ! write ip displacements
call DMPlexGetHeightStratum(dm_local,0,cellStart,cellEnd,ierr); CHKERRQ(ierr) call DMPlexGetHeightStratum(dm_local,0_pPETSCINT,cellStart,cellEnd,err_PETSc)
call PetscDSGetTabulation(mechQuad,0,basisField,basisFieldDer,ierr); CHKERRQ(ierr) CHKERRQ(err_PETSc)
call PetscDSGetTabulation(mechQuad,0_pPETSCINT,basisField,basisFieldDer,err_PETSc)
CHKERRQ(err_PETSc)
allocate(ipCoords(3,nQuadrature,mesh_NcpElems),source=0.0_pReal) allocate(ipCoords(3,nQuadrature,mesh_NcpElems),source=0.0_pReal)
do c=cellStart,cellEnd-1 do c=cellStart,cellEnd-1
qOffset=0 qOffset=0
call DMPlexVecGetClosure(dm_local,section,x_local,c,x_scal,ierr); CHKERRQ(ierr) !< get nodal coordinates of each element call DMPlexVecGetClosure(dm_local,section,x_local,c,x_scal,err_PETSc) !< get nodal coordinates of each element
CHKERRQ(err_PETSc)
do qPt=0,nQuadrature-1 do qPt=0,nQuadrature-1
qOffset= qPt * (size(basisField)/nQuadrature) qOffset= qPt * (size(basisField)/nQuadrature)
do comp=0,dimPlex-1 !< loop over components do comp=0,dimPlex-1 !< loop over components
nOffset=0 nOffset=0
q = comp q = comp
do n=0,nBasis-1 do n=0,nBasis-1
@ -737,10 +762,11 @@ subroutine FEM_mechanical_updateCoords()
enddo enddo
enddo enddo
enddo enddo
call DMPlexVecRestoreClosure(dm_local,section,x_local,c,x_scal,ierr); CHKERRQ(ierr) call DMPlexVecRestoreClosure(dm_local,section,x_local,c,x_scal,err_PETSc)
CHKERRQ(err_PETSc)
end do end do
call discretization_setIPcoords(reshape(ipCoords,[3,mesh_NcpElems*nQuadrature])) call discretization_setIPcoords(reshape(ipCoords,[3,mesh_NcpElems*nQuadrature]))
call DMRestoreLocalVector(dm_local,x_local,ierr); CHKERRQ(ierr) call DMRestoreLocalVector(dm_local,x_local,err_PETSc); CHKERRQ(err_PETSc)
end subroutine FEM_mechanical_updateCoords end subroutine FEM_mechanical_updateCoords

View File

@ -499,7 +499,7 @@ subroutine results_mapping_phase(ID,entry,label)
integer(pI64), dimension(size(entry,1),size(entry,2)) :: & integer(pI64), dimension(size(entry,1),size(entry,2)) :: &
entryGlobal entryGlobal
integer(pI64), dimension(size(label),0:worldsize-1) :: entryOffset !< offset in entry counting per process integer(pI64), dimension(size(label),0:worldsize-1) :: entryOffset !< offset in entry counting per process
integer, dimension(0:worldsize-1) :: writeSize !< amount of data written per process integer, dimension(0:worldsize-1) :: writeSize !< amount of data written per process
integer(HSIZE_T), dimension(2) :: & integer(HSIZE_T), dimension(2) :: &
myShape, & !< shape of the dataset (this process) myShape, & !< shape of the dataset (this process)