Merge branch 'solver-simplification' into 'development'
simplified thermal and damage solvers See merge request damask/DAMASK!778
This commit is contained in:
commit
341119d706
|
@ -1836,15 +1836,13 @@ subroutine initialize_read(dset_id, filespace_id, memspace_id, plist_id, aplist_
|
||||||
integer(HID_T), intent(in) :: loc_id !< file or group handle
|
integer(HID_T), intent(in) :: loc_id !< file or group handle
|
||||||
character(len=*), intent(in) :: datasetName !< name of the dataset in the file
|
character(len=*), intent(in) :: datasetName !< name of the dataset in the file
|
||||||
logical, intent(in) :: parallel
|
logical, intent(in) :: parallel
|
||||||
integer(HSIZE_T), intent(in), dimension(:) :: &
|
integer(HSIZE_T), intent(in), dimension(:) :: localShape
|
||||||
localShape
|
integer(HSIZE_T), intent(out), dimension(size(localShape)) :: &
|
||||||
integer(HSIZE_T), intent(out), dimension(size(localShape,1)):: &
|
|
||||||
myStart, &
|
myStart, &
|
||||||
globalShape !< shape of the dataset (all processes)
|
globalShape !< shape of the dataset (all processes)
|
||||||
integer(HID_T), intent(out) :: dset_id, filespace_id, memspace_id, plist_id, aplist_id
|
integer(HID_T), intent(out) :: dset_id, filespace_id, memspace_id, plist_id, aplist_id
|
||||||
|
|
||||||
integer(MPI_INTEGER_KIND), dimension(worldsize) :: &
|
integer(MPI_INTEGER_KIND), dimension(worldsize) :: readSize !< contribution of all processes
|
||||||
readSize !< contribution of all processes
|
|
||||||
integer :: hdferr
|
integer :: hdferr
|
||||||
integer(MPI_INTEGER_KIND) :: err_MPI
|
integer(MPI_INTEGER_KIND) :: err_MPI
|
||||||
|
|
||||||
|
@ -1860,7 +1858,8 @@ subroutine initialize_read(dset_id, filespace_id, memspace_id, plist_id, aplist_
|
||||||
if (parallel) then
|
if (parallel) then
|
||||||
call H5Pset_dxpl_mpio_f(plist_id, H5FD_MPIO_COLLECTIVE_F, hdferr)
|
call H5Pset_dxpl_mpio_f(plist_id, H5FD_MPIO_COLLECTIVE_F, hdferr)
|
||||||
call HDF5_chkerr(hdferr)
|
call HDF5_chkerr(hdferr)
|
||||||
call MPI_Allreduce(MPI_IN_PLACE,readSize,worldsize,MPI_INTEGER,MPI_SUM,MPI_COMM_WORLD,err_MPI) ! get total output size over each process
|
call MPI_Allgather(int(localShape(ubound(localShape,1)),MPI_INTEGER_KIND),1_MPI_INTEGER_KIND,MPI_INTEGER,&
|
||||||
|
readSize,1_MPI_INTEGER_KIND,MPI_INTEGER,MPI_COMM_WORLD,err_MPI)
|
||||||
if (err_MPI /= 0_MPI_INTEGER_KIND) error stop 'MPI error'
|
if (err_MPI /= 0_MPI_INTEGER_KIND) error stop 'MPI error'
|
||||||
end if
|
end if
|
||||||
#endif
|
#endif
|
||||||
|
@ -1930,15 +1929,14 @@ end subroutine finalize_read
|
||||||
!--------------------------------------------------------------------------------------------------
|
!--------------------------------------------------------------------------------------------------
|
||||||
subroutine initialize_write(dset_id, filespace_id, memspace_id, plist_id, &
|
subroutine initialize_write(dset_id, filespace_id, memspace_id, plist_id, &
|
||||||
myStart, totalShape, &
|
myStart, totalShape, &
|
||||||
loc_id,myShape,datasetName,datatype,parallel)
|
loc_id,localShape,datasetName,datatype,parallel)
|
||||||
|
|
||||||
integer(HID_T), intent(in) :: loc_id !< file or group handle
|
integer(HID_T), intent(in) :: loc_id !< file or group handle
|
||||||
character(len=*), intent(in) :: datasetName !< name of the dataset in the file
|
character(len=*), intent(in) :: datasetName !< name of the dataset in the file
|
||||||
logical, intent(in) :: parallel
|
logical, intent(in) :: parallel
|
||||||
integer(HID_T), intent(in) :: datatype
|
integer(HID_T), intent(in) :: datatype
|
||||||
integer(HSIZE_T), intent(in), dimension(:) :: &
|
integer(HSIZE_T), intent(in), dimension(:) :: localShape
|
||||||
myShape
|
integer(HSIZE_T), intent(out), dimension(size(localShape)) :: &
|
||||||
integer(HSIZE_T), intent(out), dimension(size(myShape,1)):: &
|
|
||||||
myStart, &
|
myStart, &
|
||||||
totalShape !< shape of the dataset (all processes)
|
totalShape !< shape of the dataset (all processes)
|
||||||
integer(HID_T), intent(out) :: dset_id, filespace_id, memspace_id, plist_id
|
integer(HID_T), intent(out) :: dset_id, filespace_id, memspace_id, plist_id
|
||||||
|
@ -1964,16 +1962,17 @@ subroutine initialize_write(dset_id, filespace_id, memspace_id, plist_id, &
|
||||||
!--------------------------------------------------------------------------------------------------
|
!--------------------------------------------------------------------------------------------------
|
||||||
! determine the global data layout among all processes
|
! determine the global data layout among all processes
|
||||||
writeSize = 0_MPI_INTEGER_KIND
|
writeSize = 0_MPI_INTEGER_KIND
|
||||||
writeSize(worldrank+1) = int(myShape(ubound(myShape,1)),MPI_INTEGER_KIND)
|
writeSize(worldrank+1) = int(localShape(ubound(localShape,1)),MPI_INTEGER_KIND)
|
||||||
#ifdef PETSC
|
#ifdef PETSC
|
||||||
if (parallel) then
|
if (parallel) then
|
||||||
call MPI_Allreduce(MPI_IN_PLACE,writeSize,worldsize,MPI_INTEGER,MPI_SUM,MPI_COMM_WORLD,err_MPI) ! get total output size over each process
|
call MPI_Allgather(int(localShape(ubound(localShape,1)),MPI_INTEGER_KIND),1_MPI_INTEGER_KIND,MPI_INTEGER,&
|
||||||
|
writeSize,1_MPI_INTEGER_KIND,MPI_INTEGER,MPI_COMM_WORLD,err_MPI)
|
||||||
if (err_MPI /= 0_MPI_INTEGER_KIND) error stop 'MPI error'
|
if (err_MPI /= 0_MPI_INTEGER_KIND) error stop 'MPI error'
|
||||||
end if
|
end if
|
||||||
#endif
|
#endif
|
||||||
myStart = int(0,HSIZE_T)
|
myStart = int(0,HSIZE_T)
|
||||||
myStart(ubound(myStart)) = int(sum(writeSize(1:worldrank)),HSIZE_T)
|
myStart(ubound(myStart)) = int(sum(writeSize(1:worldrank)),HSIZE_T)
|
||||||
totalShape = [myShape(1:ubound(myShape,1)-1),int(sum(writeSize),HSIZE_T)]
|
totalShape = [localShape(1:ubound(localShape,1)-1),int(sum(writeSize),HSIZE_T)]
|
||||||
|
|
||||||
!--------------------------------------------------------------------------------------------------
|
!--------------------------------------------------------------------------------------------------
|
||||||
! chunk dataset, enable compression for larger datasets
|
! chunk dataset, enable compression for larger datasets
|
||||||
|
@ -2001,7 +2000,7 @@ subroutine initialize_write(dset_id, filespace_id, memspace_id, plist_id, &
|
||||||
|
|
||||||
!--------------------------------------------------------------------------------------------------
|
!--------------------------------------------------------------------------------------------------
|
||||||
! create dataspace in memory (local shape) and in file (global shape)
|
! create dataspace in memory (local shape) and in file (global shape)
|
||||||
call H5Screate_simple_f(size(myShape), myShape, memspace_id, hdferr, myShape)
|
call H5Screate_simple_f(size(localShape), localShape, memspace_id, hdferr, localShape)
|
||||||
call HDF5_chkerr(hdferr)
|
call HDF5_chkerr(hdferr)
|
||||||
call H5Screate_simple_f(size(totalShape), totalShape, filespace_id, hdferr, totalShape)
|
call H5Screate_simple_f(size(totalShape), totalShape, filespace_id, hdferr, totalShape)
|
||||||
call HDF5_chkerr(hdferr)
|
call HDF5_chkerr(hdferr)
|
||||||
|
@ -2010,7 +2009,7 @@ subroutine initialize_write(dset_id, filespace_id, memspace_id, plist_id, &
|
||||||
! create dataset in the file and select a hyperslab from it (the portion of the current process)
|
! create dataset in the file and select a hyperslab from it (the portion of the current process)
|
||||||
call H5Dcreate_f(loc_id, trim(datasetName), datatype, filespace_id, dset_id, hdferr, dcpl)
|
call H5Dcreate_f(loc_id, trim(datasetName), datatype, filespace_id, dset_id, hdferr, dcpl)
|
||||||
call HDF5_chkerr(hdferr)
|
call HDF5_chkerr(hdferr)
|
||||||
call H5Sselect_hyperslab_f(filespace_id, H5S_SELECT_SET_F, myStart, myShape, hdferr)
|
call H5Sselect_hyperslab_f(filespace_id, H5S_SELECT_SET_F, myStart, localShape, hdferr)
|
||||||
call HDF5_chkerr(hdferr)
|
call HDF5_chkerr(hdferr)
|
||||||
|
|
||||||
call H5Pclose_f(dcpl , hdferr)
|
call H5Pclose_f(dcpl , hdferr)
|
||||||
|
|
|
@ -372,7 +372,7 @@ program DAMASK_grid
|
||||||
end if
|
end if
|
||||||
Delta_t = Delta_t * real(subStepFactor,pREAL)**real(-cutBackLevel,pREAL) ! depending on cut back level, decrease time step
|
Delta_t = Delta_t * real(subStepFactor,pREAL)**real(-cutBackLevel,pREAL) ! depending on cut back level, decrease time step
|
||||||
|
|
||||||
skipping: if (totalIncsCounter <= CLI_restartInc) then ! not yet at restart inc?
|
skipping: if (totalIncsCounter <= CLI_restartInc) then ! not yet at restart inc?
|
||||||
t = t + Delta_t ! just advance time, skip already performed calculation
|
t = t + Delta_t ! just advance time, skip already performed calculation
|
||||||
guess = .true. ! QUESTION:why forced guessing instead of inheriting loadcase preference
|
guess = .true. ! QUESTION:why forced guessing instead of inheriting loadcase preference
|
||||||
else skipping
|
else skipping
|
||||||
|
|
|
@ -68,7 +68,7 @@ subroutine discretization_grid_init(restart)
|
||||||
j
|
j
|
||||||
integer(MPI_INTEGER_KIND) :: err_MPI
|
integer(MPI_INTEGER_KIND) :: err_MPI
|
||||||
integer(C_INTPTR_T) :: &
|
integer(C_INTPTR_T) :: &
|
||||||
devNull, z, z_offset
|
devNull, cells3_, cells3Offset_
|
||||||
integer, dimension(worldsize) :: &
|
integer, dimension(worldsize) :: &
|
||||||
displs, sendcounts
|
displs, sendcounts
|
||||||
character(len=:), allocatable :: &
|
character(len=:), allocatable :: &
|
||||||
|
@ -113,12 +113,12 @@ subroutine discretization_grid_init(restart)
|
||||||
call fftw_mpi_init()
|
call fftw_mpi_init()
|
||||||
devNull = fftw_mpi_local_size_3d(int(cells(3),C_INTPTR_T),int(cells(2),C_INTPTR_T),int(cells(1)/2+1,C_INTPTR_T), &
|
devNull = fftw_mpi_local_size_3d(int(cells(3),C_INTPTR_T),int(cells(2),C_INTPTR_T),int(cells(1)/2+1,C_INTPTR_T), &
|
||||||
PETSC_COMM_WORLD, &
|
PETSC_COMM_WORLD, &
|
||||||
z, & ! domain cells size along z
|
cells3_, & ! domain cells size along z
|
||||||
z_offset) ! domain cells offset along z
|
cells3Offset_) ! domain cells offset along z
|
||||||
if (z==0_C_INTPTR_T) call IO_error(894, ext_msg='Cannot distribute MPI processes')
|
if (cells3_==0_C_INTPTR_T) call IO_error(894, ext_msg='Cannot distribute MPI processes')
|
||||||
|
|
||||||
cells3 = int(z)
|
cells3 = int(cells3_)
|
||||||
cells3Offset = int(z_offset)
|
cells3Offset = int(cells3Offset_)
|
||||||
size3 = geomSize(3)*real(cells3,pREAL) /real(cells(3),pREAL)
|
size3 = geomSize(3)*real(cells3,pREAL) /real(cells(3),pREAL)
|
||||||
size3Offset = geomSize(3)*real(cells3Offset,pREAL)/real(cells(3),pREAL)
|
size3Offset = geomSize(3)*real(cells3Offset,pREAL)/real(cells(3),pREAL)
|
||||||
myGrid = [cells(1:2),cells3]
|
myGrid = [cells(1:2),cells3]
|
||||||
|
|
|
@ -48,9 +48,8 @@ module grid_damage_spectral
|
||||||
!--------------------------------------------------------------------------------------------------
|
!--------------------------------------------------------------------------------------------------
|
||||||
! PETSc data
|
! PETSc data
|
||||||
SNES :: SNES_damage
|
SNES :: SNES_damage
|
||||||
Vec :: solution_vec
|
Vec :: phi_PETSc
|
||||||
real(pREAL), dimension(:,:,:), allocatable :: &
|
real(pREAL), dimension(:,:,:), allocatable :: &
|
||||||
phi, & !< field of current damage
|
|
||||||
phi_lastInc, & !< field of previous damage
|
phi_lastInc, & !< field of previous damage
|
||||||
phi_stagInc !< field of staggered damage
|
phi_stagInc !< field of staggered damage
|
||||||
|
|
||||||
|
@ -75,10 +74,10 @@ subroutine grid_damage_spectral_init(num_grid)
|
||||||
|
|
||||||
type(tDict), pointer, intent(in) :: num_grid
|
type(tDict), pointer, intent(in) :: num_grid
|
||||||
|
|
||||||
PetscInt, dimension(0:worldsize-1) :: localK
|
integer(MPI_INTEGER_KIND), dimension(0:worldsize-1) :: cells3_global
|
||||||
integer :: i, j, k, ce
|
integer :: i, j, k, ce
|
||||||
DM :: damage_grid
|
DM :: DM_damage
|
||||||
real(pREAL), dimension(:,:,:), pointer :: phi_PETSc
|
real(pREAL), dimension(:,:,:), pointer :: phi ! 0-indexed
|
||||||
Vec :: uBound, lBound
|
Vec :: uBound, lBound
|
||||||
integer(MPI_INTEGER_KIND) :: err_MPI
|
integer(MPI_INTEGER_KIND) :: err_MPI
|
||||||
PetscErrorCode :: err_PETSc
|
PetscErrorCode :: err_PETSc
|
||||||
|
@ -93,6 +92,7 @@ subroutine grid_damage_spectral_init(num_grid)
|
||||||
petsc_options
|
petsc_options
|
||||||
|
|
||||||
|
|
||||||
|
|
||||||
print'(/,1x,a)', '<<<+- grid_spectral_damage init -+>>>'
|
print'(/,1x,a)', '<<<+- grid_spectral_damage init -+>>>'
|
||||||
|
|
||||||
print'(/,1x,a)', 'P. Shanthraj et al., Handbook of Mechanics of Materials, 2019'
|
print'(/,1x,a)', 'P. Shanthraj et al., Handbook of Mechanics of Materials, 2019'
|
||||||
|
@ -124,40 +124,33 @@ subroutine grid_damage_spectral_init(num_grid)
|
||||||
call PetscOptionsInsertString(PETSC_NULL_OPTIONS,petsc_options,err_PETSc)
|
call PetscOptionsInsertString(PETSC_NULL_OPTIONS,petsc_options,err_PETSc)
|
||||||
CHKERRQ(err_PETSc)
|
CHKERRQ(err_PETSc)
|
||||||
|
|
||||||
!--------------------------------------------------------------------------------------------------
|
|
||||||
! init fields
|
|
||||||
phi = discretization_grid_getInitialCondition('phi')
|
|
||||||
phi_lastInc = phi
|
|
||||||
phi_stagInc = phi
|
|
||||||
|
|
||||||
!--------------------------------------------------------------------------------------------------
|
!--------------------------------------------------------------------------------------------------
|
||||||
! initialize solver specific parts of PETSc
|
! initialize solver specific parts of PETSc
|
||||||
call SNESCreate(PETSC_COMM_WORLD,SNES_damage,err_PETSc)
|
call SNESCreate(PETSC_COMM_WORLD,SNES_damage,err_PETSc)
|
||||||
CHKERRQ(err_PETSc)
|
CHKERRQ(err_PETSc)
|
||||||
call SNESSetOptionsPrefix(SNES_damage,'damage_',err_PETSc)
|
call SNESSetOptionsPrefix(SNES_damage,'damage_',err_PETSc)
|
||||||
CHKERRQ(err_PETSc)
|
CHKERRQ(err_PETSc)
|
||||||
localK = 0_pPetscInt
|
call MPI_Allgather(int(cells3,MPI_INTEGER_KIND),1_MPI_INTEGER_KIND,MPI_INTEGER,&
|
||||||
localK(worldrank) = int(cells3,pPetscInt)
|
cells3_global,1_MPI_INTEGER_KIND,MPI_INTEGER,MPI_COMM_WORLD,err_MPI)
|
||||||
call MPI_Allreduce(MPI_IN_PLACE,localK,worldsize,MPI_INTEGER,MPI_SUM,MPI_COMM_WORLD,err_MPI)
|
|
||||||
if (err_MPI /= 0_MPI_INTEGER_KIND) error stop 'MPI error'
|
if (err_MPI /= 0_MPI_INTEGER_KIND) error stop 'MPI error'
|
||||||
call DMDACreate3D(PETSC_COMM_WORLD, &
|
call DMDACreate3D(PETSC_COMM_WORLD, &
|
||||||
DM_BOUNDARY_NONE, DM_BOUNDARY_NONE, DM_BOUNDARY_NONE, & ! cut off stencil at boundary
|
DM_BOUNDARY_NONE, DM_BOUNDARY_NONE, DM_BOUNDARY_NONE, & ! cut off stencil at boundary
|
||||||
DMDA_STENCIL_BOX, & ! Moore (26) neighborhood around central point
|
DMDA_STENCIL_BOX, & ! Moore (26) neighborhood around central point
|
||||||
int(cells(1),pPetscInt),int(cells(2),pPetscInt),int(cells(3),pPetscInt), & ! global cells
|
int(cells(1),pPETSCINT),int(cells(2),pPETSCINT),int(cells(3),pPETSCINT), & ! global cells
|
||||||
1_pPetscInt, 1_pPetscInt, int(worldsize,pPetscInt), &
|
1_pPETSCINT, 1_pPETSCINT, int(worldsize,pPETSCINT), &
|
||||||
1_pPetscInt, 0_pPetscInt, & ! #dof (phi, scalar), ghost boundary width (domain overlap)
|
1_pPETSCINT, 0_pPETSCINT, & ! #dof (phi, scalar), ghost boundary width (domain overlap)
|
||||||
[int(cells(1),pPetscInt)],[int(cells(2),pPetscInt)],localK, & ! local cells
|
[int(cells(1),pPetscInt)],[int(cells(2),pPetscInt)],int(cells3_global,pPETSCINT), & ! local cells
|
||||||
damage_grid,err_PETSc) ! handle, error
|
DM_damage,err_PETSc) ! handle, error
|
||||||
CHKERRQ(err_PETSc)
|
CHKERRQ(err_PETSc)
|
||||||
call DMsetFromOptions(damage_grid,err_PETSc)
|
call DMsetFromOptions(DM_damage,err_PETSc)
|
||||||
CHKERRQ(err_PETSc)
|
CHKERRQ(err_PETSc)
|
||||||
call DMsetUp(damage_grid,err_PETSc)
|
call DMsetUp(DM_damage,err_PETSc)
|
||||||
CHKERRQ(err_PETSc)
|
CHKERRQ(err_PETSc)
|
||||||
call DMCreateGlobalVector(damage_grid,solution_vec,err_PETSc) ! global solution vector (cells x 1, i.e. every def grad tensor)
|
call DMCreateGlobalVector(DM_damage,phi_PETSc,err_PETSc) ! global solution vector (cells x 1, i.e. every def grad tensor)
|
||||||
CHKERRQ(err_PETSc)
|
CHKERRQ(err_PETSc)
|
||||||
call DMDASNESSetFunctionLocal(damage_grid,INSERT_VALUES,formResidual,PETSC_NULL_SNES,err_PETSc) ! residual vector of same shape as solution vector
|
call DMDASNESSetFunctionLocal(DM_damage,INSERT_VALUES,formResidual,PETSC_NULL_SNES,err_PETSc) ! residual vector of same shape as solution vector
|
||||||
CHKERRQ(err_PETSc)
|
CHKERRQ(err_PETSc)
|
||||||
call SNESSetDM(SNES_damage,damage_grid,err_PETSc)
|
call SNESSetDM(SNES_damage,DM_damage,err_PETSc)
|
||||||
CHKERRQ(err_PETSc)
|
CHKERRQ(err_PETSc)
|
||||||
call SNESSetFromOptions(SNES_damage,err_PETSc) ! pull it all together with additional CLI arguments
|
call SNESSetFromOptions(SNES_damage,err_PETSc) ! pull it all together with additional CLI arguments
|
||||||
CHKERRQ(err_PETSc)
|
CHKERRQ(err_PETSc)
|
||||||
|
@ -165,9 +158,9 @@ subroutine grid_damage_spectral_init(num_grid)
|
||||||
CHKERRQ(err_PETSc)
|
CHKERRQ(err_PETSc)
|
||||||
if (trim(snes_type) == 'vinewtonrsls' .or. &
|
if (trim(snes_type) == 'vinewtonrsls' .or. &
|
||||||
trim(snes_type) == 'vinewtonssls') then
|
trim(snes_type) == 'vinewtonssls') then
|
||||||
call DMGetGlobalVector(damage_grid,lBound,err_PETSc)
|
call DMGetGlobalVector(DM_damage,lBound,err_PETSc)
|
||||||
CHKERRQ(err_PETSc)
|
CHKERRQ(err_PETSc)
|
||||||
call DMGetGlobalVector(damage_grid,uBound,err_PETSc)
|
call DMGetGlobalVector(DM_damage,uBound,err_PETSc)
|
||||||
CHKERRQ(err_PETSc)
|
CHKERRQ(err_PETSc)
|
||||||
call VecSet(lBound,0.0_pREAL,err_PETSc)
|
call VecSet(lBound,0.0_pREAL,err_PETSc)
|
||||||
CHKERRQ(err_PETSc)
|
CHKERRQ(err_PETSc)
|
||||||
|
@ -175,12 +168,15 @@ subroutine grid_damage_spectral_init(num_grid)
|
||||||
CHKERRQ(err_PETSc)
|
CHKERRQ(err_PETSc)
|
||||||
call SNESVISetVariableBounds(SNES_damage,lBound,uBound,err_PETSc) ! variable bounds for variational inequalities
|
call SNESVISetVariableBounds(SNES_damage,lBound,uBound,err_PETSc) ! variable bounds for variational inequalities
|
||||||
CHKERRQ(err_PETSc)
|
CHKERRQ(err_PETSc)
|
||||||
call DMRestoreGlobalVector(damage_grid,lBound,err_PETSc)
|
call DMRestoreGlobalVector(DM_damage,lBound,err_PETSc)
|
||||||
CHKERRQ(err_PETSc)
|
CHKERRQ(err_PETSc)
|
||||||
call DMRestoreGlobalVector(damage_grid,uBound,err_PETSc)
|
call DMRestoreGlobalVector(DM_damage,uBound,err_PETSc)
|
||||||
CHKERRQ(err_PETSc)
|
CHKERRQ(err_PETSc)
|
||||||
end if
|
end if
|
||||||
|
|
||||||
|
call DMDAVecGetArrayF90(DM_damage,phi_PETSc,phi,err_PETSc) ! returns 0-indexed phi
|
||||||
|
CHKERRQ(err_PETSc)
|
||||||
|
|
||||||
restartRead: if (CLI_restartInc > 0) then
|
restartRead: if (CLI_restartInc > 0) then
|
||||||
print'(/,1x,a,1x,i0)', 'loading restart data of increment', CLI_restartInc
|
print'(/,1x,a,1x,i0)', 'loading restart data of increment', CLI_restartInc
|
||||||
|
|
||||||
|
@ -191,18 +187,20 @@ subroutine grid_damage_spectral_init(num_grid)
|
||||||
phi = reshape(tempN,[cells(1),cells(2),cells3])
|
phi = reshape(tempN,[cells(1),cells(2),cells3])
|
||||||
call HDF5_read(tempN,groupHandle,'phi_lastInc',.false.)
|
call HDF5_read(tempN,groupHandle,'phi_lastInc',.false.)
|
||||||
phi_lastInc = reshape(tempN,[cells(1),cells(2),cells3])
|
phi_lastInc = reshape(tempN,[cells(1),cells(2),cells3])
|
||||||
|
phi_stagInc = phi_lastInc
|
||||||
|
else
|
||||||
|
phi = discretization_grid_getInitialCondition('phi')
|
||||||
|
phi_lastInc = phi(0:,0:,0:)
|
||||||
|
phi_stagInc = phi_lastInc
|
||||||
end if restartRead
|
end if restartRead
|
||||||
|
|
||||||
ce = 0
|
ce = 0
|
||||||
do k = 1, cells3; do j = 1, cells(2); do i = 1, cells(1)
|
do k = 0, cells3-1; do j = 0, cells(2)-1; do i = 0, cells(1)-1
|
||||||
ce = ce + 1
|
ce = ce + 1
|
||||||
call homogenization_set_phi(phi(i,j,k),ce)
|
call homogenization_set_phi(phi(i,j,k),ce)
|
||||||
end do; end do; end do
|
end do; end do; end do
|
||||||
|
|
||||||
call DMDAVecGetArrayF90(damage_grid,solution_vec,phi_PETSc,err_PETSc)
|
call DMDAVecRestoreArrayF90(DM_damage,phi_PETSc,phi,err_PETSc)
|
||||||
CHKERRQ(err_PETSc)
|
|
||||||
phi_PETSc = phi
|
|
||||||
call DMDAVecRestoreArrayF90(damage_grid,solution_vec,phi_PETSc,err_PETSc)
|
|
||||||
CHKERRQ(err_PETSc)
|
CHKERRQ(err_PETSc)
|
||||||
|
|
||||||
call updateReference()
|
call updateReference()
|
||||||
|
@ -217,37 +215,43 @@ function grid_damage_spectral_solution(Delta_t) result(solution)
|
||||||
|
|
||||||
real(pREAL), intent(in) :: &
|
real(pREAL), intent(in) :: &
|
||||||
Delta_t !< increment in time for current solution
|
Delta_t !< increment in time for current solution
|
||||||
|
|
||||||
integer :: i, j, k, ce
|
integer :: i, j, k, ce
|
||||||
type(tSolutionState) :: solution
|
type(tSolutionState) :: solution
|
||||||
PetscInt :: devNull
|
PetscInt :: devNull
|
||||||
PetscReal :: phi_min, phi_max, stagNorm
|
PetscReal :: phi_min, phi_max, stagNorm
|
||||||
|
DM :: DM_damage
|
||||||
|
real(pREAL), dimension(:,:,:), pointer :: phi ! 0-indexed
|
||||||
integer(MPI_INTEGER_KIND) :: err_MPI
|
integer(MPI_INTEGER_KIND) :: err_MPI
|
||||||
PetscErrorCode :: err_PETSc
|
PetscErrorCode :: err_PETSc
|
||||||
SNESConvergedReason :: reason
|
SNESConvergedReason :: reason
|
||||||
|
|
||||||
|
|
||||||
solution%converged = .false.
|
solution%converged = .false.
|
||||||
|
|
||||||
!--------------------------------------------------------------------------------------------------
|
!--------------------------------------------------------------------------------------------------
|
||||||
! set module wide availabe data
|
! set module wide availabe data
|
||||||
params%Delta_t = Delta_t
|
params%Delta_t = Delta_t
|
||||||
|
|
||||||
call SNESSolve(SNES_damage,PETSC_NULL_VEC,solution_vec,err_PETSc)
|
call SNESSolve(SNES_damage,PETSC_NULL_VEC,phi_PETSc,err_PETSc)
|
||||||
CHKERRQ(err_PETSc)
|
CHKERRQ(err_PETSc)
|
||||||
call SNESGetConvergedReason(SNES_damage,reason,err_PETSc)
|
call SNESGetConvergedReason(SNES_damage,reason,err_PETSc)
|
||||||
CHKERRQ(err_PETSc)
|
CHKERRQ(err_PETSc)
|
||||||
|
|
||||||
if (reason < 1) then
|
solution%converged = reason > 0
|
||||||
solution%converged = .false.
|
solution%iterationsNeeded = merge(totalIter,num%itmax,solution%converged)
|
||||||
solution%iterationsNeeded = num%itmax
|
|
||||||
else
|
call SNESGetDM(SNES_damage,DM_damage,err_PETSc)
|
||||||
solution%converged = .true.
|
CHKERRQ(err_PETSc)
|
||||||
solution%iterationsNeeded = totalIter
|
call DMDAVecGetArrayF90(DM_damage,phi_PETSc,phi,err_PETSc) ! returns 0-indexed phi
|
||||||
end if
|
CHKERRQ(err_PETSc)
|
||||||
|
|
||||||
|
phi_min = minval(phi)
|
||||||
|
phi_max = maxval(phi)
|
||||||
stagNorm = maxval(abs(phi - phi_stagInc))
|
stagNorm = maxval(abs(phi - phi_stagInc))
|
||||||
call MPI_Allreduce(MPI_IN_PLACE,stagNorm,1_MPI_INTEGER_KIND,MPI_DOUBLE,MPI_MAX,MPI_COMM_WORLD,err_MPI)
|
call MPI_Allreduce(MPI_IN_PLACE,stagNorm,1_MPI_INTEGER_KIND,MPI_DOUBLE,MPI_MAX,MPI_COMM_WORLD,err_MPI)
|
||||||
if (err_MPI /= 0_MPI_INTEGER_KIND) error stop 'MPI error'
|
if (err_MPI /= 0_MPI_INTEGER_KIND) error stop 'MPI error'
|
||||||
solution%stagConverged = stagNorm < max(num%eps_damage_atol, num%eps_damage_rtol*maxval(phi))
|
solution%stagConverged = stagNorm < max(num%eps_damage_atol, num%eps_damage_rtol*phi_max)
|
||||||
call MPI_Allreduce(MPI_IN_PLACE,solution%stagConverged,1_MPI_INTEGER_KIND,MPI_LOGICAL,MPI_LAND,MPI_COMM_WORLD,err_MPI)
|
call MPI_Allreduce(MPI_IN_PLACE,solution%stagConverged,1_MPI_INTEGER_KIND,MPI_LOGICAL,MPI_LAND,MPI_COMM_WORLD,err_MPI)
|
||||||
if (err_MPI /= 0_MPI_INTEGER_KIND) error stop 'MPI error'
|
if (err_MPI /= 0_MPI_INTEGER_KIND) error stop 'MPI error'
|
||||||
phi_stagInc = phi
|
phi_stagInc = phi
|
||||||
|
@ -255,15 +259,14 @@ function grid_damage_spectral_solution(Delta_t) result(solution)
|
||||||
!--------------------------------------------------------------------------------------------------
|
!--------------------------------------------------------------------------------------------------
|
||||||
! updating damage state
|
! updating damage state
|
||||||
ce = 0
|
ce = 0
|
||||||
do k = 1, cells3; do j = 1, cells(2); do i = 1,cells(1)
|
do k = 0, cells3-1; do j = 0, cells(2)-1; do i = 0,cells(1)-1
|
||||||
ce = ce + 1
|
ce = ce + 1
|
||||||
call homogenization_set_phi(phi(i,j,k),ce)
|
call homogenization_set_phi(phi(i,j,k),ce)
|
||||||
end do; end do; end do
|
end do; end do; end do
|
||||||
|
|
||||||
call VecMin(solution_vec,devNull,phi_min,err_PETSc)
|
call DMDAVecRestoreArrayF90(DM_damage,phi_PETSc,phi,err_PETSc)
|
||||||
CHKERRQ(err_PETSc)
|
|
||||||
call VecMax(solution_vec,devNull,phi_max,err_PETSc)
|
|
||||||
CHKERRQ(err_PETSc)
|
CHKERRQ(err_PETSc)
|
||||||
|
|
||||||
if (solution%converged) &
|
if (solution%converged) &
|
||||||
print'(/,1x,a)', '... nonlocal damage converged .....................................'
|
print'(/,1x,a)', '... nonlocal damage converged .....................................'
|
||||||
print'(/,1x,a,f8.6,2x,f8.6,2x,e11.4)', 'Minimum|Maximum|Delta Damage = ', phi_min, phi_max, stagNorm
|
print'(/,1x,a,f8.6,2x,f8.6,2x,e11.4)', 'Minimum|Maximum|Delta Damage = ', phi_min, phi_max, stagNorm
|
||||||
|
@ -274,35 +277,31 @@ end function grid_damage_spectral_solution
|
||||||
|
|
||||||
|
|
||||||
!--------------------------------------------------------------------------------------------------
|
!--------------------------------------------------------------------------------------------------
|
||||||
!> @brief spectral damage forwarding routine
|
!> @brief Set DAMASK data to current solver status.
|
||||||
!--------------------------------------------------------------------------------------------------
|
!--------------------------------------------------------------------------------------------------
|
||||||
subroutine grid_damage_spectral_forward(cutBack)
|
subroutine grid_damage_spectral_forward(cutBack)
|
||||||
|
|
||||||
logical, intent(in) :: cutBack
|
logical, intent(in) :: cutBack
|
||||||
|
|
||||||
integer :: i, j, k, ce
|
integer :: i, j, k, ce
|
||||||
DM :: dm_local
|
DM :: DM_damage
|
||||||
real(pREAL), dimension(:,:,:), pointer :: phi_PETSc
|
real(pREAL), dimension(:,:,:), pointer :: phi ! 0-indexed
|
||||||
PetscErrorCode :: err_PETSc
|
PetscErrorCode :: err_PETSc
|
||||||
|
|
||||||
|
|
||||||
|
call SNESGetDM(SNES_damage,DM_damage,err_PETSc)
|
||||||
|
CHKERRQ(err_PETSc)
|
||||||
|
call DMDAVecGetArrayF90(DM_damage,phi_PETSc,phi,err_PETSc) ! returns 0-indexed T
|
||||||
|
CHKERRQ(err_PETSc)
|
||||||
|
|
||||||
if (cutBack) then
|
if (cutBack) then
|
||||||
phi = phi_lastInc
|
|
||||||
phi_stagInc = phi_lastInc
|
|
||||||
!--------------------------------------------------------------------------------------------------
|
|
||||||
! reverting damage field state
|
|
||||||
call SNESGetDM(SNES_damage,dm_local,err_PETSc)
|
|
||||||
CHKERRQ(err_PETSc)
|
|
||||||
call DMDAVecGetArrayF90(dm_local,solution_vec,phi_PETSc,err_PETSc) !< get the data out of PETSc to work with
|
|
||||||
CHKERRQ(err_PETSc)
|
|
||||||
phi_PETSc = phi
|
|
||||||
call DMDAVecRestoreArrayF90(dm_local,solution_vec,phi_PETSc,err_PETSc)
|
|
||||||
CHKERRQ(err_PETSc)
|
|
||||||
ce = 0
|
ce = 0
|
||||||
do k = 1, cells3; do j = 1, cells(2); do i = 1,cells(1)
|
do k = 1, cells3; do j = 1, cells(2); do i = 1,cells(1)
|
||||||
ce = ce + 1
|
ce = ce + 1
|
||||||
call homogenization_set_phi(phi(i,j,k),ce)
|
call homogenization_set_phi(phi_lastInc(i,j,k),ce)
|
||||||
end do; end do; end do
|
end do; end do; end do
|
||||||
|
phi = phi_lastInc
|
||||||
|
phi_stagInc = phi_lastInc
|
||||||
else
|
else
|
||||||
phi_lastInc = phi
|
phi_lastInc = phi
|
||||||
call updateReference()
|
call updateReference()
|
||||||
|
@ -317,13 +316,14 @@ end subroutine grid_damage_spectral_forward
|
||||||
subroutine grid_damage_spectral_restartWrite()
|
subroutine grid_damage_spectral_restartWrite()
|
||||||
|
|
||||||
PetscErrorCode :: err_PETSc
|
PetscErrorCode :: err_PETSc
|
||||||
DM :: dm_local
|
DM :: DM_damage
|
||||||
integer(HID_T) :: fileHandle, groupHandle
|
integer(HID_T) :: fileHandle, groupHandle
|
||||||
PetscScalar, dimension(:,:,:), pointer :: phi
|
real(pREAL), dimension(:,:,:), pointer :: phi ! 0-indexed
|
||||||
|
|
||||||
call SNESGetDM(SNES_damage,dm_local,err_PETSc);
|
|
||||||
|
call SNESGetDM(SNES_damage,DM_damage,err_PETSc)
|
||||||
CHKERRQ(err_PETSc)
|
CHKERRQ(err_PETSc)
|
||||||
call DMDAVecGetArrayReadF90(dm_local,solution_vec,phi,err_PETSc);
|
call DMDAVecGetArrayReadF90(DM_damage,phi_PETSc,phi,err_PETSc) ! returns 0-indexed T
|
||||||
CHKERRQ(err_PETSc)
|
CHKERRQ(err_PETSc)
|
||||||
|
|
||||||
print'(1x,a)', 'saving damage solver data required for restart'; flush(IO_STDOUT)
|
print'(1x,a)', 'saving damage solver data required for restart'; flush(IO_STDOUT)
|
||||||
|
@ -335,7 +335,7 @@ subroutine grid_damage_spectral_restartWrite()
|
||||||
call HDF5_closeGroup(groupHandle)
|
call HDF5_closeGroup(groupHandle)
|
||||||
call HDF5_closeFile(fileHandle)
|
call HDF5_closeFile(fileHandle)
|
||||||
|
|
||||||
call DMDAVecRestoreArrayReadF90(dm_local,solution_vec,phi,err_PETSc);
|
call DMDAVecRestoreArrayReadF90(DM_damage,phi_PETSc,phi,err_PETSc);
|
||||||
CHKERRQ(err_PETSc)
|
CHKERRQ(err_PETSc)
|
||||||
|
|
||||||
end subroutine grid_damage_spectral_restartWrite
|
end subroutine grid_damage_spectral_restartWrite
|
||||||
|
@ -359,24 +359,25 @@ subroutine formResidual(residual_subdomain,x_scal,r,dummy,err_PETSc)
|
||||||
real(pREAL), dimension(3,cells(1),cells(2),cells3) :: vectorField
|
real(pREAL), dimension(3,cells(1),cells(2),cells3) :: vectorField
|
||||||
|
|
||||||
|
|
||||||
phi = x_scal
|
associate(phi => x_scal)
|
||||||
vectorField = utilities_ScalarGradient(phi)
|
vectorField = utilities_ScalarGradient(phi)
|
||||||
ce = 0
|
ce = 0
|
||||||
do k = 1, cells3; do j = 1, cells(2); do i = 1,cells(1)
|
do k = 1, cells3; do j = 1, cells(2); do i = 1,cells(1)
|
||||||
ce = ce + 1
|
ce = ce + 1
|
||||||
vectorField(1:3,i,j,k) = matmul(homogenization_K_phi(ce) - K_ref, vectorField(1:3,i,j,k))
|
vectorField(1:3,i,j,k) = matmul(homogenization_K_phi(ce) - K_ref, vectorField(1:3,i,j,k))
|
||||||
end do; end do; end do
|
end do; end do; end do
|
||||||
r = utilities_VectorDivergence(vectorField)
|
r = utilities_VectorDivergence(vectorField)
|
||||||
ce = 0
|
ce = 0
|
||||||
do k = 1, cells3; do j = 1, cells(2); do i = 1,cells(1)
|
do k = 1, cells3; do j = 1, cells(2); do i = 1,cells(1)
|
||||||
ce = ce + 1
|
ce = ce + 1
|
||||||
r(i,j,k) = params%Delta_t*(r(i,j,k) + homogenization_f_phi(phi(i,j,k),ce)) &
|
r(i,j,k) = params%Delta_t*(r(i,j,k) + homogenization_f_phi(phi(i,j,k),ce)) &
|
||||||
+ homogenization_mu_phi(ce)*(phi_lastInc(i,j,k) - phi(i,j,k)) &
|
+ homogenization_mu_phi(ce)*(phi_lastInc(i,j,k) - phi(i,j,k)) &
|
||||||
+ mu_ref*phi(i,j,k)
|
+ mu_ref*phi(i,j,k)
|
||||||
end do; end do; end do
|
end do; end do; end do
|
||||||
|
|
||||||
r = max(min(utilities_GreenConvolution(r, K_ref, mu_ref, params%Delta_t),phi_lastInc),num%phi_min) &
|
r = max(min(utilities_GreenConvolution(r, K_ref, mu_ref, params%Delta_t),phi_lastInc),num%phi_min) &
|
||||||
- phi
|
- phi
|
||||||
|
end associate
|
||||||
err_PETSc = 0
|
err_PETSc = 0
|
||||||
|
|
||||||
end subroutine formResidual
|
end subroutine formResidual
|
||||||
|
|
|
@ -53,9 +53,9 @@ module grid_mechanical_FEM
|
||||||
|
|
||||||
!--------------------------------------------------------------------------------------------------
|
!--------------------------------------------------------------------------------------------------
|
||||||
! PETSc data
|
! PETSc data
|
||||||
DM :: mechanical_grid
|
DM :: DM_mech
|
||||||
SNES :: SNES_mechanical
|
SNES :: SNES_mech
|
||||||
Vec :: solution_current, solution_lastInc, solution_rate
|
Vec :: u_PETSc, u_lastInc_PETSc, uDot_PETSc
|
||||||
|
|
||||||
!--------------------------------------------------------------------------------------------------
|
!--------------------------------------------------------------------------------------------------
|
||||||
! common pointwise data
|
! common pointwise data
|
||||||
|
@ -118,7 +118,7 @@ subroutine grid_mechanical_FEM_init(num_grid)
|
||||||
integer(MPI_INTEGER_KIND) :: err_MPI
|
integer(MPI_INTEGER_KIND) :: err_MPI
|
||||||
PetscScalar, pointer, dimension(:,:,:,:) :: &
|
PetscScalar, pointer, dimension(:,:,:,:) :: &
|
||||||
u,u_lastInc
|
u,u_lastInc
|
||||||
PetscInt, dimension(0:worldsize-1) :: localK
|
integer(MPI_INTEGER_KIND), dimension(0:worldsize-1) :: cells3_global
|
||||||
integer(HID_T) :: fileHandle, groupHandle
|
integer(HID_T) :: fileHandle, groupHandle
|
||||||
type(tDict), pointer :: &
|
type(tDict), pointer :: &
|
||||||
num_grid_mech
|
num_grid_mech
|
||||||
|
@ -166,59 +166,58 @@ subroutine grid_mechanical_FEM_init(num_grid)
|
||||||
|
|
||||||
!--------------------------------------------------------------------------------------------------
|
!--------------------------------------------------------------------------------------------------
|
||||||
! initialize solver specific parts of PETSc
|
! initialize solver specific parts of PETSc
|
||||||
call SNESCreate(PETSC_COMM_WORLD,SNES_mechanical,err_PETSc)
|
call SNESCreate(PETSC_COMM_WORLD,SNES_mech,err_PETSc)
|
||||||
CHKERRQ(err_PETSc)
|
CHKERRQ(err_PETSc)
|
||||||
call SNESSetOptionsPrefix(SNES_mechanical,'mechanical_',err_PETSc)
|
call SNESSetOptionsPrefix(SNES_mech,'mechanical_',err_PETSc)
|
||||||
CHKERRQ(err_PETSc)
|
CHKERRQ(err_PETSc)
|
||||||
localK = 0_pPetscInt
|
call MPI_Allgather(int(cells3,MPI_INTEGER_KIND),1_MPI_INTEGER_KIND,MPI_INTEGER,&
|
||||||
localK(worldrank) = int(cells3,pPetscInt)
|
cells3_global,1_MPI_INTEGER_KIND,MPI_INTEGER,MPI_COMM_WORLD,err_MPI)
|
||||||
call MPI_Allreduce(MPI_IN_PLACE,localK,worldsize,MPI_INTEGER,MPI_SUM,MPI_COMM_WORLD,err_MPI)
|
|
||||||
if (err_MPI /= 0_MPI_INTEGER_KIND) error stop 'MPI error'
|
if (err_MPI /= 0_MPI_INTEGER_KIND) error stop 'MPI error'
|
||||||
call DMDACreate3d(PETSC_COMM_WORLD, &
|
call DMDACreate3d(PETSC_COMM_WORLD, &
|
||||||
DM_BOUNDARY_PERIODIC, DM_BOUNDARY_PERIODIC, DM_BOUNDARY_PERIODIC, &
|
DM_BOUNDARY_PERIODIC, DM_BOUNDARY_PERIODIC, DM_BOUNDARY_PERIODIC, &
|
||||||
DMDA_STENCIL_BOX, &
|
DMDA_STENCIL_BOX, &
|
||||||
int(cells(1),pPetscInt),int(cells(2),pPetscInt),int(cells(3),pPetscInt), & ! global cells
|
int(cells(1),pPETSCINT),int(cells(2),pPETSCINT),int(cells(3),pPETSCINT), & ! global cells
|
||||||
1_pPetscInt, 1_pPetscInt, int(worldsize,pPetscInt), &
|
1_pPETSCINT, 1_pPETSCINT, int(worldsize,pPETSCINT), &
|
||||||
3_pPetscInt, 1_pPetscInt, & ! #dof (u, vector), ghost boundary width (domain overlap)
|
3_pPETSCINT, 1_pPETSCINT, & ! #dof (u, vector), ghost boundary width (domain overlap)
|
||||||
[int(cells(1),pPetscInt)],[int(cells(2),pPetscInt)],localK, & ! local cells
|
[int(cells(1),pPETSCINT)],[int(cells(2),pPETSCINT)],int(cells3_global,pPETSCINT), & ! local cells
|
||||||
mechanical_grid,err_PETSc)
|
DM_mech,err_PETSc)
|
||||||
CHKERRQ(err_PETSc)
|
CHKERRQ(err_PETSc)
|
||||||
call DMsetFromOptions(mechanical_grid,err_PETSc)
|
call DMsetFromOptions(DM_mech,err_PETSc)
|
||||||
CHKERRQ(err_PETSc)
|
CHKERRQ(err_PETSc)
|
||||||
call DMsetUp(mechanical_grid,err_PETSc)
|
call DMsetUp(DM_mech,err_PETSc)
|
||||||
CHKERRQ(err_PETSc)
|
CHKERRQ(err_PETSc)
|
||||||
call DMDASetUniformCoordinates(mechanical_grid,0.0_pREAL,geomSize(1),0.0_pREAL,geomSize(2),0.0_pREAL,geomSize(3),err_PETSc)
|
call DMDASetUniformCoordinates(DM_mech,0.0_pREAL,geomSize(1),0.0_pREAL,geomSize(2),0.0_pREAL,geomSize(3),err_PETSc)
|
||||||
CHKERRQ(err_PETSc)
|
CHKERRQ(err_PETSc)
|
||||||
call DMCreateGlobalVector(mechanical_grid,solution_current,err_PETSc)
|
call DMCreateGlobalVector(DM_mech,u_PETSc,err_PETSc)
|
||||||
CHKERRQ(err_PETSc)
|
CHKERRQ(err_PETSc)
|
||||||
call DMCreateGlobalVector(mechanical_grid,solution_lastInc,err_PETSc)
|
call DMCreateGlobalVector(DM_mech,u_lastInc_PETSc,err_PETSc)
|
||||||
CHKERRQ(err_PETSc)
|
CHKERRQ(err_PETSc)
|
||||||
call DMCreateGlobalVector(mechanical_grid,solution_rate ,err_PETSc)
|
call DMCreateGlobalVector(DM_mech,uDot_PETSc,err_PETSc)
|
||||||
CHKERRQ(err_PETSc)
|
CHKERRQ(err_PETSc)
|
||||||
call DMSNESSetFunctionLocal(mechanical_grid,formResidual,PETSC_NULL_SNES,err_PETSc)
|
call DMSNESSetFunctionLocal(DM_mech,formResidual,PETSC_NULL_SNES,err_PETSc)
|
||||||
CHKERRQ(err_PETSc)
|
CHKERRQ(err_PETSc)
|
||||||
call DMSNESSetJacobianLocal(mechanical_grid,formJacobian,PETSC_NULL_SNES,err_PETSc)
|
call DMSNESSetJacobianLocal(DM_mech,formJacobian,PETSC_NULL_SNES,err_PETSc)
|
||||||
CHKERRQ(err_PETSc)
|
CHKERRQ(err_PETSc)
|
||||||
call SNESSetConvergenceTest(SNES_mechanical,converged,PETSC_NULL_SNES,PETSC_NULL_FUNCTION,err_PETSc) ! specify custom convergence check function "_converged"
|
call SNESSetConvergenceTest(SNES_mech,converged,PETSC_NULL_SNES,PETSC_NULL_FUNCTION,err_PETSc) ! specify custom convergence check function "_converged"
|
||||||
CHKERRQ(err_PETSc)
|
CHKERRQ(err_PETSc)
|
||||||
call SNESSetMaxLinearSolveFailures(SNES_mechanical, huge(1_pPetscInt), err_PETSc) ! ignore linear solve failures
|
call SNESSetMaxLinearSolveFailures(SNES_mech, huge(1_pPETSCINT), err_PETSc) ! ignore linear solve failures
|
||||||
CHKERRQ(err_PETSc)
|
CHKERRQ(err_PETSc)
|
||||||
call SNESSetDM(SNES_mechanical,mechanical_grid,err_PETSc)
|
call SNESSetDM(SNES_mech,DM_mech,err_PETSc)
|
||||||
CHKERRQ(err_PETSc)
|
CHKERRQ(err_PETSc)
|
||||||
call SNESSetFromOptions(SNES_mechanical,err_PETSc) ! pull it all together with additional cli arguments
|
call SNESSetFromOptions(SNES_mech,err_PETSc) ! pull it all together with additional cli arguments
|
||||||
CHKERRQ(err_PETSc)
|
CHKERRQ(err_PETSc)
|
||||||
|
|
||||||
!--------------------------------------------------------------------------------------------------
|
!--------------------------------------------------------------------------------------------------
|
||||||
! init fields
|
! init fields
|
||||||
call VecSet(solution_current,0.0_pREAL,err_PETSc)
|
call VecSet(u_PETSc,0.0_pREAL,err_PETSc)
|
||||||
CHKERRQ(err_PETSc)
|
CHKERRQ(err_PETSc)
|
||||||
call VecSet(solution_lastInc,0.0_pREAL,err_PETSc)
|
call VecSet(u_lastInc_PETSc,0.0_pREAL,err_PETSc)
|
||||||
CHKERRQ(err_PETSc)
|
CHKERRQ(err_PETSc)
|
||||||
call VecSet(solution_rate ,0.0_pREAL,err_PETSc)
|
call VecSet(uDot_PETSc ,0.0_pREAL,err_PETSc)
|
||||||
CHKERRQ(err_PETSc)
|
CHKERRQ(err_PETSc)
|
||||||
call DMDAVecGetArrayF90(mechanical_grid,solution_current,u,err_PETSc)
|
call DMDAVecGetArrayF90(DM_mech,u_PETSc,u,err_PETSc)
|
||||||
CHKERRQ(err_PETSc)
|
CHKERRQ(err_PETSc)
|
||||||
call DMDAVecGetArrayF90(mechanical_grid,solution_lastInc,u_lastInc,err_PETSc)
|
call DMDAVecGetArrayF90(DM_mech,u_lastInc_PETSc,u_lastInc,err_PETSc)
|
||||||
CHKERRQ(err_PETSc)
|
CHKERRQ(err_PETSc)
|
||||||
|
|
||||||
delta = geomSize/real(cells,pREAL) ! grid spacing
|
delta = geomSize/real(cells,pREAL) ! grid spacing
|
||||||
|
@ -275,9 +274,9 @@ subroutine grid_mechanical_FEM_init(num_grid)
|
||||||
call utilities_constitutiveResponse(P_current,P_av,C_volAvg,devNull, & ! stress field, stress avg, global average of stiffness and (min+max)/2
|
call utilities_constitutiveResponse(P_current,P_av,C_volAvg,devNull, & ! stress field, stress avg, global average of stiffness and (min+max)/2
|
||||||
F, & ! target F
|
F, & ! target F
|
||||||
0.0_pREAL) ! time increment
|
0.0_pREAL) ! time increment
|
||||||
call DMDAVecRestoreArrayF90(mechanical_grid,solution_current,u,err_PETSc)
|
call DMDAVecRestoreArrayF90(DM_mech,u_PETSc,u,err_PETSc)
|
||||||
CHKERRQ(err_PETSc)
|
CHKERRQ(err_PETSc)
|
||||||
call DMDAVecRestoreArrayF90(mechanical_grid,solution_lastInc,u_lastInc,err_PETSc)
|
call DMDAVecRestoreArrayF90(DM_mech,u_lastInc_PETSc,u_lastInc,err_PETSc)
|
||||||
CHKERRQ(err_PETSc)
|
CHKERRQ(err_PETSc)
|
||||||
|
|
||||||
restartRead2: if (CLI_restartInc > 0) then
|
restartRead2: if (CLI_restartInc > 0) then
|
||||||
|
@ -319,9 +318,9 @@ function grid_mechanical_FEM_solution(incInfoIn) result(solution)
|
||||||
! update stiffness (and gamma operator)
|
! update stiffness (and gamma operator)
|
||||||
S = utilities_maskedCompliance(params%rotation_BC,params%stress_mask,C_volAvg)
|
S = utilities_maskedCompliance(params%rotation_BC,params%stress_mask,C_volAvg)
|
||||||
|
|
||||||
call SNESsolve(SNES_mechanical,PETSC_NULL_VEC,solution_current,err_PETSc)
|
call SNESsolve(SNES_mech,PETSC_NULL_VEC,u_PETSc,err_PETSc)
|
||||||
CHKERRQ(err_PETSc)
|
CHKERRQ(err_PETSc)
|
||||||
call SNESGetConvergedReason(SNES_mechanical,reason,err_PETSc)
|
call SNESGetConvergedReason(SNES_mech,reason,err_PETSc)
|
||||||
CHKERRQ(err_PETSc)
|
CHKERRQ(err_PETSc)
|
||||||
|
|
||||||
solution%converged = reason > 0
|
solution%converged = reason > 0
|
||||||
|
@ -378,15 +377,15 @@ subroutine grid_mechanical_FEM_forward(cutBack,guess,Delta_t,Delta_t_old,t_remai
|
||||||
end if
|
end if
|
||||||
|
|
||||||
if (guess) then
|
if (guess) then
|
||||||
call VecWAXPY(solution_rate,-1.0_pREAL,solution_lastInc,solution_current,err_PETSc)
|
call VecWAXPY(uDot_PETSc,-1.0_pREAL,u_lastInc_PETSc,u_PETSc,err_PETSc)
|
||||||
CHKERRQ(err_PETSc)
|
CHKERRQ(err_PETSc)
|
||||||
call VecScale(solution_rate,1.0_pREAL/Delta_t_old,err_PETSc)
|
call VecScale(uDot_PETSc,1.0_pREAL/Delta_t_old,err_PETSc)
|
||||||
CHKERRQ(err_PETSc)
|
CHKERRQ(err_PETSc)
|
||||||
else
|
else
|
||||||
call VecSet(solution_rate,0.0_pREAL,err_PETSc)
|
call VecSet(uDot_PETSc,0.0_pREAL,err_PETSc)
|
||||||
CHKERRQ(err_PETSc)
|
CHKERRQ(err_PETSc)
|
||||||
end if
|
end if
|
||||||
call VecCopy(solution_current,solution_lastInc,err_PETSc)
|
call VecCopy(u_PETSc,u_lastInc_PETSc,err_PETSc)
|
||||||
CHKERRQ(err_PETSc)
|
CHKERRQ(err_PETSc)
|
||||||
|
|
||||||
F_lastInc = F
|
F_lastInc = F
|
||||||
|
@ -402,7 +401,7 @@ subroutine grid_mechanical_FEM_forward(cutBack,guess,Delta_t,Delta_t_old,t_remai
|
||||||
if (stress_BC%myType=='dot_P') P_aim = P_aim &
|
if (stress_BC%myType=='dot_P') P_aim = P_aim &
|
||||||
+ merge(.0_pREAL,stress_BC%values,stress_BC%mask)*Delta_t
|
+ merge(.0_pREAL,stress_BC%values,stress_BC%mask)*Delta_t
|
||||||
|
|
||||||
call VecAXPY(solution_current,Delta_t,solution_rate,err_PETSc)
|
call VecAXPY(u_PETSc,Delta_t,uDot_PETSc,err_PETSc)
|
||||||
CHKERRQ(err_PETSc)
|
CHKERRQ(err_PETSc)
|
||||||
|
|
||||||
!--------------------------------------------------------------------------------------------------
|
!--------------------------------------------------------------------------------------------------
|
||||||
|
@ -434,9 +433,9 @@ subroutine grid_mechanical_FEM_restartWrite()
|
||||||
PetscScalar, dimension(:,:,:,:), pointer :: u,u_lastInc
|
PetscScalar, dimension(:,:,:,:), pointer :: u,u_lastInc
|
||||||
|
|
||||||
|
|
||||||
call DMDAVecGetArrayReadF90(mechanical_grid,solution_current,u,err_PETSc)
|
call DMDAVecGetArrayReadF90(DM_mech,u_PETSc,u,err_PETSc)
|
||||||
CHKERRQ(err_PETSc)
|
CHKERRQ(err_PETSc)
|
||||||
call DMDAVecGetArrayReadF90(mechanical_grid,solution_lastInc,u_lastInc,err_PETSc)
|
call DMDAVecGetArrayReadF90(DM_mech,u_lastInc_PETSc,u_lastInc,err_PETSc)
|
||||||
CHKERRQ(err_PETSc)
|
CHKERRQ(err_PETSc)
|
||||||
|
|
||||||
print'(1x,a)', 'saving solver data required for restart'; flush(IO_STDOUT)
|
print'(1x,a)', 'saving solver data required for restart'; flush(IO_STDOUT)
|
||||||
|
@ -463,9 +462,9 @@ subroutine grid_mechanical_FEM_restartWrite()
|
||||||
call HDF5_closeFile(fileHandle)
|
call HDF5_closeFile(fileHandle)
|
||||||
end if
|
end if
|
||||||
|
|
||||||
call DMDAVecRestoreArrayReadF90(mechanical_grid,solution_current,u,err_PETSc)
|
call DMDAVecRestoreArrayReadF90(DM_mech,u_PETSc,u,err_PETSc)
|
||||||
CHKERRQ(err_PETSc)
|
CHKERRQ(err_PETSc)
|
||||||
call DMDAVecRestoreArrayReadF90(mechanical_grid,solution_lastInc,u_lastInc,err_PETSc)
|
call DMDAVecRestoreArrayReadF90(DM_mech,u_lastInc_PETSc,u_lastInc,err_PETSc)
|
||||||
CHKERRQ(err_PETSc)
|
CHKERRQ(err_PETSc)
|
||||||
|
|
||||||
end subroutine grid_mechanical_FEM_restartWrite
|
end subroutine grid_mechanical_FEM_restartWrite
|
||||||
|
@ -535,9 +534,9 @@ subroutine formResidual(da_local,x_local, &
|
||||||
integer(MPI_INTEGER_KIND) :: err_MPI
|
integer(MPI_INTEGER_KIND) :: err_MPI
|
||||||
real(pREAL), dimension(3,3,3,3) :: devNull
|
real(pREAL), dimension(3,3,3,3) :: devNull
|
||||||
|
|
||||||
call SNESGetNumberFunctionEvals(SNES_mechanical,nfuncs,err_PETSc)
|
call SNESGetNumberFunctionEvals(SNES_mech,nfuncs,err_PETSc)
|
||||||
CHKERRQ(err_PETSc)
|
CHKERRQ(err_PETSc)
|
||||||
call SNESGetIterationNumber(SNES_mechanical,PETScIter,err_PETSc)
|
call SNESGetIterationNumber(SNES_mech,PETScIter,err_PETSc)
|
||||||
CHKERRQ(err_PETSc)
|
CHKERRQ(err_PETSc)
|
||||||
|
|
||||||
|
|
||||||
|
|
|
@ -51,9 +51,9 @@ module grid_mechanical_spectral_basic
|
||||||
|
|
||||||
!--------------------------------------------------------------------------------------------------
|
!--------------------------------------------------------------------------------------------------
|
||||||
! PETSc data
|
! PETSc data
|
||||||
DM :: da
|
DM :: DM_mech
|
||||||
SNES :: SNES_mechanical
|
SNES :: SNES_mech
|
||||||
Vec :: solution_vec
|
Vec :: F_PETSc
|
||||||
|
|
||||||
!--------------------------------------------------------------------------------------------------
|
!--------------------------------------------------------------------------------------------------
|
||||||
! common pointwise data
|
! common pointwise data
|
||||||
|
@ -112,7 +112,7 @@ subroutine grid_mechanical_spectral_basic_init(num_grid)
|
||||||
integer(MPI_INTEGER_KIND) :: err_MPI
|
integer(MPI_INTEGER_KIND) :: err_MPI
|
||||||
real(pREAL), pointer, dimension(:,:,:,:) :: &
|
real(pREAL), pointer, dimension(:,:,:,:) :: &
|
||||||
F ! pointer to solution data
|
F ! pointer to solution data
|
||||||
PetscInt, dimension(0:worldsize-1) :: localK
|
integer(MPI_INTEGER_KIND), dimension(0:worldsize-1) :: cells3_global
|
||||||
real(pREAL), dimension(3,3,product(cells(1:2))*cells3) :: temp33n
|
real(pREAL), dimension(3,3,product(cells(1:2))*cells3) :: temp33n
|
||||||
integer(HID_T) :: fileHandle, groupHandle
|
integer(HID_T) :: fileHandle, groupHandle
|
||||||
type(tDict), pointer :: &
|
type(tDict), pointer :: &
|
||||||
|
@ -168,41 +168,40 @@ subroutine grid_mechanical_spectral_basic_init(num_grid)
|
||||||
|
|
||||||
!--------------------------------------------------------------------------------------------------
|
!--------------------------------------------------------------------------------------------------
|
||||||
! initialize solver specific parts of PETSc
|
! initialize solver specific parts of PETSc
|
||||||
call SNESCreate(PETSC_COMM_WORLD,SNES_mechanical,err_PETSc)
|
call SNESCreate(PETSC_COMM_WORLD,SNES_mech,err_PETSc)
|
||||||
CHKERRQ(err_PETSc)
|
CHKERRQ(err_PETSc)
|
||||||
call SNESSetOptionsPrefix(SNES_mechanical,'mechanical_',err_PETSc)
|
call SNESSetOptionsPrefix(SNES_mech,'mechanical_',err_PETSc)
|
||||||
CHKERRQ(err_PETSc)
|
CHKERRQ(err_PETSc)
|
||||||
localK = 0_pPetscInt
|
call MPI_Allgather(int(cells3,MPI_INTEGER_KIND),1_MPI_INTEGER_KIND,MPI_INTEGER,&
|
||||||
localK(worldrank) = int(cells3,pPetscInt)
|
cells3_global,1_MPI_INTEGER_KIND,MPI_INTEGER,MPI_COMM_WORLD,err_MPI)
|
||||||
call MPI_Allreduce(MPI_IN_PLACE,localK,worldsize,MPI_INTEGER,MPI_SUM,MPI_COMM_WORLD,err_MPI)
|
|
||||||
if (err_MPI /= 0_MPI_INTEGER_KIND) error stop 'MPI error'
|
if (err_MPI /= 0_MPI_INTEGER_KIND) error stop 'MPI error'
|
||||||
call DMDACreate3d(PETSC_COMM_WORLD, &
|
call DMDACreate3d(PETSC_COMM_WORLD, &
|
||||||
DM_BOUNDARY_NONE, DM_BOUNDARY_NONE, DM_BOUNDARY_NONE, & ! cut off stencil at boundary
|
DM_BOUNDARY_NONE, DM_BOUNDARY_NONE, DM_BOUNDARY_NONE, & ! cut off stencil at boundary
|
||||||
DMDA_STENCIL_BOX, & ! Moore (26) neighborhood around central point
|
DMDA_STENCIL_BOX, & ! Moore (26) neighborhood around central point
|
||||||
int(cells(1),pPetscInt),int(cells(2),pPetscInt),int(cells(3),pPetscInt), & ! global cells
|
int(cells(1),pPETSCINT),int(cells(2),pPETSCINT),int(cells(3),pPETSCINT), & ! global cells
|
||||||
1_pPetscInt, 1_pPetscInt, int(worldsize,pPetscInt), &
|
1_pPETSCINT, 1_pPETSCINT, int(worldsize,pPETSCINT), &
|
||||||
9_pPetscInt, 0_pPetscInt, & ! #dof (F, tensor), ghost boundary width (domain overlap)
|
9_pPETSCINT, 0_pPETSCINT, & ! #dof (F, tensor), ghost boundary width (domain overlap)
|
||||||
[int(cells(1),pPetscInt)],[int(cells(2),pPetscInt)],localK, & ! local cells
|
[int(cells(1),pPETSCINT)],[int(cells(2),pPETSCINT)],int(cells3_global,pPETSCINT), & ! local cells
|
||||||
da,err_PETSc) ! handle, error
|
DM_mech,err_PETSc) ! handle, error
|
||||||
CHKERRQ(err_PETSc)
|
CHKERRQ(err_PETSc)
|
||||||
call DMsetFromOptions(da,err_PETSc)
|
call DMsetFromOptions(DM_mech,err_PETSc)
|
||||||
CHKERRQ(err_PETSc)
|
CHKERRQ(err_PETSc)
|
||||||
call DMsetUp(da,err_PETSc)
|
call DMsetUp(DM_mech,err_PETSc)
|
||||||
CHKERRQ(err_PETSc)
|
CHKERRQ(err_PETSc)
|
||||||
call DMcreateGlobalVector(da,solution_vec,err_PETSc) ! global solution vector (cells x 9, i.e. every def grad tensor)
|
call DMcreateGlobalVector(DM_mech,F_PETSc,err_PETSc) ! global solution vector (cells x 9, i.e. every def grad tensor)
|
||||||
CHKERRQ(err_PETSc)
|
CHKERRQ(err_PETSc)
|
||||||
call DMDASNESsetFunctionLocal(da,INSERT_VALUES,formResidual,PETSC_NULL_SNES,err_PETSc) ! residual vector of same shape as solution vector
|
call DMDASNESsetFunctionLocal(DM_mech,INSERT_VALUES,formResidual,PETSC_NULL_SNES,err_PETSc) ! residual vector of same shape as solution vector
|
||||||
CHKERRQ(err_PETSc)
|
CHKERRQ(err_PETSc)
|
||||||
call SNESsetConvergenceTest(SNES_mechanical,converged,PETSC_NULL_SNES,PETSC_NULL_FUNCTION,err_PETSc) ! specify custom convergence check function "converged"
|
call SNESsetConvergenceTest(SNES_mech,converged,PETSC_NULL_SNES,PETSC_NULL_FUNCTION,err_PETSc) ! specify custom convergence check function "converged"
|
||||||
CHKERRQ(err_PETSc)
|
CHKERRQ(err_PETSc)
|
||||||
call SNESSetDM(SNES_mechanical,da,err_PETSc)
|
call SNESSetDM(SNES_mech,DM_mech,err_PETSc)
|
||||||
CHKERRQ(err_PETSc)
|
CHKERRQ(err_PETSc)
|
||||||
call SNESsetFromOptions(SNES_mechanical,err_PETSc) ! pull it all together with additional CLI arguments
|
call SNESsetFromOptions(SNES_mech,err_PETSc) ! pull it all together with additional CLI arguments
|
||||||
CHKERRQ(err_PETSc)
|
CHKERRQ(err_PETSc)
|
||||||
|
|
||||||
!--------------------------------------------------------------------------------------------------
|
!--------------------------------------------------------------------------------------------------
|
||||||
! init fields
|
! init fields
|
||||||
call DMDAVecGetArrayF90(da,solution_vec,F,err_PETSc) ! places pointer on PETSc data
|
call DMDAVecGetArrayF90(DM_mech,F_PETSc,F,err_PETSc) ! places pointer on PETSc data
|
||||||
CHKERRQ(err_PETSc)
|
CHKERRQ(err_PETSc)
|
||||||
|
|
||||||
restartRead: if (CLI_restartInc > 0) then
|
restartRead: if (CLI_restartInc > 0) then
|
||||||
|
@ -238,7 +237,7 @@ subroutine grid_mechanical_spectral_basic_init(num_grid)
|
||||||
call utilities_constitutiveResponse(P,P_av,C_volAvg,C_minMaxAvg, & ! stress field, stress avg, global average of stiffness and (min+max)/2
|
call utilities_constitutiveResponse(P,P_av,C_volAvg,C_minMaxAvg, & ! stress field, stress avg, global average of stiffness and (min+max)/2
|
||||||
reshape(F,shape(F_lastInc)), & ! target F
|
reshape(F,shape(F_lastInc)), & ! target F
|
||||||
0.0_pREAL) ! time increment
|
0.0_pREAL) ! time increment
|
||||||
call DMDAVecRestoreArrayF90(da,solution_vec,F,err_PETSc) ! deassociate pointer
|
call DMDAVecRestoreArrayF90(DM_mech,F_PETSc,F,err_PETSc) ! deassociate pointer
|
||||||
CHKERRQ(err_PETSc)
|
CHKERRQ(err_PETSc)
|
||||||
|
|
||||||
restartRead2: if (CLI_restartInc > 0) then
|
restartRead2: if (CLI_restartInc > 0) then
|
||||||
|
@ -287,9 +286,9 @@ function grid_mechanical_spectral_basic_solution(incInfoIn) result(solution)
|
||||||
S = utilities_maskedCompliance(params%rotation_BC,params%stress_mask,C_volAvg)
|
S = utilities_maskedCompliance(params%rotation_BC,params%stress_mask,C_volAvg)
|
||||||
if (num%update_gamma) call utilities_updateGamma(C_minMaxAvg)
|
if (num%update_gamma) call utilities_updateGamma(C_minMaxAvg)
|
||||||
|
|
||||||
call SNESsolve(SNES_mechanical,PETSC_NULL_VEC,solution_vec,err_PETSc)
|
call SNESsolve(SNES_mech,PETSC_NULL_VEC,F_PETSc,err_PETSc)
|
||||||
CHKERRQ(err_PETSc)
|
CHKERRQ(err_PETSc)
|
||||||
call SNESGetConvergedReason(SNES_mechanical,reason,err_PETSc)
|
call SNESGetConvergedReason(SNES_mech,reason,err_PETSc)
|
||||||
CHKERRQ(err_PETSc)
|
CHKERRQ(err_PETSc)
|
||||||
|
|
||||||
solution%converged = reason > 0
|
solution%converged = reason > 0
|
||||||
|
@ -324,7 +323,7 @@ subroutine grid_mechanical_spectral_basic_forward(cutBack,guess,Delta_t,Delta_t_
|
||||||
real(pREAL), pointer, dimension(:,:,:,:) :: F
|
real(pREAL), pointer, dimension(:,:,:,:) :: F
|
||||||
|
|
||||||
|
|
||||||
call DMDAVecGetArrayF90(da,solution_vec,F,err_PETSc)
|
call DMDAVecGetArrayF90(DM_mech,F_PETSc,F,err_PETSc)
|
||||||
CHKERRQ(err_PETSc)
|
CHKERRQ(err_PETSc)
|
||||||
|
|
||||||
if (cutBack) then
|
if (cutBack) then
|
||||||
|
@ -368,7 +367,7 @@ subroutine grid_mechanical_spectral_basic_forward(cutBack,guess,Delta_t,Delta_t_
|
||||||
|
|
||||||
F = reshape(utilities_forwardField(Delta_t,F_lastInc,Fdot, & ! estimate of F at end of time+Delta_t that matches rotated F_aim on average
|
F = reshape(utilities_forwardField(Delta_t,F_lastInc,Fdot, & ! estimate of F at end of time+Delta_t that matches rotated F_aim on average
|
||||||
rotation_BC%rotate(F_aim,active=.true.)),[9,cells(1),cells(2),cells3])
|
rotation_BC%rotate(F_aim,active=.true.)),[9,cells(1),cells(2),cells3])
|
||||||
call DMDAVecRestoreArrayF90(da,solution_vec,F,err_PETSc)
|
call DMDAVecRestoreArrayF90(DM_mech,F_PETSc,F,err_PETSc)
|
||||||
CHKERRQ(err_PETSc)
|
CHKERRQ(err_PETSc)
|
||||||
|
|
||||||
!--------------------------------------------------------------------------------------------------
|
!--------------------------------------------------------------------------------------------------
|
||||||
|
@ -388,10 +387,10 @@ subroutine grid_mechanical_spectral_basic_updateCoords()
|
||||||
PetscErrorCode :: err_PETSc
|
PetscErrorCode :: err_PETSc
|
||||||
real(pREAL), dimension(:,:,:,:), pointer :: F
|
real(pREAL), dimension(:,:,:,:), pointer :: F
|
||||||
|
|
||||||
call DMDAVecGetArrayReadF90(da,solution_vec,F,err_PETSc)
|
call DMDAVecGetArrayReadF90(DM_mech,F_PETSc,F,err_PETSc)
|
||||||
CHKERRQ(err_PETSc)
|
CHKERRQ(err_PETSc)
|
||||||
call utilities_updateCoords(reshape(F,[3,3,size(F,2),size(F,3),size(F,4)]))
|
call utilities_updateCoords(reshape(F,[3,3,size(F,2),size(F,3),size(F,4)]))
|
||||||
call DMDAVecRestoreArrayReadF90(da,solution_vec,F,err_PETSc)
|
call DMDAVecRestoreArrayReadF90(DM_mech,F_PETSc,F,err_PETSc)
|
||||||
CHKERRQ(err_PETSc)
|
CHKERRQ(err_PETSc)
|
||||||
|
|
||||||
end subroutine grid_mechanical_spectral_basic_updateCoords
|
end subroutine grid_mechanical_spectral_basic_updateCoords
|
||||||
|
@ -406,7 +405,7 @@ subroutine grid_mechanical_spectral_basic_restartWrite()
|
||||||
integer(HID_T) :: fileHandle, groupHandle
|
integer(HID_T) :: fileHandle, groupHandle
|
||||||
real(pREAL), dimension(:,:,:,:), pointer :: F
|
real(pREAL), dimension(:,:,:,:), pointer :: F
|
||||||
|
|
||||||
call DMDAVecGetArrayReadF90(da,solution_vec,F,err_PETSc)
|
call DMDAVecGetArrayReadF90(DM_mech,F_PETSc,F,err_PETSc)
|
||||||
CHKERRQ(err_PETSc)
|
CHKERRQ(err_PETSc)
|
||||||
|
|
||||||
if (num%update_gamma) C_minMaxAvgRestart = C_minMaxAvg
|
if (num%update_gamma) C_minMaxAvgRestart = C_minMaxAvg
|
||||||
|
@ -434,7 +433,7 @@ subroutine grid_mechanical_spectral_basic_restartWrite()
|
||||||
call HDF5_closeFile(fileHandle)
|
call HDF5_closeFile(fileHandle)
|
||||||
end if
|
end if
|
||||||
|
|
||||||
call DMDAVecRestoreArrayReadF90(da,solution_vec,F,err_PETSc)
|
call DMDAVecRestoreArrayReadF90(DM_mech,F_PETSc,F,err_PETSc)
|
||||||
CHKERRQ(err_PETSc)
|
CHKERRQ(err_PETSc)
|
||||||
|
|
||||||
end subroutine grid_mechanical_spectral_basic_restartWrite
|
end subroutine grid_mechanical_spectral_basic_restartWrite
|
||||||
|
@ -505,9 +504,9 @@ subroutine formResidual(residual_subdomain, F, &
|
||||||
integer(MPI_INTEGER_KIND) :: err_MPI
|
integer(MPI_INTEGER_KIND) :: err_MPI
|
||||||
|
|
||||||
|
|
||||||
call SNESGetNumberFunctionEvals(SNES_mechanical,nfuncs,err_PETSc)
|
call SNESGetNumberFunctionEvals(SNES_mech,nfuncs,err_PETSc)
|
||||||
CHKERRQ(err_PETSc)
|
CHKERRQ(err_PETSc)
|
||||||
call SNESGetIterationNumber(SNES_mechanical,PETScIter,err_PETSc)
|
call SNESGetIterationNumber(SNES_mech,PETScIter,err_PETSc)
|
||||||
CHKERRQ(err_PETSc)
|
CHKERRQ(err_PETSc)
|
||||||
|
|
||||||
if (nfuncs == 0 .and. PETScIter == 0) totalIter = -1 ! new increment
|
if (nfuncs == 0 .and. PETScIter == 0) totalIter = -1 ! new increment
|
||||||
|
|
|
@ -57,9 +57,9 @@ module grid_mechanical_spectral_polarization
|
||||||
|
|
||||||
!--------------------------------------------------------------------------------------------------
|
!--------------------------------------------------------------------------------------------------
|
||||||
! PETSc data
|
! PETSc data
|
||||||
DM :: da
|
DM :: DM_mech
|
||||||
SNES :: SNES_mechanical
|
SNES :: SNES_mech
|
||||||
Vec :: solution_vec
|
Vec :: FandF_tau_PETSc
|
||||||
|
|
||||||
!--------------------------------------------------------------------------------------------------
|
!--------------------------------------------------------------------------------------------------
|
||||||
! common pointwise data
|
! common pointwise data
|
||||||
|
@ -126,7 +126,7 @@ subroutine grid_mechanical_spectral_polarization_init(num_grid)
|
||||||
FandF_tau, & ! overall pointer to solution data
|
FandF_tau, & ! overall pointer to solution data
|
||||||
F, & ! specific (sub)pointer
|
F, & ! specific (sub)pointer
|
||||||
F_tau ! specific (sub)pointer
|
F_tau ! specific (sub)pointer
|
||||||
PetscInt, dimension(0:worldsize-1) :: localK
|
integer(MPI_INTEGER_KIND), dimension(0:worldsize-1) :: cells3_global
|
||||||
real(pREAL), dimension(3,3,product(cells(1:2))*cells3) :: temp33n
|
real(pREAL), dimension(3,3,product(cells(1:2))*cells3) :: temp33n
|
||||||
integer(HID_T) :: fileHandle, groupHandle
|
integer(HID_T) :: fileHandle, groupHandle
|
||||||
type(tDict), pointer :: &
|
type(tDict), pointer :: &
|
||||||
|
@ -189,41 +189,40 @@ subroutine grid_mechanical_spectral_polarization_init(num_grid)
|
||||||
|
|
||||||
!--------------------------------------------------------------------------------------------------
|
!--------------------------------------------------------------------------------------------------
|
||||||
! initialize solver specific parts of PETSc
|
! initialize solver specific parts of PETSc
|
||||||
call SNESCreate(PETSC_COMM_WORLD,SNES_mechanical,err_PETSc)
|
call SNESCreate(PETSC_COMM_WORLD,SNES_mech,err_PETSc)
|
||||||
CHKERRQ(err_PETSc)
|
CHKERRQ(err_PETSc)
|
||||||
call SNESSetOptionsPrefix(SNES_mechanical,'mechanical_',err_PETSc)
|
call SNESSetOptionsPrefix(SNES_mech,'mechanical_',err_PETSc)
|
||||||
CHKERRQ(err_PETSc)
|
CHKERRQ(err_PETSc)
|
||||||
localK = 0_pPetscInt
|
call MPI_Allgather(int(cells3,pPetscInt),1_MPI_INTEGER_KIND,MPI_INTEGER,&
|
||||||
localK(worldrank) = int(cells3,pPetscInt)
|
cells3_global,1_MPI_INTEGER_KIND,MPI_INTEGER,MPI_COMM_WORLD,err_MPI)
|
||||||
call MPI_Allreduce(MPI_IN_PLACE,localK,worldsize,MPI_INTEGER,MPI_SUM,MPI_COMM_WORLD,err_MPI)
|
|
||||||
if (err_MPI /= 0_MPI_INTEGER_KIND) error stop 'MPI error'
|
if (err_MPI /= 0_MPI_INTEGER_KIND) error stop 'MPI error'
|
||||||
call DMDACreate3d(PETSC_COMM_WORLD, &
|
call DMDACreate3d(PETSC_COMM_WORLD, &
|
||||||
DM_BOUNDARY_NONE, DM_BOUNDARY_NONE, DM_BOUNDARY_NONE, & ! cut off stencil at boundary
|
DM_BOUNDARY_NONE, DM_BOUNDARY_NONE, DM_BOUNDARY_NONE, & ! cut off stencil at boundary
|
||||||
DMDA_STENCIL_BOX, & ! Moore (26) neighborhood around central point
|
DMDA_STENCIL_BOX, & ! Moore (26) neighborhood around central point
|
||||||
int(cells(1),pPetscInt),int(cells(2),pPetscInt),int(cells(3),pPetscInt), & ! global cells
|
int(cells(1),pPETSCINT),int(cells(2),pPETSCINT),int(cells(3),pPETSCINT), & ! global cells
|
||||||
1_pPetscInt, 1_pPetscInt, int(worldsize,pPetscInt), &
|
1_pPETSCINT, 1_pPETSCINT, int(worldsize,pPETSCINT), &
|
||||||
18_pPetscInt, 0_pPetscInt, & ! #dof (2xtensor), ghost boundary width (domain overlap)
|
18_pPETSCINT, 0_pPETSCINT, & ! #dof (2xtensor), ghost boundary width (domain overlap)
|
||||||
[int(cells(1),pPetscInt)],[int(cells(2),pPetscInt)],localK, & ! local cells
|
[int(cells(1),pPETSCINT)],[int(cells(2),pPETSCINT)],int(cells3_global,pPETSCINT), & ! local cells
|
||||||
da,err_PETSc) ! handle, error
|
DM_mech,err_PETSc) ! handle, error
|
||||||
CHKERRQ(err_PETSc)
|
CHKERRQ(err_PETSc)
|
||||||
call DMsetFromOptions(da,err_PETSc)
|
call DMsetFromOptions(DM_mech,err_PETSc)
|
||||||
CHKERRQ(err_PETSc)
|
CHKERRQ(err_PETSc)
|
||||||
call DMsetUp(da,err_PETSc)
|
call DMsetUp(DM_mech,err_PETSc)
|
||||||
CHKERRQ(err_PETSc)
|
CHKERRQ(err_PETSc)
|
||||||
call DMcreateGlobalVector(da,solution_vec,err_PETSc) ! global solution vector (cells x 18, i.e. every def grad tensor)
|
call DMcreateGlobalVector(DM_mech,FandF_tau_PETSc,err_PETSc) ! global solution vector (cells x 18, i.e. every def grad tensor)
|
||||||
CHKERRQ(err_PETSc)
|
CHKERRQ(err_PETSc)
|
||||||
call DMDASNESsetFunctionLocal(da,INSERT_VALUES,formResidual,PETSC_NULL_SNES,err_PETSc) ! residual vector of same shape as solution vector
|
call DMDASNESsetFunctionLocal(DM_mech,INSERT_VALUES,formResidual,PETSC_NULL_SNES,err_PETSc) ! residual vector of same shape as solution vector
|
||||||
CHKERRQ(err_PETSc)
|
CHKERRQ(err_PETSc)
|
||||||
call SNESsetConvergenceTest(SNES_mechanical,converged,PETSC_NULL_SNES,PETSC_NULL_FUNCTION,err_PETSc) ! specify custom convergence check function "converged"
|
call SNESsetConvergenceTest(SNES_mech,converged,PETSC_NULL_SNES,PETSC_NULL_FUNCTION,err_PETSc) ! specify custom convergence check function "converged"
|
||||||
CHKERRQ(err_PETSc)
|
CHKERRQ(err_PETSc)
|
||||||
call SNESSetDM(SNES_mechanical,da,err_PETSc)
|
call SNESSetDM(SNES_mech,DM_mech,err_PETSc)
|
||||||
CHKERRQ(err_PETSc)
|
CHKERRQ(err_PETSc)
|
||||||
call SNESsetFromOptions(SNES_mechanical,err_PETSc) ! pull it all together with additional CLI arguments
|
call SNESsetFromOptions(SNES_mech,err_PETSc) ! pull it all together with additional CLI arguments
|
||||||
CHKERRQ(err_PETSc)
|
CHKERRQ(err_PETSc)
|
||||||
|
|
||||||
!--------------------------------------------------------------------------------------------------
|
!--------------------------------------------------------------------------------------------------
|
||||||
! init fields
|
! init fields
|
||||||
call DMDAVecGetArrayF90(da,solution_vec,FandF_tau,err_PETSc) ! places pointer on PETSc data
|
call DMDAVecGetArrayF90(DM_mech,FandF_tau_PETSc,FandF_tau,err_PETSc) ! places pointer on PETSc data
|
||||||
CHKERRQ(err_PETSc)
|
CHKERRQ(err_PETSc)
|
||||||
F => FandF_tau(0: 8,:,:,:)
|
F => FandF_tau(0: 8,:,:,:)
|
||||||
F_tau => FandF_tau(9:17,:,:,:)
|
F_tau => FandF_tau(9:17,:,:,:)
|
||||||
|
@ -267,7 +266,7 @@ subroutine grid_mechanical_spectral_polarization_init(num_grid)
|
||||||
call utilities_constitutiveResponse(P,P_av,C_volAvg,C_minMaxAvg, & ! stress field, stress avg, global average of stiffness and (min+max)/2
|
call utilities_constitutiveResponse(P,P_av,C_volAvg,C_minMaxAvg, & ! stress field, stress avg, global average of stiffness and (min+max)/2
|
||||||
reshape(F,shape(F_lastInc)), & ! target F
|
reshape(F,shape(F_lastInc)), & ! target F
|
||||||
0.0_pREAL) ! time increment
|
0.0_pREAL) ! time increment
|
||||||
call DMDAVecRestoreArrayF90(da,solution_vec,FandF_tau,err_PETSc) ! deassociate pointer
|
call DMDAVecRestoreArrayF90(DM_mech,FandF_tau_PETSc,FandF_tau,err_PETSc) ! deassociate pointer
|
||||||
CHKERRQ(err_PETSc)
|
CHKERRQ(err_PETSc)
|
||||||
|
|
||||||
restartRead2: if (CLI_restartInc > 0) then
|
restartRead2: if (CLI_restartInc > 0) then
|
||||||
|
@ -322,9 +321,9 @@ function grid_mechanical_spectral_polarization_solution(incInfoIn) result(soluti
|
||||||
S_scale = math_invSym3333(C_minMaxAvg)
|
S_scale = math_invSym3333(C_minMaxAvg)
|
||||||
end if
|
end if
|
||||||
|
|
||||||
call SNESSolve(SNES_mechanical,PETSC_NULL_VEC,solution_vec,err_PETSc)
|
call SNESSolve(SNES_mech,PETSC_NULL_VEC,FandF_tau_PETSc,err_PETSc)
|
||||||
CHKERRQ(err_PETSc)
|
CHKERRQ(err_PETSc)
|
||||||
call SNESGetConvergedReason(SNES_mechanical,reason,err_PETSc)
|
call SNESGetConvergedReason(SNES_mech,reason,err_PETSc)
|
||||||
CHKERRQ(err_PETSc)
|
CHKERRQ(err_PETSc)
|
||||||
|
|
||||||
solution%converged = reason > 0
|
solution%converged = reason > 0
|
||||||
|
@ -361,7 +360,7 @@ subroutine grid_mechanical_spectral_polarization_forward(cutBack,guess,Delta_t,D
|
||||||
real(pREAL), dimension(3,3) :: F_lambda33
|
real(pREAL), dimension(3,3) :: F_lambda33
|
||||||
|
|
||||||
|
|
||||||
call DMDAVecGetArrayF90(da,solution_vec,FandF_tau,err_PETSc)
|
call DMDAVecGetArrayF90(DM_mech,FandF_tau_PETSc,FandF_tau,err_PETSc)
|
||||||
CHKERRQ(err_PETSc)
|
CHKERRQ(err_PETSc)
|
||||||
F => FandF_tau(0: 8,:,:,:)
|
F => FandF_tau(0: 8,:,:,:)
|
||||||
F_tau => FandF_tau(9:17,:,:,:)
|
F_tau => FandF_tau(9:17,:,:,:)
|
||||||
|
@ -425,7 +424,7 @@ subroutine grid_mechanical_spectral_polarization_forward(cutBack,guess,Delta_t,D
|
||||||
end do; end do; end do
|
end do; end do; end do
|
||||||
end if
|
end if
|
||||||
|
|
||||||
call DMDAVecRestoreArrayF90(da,solution_vec,FandF_tau,err_PETSc)
|
call DMDAVecRestoreArrayF90(DM_mech,FandF_tau_PETSc,FandF_tau,err_PETSc)
|
||||||
CHKERRQ(err_PETSc)
|
CHKERRQ(err_PETSc)
|
||||||
|
|
||||||
!--------------------------------------------------------------------------------------------------
|
!--------------------------------------------------------------------------------------------------
|
||||||
|
@ -445,10 +444,10 @@ subroutine grid_mechanical_spectral_polarization_updateCoords()
|
||||||
PetscErrorCode :: err_PETSc
|
PetscErrorCode :: err_PETSc
|
||||||
real(pREAL), dimension(:,:,:,:), pointer :: FandF_tau
|
real(pREAL), dimension(:,:,:,:), pointer :: FandF_tau
|
||||||
|
|
||||||
call DMDAVecGetArrayReadF90(da,solution_vec,FandF_tau,err_PETSc)
|
call DMDAVecGetArrayReadF90(DM_mech,FandF_tau_PETSc,FandF_tau,err_PETSc)
|
||||||
CHKERRQ(err_PETSc)
|
CHKERRQ(err_PETSc)
|
||||||
call utilities_updateCoords(reshape(FandF_tau(0:8,:,:,:),[3,3,size(FandF_tau,2),size(FandF_tau,3),size(FandF_tau,4)]))
|
call utilities_updateCoords(reshape(FandF_tau(0:8,:,:,:),[3,3,size(FandF_tau,2),size(FandF_tau,3),size(FandF_tau,4)]))
|
||||||
call DMDAVecRestoreArrayReadF90(da,solution_vec,FandF_tau,err_PETSc)
|
call DMDAVecRestoreArrayReadF90(DM_mech,FandF_tau_PETSc,FandF_tau,err_PETSc)
|
||||||
CHKERRQ(err_PETSc)
|
CHKERRQ(err_PETSc)
|
||||||
|
|
||||||
end subroutine grid_mechanical_spectral_polarization_updateCoords
|
end subroutine grid_mechanical_spectral_polarization_updateCoords
|
||||||
|
@ -463,7 +462,7 @@ subroutine grid_mechanical_spectral_polarization_restartWrite()
|
||||||
integer(HID_T) :: fileHandle, groupHandle
|
integer(HID_T) :: fileHandle, groupHandle
|
||||||
real(pREAL), dimension(:,:,:,:), pointer :: FandF_tau, F, F_tau
|
real(pREAL), dimension(:,:,:,:), pointer :: FandF_tau, F, F_tau
|
||||||
|
|
||||||
call DMDAVecGetArrayReadF90(da,solution_vec,FandF_tau,err_PETSc)
|
call DMDAVecGetArrayReadF90(DM_mech,FandF_tau_PETSc,FandF_tau,err_PETSc)
|
||||||
CHKERRQ(err_PETSc)
|
CHKERRQ(err_PETSc)
|
||||||
F => FandF_tau(0: 8,:,:,:)
|
F => FandF_tau(0: 8,:,:,:)
|
||||||
F_tau => FandF_tau(9:17,:,:,:)
|
F_tau => FandF_tau(9:17,:,:,:)
|
||||||
|
@ -495,7 +494,7 @@ subroutine grid_mechanical_spectral_polarization_restartWrite()
|
||||||
call HDF5_closeFile(fileHandle)
|
call HDF5_closeFile(fileHandle)
|
||||||
end if
|
end if
|
||||||
|
|
||||||
call DMDAVecRestoreArrayReadF90(da,solution_vec,FandF_tau,err_PETSc)
|
call DMDAVecRestoreArrayReadF90(DM_mech,FandF_tau_PETSc,FandF_tau,err_PETSc)
|
||||||
CHKERRQ(err_PETSc)
|
CHKERRQ(err_PETSc)
|
||||||
|
|
||||||
end subroutine grid_mechanical_spectral_polarization_restartWrite
|
end subroutine grid_mechanical_spectral_polarization_restartWrite
|
||||||
|
@ -583,9 +582,9 @@ subroutine formResidual(residual_subdomain, FandF_tau, &
|
||||||
call MPI_Allreduce(MPI_IN_PLACE,F_av,9_MPI_INTEGER_KIND,MPI_DOUBLE,MPI_SUM,MPI_COMM_WORLD,err_MPI)
|
call MPI_Allreduce(MPI_IN_PLACE,F_av,9_MPI_INTEGER_KIND,MPI_DOUBLE,MPI_SUM,MPI_COMM_WORLD,err_MPI)
|
||||||
if (err_MPI /= 0_MPI_INTEGER_KIND) error stop 'MPI error'
|
if (err_MPI /= 0_MPI_INTEGER_KIND) error stop 'MPI error'
|
||||||
|
|
||||||
call SNESGetNumberFunctionEvals(SNES_mechanical,nfuncs,err_PETSc)
|
call SNESGetNumberFunctionEvals(SNES_mech,nfuncs,err_PETSc)
|
||||||
CHKERRQ(err_PETSc)
|
CHKERRQ(err_PETSc)
|
||||||
call SNESGetIterationNumber(SNES_mechanical,PETScIter,err_PETSc)
|
call SNESGetIterationNumber(SNES_mech,PETScIter,err_PETSc)
|
||||||
CHKERRQ(err_PETSc)
|
CHKERRQ(err_PETSc)
|
||||||
|
|
||||||
if (nfuncs == 0 .and. PETScIter == 0) totalIter = -1 ! new increment
|
if (nfuncs == 0 .and. PETScIter == 0) totalIter = -1 ! new increment
|
||||||
|
|
|
@ -47,9 +47,8 @@ module grid_thermal_spectral
|
||||||
!--------------------------------------------------------------------------------------------------
|
!--------------------------------------------------------------------------------------------------
|
||||||
! PETSc data
|
! PETSc data
|
||||||
SNES :: SNES_thermal
|
SNES :: SNES_thermal
|
||||||
Vec :: solution_vec
|
Vec :: T_PETSc
|
||||||
real(pREAL), dimension(:,:,:), allocatable :: &
|
real(pREAL), dimension(:,:,:), allocatable :: &
|
||||||
T, & !< field of current temperature
|
|
||||||
T_lastInc, & !< field of previous temperature
|
T_lastInc, & !< field of previous temperature
|
||||||
T_stagInc, & !< field of staggered temperature
|
T_stagInc, & !< field of staggered temperature
|
||||||
dotT_lastInc
|
dotT_lastInc
|
||||||
|
@ -74,10 +73,10 @@ subroutine grid_thermal_spectral_init(num_grid)
|
||||||
|
|
||||||
type(tDict), pointer, intent(in) :: num_grid
|
type(tDict), pointer, intent(in) :: num_grid
|
||||||
|
|
||||||
PetscInt, dimension(0:worldsize-1) :: localK
|
integer(MPI_INTEGER_KIND), dimension(0:worldsize-1) :: cells3_global
|
||||||
integer :: i, j, k, ce
|
integer :: i, j, k, ce
|
||||||
DM :: thermal_grid
|
DM :: DM_thermal
|
||||||
real(pREAL), dimension(:,:,:), pointer :: T_PETSc
|
real(pREAL), dimension(:,:,:), pointer :: T ! 0-indexed
|
||||||
integer(MPI_INTEGER_KIND) :: err_MPI
|
integer(MPI_INTEGER_KIND) :: err_MPI
|
||||||
PetscErrorCode :: err_PETSc
|
PetscErrorCode :: err_PETSc
|
||||||
integer(HID_T) :: fileHandle, groupHandle
|
integer(HID_T) :: fileHandle, groupHandle
|
||||||
|
@ -89,6 +88,7 @@ subroutine grid_thermal_spectral_init(num_grid)
|
||||||
petsc_options
|
petsc_options
|
||||||
|
|
||||||
|
|
||||||
|
|
||||||
print'(/,1x,a)', '<<<+- grid_thermal_spectral init -+>>>'
|
print'(/,1x,a)', '<<<+- grid_thermal_spectral init -+>>>'
|
||||||
|
|
||||||
print'(/,1x,a)', 'P. Shanthraj et al., Handbook of Mechanics of Materials, 2019'
|
print'(/,1x,a)', 'P. Shanthraj et al., Handbook of Mechanics of Materials, 2019'
|
||||||
|
@ -117,45 +117,39 @@ subroutine grid_thermal_spectral_init(num_grid)
|
||||||
call PetscOptionsInsertString(PETSC_NULL_OPTIONS,petsc_options,err_PETSc)
|
call PetscOptionsInsertString(PETSC_NULL_OPTIONS,petsc_options,err_PETSc)
|
||||||
CHKERRQ(err_PETSc)
|
CHKERRQ(err_PETSc)
|
||||||
|
|
||||||
!--------------------------------------------------------------------------------------------------
|
|
||||||
! init fields
|
|
||||||
T = discretization_grid_getInitialCondition('T')
|
|
||||||
T_lastInc = T
|
|
||||||
T_stagInc = T
|
|
||||||
dotT_lastInc = 0.0_pREAL * T
|
|
||||||
|
|
||||||
!--------------------------------------------------------------------------------------------------
|
!--------------------------------------------------------------------------------------------------
|
||||||
! initialize solver specific parts of PETSc
|
! initialize solver specific parts of PETSc
|
||||||
call SNESCreate(PETSC_COMM_WORLD,SNES_thermal,err_PETSc)
|
call SNESCreate(PETSC_COMM_WORLD,SNES_thermal,err_PETSc)
|
||||||
CHKERRQ(err_PETSc)
|
CHKERRQ(err_PETSc)
|
||||||
call SNESSetOptionsPrefix(SNES_thermal,'thermal_',err_PETSc)
|
call SNESSetOptionsPrefix(SNES_thermal,'thermal_',err_PETSc)
|
||||||
CHKERRQ(err_PETSc)
|
CHKERRQ(err_PETSc)
|
||||||
localK = 0_pPetscInt
|
call MPI_Allgather(int(cells3,pPETSCINT),1_MPI_INTEGER_KIND,MPI_INTEGER,&
|
||||||
localK(worldrank) = int(cells3,pPetscInt)
|
cells3_global,1_MPI_INTEGER_KIND,MPI_INTEGER,MPI_COMM_WORLD,err_MPI)
|
||||||
call MPI_Allreduce(MPI_IN_PLACE,localK,worldsize,MPI_INTEGER,MPI_SUM,MPI_COMM_WORLD,err_MPI)
|
|
||||||
if (err_MPI /= 0_MPI_INTEGER_KIND) error stop 'MPI error'
|
if (err_MPI /= 0_MPI_INTEGER_KIND) error stop 'MPI error'
|
||||||
call DMDACreate3D(PETSC_COMM_WORLD, &
|
call DMDACreate3D(PETSC_COMM_WORLD, &
|
||||||
DM_BOUNDARY_NONE, DM_BOUNDARY_NONE, DM_BOUNDARY_NONE, & ! cut off stencil at boundary
|
DM_BOUNDARY_NONE, DM_BOUNDARY_NONE, DM_BOUNDARY_NONE, & ! cut off stencil at boundary
|
||||||
DMDA_STENCIL_BOX, & ! Moore (26) neighborhood around central point
|
DMDA_STENCIL_BOX, & ! Moore (26) neighborhood around central point
|
||||||
int(cells(1),pPetscInt),int(cells(2),pPetscInt),int(cells(3),pPetscInt), & ! global cells
|
int(cells(1),pPETSCINT),int(cells(2),pPETSCINT),int(cells(3),pPETSCINT), & ! global cells
|
||||||
1_pPetscInt, 1_pPetscInt, int(worldsize,pPetscInt), &
|
1_pPETSCINT, 1_pPETSCINT, int(worldsize,pPETSCINT), &
|
||||||
1_pPetscInt, 0_pPetscInt, & ! #dof (T, scalar), ghost boundary width (domain overlap)
|
1_pPETSCINT, 0_pPETSCINT, & ! #dof (T, scalar), ghost boundary width (domain overlap)
|
||||||
[int(cells(1),pPetscInt)],[int(cells(2),pPetscInt)],localK, & ! local cells
|
[int(cells(1),pPETSCINT)],[int(cells(2),pPETSCINT)],int(cells3_global,pPETSCINT), & ! local cells
|
||||||
thermal_grid,err_PETSc) ! handle, error
|
DM_thermal,err_PETSc) ! handle, error
|
||||||
CHKERRQ(err_PETSc)
|
CHKERRQ(err_PETSc)
|
||||||
call DMsetFromOptions(thermal_grid,err_PETSc)
|
call DMsetFromOptions(DM_thermal,err_PETSc)
|
||||||
CHKERRQ(err_PETSc)
|
CHKERRQ(err_PETSc)
|
||||||
call DMsetUp(thermal_grid,err_PETSc)
|
call DMsetUp(DM_thermal,err_PETSc)
|
||||||
CHKERRQ(err_PETSc)
|
CHKERRQ(err_PETSc)
|
||||||
call DMCreateGlobalVector(thermal_grid,solution_vec,err_PETSc) ! global solution vector (cells x 1, i.e. every def grad tensor)
|
call DMCreateGlobalVector(DM_thermal,T_PETSc,err_PETSc) ! global solution vector (cells x 1, i.e. every def grad tensor)
|
||||||
CHKERRQ(err_PETSc)
|
CHKERRQ(err_PETSc)
|
||||||
call DMDASNESSetFunctionLocal(thermal_grid,INSERT_VALUES,formResidual,PETSC_NULL_SNES,err_PETSc) ! residual vector of same shape as solution vector
|
call DMDASNESSetFunctionLocal(DM_thermal,INSERT_VALUES,formResidual,PETSC_NULL_SNES,err_PETSc) ! residual vector of same shape as solution vector
|
||||||
CHKERRQ(err_PETSc)
|
CHKERRQ(err_PETSc)
|
||||||
call SNESSetDM(SNES_thermal,thermal_grid,err_PETSc)
|
call SNESSetDM(SNES_thermal,DM_thermal,err_PETSc)
|
||||||
CHKERRQ(err_PETSc)
|
CHKERRQ(err_PETSc)
|
||||||
call SNESSetFromOptions(SNES_thermal,err_PETSc) ! pull it all together with additional CLI arguments
|
call SNESSetFromOptions(SNES_thermal,err_PETSc) ! pull it all together with additional CLI arguments
|
||||||
CHKERRQ(err_PETSc)
|
CHKERRQ(err_PETSc)
|
||||||
|
|
||||||
|
call DMDAVecGetArrayF90(DM_thermal,T_PETSc,T,err_PETSc) ! returns 0-indexed T
|
||||||
|
CHKERRQ(err_PETSc)
|
||||||
|
|
||||||
restartRead: if (CLI_restartInc > 0) then
|
restartRead: if (CLI_restartInc > 0) then
|
||||||
print'(/,1x,a,1x,i0)', 'loading restart data of increment', CLI_restartInc
|
print'(/,1x,a,1x,i0)', 'loading restart data of increment', CLI_restartInc
|
||||||
|
@ -167,20 +161,23 @@ subroutine grid_thermal_spectral_init(num_grid)
|
||||||
T = reshape(tempN,[cells(1),cells(2),cells3])
|
T = reshape(tempN,[cells(1),cells(2),cells3])
|
||||||
call HDF5_read(tempN,groupHandle,'T_lastInc',.false.)
|
call HDF5_read(tempN,groupHandle,'T_lastInc',.false.)
|
||||||
T_lastInc = reshape(tempN,[cells(1),cells(2),cells3])
|
T_lastInc = reshape(tempN,[cells(1),cells(2),cells3])
|
||||||
|
T_stagInc = T_lastInc
|
||||||
call HDF5_read(tempN,groupHandle,'dotT_lastInc',.false.)
|
call HDF5_read(tempN,groupHandle,'dotT_lastInc',.false.)
|
||||||
dotT_lastInc = reshape(tempN,[cells(1),cells(2),cells3])
|
dotT_lastInc = reshape(tempN,[cells(1),cells(2),cells3])
|
||||||
|
else
|
||||||
|
T = discretization_grid_getInitialCondition('T')
|
||||||
|
T_lastInc = T(0:,0:,0:)
|
||||||
|
T_stagInc = T_lastInc
|
||||||
|
dotT_lastInc = 0.0_pREAL * T_lastInc
|
||||||
end if restartRead
|
end if restartRead
|
||||||
|
|
||||||
ce = 0
|
ce = 0
|
||||||
do k = 1, cells3; do j = 1, cells(2); do i = 1, cells(1)
|
do k = 0, cells3-1; do j = 0, cells(2)-1; do i = 0, cells(1)-1
|
||||||
ce = ce + 1
|
ce = ce + 1
|
||||||
call homogenization_thermal_setField(T(i,j,k),0.0_pREAL,ce)
|
call homogenization_thermal_setField(T(i,j,k),0.0_pREAL,ce)
|
||||||
end do; end do; end do
|
end do; end do; end do
|
||||||
|
|
||||||
call DMDAVecGetArrayF90(thermal_grid,solution_vec,T_PETSc,err_PETSc)
|
call DMDAVecRestoreArrayF90(DM_thermal,T_PETSc,T,err_PETSc)
|
||||||
CHKERRQ(err_PETSc)
|
|
||||||
T_PETSc = T
|
|
||||||
call DMDAVecRestoreArrayF90(thermal_grid,solution_vec,T_PETSc,err_PETSc)
|
|
||||||
CHKERRQ(err_PETSc)
|
CHKERRQ(err_PETSc)
|
||||||
|
|
||||||
call updateReference()
|
call updateReference()
|
||||||
|
@ -195,37 +192,43 @@ function grid_thermal_spectral_solution(Delta_t) result(solution)
|
||||||
|
|
||||||
real(pREAL), intent(in) :: &
|
real(pREAL), intent(in) :: &
|
||||||
Delta_t !< increment in time for current solution
|
Delta_t !< increment in time for current solution
|
||||||
|
|
||||||
integer :: i, j, k, ce
|
integer :: i, j, k, ce
|
||||||
type(tSolutionState) :: solution
|
type(tSolutionState) :: solution
|
||||||
PetscInt :: devNull
|
PetscInt :: devNull
|
||||||
PetscReal :: T_min, T_max, stagNorm
|
PetscReal :: T_min, T_max, stagNorm
|
||||||
|
DM :: DM_thermal
|
||||||
|
real(pREAL), dimension(:,:,:), pointer :: T ! 0-indexed
|
||||||
integer(MPI_INTEGER_KIND) :: err_MPI
|
integer(MPI_INTEGER_KIND) :: err_MPI
|
||||||
PetscErrorCode :: err_PETSc
|
PetscErrorCode :: err_PETSc
|
||||||
SNESConvergedReason :: reason
|
SNESConvergedReason :: reason
|
||||||
|
|
||||||
|
|
||||||
solution%converged = .false.
|
solution%converged = .false.
|
||||||
|
|
||||||
!--------------------------------------------------------------------------------------------------
|
!--------------------------------------------------------------------------------------------------
|
||||||
! set module wide availabe data
|
! set module wide availabe data
|
||||||
params%Delta_t = Delta_t
|
params%Delta_t = Delta_t
|
||||||
|
|
||||||
call SNESSolve(SNES_thermal,PETSC_NULL_VEC,solution_vec,err_PETSc)
|
call SNESSolve(SNES_thermal,PETSC_NULL_VEC,T_PETSc,err_PETSc)
|
||||||
CHKERRQ(err_PETSc)
|
CHKERRQ(err_PETSc)
|
||||||
call SNESGetConvergedReason(SNES_thermal,reason,err_PETSc)
|
call SNESGetConvergedReason(SNES_thermal,reason,err_PETSc)
|
||||||
CHKERRQ(err_PETSc)
|
CHKERRQ(err_PETSc)
|
||||||
|
|
||||||
if (reason < 1) then
|
solution%converged = reason > 0
|
||||||
solution%converged = .false.
|
solution%iterationsNeeded = merge(totalIter,num%itmax,solution%converged)
|
||||||
solution%iterationsNeeded = num%itmax
|
|
||||||
else
|
call SNESGetDM(SNES_thermal,DM_thermal,err_PETSc)
|
||||||
solution%converged = .true.
|
CHKERRQ(err_PETSc)
|
||||||
solution%iterationsNeeded = totalIter
|
call DMDAVecGetArrayF90(DM_thermal,T_PETSc,T,err_PETSc) ! returns 0-indexed T
|
||||||
end if
|
CHKERRQ(err_PETSc)
|
||||||
|
|
||||||
|
T_min = minval(T)
|
||||||
|
T_max = maxval(T)
|
||||||
stagNorm = maxval(abs(T - T_stagInc))
|
stagNorm = maxval(abs(T - T_stagInc))
|
||||||
call MPI_Allreduce(MPI_IN_PLACE,stagNorm,1_MPI_INTEGER_KIND,MPI_DOUBLE,MPI_MAX,MPI_COMM_WORLD,err_MPI)
|
call MPI_Allreduce(MPI_IN_PLACE,stagNorm,1_MPI_INTEGER_KIND,MPI_DOUBLE,MPI_MAX,MPI_COMM_WORLD,err_MPI)
|
||||||
if (err_MPI /= 0_MPI_INTEGER_KIND) error stop 'MPI error'
|
if (err_MPI /= 0_MPI_INTEGER_KIND) error stop 'MPI error'
|
||||||
solution%stagConverged = stagNorm < max(num%eps_thermal_atol, num%eps_thermal_rtol*maxval(T))
|
solution%stagConverged = stagNorm < max(num%eps_thermal_atol, num%eps_thermal_rtol*T_max)
|
||||||
call MPI_Allreduce(MPI_IN_PLACE,solution%stagConverged,1_MPI_INTEGER_KIND,MPI_LOGICAL,MPI_LAND,MPI_COMM_WORLD,err_MPI)
|
call MPI_Allreduce(MPI_IN_PLACE,solution%stagConverged,1_MPI_INTEGER_KIND,MPI_LOGICAL,MPI_LAND,MPI_COMM_WORLD,err_MPI)
|
||||||
if (err_MPI /= 0_MPI_INTEGER_KIND) error stop 'MPI error'
|
if (err_MPI /= 0_MPI_INTEGER_KIND) error stop 'MPI error'
|
||||||
T_stagInc = T
|
T_stagInc = T
|
||||||
|
@ -233,15 +236,14 @@ function grid_thermal_spectral_solution(Delta_t) result(solution)
|
||||||
!--------------------------------------------------------------------------------------------------
|
!--------------------------------------------------------------------------------------------------
|
||||||
! updating thermal state
|
! updating thermal state
|
||||||
ce = 0
|
ce = 0
|
||||||
do k = 1, cells3; do j = 1, cells(2); do i = 1,cells(1)
|
do k = 0, cells3-1; do j = 0, cells(2)-1; do i = 0, cells(1)-1
|
||||||
ce = ce + 1
|
ce = ce + 1
|
||||||
call homogenization_thermal_setField(T(i,j,k),(T(i,j,k)-T_lastInc(i,j,k))/params%Delta_t,ce)
|
call homogenization_thermal_setField(T(i,j,k),(T(i,j,k)-T_lastInc(i+1,j+1,k+1))/params%Delta_t,ce)
|
||||||
end do; end do; end do
|
end do; end do; end do
|
||||||
|
|
||||||
call VecMin(solution_vec,devNull,T_min,err_PETSc)
|
call DMDAVecRestoreArrayF90(DM_thermal,T_PETSc,T,err_PETSc)
|
||||||
CHKERRQ(err_PETSc)
|
|
||||||
call VecMax(solution_vec,devNull,T_max,err_PETSc)
|
|
||||||
CHKERRQ(err_PETSc)
|
CHKERRQ(err_PETSc)
|
||||||
|
|
||||||
if (solution%converged) &
|
if (solution%converged) &
|
||||||
print'(/,1x,a)', '... thermal conduction converged ..................................'
|
print'(/,1x,a)', '... thermal conduction converged ..................................'
|
||||||
print'(/,1x,a,f8.4,2x,f8.4,2x,f8.4)', 'Minimum|Maximum|Delta Temperature / K = ', T_min, T_max, stagNorm
|
print'(/,1x,a,f8.4,2x,f8.4,2x,f8.4)', 'Minimum|Maximum|Delta Temperature / K = ', T_min, T_max, stagNorm
|
||||||
|
@ -252,42 +254,40 @@ end function grid_thermal_spectral_solution
|
||||||
|
|
||||||
|
|
||||||
!--------------------------------------------------------------------------------------------------
|
!--------------------------------------------------------------------------------------------------
|
||||||
!> @brief forwarding routine
|
!> @brief Set DAMASK data to current solver status.
|
||||||
!--------------------------------------------------------------------------------------------------
|
!--------------------------------------------------------------------------------------------------
|
||||||
subroutine grid_thermal_spectral_forward(cutBack)
|
subroutine grid_thermal_spectral_forward(cutBack)
|
||||||
|
|
||||||
logical, intent(in) :: cutBack
|
logical, intent(in) :: cutBack
|
||||||
|
|
||||||
integer :: i, j, k, ce
|
integer :: i, j, k, ce
|
||||||
DM :: dm_local
|
DM :: DM_thermal
|
||||||
real(pREAL), dimension(:,:,:), pointer :: T_PETSc
|
real(pREAL), dimension(:,:,:), pointer :: T ! 0-indexed
|
||||||
PetscErrorCode :: err_PETSc
|
PetscErrorCode :: err_PETSc
|
||||||
|
|
||||||
|
|
||||||
if (cutBack) then
|
call SNESGetDM(SNES_thermal,DM_thermal,err_PETSc)
|
||||||
T = T_lastInc
|
CHKERRQ(err_PETSc)
|
||||||
T_stagInc = T_lastInc
|
call DMDAVecGetArrayF90(DM_thermal,T_PETSc,T,err_PETSc) ! returns 0-indexed T
|
||||||
|
CHKERRQ(err_PETSc)
|
||||||
|
|
||||||
!--------------------------------------------------------------------------------------------------
|
if (cutBack) then
|
||||||
! reverting thermal field state
|
|
||||||
call SNESGetDM(SNES_thermal,dm_local,err_PETSc)
|
|
||||||
CHKERRQ(err_PETSc)
|
|
||||||
call DMDAVecGetArrayF90(dm_local,solution_vec,T_PETSc,err_PETSc) !< get the data out of PETSc to work with
|
|
||||||
CHKERRQ(err_PETSc)
|
|
||||||
T_PETSc = T
|
|
||||||
call DMDAVecRestoreArrayF90(dm_local,solution_vec,T_PETSc,err_PETSc)
|
|
||||||
CHKERRQ(err_PETSc)
|
|
||||||
ce = 0
|
ce = 0
|
||||||
do k = 1, cells3; do j = 1, cells(2); do i = 1,cells(1)
|
do k = 1, cells3; do j = 1, cells(2); do i = 1,cells(1)
|
||||||
ce = ce + 1
|
ce = ce + 1
|
||||||
call homogenization_thermal_setField(T(i,j,k),dotT_lastInc(i,j,k),ce)
|
call homogenization_thermal_setField(T_lastInc(i,j,k),dotT_lastInc(i,j,k),ce)
|
||||||
end do; end do; end do
|
end do; end do; end do
|
||||||
|
T = T_lastInc
|
||||||
|
T_stagInc = T_lastInc
|
||||||
else
|
else
|
||||||
dotT_lastInc = (T - T_lastInc)/params%Delta_t
|
dotT_lastInc = (T - T_lastInc)/params%Delta_t
|
||||||
T_lastInc = T
|
T_lastInc = T
|
||||||
call updateReference()
|
call updateReference()
|
||||||
end if
|
end if
|
||||||
|
|
||||||
|
call DMDAVecRestoreArrayF90(DM_thermal,T_PETSc,T,err_PETSc)
|
||||||
|
CHKERRQ(err_PETSc)
|
||||||
|
|
||||||
end subroutine grid_thermal_spectral_forward
|
end subroutine grid_thermal_spectral_forward
|
||||||
|
|
||||||
|
|
||||||
|
@ -297,13 +297,14 @@ end subroutine grid_thermal_spectral_forward
|
||||||
subroutine grid_thermal_spectral_restartWrite()
|
subroutine grid_thermal_spectral_restartWrite()
|
||||||
|
|
||||||
PetscErrorCode :: err_PETSc
|
PetscErrorCode :: err_PETSc
|
||||||
DM :: dm_local
|
DM :: DM_thermal
|
||||||
integer(HID_T) :: fileHandle, groupHandle
|
integer(HID_T) :: fileHandle, groupHandle
|
||||||
real(pREAL), dimension(:,:,:), pointer :: T
|
real(pREAL), dimension(:,:,:), pointer :: T ! 0-indexed
|
||||||
|
|
||||||
call SNESGetDM(SNES_thermal,dm_local,err_PETSc);
|
|
||||||
|
call SNESGetDM(SNES_thermal,DM_thermal,err_PETSc)
|
||||||
CHKERRQ(err_PETSc)
|
CHKERRQ(err_PETSc)
|
||||||
call DMDAVecGetArrayReadF90(dm_local,solution_vec,T,err_PETSc);
|
call DMDAVecGetArrayReadF90(DM_thermal,T_PETSc,T,err_PETSc) ! returns 0-indexed T
|
||||||
CHKERRQ(err_PETSc)
|
CHKERRQ(err_PETSc)
|
||||||
|
|
||||||
print'(1x,a)', 'saving thermal solver data required for restart'; flush(IO_STDOUT)
|
print'(1x,a)', 'saving thermal solver data required for restart'; flush(IO_STDOUT)
|
||||||
|
@ -316,13 +317,12 @@ subroutine grid_thermal_spectral_restartWrite()
|
||||||
call HDF5_closeGroup(groupHandle)
|
call HDF5_closeGroup(groupHandle)
|
||||||
call HDF5_closeFile(fileHandle)
|
call HDF5_closeFile(fileHandle)
|
||||||
|
|
||||||
call DMDAVecRestoreArrayReadF90(dm_local,solution_vec,T,err_PETSc);
|
call DMDAVecRestoreArrayReadF90(DM_thermal,T_PETSc,T,err_PETSc);
|
||||||
CHKERRQ(err_PETSc)
|
CHKERRQ(err_PETSc)
|
||||||
|
|
||||||
end subroutine grid_thermal_spectral_restartWrite
|
end subroutine grid_thermal_spectral_restartWrite
|
||||||
|
|
||||||
|
|
||||||
|
|
||||||
!--------------------------------------------------------------------------------------------------
|
!--------------------------------------------------------------------------------------------------
|
||||||
!> @brief Construct the residual vector.
|
!> @brief Construct the residual vector.
|
||||||
!--------------------------------------------------------------------------------------------------
|
!--------------------------------------------------------------------------------------------------
|
||||||
|
@ -333,7 +333,7 @@ subroutine formResidual(residual_subdomain,x_scal,r,dummy,err_PETSc)
|
||||||
real(pREAL), dimension(cells(1),cells(2),cells3), intent(in) :: &
|
real(pREAL), dimension(cells(1),cells(2),cells3), intent(in) :: &
|
||||||
x_scal
|
x_scal
|
||||||
real(pREAL), dimension(cells(1),cells(2),cells3), intent(out) :: &
|
real(pREAL), dimension(cells(1),cells(2),cells3), intent(out) :: &
|
||||||
r !< residual
|
r !< residual
|
||||||
PetscObject :: dummy
|
PetscObject :: dummy
|
||||||
PetscErrorCode, intent(out) :: err_PETSc
|
PetscErrorCode, intent(out) :: err_PETSc
|
||||||
|
|
||||||
|
@ -341,24 +341,25 @@ subroutine formResidual(residual_subdomain,x_scal,r,dummy,err_PETSc)
|
||||||
real(pREAL), dimension(3,cells(1),cells(2),cells3) :: vectorField
|
real(pREAL), dimension(3,cells(1),cells(2),cells3) :: vectorField
|
||||||
|
|
||||||
|
|
||||||
T = x_scal
|
associate(T => x_scal)
|
||||||
vectorField = utilities_ScalarGradient(T)
|
vectorField = utilities_ScalarGradient(T)
|
||||||
ce = 0
|
ce = 0
|
||||||
do k = 1, cells3; do j = 1, cells(2); do i = 1,cells(1)
|
do k = 1, cells3; do j = 1, cells(2); do i = 1,cells(1)
|
||||||
ce = ce + 1
|
ce = ce + 1
|
||||||
vectorField(1:3,i,j,k) = matmul(homogenization_K_T(ce) - K_ref, vectorField(1:3,i,j,k))
|
vectorField(1:3,i,j,k) = matmul(homogenization_K_T(ce) - K_ref, vectorField(1:3,i,j,k))
|
||||||
end do; end do; end do
|
end do; end do; end do
|
||||||
r = utilities_VectorDivergence(vectorField)
|
r = utilities_VectorDivergence(vectorField)
|
||||||
ce = 0
|
ce = 0
|
||||||
do k = 1, cells3; do j = 1, cells(2); do i = 1,cells(1)
|
do k = 1, cells3; do j = 1, cells(2); do i = 1,cells(1)
|
||||||
ce = ce + 1
|
ce = ce + 1
|
||||||
r(i,j,k) = params%Delta_t*(r(i,j,k) + homogenization_f_T(ce)) &
|
r(i,j,k) = params%Delta_t*(r(i,j,k) + homogenization_f_T(ce)) &
|
||||||
+ homogenization_mu_T(ce) * (T_lastInc(i,j,k) - T(i,j,k)) &
|
+ homogenization_mu_T(ce) * (T_lastInc(i,j,k) - T(i,j,k)) &
|
||||||
+ mu_ref*T(i,j,k)
|
+ mu_ref*T(i,j,k)
|
||||||
end do; end do; end do
|
end do; end do; end do
|
||||||
|
|
||||||
r = T &
|
r = T &
|
||||||
- utilities_GreenConvolution(r, K_ref, mu_ref, params%Delta_t)
|
- utilities_GreenConvolution(r, K_ref, mu_ref, params%Delta_t)
|
||||||
|
end associate
|
||||||
err_PETSc = 0
|
err_PETSc = 0
|
||||||
|
|
||||||
end subroutine formResidual
|
end subroutine formResidual
|
||||||
|
|
|
@ -495,10 +495,9 @@ subroutine result_mapping_phase(ID,entry,label)
|
||||||
integer, dimension(:,:), intent(in) :: entry !< phase entry at (co,ce)
|
integer, dimension(:,:), intent(in) :: entry !< phase entry at (co,ce)
|
||||||
character(len=*), dimension(:), intent(in) :: label !< label of each phase section
|
character(len=*), dimension(:), intent(in) :: label !< label of each phase section
|
||||||
|
|
||||||
integer(pI64), dimension(size(entry,1),size(entry,2)) :: &
|
integer(pI64), dimension(size(entry,1),size(entry,2)) :: entryGlobal
|
||||||
entryGlobal
|
|
||||||
integer(pI64), dimension(size(label),0:worldsize-1) :: entryOffset !< offset in entry counting per process
|
integer(pI64), dimension(size(label),0:worldsize-1) :: entryOffset !< offset in entry counting per process
|
||||||
integer, dimension(0:worldsize-1) :: writeSize !< amount of data written per process
|
integer(MPI_INTEGER_KIND), dimension(0:worldsize-1) :: writeSize !< amount of data written per process
|
||||||
integer(HSIZE_T), dimension(2) :: &
|
integer(HSIZE_T), dimension(2) :: &
|
||||||
myShape, & !< shape of the dataset (this process)
|
myShape, & !< shape of the dataset (this process)
|
||||||
myOffset, &
|
myOffset, &
|
||||||
|
@ -521,21 +520,19 @@ subroutine result_mapping_phase(ID,entry,label)
|
||||||
integer(MPI_INTEGER_KIND) :: err_MPI
|
integer(MPI_INTEGER_KIND) :: err_MPI
|
||||||
|
|
||||||
|
|
||||||
writeSize = 0
|
|
||||||
writeSize(worldrank) = size(entry(1,:)) ! total number of entries of this process
|
|
||||||
|
|
||||||
call H5Pcreate_f(H5P_DATASET_XFER_F, plist_id, hdferr)
|
call H5Pcreate_f(H5P_DATASET_XFER_F, plist_id, hdferr)
|
||||||
call HDF5_chkerr(hdferr)
|
call HDF5_chkerr(hdferr)
|
||||||
|
|
||||||
#ifndef PETSC
|
#ifndef PETSC
|
||||||
entryGlobal = int(entry -1,pI64) ! 0-based
|
entryGlobal = int(entry-1,pI64) ! 0-based
|
||||||
|
writeSize(0) = size(entry,dim=2,kind=MPI_INTEGER_KIND) ! total number of entries of this process
|
||||||
#else
|
#else
|
||||||
!--------------------------------------------------------------------------------------------------
|
!--------------------------------------------------------------------------------------------------
|
||||||
! MPI settings and communication
|
! MPI settings and communication
|
||||||
call H5Pset_dxpl_mpio_f(plist_id, H5FD_MPIO_COLLECTIVE_F, hdferr)
|
call H5Pset_dxpl_mpio_f(plist_id, H5FD_MPIO_COLLECTIVE_F, hdferr)
|
||||||
call HDF5_chkerr(hdferr)
|
call HDF5_chkerr(hdferr)
|
||||||
|
call MPI_Allgather(size(entry,dim=2,kind=MPI_INTEGER_KIND),1_MPI_INTEGER_KIND,MPI_INTEGER,&
|
||||||
call MPI_Allreduce(MPI_IN_PLACE,writeSize,worldsize,MPI_INTEGER,MPI_SUM,MPI_COMM_WORLD,err_MPI) ! get output at each process
|
writeSize,1_MPI_INTEGER_KIND,MPI_INTEGER,MPI_COMM_WORLD,err_MPI)
|
||||||
call parallelization_chkerr(err_MPI)
|
call parallelization_chkerr(err_MPI)
|
||||||
|
|
||||||
entryOffset = 0_pI64
|
entryOffset = 0_pI64
|
||||||
|
@ -554,9 +551,9 @@ subroutine result_mapping_phase(ID,entry,label)
|
||||||
end do
|
end do
|
||||||
#endif
|
#endif
|
||||||
|
|
||||||
myShape = int([size(ID,1),writeSize(worldrank)], HSIZE_T)
|
myShape = int([size(ID,1,MPI_INTEGER_KIND),writeSize(worldrank)], HSIZE_T)
|
||||||
myOffset = int([0,sum(writeSize(0:worldrank-1))], HSIZE_T)
|
myOffset = int([0_MPI_INTEGER_KIND,sum(writeSize(0:worldrank-1))], HSIZE_T)
|
||||||
totalShape = int([size(ID,1),sum(writeSize)], HSIZE_T)
|
totalShape = int([size(ID,1,MPI_INTEGER_KIND),sum(writeSize)], HSIZE_T)
|
||||||
|
|
||||||
!---------------------------------------------------------------------------------------------------
|
!---------------------------------------------------------------------------------------------------
|
||||||
! compound type: label(ID) + entry
|
! compound type: label(ID) + entry
|
||||||
|
@ -651,10 +648,9 @@ subroutine result_mapping_homogenization(ID,entry,label)
|
||||||
integer, dimension(:), intent(in) :: entry !< homogenization entry at (ce)
|
integer, dimension(:), intent(in) :: entry !< homogenization entry at (ce)
|
||||||
character(len=*), dimension(:), intent(in) :: label !< label of each homogenization section
|
character(len=*), dimension(:), intent(in) :: label !< label of each homogenization section
|
||||||
|
|
||||||
integer(pI64), dimension(size(entry,1)) :: &
|
integer(pI64), dimension(size(entry,1)) :: entryGlobal
|
||||||
entryGlobal
|
|
||||||
integer(pI64), dimension(size(label),0:worldsize-1) :: entryOffset !< offset in entry counting per process
|
integer(pI64), dimension(size(label),0:worldsize-1) :: entryOffset !< offset in entry counting per process
|
||||||
integer, dimension(0:worldsize-1) :: writeSize !< amount of data written per process
|
integer(MPI_INTEGER_KIND), dimension(0:worldsize-1) :: writeSize !< amount of data written per process
|
||||||
integer(HSIZE_T), dimension(1) :: &
|
integer(HSIZE_T), dimension(1) :: &
|
||||||
myShape, & !< shape of the dataset (this process)
|
myShape, & !< shape of the dataset (this process)
|
||||||
myOffset, &
|
myOffset, &
|
||||||
|
@ -677,31 +673,29 @@ subroutine result_mapping_homogenization(ID,entry,label)
|
||||||
integer(MPI_INTEGER_KIND) :: err_MPI
|
integer(MPI_INTEGER_KIND) :: err_MPI
|
||||||
|
|
||||||
|
|
||||||
writeSize = 0
|
|
||||||
writeSize(worldrank) = size(entry) ! total number of entries of this process
|
|
||||||
|
|
||||||
call H5Pcreate_f(H5P_DATASET_XFER_F, plist_id, hdferr)
|
call H5Pcreate_f(H5P_DATASET_XFER_F, plist_id, hdferr)
|
||||||
call HDF5_chkerr(hdferr)
|
call HDF5_chkerr(hdferr)
|
||||||
|
|
||||||
#ifndef PETSC
|
#ifndef PETSC
|
||||||
entryGlobal = int(entry -1,pI64) ! 0-based
|
entryGlobal = int(entry-1,pI64)
|
||||||
|
writeSize(0) = size(entry,kind=MPI_INTEGER_KIND) ! total number of entries of this process ! 0-based
|
||||||
#else
|
#else
|
||||||
!--------------------------------------------------------------------------------------------------
|
!--------------------------------------------------------------------------------------------------
|
||||||
! MPI settings and communication
|
! MPI settings and communication
|
||||||
call H5Pset_dxpl_mpio_f(plist_id, H5FD_MPIO_COLLECTIVE_F, hdferr)
|
call H5Pset_dxpl_mpio_f(plist_id, H5FD_MPIO_COLLECTIVE_F, hdferr)
|
||||||
call HDF5_chkerr(hdferr)
|
call HDF5_chkerr(hdferr)
|
||||||
|
call MPI_Allgather(size(entry,kind=MPI_INTEGER_KIND),1_MPI_INTEGER_KIND,MPI_INTEGER,&
|
||||||
call MPI_Allreduce(MPI_IN_PLACE,writeSize,worldsize,MPI_INTEGER,MPI_SUM,MPI_COMM_WORLD,err_MPI) ! get output at each process
|
writeSize,1_MPI_INTEGER_KIND,MPI_INTEGER,MPI_COMM_WORLD,err_MPI)
|
||||||
call parallelization_chkerr(err_MPI)
|
call parallelization_chkerr(err_MPI)
|
||||||
|
|
||||||
entryOffset = 0_pI64
|
entryOffset = 0_pI64
|
||||||
do ce = 1, size(ID,1)
|
do ce = 1, size(ID)
|
||||||
entryOffset(ID(ce),worldrank) = entryOffset(ID(ce),worldrank) +1_pI64
|
entryOffset(ID(ce),worldrank) = entryOffset(ID(ce),worldrank) +1_pI64
|
||||||
end do
|
end do
|
||||||
call MPI_Allreduce(MPI_IN_PLACE,entryOffset,size(entryOffset),MPI_INTEGER8,MPI_SUM,MPI_COMM_WORLD,err_MPI)! get offset at each process
|
call MPI_Allreduce(MPI_IN_PLACE,entryOffset,size(entryOffset),MPI_INTEGER8,MPI_SUM,MPI_COMM_WORLD,err_MPI)! get offset at each process
|
||||||
call parallelization_chkerr(err_MPI)
|
call parallelization_chkerr(err_MPI)
|
||||||
entryOffset(:,worldrank) = sum(entryOffset(:,0:worldrank-1),2)
|
entryOffset(:,worldrank) = sum(entryOffset(:,0:worldrank-1),2)
|
||||||
do ce = 1, size(ID,1)
|
do ce = 1, size(ID)
|
||||||
entryGlobal(ce) = int(entry(ce),pI64) -1_pI64 + entryOffset(ID(ce),worldrank)
|
entryGlobal(ce) = int(entry(ce),pI64) -1_pI64 + entryOffset(ID(ce),worldrank)
|
||||||
end do
|
end do
|
||||||
#endif
|
#endif
|
||||||
|
|
Loading…
Reference in New Issue