Merge branch '220-array-dimensions-in-restart-file-hdf5-chunking-fails-for-large-simulations' into 'development'
flatten solver data layout Closes #220 See merge request damask/DAMASK!671
This commit is contained in:
commit
26ff1b8956
2
PRIVATE
2
PRIVATE
|
@ -1 +1 @@
|
|||
Subproject commit 81f5f24d076a623e6052c234825c591267915285
|
||||
Subproject commit ecfe3b3f057f4f81b3b1a12399bf238bc2546de7
|
|
@ -46,7 +46,7 @@ module grid_damage_spectral
|
|||
SNES :: SNES_damage
|
||||
Vec :: solution_vec
|
||||
real(pReal), dimension(:,:,:), allocatable :: &
|
||||
phi_current, & !< field of current damage
|
||||
phi, & !< field of current damage
|
||||
phi_lastInc, & !< field of previous damage
|
||||
phi_stagInc !< field of staggered damage
|
||||
|
||||
|
@ -112,9 +112,9 @@ subroutine grid_damage_spectral_init()
|
|||
|
||||
!--------------------------------------------------------------------------------------------------
|
||||
! init fields
|
||||
phi_current = discretization_grid_getInitialCondition('phi')
|
||||
phi_lastInc = phi_current
|
||||
phi_stagInc = phi_current
|
||||
phi = discretization_grid_getInitialCondition('phi')
|
||||
phi_lastInc = phi
|
||||
phi_stagInc = phi
|
||||
|
||||
!--------------------------------------------------------------------------------------------------
|
||||
! initialize solver specific parts of PETSc
|
||||
|
@ -170,12 +170,12 @@ subroutine grid_damage_spectral_init()
|
|||
ce = 0
|
||||
do k = 1, cells3; do j = 1, cells(2); do i = 1, cells(1)
|
||||
ce = ce + 1
|
||||
call homogenization_set_phi(phi_current(i,j,k),ce)
|
||||
call homogenization_set_phi(phi(i,j,k),ce)
|
||||
end do; end do; end do
|
||||
|
||||
call DMDAVecGetArrayF90(damage_grid,solution_vec,phi_PETSc,err_PETSc)
|
||||
CHKERRQ(err_PETSc)
|
||||
phi_PETSc = phi_current
|
||||
phi_PETSc = phi
|
||||
call DMDAVecRestoreArrayF90(damage_grid,solution_vec,phi_PETSc,err_PETSc)
|
||||
CHKERRQ(err_PETSc)
|
||||
|
||||
|
@ -218,20 +218,20 @@ function grid_damage_spectral_solution(Delta_t) result(solution)
|
|||
solution%converged = .true.
|
||||
solution%iterationsNeeded = totalIter
|
||||
end if
|
||||
stagNorm = maxval(abs(phi_current - phi_stagInc))
|
||||
stagNorm = maxval(abs(phi - phi_stagInc))
|
||||
call MPI_Allreduce(MPI_IN_PLACE,stagNorm,1_MPI_INTEGER_KIND,MPI_DOUBLE,MPI_MAX,MPI_COMM_WORLD,err_MPI)
|
||||
if (err_MPI /= 0_MPI_INTEGER_KIND) error stop 'MPI error'
|
||||
solution%stagConverged = stagNorm < max(num%eps_damage_atol, num%eps_damage_rtol*maxval(phi_current))
|
||||
solution%stagConverged = stagNorm < max(num%eps_damage_atol, num%eps_damage_rtol*maxval(phi))
|
||||
call MPI_Allreduce(MPI_IN_PLACE,solution%stagConverged,1_MPI_INTEGER_KIND,MPI_LOGICAL,MPI_LAND,MPI_COMM_WORLD,err_MPI)
|
||||
if (err_MPI /= 0_MPI_INTEGER_KIND) error stop 'MPI error'
|
||||
phi_stagInc = phi_current
|
||||
phi_stagInc = phi
|
||||
|
||||
!--------------------------------------------------------------------------------------------------
|
||||
! updating damage state
|
||||
ce = 0
|
||||
do k = 1, cells3; do j = 1, cells(2); do i = 1,cells(1)
|
||||
ce = ce + 1
|
||||
call homogenization_set_phi(phi_current(i,j,k),ce)
|
||||
call homogenization_set_phi(phi(i,j,k),ce)
|
||||
end do; end do; end do
|
||||
|
||||
call VecMin(solution_vec,devNull,phi_min,err_PETSc)
|
||||
|
@ -261,7 +261,7 @@ subroutine grid_damage_spectral_forward(cutBack)
|
|||
|
||||
|
||||
if (cutBack) then
|
||||
phi_current = phi_lastInc
|
||||
phi = phi_lastInc
|
||||
phi_stagInc = phi_lastInc
|
||||
!--------------------------------------------------------------------------------------------------
|
||||
! reverting damage field state
|
||||
|
@ -269,16 +269,16 @@ subroutine grid_damage_spectral_forward(cutBack)
|
|||
CHKERRQ(err_PETSc)
|
||||
call DMDAVecGetArrayF90(dm_local,solution_vec,phi_PETSc,err_PETSc) !< get the data out of PETSc to work with
|
||||
CHKERRQ(err_PETSc)
|
||||
phi_PETSc = phi_current
|
||||
phi_PETSc = phi
|
||||
call DMDAVecRestoreArrayF90(dm_local,solution_vec,phi_PETSc,err_PETSc)
|
||||
CHKERRQ(err_PETSc)
|
||||
ce = 0
|
||||
do k = 1, cells3; do j = 1, cells(2); do i = 1,cells(1)
|
||||
ce = ce + 1
|
||||
call homogenization_set_phi(phi_current(i,j,k),ce)
|
||||
call homogenization_set_phi(phi(i,j,k),ce)
|
||||
end do; end do; end do
|
||||
else
|
||||
phi_lastInc = phi_current
|
||||
phi_lastInc = phi
|
||||
call updateReference
|
||||
end if
|
||||
|
||||
|
@ -297,7 +297,7 @@ subroutine formResidual(in,x_scal,r,dummy,err_PETSc)
|
|||
x_scal
|
||||
PetscScalar, dimension( &
|
||||
X_RANGE,Y_RANGE,Z_RANGE), intent(out) :: &
|
||||
r
|
||||
r !< residual
|
||||
PetscObject :: dummy
|
||||
PetscErrorCode, intent(out) :: err_PETSc
|
||||
|
||||
|
@ -305,10 +305,8 @@ subroutine formResidual(in,x_scal,r,dummy,err_PETSc)
|
|||
real(pReal), dimension(3,cells(1),cells(2),cells3) :: vectorField
|
||||
|
||||
|
||||
phi_current = x_scal
|
||||
!--------------------------------------------------------------------------------------------------
|
||||
! evaluate polarization field
|
||||
vectorField = utilities_ScalarGradient(phi_current)
|
||||
phi = x_scal
|
||||
vectorField = utilities_ScalarGradient(phi)
|
||||
ce = 0
|
||||
do k = 1, cells3; do j = 1, cells(2); do i = 1,cells(1)
|
||||
ce = ce + 1
|
||||
|
@ -318,15 +316,13 @@ subroutine formResidual(in,x_scal,r,dummy,err_PETSc)
|
|||
ce = 0
|
||||
do k = 1, cells3; do j = 1, cells(2); do i = 1,cells(1)
|
||||
ce = ce + 1
|
||||
r(i,j,k) = params%Delta_t*(r(i,j,k) + homogenization_f_phi(phi_current(i,j,k),ce)) &
|
||||
+ homogenization_mu_phi(ce)*(phi_lastInc(i,j,k) - phi_current(i,j,k)) &
|
||||
+ mu_ref*phi_current(i,j,k)
|
||||
r(i,j,k) = params%Delta_t*(r(i,j,k) + homogenization_f_phi(phi(i,j,k),ce)) &
|
||||
+ homogenization_mu_phi(ce)*(phi_lastInc(i,j,k) - phi(i,j,k)) &
|
||||
+ mu_ref*phi(i,j,k)
|
||||
end do; end do; end do
|
||||
|
||||
!--------------------------------------------------------------------------------------------------
|
||||
! constructing residual
|
||||
r = max(min(utilities_GreenConvolution(r, K_ref, mu_ref, params%Delta_t),phi_lastInc),num%phi_min) &
|
||||
- phi_current
|
||||
- phi
|
||||
err_PETSc = 0
|
||||
|
||||
end subroutine formResidual
|
||||
|
|
|
@ -111,10 +111,12 @@ subroutine grid_mechanical_FEM_init
|
|||
1.0_pReal,-1.0_pReal,-1.0_pReal,-1.0_pReal, &
|
||||
1.0_pReal, 1.0_pReal, 1.0_pReal, 1.0_pReal], [4,8])
|
||||
real(pReal), dimension(3,3,3,3) :: devNull
|
||||
real(pReal), dimension(3,3,product(cells(1:2))*cells3) :: temp33n
|
||||
real(pReal), dimension(3,product(cells(1:2))*cells3) :: temp3n
|
||||
PetscErrorCode :: err_PETSc
|
||||
integer(MPI_INTEGER_KIND) :: err_MPI
|
||||
PetscScalar, pointer, dimension(:,:,:,:) :: &
|
||||
u_current,u_lastInc
|
||||
u,u_lastInc
|
||||
PetscInt, dimension(0:worldsize-1) :: localK
|
||||
integer(HID_T) :: fileHandle, groupHandle
|
||||
type(tDict), pointer :: &
|
||||
|
@ -220,7 +222,7 @@ subroutine grid_mechanical_FEM_init
|
|||
CHKERRQ(err_PETSc)
|
||||
call VecSet(solution_rate ,0.0_pReal,err_PETSc)
|
||||
CHKERRQ(err_PETSc)
|
||||
call DMDAVecGetArrayF90(mechanical_grid,solution_current,u_current,err_PETSc)
|
||||
call DMDAVecGetArrayF90(mechanical_grid,solution_current,u,err_PETSc)
|
||||
CHKERRQ(err_PETSc)
|
||||
call DMDAVecGetArrayF90(mechanical_grid,solution_lastInc,u_lastInc,err_PETSc)
|
||||
CHKERRQ(err_PETSc)
|
||||
|
@ -260,10 +262,14 @@ subroutine grid_mechanical_FEM_init
|
|||
call HDF5_read(F_aimDot,groupHandle,'F_aimDot',.false.)
|
||||
call MPI_Bcast(F_aimDot,9_MPI_INTEGER_KIND,MPI_DOUBLE,0_MPI_INTEGER_KIND,MPI_COMM_WORLD,err_MPI)
|
||||
if(err_MPI /= 0_MPI_INTEGER_KIND) error stop 'MPI error'
|
||||
call HDF5_read(F,groupHandle,'F')
|
||||
call HDF5_read(F_lastInc,groupHandle,'F_lastInc')
|
||||
call HDF5_read(u_current,groupHandle,'u')
|
||||
call HDF5_read(u_lastInc,groupHandle,'u_lastInc')
|
||||
call HDF5_read(temp33n,groupHandle,'F')
|
||||
F = reshape(temp33n,[3,3,cells(1),cells(2),cells3])
|
||||
call HDF5_read(temp33n,groupHandle,'F_lastInc')
|
||||
F_lastInc = reshape(temp33n,[3,3,cells(1),cells(2),cells3])
|
||||
call HDF5_read(temp3n,groupHandle,'u')
|
||||
u = reshape(temp3n,[3,cells(1),cells(2),cells3])
|
||||
call HDF5_read(temp3n,groupHandle,'u_lastInc')
|
||||
u_lastInc = reshape(temp3n,[3,cells(1),cells(2),cells3])
|
||||
|
||||
elseif (CLI_restartInc == 0) then restartRead
|
||||
F_lastInc = spread(spread(spread(math_I3,3,cells(1)),4,cells(2)),5,cells3) ! initialize to identity
|
||||
|
@ -275,7 +281,7 @@ subroutine grid_mechanical_FEM_init
|
|||
call utilities_constitutiveResponse(P_current,P_av,C_volAvg,devNull, & ! stress field, stress avg, global average of stiffness and (min+max)/2
|
||||
F, & ! target F
|
||||
0.0_pReal) ! time increment
|
||||
call DMDAVecRestoreArrayF90(mechanical_grid,solution_current,u_current,err_PETSc)
|
||||
call DMDAVecRestoreArrayF90(mechanical_grid,solution_current,u,err_PETSc)
|
||||
CHKERRQ(err_PETSc)
|
||||
call DMDAVecRestoreArrayF90(mechanical_grid,solution_lastInc,u_lastInc,err_PETSc)
|
||||
CHKERRQ(err_PETSc)
|
||||
|
@ -354,10 +360,10 @@ subroutine grid_mechanical_FEM_forward(cutBack,guess,Delta_t,Delta_t_old,t_remai
|
|||
rotation_BC
|
||||
PetscErrorCode :: err_PETSc
|
||||
PetscScalar, pointer, dimension(:,:,:,:) :: &
|
||||
u_current,u_lastInc
|
||||
u,u_lastInc
|
||||
|
||||
|
||||
call DMDAVecGetArrayF90(mechanical_grid,solution_current,u_current,err_PETSc)
|
||||
call DMDAVecGetArrayF90(mechanical_grid,solution_current,u,err_PETSc)
|
||||
CHKERRQ(err_PETSc)
|
||||
call DMDAVecGetArrayF90(mechanical_grid,solution_lastInc,u_lastInc,err_PETSc)
|
||||
CHKERRQ(err_PETSc)
|
||||
|
@ -410,7 +416,7 @@ subroutine grid_mechanical_FEM_forward(cutBack,guess,Delta_t,Delta_t_old,t_remai
|
|||
|
||||
call VecAXPY(solution_current,Delta_t,solution_rate,err_PETSc)
|
||||
CHKERRQ(err_PETSc)
|
||||
call DMDAVecRestoreArrayF90(mechanical_grid,solution_current,u_current,err_PETSc)
|
||||
call DMDAVecRestoreArrayF90(mechanical_grid,solution_current,u,err_PETSc)
|
||||
CHKERRQ(err_PETSc)
|
||||
call DMDAVecRestoreArrayF90(mechanical_grid,solution_lastInc,u_lastInc,err_PETSc)
|
||||
CHKERRQ(err_PETSc)
|
||||
|
@ -441,10 +447,10 @@ subroutine grid_mechanical_FEM_restartWrite
|
|||
|
||||
PetscErrorCode :: err_PETSc
|
||||
integer(HID_T) :: fileHandle, groupHandle
|
||||
PetscScalar, dimension(:,:,:,:), pointer :: u_current,u_lastInc
|
||||
PetscScalar, dimension(:,:,:,:), pointer :: u,u_lastInc
|
||||
|
||||
|
||||
call DMDAVecGetArrayF90(mechanical_grid,solution_current,u_current,err_PETSc)
|
||||
call DMDAVecGetArrayF90(mechanical_grid,solution_current,u,err_PETSc)
|
||||
CHKERRQ(err_PETSc)
|
||||
call DMDAVecGetArrayF90(mechanical_grid,solution_lastInc,u_lastInc,err_PETSc)
|
||||
CHKERRQ(err_PETSc)
|
||||
|
@ -453,10 +459,10 @@ subroutine grid_mechanical_FEM_restartWrite
|
|||
|
||||
fileHandle = HDF5_openFile(getSolverJobName()//'_restart.hdf5','w')
|
||||
groupHandle = HDF5_addGroup(fileHandle,'solver')
|
||||
call HDF5_write(F,groupHandle,'F')
|
||||
call HDF5_write(F_lastInc,groupHandle,'F_lastInc')
|
||||
call HDF5_write(u_current,groupHandle,'u')
|
||||
call HDF5_write(u_lastInc,groupHandle,'u_lastInc')
|
||||
call HDF5_write(reshape(F,[3,3,product(cells(1:2))*cells3]),groupHandle,'F')
|
||||
call HDF5_write(reshape(F_lastInc,[3,3,product(cells(1:2))*cells3]),groupHandle,'F_lastInc')
|
||||
call HDF5_write(reshape(u,[3,product(cells(1:2))*cells3]),groupHandle,'u')
|
||||
call HDF5_write(reshape(u_lastInc,[3,product(cells(1:2))*cells3]),groupHandle,'u_lastInc')
|
||||
call HDF5_closeGroup(groupHandle)
|
||||
call HDF5_closeFile(fileHandle)
|
||||
|
||||
|
@ -473,7 +479,7 @@ subroutine grid_mechanical_FEM_restartWrite
|
|||
call HDF5_closeFile(fileHandle)
|
||||
endif
|
||||
|
||||
call DMDAVecRestoreArrayF90(mechanical_grid,solution_current,u_current,err_PETSc)
|
||||
call DMDAVecRestoreArrayF90(mechanical_grid,solution_current,u,err_PETSc)
|
||||
CHKERRQ(err_PETSc)
|
||||
call DMDAVecRestoreArrayF90(mechanical_grid,solution_lastInc,u_lastInc,err_PETSc)
|
||||
CHKERRQ(err_PETSc)
|
||||
|
|
|
@ -104,7 +104,7 @@ contains
|
|||
!--------------------------------------------------------------------------------------------------
|
||||
!> @brief allocates all necessary fields and fills them with data, potentially from restart info
|
||||
!--------------------------------------------------------------------------------------------------
|
||||
subroutine grid_mechanical_spectral_basic_init
|
||||
subroutine grid_mechanical_spectral_basic_init()
|
||||
|
||||
real(pReal), dimension(3,3,cells(1),cells(2),cells3) :: P
|
||||
PetscErrorCode :: err_PETSc
|
||||
|
@ -112,6 +112,7 @@ subroutine grid_mechanical_spectral_basic_init
|
|||
PetscScalar, pointer, dimension(:,:,:,:) :: &
|
||||
F ! pointer to solution data
|
||||
PetscInt, dimension(0:worldsize-1) :: localK
|
||||
real(pReal), dimension(3,3,product(cells(1:2))*cells3) :: temp33n
|
||||
integer(HID_T) :: fileHandle, groupHandle
|
||||
#if (PETSC_VERSION_MAJOR==3 && PETSC_VERSION_MINOR>14) && !defined(PETSC_HAVE_MPI_F90MODULE_VISIBILITY)
|
||||
type(MPI_File) :: fileUnit
|
||||
|
@ -229,8 +230,10 @@ subroutine grid_mechanical_spectral_basic_init
|
|||
call HDF5_read(F_aimDot,groupHandle,'F_aimDot',.false.)
|
||||
call MPI_Bcast(F_aimDot,9_MPI_INTEGER_KIND,MPI_DOUBLE,0_MPI_INTEGER_KIND,MPI_COMM_WORLD,err_MPI)
|
||||
if (err_MPI /= 0_MPI_INTEGER_KIND) error stop 'MPI error'
|
||||
call HDF5_read(F,groupHandle,'F')
|
||||
call HDF5_read(F_lastInc,groupHandle,'F_lastInc')
|
||||
call HDF5_read(temp33n,groupHandle,'F')
|
||||
F = reshape(temp33n,[9,cells(1),cells(2),cells3])
|
||||
call HDF5_read(temp33n,groupHandle,'F_lastInc')
|
||||
F_lastInc = reshape(temp33n,[3,3,cells(1),cells(2),cells3])
|
||||
|
||||
elseif (CLI_restartInc == 0) then restartRead
|
||||
F_lastInc = spread(spread(spread(math_I3,3,cells(1)),4,cells(2)),5,cells3) ! initialize to identity
|
||||
|
@ -421,8 +424,8 @@ subroutine grid_mechanical_spectral_basic_restartWrite
|
|||
|
||||
fileHandle = HDF5_openFile(getSolverJobName()//'_restart.hdf5','w')
|
||||
groupHandle = HDF5_addGroup(fileHandle,'solver')
|
||||
call HDF5_write(F,groupHandle,'F')
|
||||
call HDF5_write(F_lastInc,groupHandle,'F_lastInc')
|
||||
call HDF5_write(reshape(F,[3,3,product(cells(1:2))*cells3]),groupHandle,'F')
|
||||
call HDF5_write(reshape(F_lastInc,[3,3,product(cells(1:2))*cells3]),groupHandle,'F_lastInc')
|
||||
call HDF5_closeGroup(groupHandle)
|
||||
call HDF5_closeFile(fileHandle)
|
||||
|
||||
|
|
|
@ -115,7 +115,7 @@ contains
|
|||
!--------------------------------------------------------------------------------------------------
|
||||
!> @brief allocates all necessary fields and fills them with data, potentially from restart info
|
||||
!--------------------------------------------------------------------------------------------------
|
||||
subroutine grid_mechanical_spectral_polarisation_init
|
||||
subroutine grid_mechanical_spectral_polarisation_init()
|
||||
|
||||
real(pReal), dimension(3,3,cells(1),cells(2),cells3) :: P
|
||||
PetscErrorCode :: err_PETSc
|
||||
|
@ -125,6 +125,7 @@ subroutine grid_mechanical_spectral_polarisation_init
|
|||
F, & ! specific (sub)pointer
|
||||
F_tau ! specific (sub)pointer
|
||||
PetscInt, dimension(0:worldsize-1) :: localK
|
||||
real(pReal), dimension(3,3,product(cells(1:2))*cells3) :: temp33n
|
||||
integer(HID_T) :: fileHandle, groupHandle
|
||||
#if (PETSC_VERSION_MAJOR==3 && PETSC_VERSION_MINOR>14) && !defined(PETSC_HAVE_MPI_F90MODULE_VISIBILITY)
|
||||
type(MPI_File) :: fileUnit
|
||||
|
@ -250,10 +251,14 @@ subroutine grid_mechanical_spectral_polarisation_init
|
|||
call HDF5_read(F_aimDot,groupHandle,'F_aimDot',.false.)
|
||||
call MPI_Bcast(F_aimDot,9_MPI_INTEGER_KIND,MPI_DOUBLE,0_MPI_INTEGER_KIND,MPI_COMM_WORLD,err_MPI)
|
||||
if (err_MPI /= 0_MPI_INTEGER_KIND) error stop 'MPI error'
|
||||
call HDF5_read(F,groupHandle,'F')
|
||||
call HDF5_read(F_lastInc,groupHandle,'F_lastInc')
|
||||
call HDF5_read(F_tau,groupHandle,'F_tau')
|
||||
call HDF5_read(F_tau_lastInc,groupHandle,'F_tau_lastInc')
|
||||
call HDF5_read(temp33n,groupHandle,'F')
|
||||
F = reshape(temp33n,[9,cells(1),cells(2),cells3])
|
||||
call HDF5_read(temp33n,groupHandle,'F_lastInc')
|
||||
F_lastInc = reshape(temp33n,[3,3,cells(1),cells(2),cells3])
|
||||
call HDF5_read(temp33n,groupHandle,'F_tau')
|
||||
F_tau = reshape(temp33n,[9,cells(1),cells(2),cells3])
|
||||
call HDF5_read(temp33n,groupHandle,'F_tau_lastInc')
|
||||
F_tau_lastInc = reshape(temp33n,[3,3,cells(1),cells(2),cells3])
|
||||
|
||||
elseif (CLI_restartInc == 0) then restartRead
|
||||
F_lastInc = spread(spread(spread(math_I3,3,cells(1)),4,cells(2)),5,cells3) ! initialize to identity
|
||||
|
@ -476,10 +481,10 @@ subroutine grid_mechanical_spectral_polarisation_restartWrite
|
|||
|
||||
fileHandle = HDF5_openFile(getSolverJobName()//'_restart.hdf5','w')
|
||||
groupHandle = HDF5_addGroup(fileHandle,'solver')
|
||||
call HDF5_write(F,groupHandle,'F')
|
||||
call HDF5_write(F_lastInc,groupHandle,'F_lastInc')
|
||||
call HDF5_write(F_tau,groupHandle,'F_tau')
|
||||
call HDF5_write(F_tau_lastInc,groupHandle,'F_tau_lastInc')
|
||||
call HDF5_write(reshape(F,[3,3,product(cells(1:2))*cells3]),groupHandle,'F')
|
||||
call HDF5_write(reshape(F_lastInc,[3,3,product(cells(1:2))*cells3]),groupHandle,'F_lastInc')
|
||||
call HDF5_write(reshape(F_tau,[3,3,product(cells(1:2))*cells3]),groupHandle,'F_tau')
|
||||
call HDF5_write(reshape(F_tau_lastInc,[3,3,product(cells(1:2))*cells3]),groupHandle,'F_tau_lastInc')
|
||||
call HDF5_closeGroup(groupHandle)
|
||||
call HDF5_closeFile(fileHandle)
|
||||
|
||||
|
|
|
@ -48,7 +48,7 @@ module grid_thermal_spectral
|
|||
SNES :: SNES_thermal
|
||||
Vec :: solution_vec
|
||||
real(pReal), dimension(:,:,:), allocatable :: &
|
||||
T_current, & !< field of current temperature
|
||||
T, & !< field of current temperature
|
||||
T_lastInc, & !< field of previous temperature
|
||||
T_stagInc, & !< field of staggered temperature
|
||||
dotT_lastInc
|
||||
|
@ -78,6 +78,7 @@ subroutine grid_thermal_spectral_init()
|
|||
integer(MPI_INTEGER_KIND) :: err_MPI
|
||||
PetscErrorCode :: err_PETSc
|
||||
integer(HID_T) :: fileHandle, groupHandle
|
||||
real(pReal), dimension(1,product(cells(1:2))*cells3) :: tempN
|
||||
type(tDict), pointer :: &
|
||||
num_grid
|
||||
|
||||
|
@ -107,10 +108,10 @@ subroutine grid_thermal_spectral_init()
|
|||
|
||||
!--------------------------------------------------------------------------------------------------
|
||||
! init fields
|
||||
T_current = discretization_grid_getInitialCondition('T')
|
||||
T_lastInc = T_current
|
||||
T_stagInc = T_current
|
||||
dotT_lastInc = 0.0_pReal * T_current
|
||||
T = discretization_grid_getInitialCondition('T')
|
||||
T_lastInc = T
|
||||
T_stagInc = T
|
||||
dotT_lastInc = 0.0_pReal * T
|
||||
|
||||
!--------------------------------------------------------------------------------------------------
|
||||
! initialize solver specific parts of PETSc
|
||||
|
@ -151,20 +152,23 @@ subroutine grid_thermal_spectral_init()
|
|||
fileHandle = HDF5_openFile(getSolverJobName()//'_restart.hdf5','r')
|
||||
groupHandle = HDF5_openGroup(fileHandle,'solver')
|
||||
|
||||
call HDF5_read(T_current,groupHandle,'T',.false.)
|
||||
call HDF5_read(T_lastInc,groupHandle,'T_lastInc',.false.)
|
||||
call HDF5_read(dotT_lastInc,groupHandle,'dotT_lastInc',.false.)
|
||||
call HDF5_read(tempN,groupHandle,'T',.false.)
|
||||
T = reshape(tempN,[cells(1),cells(2),cells3])
|
||||
call HDF5_read(tempN,groupHandle,'T_lastInc',.false.)
|
||||
T_lastInc = reshape(tempN,[cells(1),cells(2),cells3])
|
||||
call HDF5_read(tempN,groupHandle,'dotT_lastInc',.false.)
|
||||
dotT_lastInc = reshape(tempN,[cells(1),cells(2),cells3])
|
||||
end if restartRead
|
||||
|
||||
ce = 0
|
||||
do k = 1, cells3; do j = 1, cells(2); do i = 1, cells(1)
|
||||
ce = ce + 1
|
||||
call homogenization_thermal_setField(T_current(i,j,k),0.0_pReal,ce)
|
||||
call homogenization_thermal_setField(T(i,j,k),0.0_pReal,ce)
|
||||
end do; end do; end do
|
||||
|
||||
call DMDAVecGetArrayF90(thermal_grid,solution_vec,T_PETSc,err_PETSc)
|
||||
CHKERRQ(err_PETSc)
|
||||
T_PETSc = T_current
|
||||
T_PETSc = T
|
||||
call DMDAVecRestoreArrayF90(thermal_grid,solution_vec,T_PETSc,err_PETSc)
|
||||
CHKERRQ(err_PETSc)
|
||||
|
||||
|
@ -207,20 +211,20 @@ function grid_thermal_spectral_solution(Delta_t) result(solution)
|
|||
solution%converged = .true.
|
||||
solution%iterationsNeeded = totalIter
|
||||
end if
|
||||
stagNorm = maxval(abs(T_current - T_stagInc))
|
||||
stagNorm = maxval(abs(T - T_stagInc))
|
||||
call MPI_Allreduce(MPI_IN_PLACE,stagNorm,1_MPI_INTEGER_KIND,MPI_DOUBLE,MPI_MAX,MPI_COMM_WORLD,err_MPI)
|
||||
if (err_MPI /= 0_MPI_INTEGER_KIND) error stop 'MPI error'
|
||||
solution%stagConverged = stagNorm < max(num%eps_thermal_atol, num%eps_thermal_rtol*maxval(T_current))
|
||||
solution%stagConverged = stagNorm < max(num%eps_thermal_atol, num%eps_thermal_rtol*maxval(T))
|
||||
call MPI_Allreduce(MPI_IN_PLACE,solution%stagConverged,1_MPI_INTEGER_KIND,MPI_LOGICAL,MPI_LAND,MPI_COMM_WORLD,err_MPI)
|
||||
if (err_MPI /= 0_MPI_INTEGER_KIND) error stop 'MPI error'
|
||||
T_stagInc = T_current
|
||||
T_stagInc = T
|
||||
|
||||
!--------------------------------------------------------------------------------------------------
|
||||
! updating thermal state
|
||||
ce = 0
|
||||
do k = 1, cells3; do j = 1, cells(2); do i = 1,cells(1)
|
||||
ce = ce + 1
|
||||
call homogenization_thermal_setField(T_current(i,j,k),(T_current(i,j,k)-T_lastInc(i,j,k))/params%Delta_t,ce)
|
||||
call homogenization_thermal_setField(T(i,j,k),(T(i,j,k)-T_lastInc(i,j,k))/params%Delta_t,ce)
|
||||
end do; end do; end do
|
||||
|
||||
call VecMin(solution_vec,devNull,T_min,err_PETSc)
|
||||
|
@ -250,7 +254,7 @@ subroutine grid_thermal_spectral_forward(cutBack)
|
|||
|
||||
|
||||
if (cutBack) then
|
||||
T_current = T_lastInc
|
||||
T = T_lastInc
|
||||
T_stagInc = T_lastInc
|
||||
|
||||
!--------------------------------------------------------------------------------------------------
|
||||
|
@ -259,17 +263,17 @@ subroutine grid_thermal_spectral_forward(cutBack)
|
|||
CHKERRQ(err_PETSc)
|
||||
call DMDAVecGetArrayF90(dm_local,solution_vec,T_PETSc,err_PETSc) !< get the data out of PETSc to work with
|
||||
CHKERRQ(err_PETSc)
|
||||
T_PETSc = T_current
|
||||
T_PETSc = T
|
||||
call DMDAVecRestoreArrayF90(dm_local,solution_vec,T_PETSc,err_PETSc)
|
||||
CHKERRQ(err_PETSc)
|
||||
ce = 0
|
||||
do k = 1, cells3; do j = 1, cells(2); do i = 1,cells(1)
|
||||
ce = ce + 1
|
||||
call homogenization_thermal_setField(T_current(i,j,k),dotT_lastInc(i,j,k),ce)
|
||||
call homogenization_thermal_setField(T(i,j,k),dotT_lastInc(i,j,k),ce)
|
||||
end do; end do; end do
|
||||
else
|
||||
dotT_lastInc = (T_current - T_lastInc)/params%Delta_t
|
||||
T_lastInc = T_current
|
||||
dotT_lastInc = (T - T_lastInc)/params%Delta_t
|
||||
T_lastInc = T
|
||||
call updateReference
|
||||
end if
|
||||
|
||||
|
@ -295,9 +299,9 @@ subroutine grid_thermal_spectral_restartWrite
|
|||
|
||||
fileHandle = HDF5_openFile(getSolverJobName()//'_restart.hdf5','a')
|
||||
groupHandle = HDF5_openGroup(fileHandle,'solver')
|
||||
call HDF5_write(T,groupHandle,'T')
|
||||
call HDF5_write(T_lastInc,groupHandle,'T_lastInc')
|
||||
call HDF5_write(dotT_lastInc,groupHandle,'dotT_lastInc')
|
||||
call HDF5_write(reshape(T,[1,product(cells(1:2))*cells3]),groupHandle,'T')
|
||||
call HDF5_write(reshape(T_lastInc,[1,product(cells(1:2))*cells3]),groupHandle,'T_lastInc')
|
||||
call HDF5_write(reshape(dotT_lastInc,[1,product(cells(1:2))*cells3]),groupHandle,'dotT_lastInc')
|
||||
call HDF5_closeGroup(groupHandle)
|
||||
call HDF5_closeFile(fileHandle)
|
||||
|
||||
|
@ -309,7 +313,7 @@ end subroutine grid_thermal_spectral_restartWrite
|
|||
|
||||
|
||||
!--------------------------------------------------------------------------------------------------
|
||||
!> @brief Construct the residual vector.
|
||||
!> @brief forms the spectral thermal residual vector
|
||||
!--------------------------------------------------------------------------------------------------
|
||||
subroutine formResidual(in,x_scal,r,dummy,err_PETSc)
|
||||
|
||||
|
@ -320,7 +324,7 @@ subroutine formResidual(in,x_scal,r,dummy,err_PETSc)
|
|||
x_scal
|
||||
PetscScalar, dimension( &
|
||||
X_RANGE,Y_RANGE,Z_RANGE), intent(out) :: &
|
||||
r
|
||||
r !< residual
|
||||
PetscObject :: dummy
|
||||
PetscErrorCode, intent(out) :: err_PETSc
|
||||
|
||||
|
@ -328,10 +332,8 @@ subroutine formResidual(in,x_scal,r,dummy,err_PETSc)
|
|||
real(pReal), dimension(3,cells(1),cells(2),cells3) :: vectorField
|
||||
|
||||
|
||||
T_current = x_scal
|
||||
!--------------------------------------------------------------------------------------------------
|
||||
! evaluate polarization field
|
||||
vectorField = utilities_ScalarGradient(T_current)
|
||||
T = x_scal
|
||||
vectorField = utilities_ScalarGradient(T)
|
||||
ce = 0
|
||||
do k = 1, cells3; do j = 1, cells(2); do i = 1,cells(1)
|
||||
ce = ce + 1
|
||||
|
@ -342,13 +344,11 @@ subroutine formResidual(in,x_scal,r,dummy,err_PETSc)
|
|||
do k = 1, cells3; do j = 1, cells(2); do i = 1,cells(1)
|
||||
ce = ce + 1
|
||||
r(i,j,k) = params%Delta_t*(r(i,j,k) + homogenization_f_T(ce)) &
|
||||
+ homogenization_mu_T(ce) * (T_lastInc(i,j,k) - T_current(i,j,k)) &
|
||||
+ mu_ref*T_current(i,j,k)
|
||||
+ homogenization_mu_T(ce) * (T_lastInc(i,j,k) - T(i,j,k)) &
|
||||
+ mu_ref*T(i,j,k)
|
||||
end do; end do; end do
|
||||
|
||||
!--------------------------------------------------------------------------------------------------
|
||||
! constructing residual
|
||||
r = T_current &
|
||||
r = T &
|
||||
- utilities_GreenConvolution(r, K_ref, mu_ref, params%Delta_t)
|
||||
err_PETSc = 0
|
||||
|
||||
|
|
Loading…
Reference in New Issue