Merge branch 'thermal-restart' into development

This commit is contained in:
Martin Diehl 2022-02-03 07:53:05 +01:00
commit 5d55392425
9 changed files with 123 additions and 28 deletions

@ -1 +1 @@
Subproject commit 419ba7b6f6989a14e0c91c1f1f6621daf8215b71
Subproject commit ebb7f0ce78d11275020af0ba60f929f95b446932

View File

@ -5,7 +5,7 @@ import os
import copy
import datetime
import warnings
import xml.etree.ElementTree as ET
import xml.etree.ElementTree as ET # noqa
import xml.dom.minidom
from pathlib import Path
from functools import partial

View File

@ -8,19 +8,19 @@ from damask import seeds
class TestGridFilters:
def test_coordinates0_point(self):
size = np.random.random(3)
size = np.random.random(3) # noqa
cells = np.random.randint(8,32,(3))
coord = grid_filters.coordinates0_point(cells,size)
assert np.allclose(coord[0,0,0],size/cells*.5) and coord.shape == tuple(cells) + (3,)
def test_coordinates0_node(self):
size = np.random.random(3)
size = np.random.random(3) # noqa
cells = np.random.randint(8,32,(3))
coord = grid_filters.coordinates0_node(cells,size)
assert np.allclose(coord[-1,-1,-1],size) and coord.shape == tuple(cells+1) + (3,)
def test_coord0(self):
size = np.random.random(3)
size = np.random.random(3) # noqa
cells = np.random.randint(8,32,(3))
c = grid_filters.coordinates0_point(cells+1,size+size/cells)
n = grid_filters.coordinates0_node(cells,size) + size/cells*.5
@ -28,16 +28,16 @@ class TestGridFilters:
@pytest.mark.parametrize('mode',['point','node'])
def test_grid_DNA(self,mode):
"""Ensure that cellsSizeOrigin_coordinates0_xx is the inverse of coordinates0_xx."""
"""Ensure that cellsSizeOrigin_coordinates0_xx is the inverse of coordinates0_xx.""" # noqa
cells = np.random.randint(8,32,(3))
size = np.random.random(3)
origin = np.random.random(3)
coord0 = eval(f'grid_filters.coordinates0_{mode}(cells,size,origin)') # noqa
coord0 = eval(f'grid_filters.coordinates0_{mode}(cells,size,origin)') # noqa
_cells,_size,_origin = eval(f'grid_filters.cellsSizeOrigin_coordinates0_{mode}(coord0.reshape(-1,3,order="F"))')
assert np.allclose(cells,_cells) and np.allclose(size,_size) and np.allclose(origin,_origin)
def test_displacement_fluct_equivalence(self):
"""Ensure that fluctuations are periodic."""
"""Ensure that fluctuations are periodic.""" # noqa
size = np.random.random(3)
cells = np.random.randint(8,32,(3))
F = np.random.random(tuple(cells)+(3,3))
@ -45,14 +45,14 @@ class TestGridFilters:
grid_filters.point_to_node(grid_filters.displacement_fluct_point(size,F)))
def test_interpolation_to_node(self):
size = np.random.random(3)
size = np.random.random(3) # noqa
cells = np.random.randint(8,32,(3))
F = np.random.random(tuple(cells)+(3,3))
assert np.allclose(grid_filters.coordinates_node(size,F) [1:-1,1:-1,1:-1],
grid_filters.point_to_node(grid_filters.coordinates_point(size,F))[1:-1,1:-1,1:-1])
def test_interpolation_to_cell(self):
cells = np.random.randint(1,30,(3))
cells = np.random.randint(1,30,(3)) # noqa
coordinates_node_x = np.linspace(0,np.pi*2,num=cells[0]+1)
node_field_x = np.cos(coordinates_node_x)
@ -66,7 +66,7 @@ class TestGridFilters:
@pytest.mark.parametrize('mode',['point','node'])
def test_coordinates0_origin(self,mode):
origin= np.random.random(3)
origin= np.random.random(3) # noqa
size = np.random.random(3) # noqa
cells = np.random.randint(8,32,(3))
shifted = eval(f'grid_filters.coordinates0_{mode}(cells,size,origin)')
@ -79,7 +79,7 @@ class TestGridFilters:
@pytest.mark.parametrize('function',[grid_filters.displacement_avg_point,
grid_filters.displacement_avg_node])
def test_displacement_avg_vanishes(self,function):
"""Ensure that random fluctuations in F do not result in average displacement."""
"""Ensure that random fluctuations in F do not result in average displacement.""" # noqa
size = np.random.random(3)
cells = np.random.randint(8,32,(3))
F = np.random.random(tuple(cells)+(3,3))
@ -89,7 +89,7 @@ class TestGridFilters:
@pytest.mark.parametrize('function',[grid_filters.displacement_fluct_point,
grid_filters.displacement_fluct_node])
def test_displacement_fluct_vanishes(self,function):
"""Ensure that constant F does not result in fluctuating displacement."""
"""Ensure that constant F does not result in fluctuating displacement.""" # noqa
size = np.random.random(3)
cells = np.random.randint(8,32,(3))
F = np.broadcast_to(np.random.random((3,3)), tuple(cells)+(3,3))
@ -142,13 +142,13 @@ class TestGridFilters:
function(unordered,mode)
def test_regrid_identity(self):
size = np.random.random(3)
size = np.random.random(3) # noqa
cells = np.random.randint(8,32,(3))
F = np.broadcast_to(np.eye(3), tuple(cells)+(3,3))
assert all(grid_filters.regrid(size,F,cells) == np.arange(cells.prod()))
def test_regrid_double_cells(self):
size = np.random.random(3)
size = np.random.random(3) # noqa
cells = np.random.randint(8,32,(3))
g = Grid.from_Voronoi_tessellation(cells,size,seeds.from_random(size,10))
F = np.broadcast_to(np.eye(3), tuple(cells)+(3,3))

View File

@ -466,7 +466,14 @@ program DAMASK_grid
call MPI_Allreduce(interface_SIGUSR2,signal,1_MPI_INTEGER_KIND,MPI_LOGICAL,MPI_LOR,MPI_COMM_WORLD,err_MPI)
if (err_MPI /= 0_MPI_INTEGER_KIND) error stop 'MPI error'
if (mod(inc,loadCases(l)%f_restart) == 0 .or. signal) then
call mechanical_restartWrite
do field = 1, nActiveFields
select case (ID(field))
case(FIELD_MECH_ID)
call mechanical_restartWrite
case(FIELD_THERMAL_ID)
call grid_thermal_spectral_restartWrite
end select
end do
call CPFEM_restartWrite
endif
if (signal) call interface_setSIGUSR2(.false.)

View File

@ -16,6 +16,9 @@ module grid_thermal_spectral
use prec
use parallelization
use IO
use DAMASK_interface
use HDF5_utilities
use HDF5
use spectral_utilities
use discretization_grid
use homogenization
@ -54,13 +57,13 @@ module grid_thermal_spectral
public :: &
grid_thermal_spectral_init, &
grid_thermal_spectral_solution, &
grid_thermal_spectral_restartWrite, &
grid_thermal_spectral_forward
contains
!--------------------------------------------------------------------------------------------------
!> @brief allocates all neccessary fields and fills them with data
! ToDo: Restart not implemented
!--------------------------------------------------------------------------------------------------
subroutine grid_thermal_spectral_init(T_0)
@ -72,6 +75,7 @@ subroutine grid_thermal_spectral_init(T_0)
PetscScalar, dimension(:,:,:), pointer :: T_PETSc
integer(MPI_INTEGER_KIND) :: err_MPI
PetscErrorCode :: err_PETSc
integer(HID_T) :: fileHandle, groupHandle
class(tNode), pointer :: &
num_grid
@ -105,12 +109,6 @@ subroutine grid_thermal_spectral_init(T_0)
allocate(T_lastInc(grid(1),grid(2),grid3), source=T_0)
allocate(T_stagInc(grid(1),grid(2),grid3), source=T_0)
ce = 0
do k = 1, grid3; do j = 1, grid(2); do i = 1,grid(1)
ce = ce + 1
call homogenization_thermal_setField(T_0,0.0_pReal,ce)
end do; end do; end do
!--------------------------------------------------------------------------------------------------
! initialize solver specific parts of PETSc
call SNESCreate(PETSC_COMM_WORLD,SNES_thermal,err_PETSc)
@ -142,13 +140,31 @@ subroutine grid_thermal_spectral_init(T_0)
CHKERRQ(err_PETSc)
call SNESSetFromOptions(SNES_thermal,err_PETSc) ! pull it all together with additional CLI arguments
CHKERRQ(err_PETSc)
restartRead: if (interface_restartInc > 0) then
print'(/,1x,a,i0,a)', 'reading restart data of increment ', interface_restartInc, ' from file'
fileHandle = HDF5_openFile(getSolverJobName()//'_restart.hdf5','r')
groupHandle = HDF5_openGroup(fileHandle,'solver')
call HDF5_read(T_current,groupHandle,'T',.false.)
call HDF5_read(T_lastInc,groupHandle,'T_lastInc',.false.)
end if restartRead
ce = 0
do k = 1, grid3; do j = 1, grid(2); do i = 1, grid(1)
ce = ce + 1
call homogenization_thermal_setField(T_current(i,j,k),0.0_pReal,ce)
end do; end do; end do
call DMDAVecGetArrayF90(thermal_grid,solution_vec,T_PETSc,err_PETSc)
CHKERRQ(err_PETSc)
T_PETSc = T_current
call DMDAVecRestoreArrayF90(thermal_grid,solution_vec,T_PETSc,err_PETSc)
CHKERRQ(err_PETSc)
call updateReference()
call updateReference
end subroutine grid_thermal_spectral_init
@ -253,6 +269,37 @@ subroutine grid_thermal_spectral_forward(cutBack)
end subroutine grid_thermal_spectral_forward
!--------------------------------------------------------------------------------------------------
!> @brief Write current solver and constitutive data for restart to file
!--------------------------------------------------------------------------------------------------
subroutine grid_thermal_spectral_restartWrite
PetscErrorCode :: err_PETSc
DM :: dm_local
integer(HID_T) :: fileHandle, groupHandle
PetscScalar, dimension(:,:,:), pointer :: T
call SNESGetDM(SNES_thermal,dm_local,err_PETSc);
CHKERRQ(err_PETSc)
call DMDAVecGetArrayF90(dm_local,solution_vec,T,err_PETSc);
CHKERRQ(err_PETSc)
print'(1x,a)', 'writing thermal solver data required for restart to file'; flush(IO_STDOUT)
fileHandle = HDF5_openFile(getSolverJobName()//'_restart.hdf5','a')
groupHandle = HDF5_openGroup(fileHandle,'solver')
call HDF5_write(T,groupHandle,'T')
call HDF5_write(T_lastInc,groupHandle,'T_lastInc')
call HDF5_closeGroup(groupHandle)
call HDF5_closeFile(fileHandle)
call DMDAVecRestoreArrayF90(dm_local,solution_vec,T,err_PETSc);
CHKERRQ(err_PETSc)
end subroutine grid_thermal_spectral_restartWrite
!--------------------------------------------------------------------------------------------------
!> @brief forms the spectral thermal residual vector
!--------------------------------------------------------------------------------------------------

View File

@ -414,7 +414,7 @@ subroutine homogenization_restartWrite(fileHandle)
groupHandle(2) = HDF5_addGroup(groupHandle(1),material_name_homogenization(ho))
call HDF5_write(homogState(ho)%state,groupHandle(2),'omega') ! ToDo: should be done by mech
call HDF5_write(homogState(ho)%state,groupHandle(2),'omega_mechanical') ! ToDo: should be done by mech
call HDF5_closeGroup(groupHandle(2))
@ -441,7 +441,7 @@ subroutine homogenization_restartRead(fileHandle)
groupHandle(2) = HDF5_openGroup(groupHandle(1),material_name_homogenization(ho))
call HDF5_read(homogState(ho)%state0,groupHandle(2),'omega') ! ToDo: should be done by mech
call HDF5_read(homogState(ho)%state0,groupHandle(2),'omega_mechanical') ! ToDo: should be done by mech
call HDF5_closeGroup(groupHandle(2))

View File

@ -124,11 +124,20 @@ module phase
integer, intent(in) :: ph
end subroutine mechanical_restartWrite
module subroutine thermal_restartWrite(groupHandle,ph)
integer(HID_T), intent(in) :: groupHandle
integer, intent(in) :: ph
end subroutine thermal_restartWrite
module subroutine mechanical_restartRead(groupHandle,ph)
integer(HID_T), intent(in) :: groupHandle
integer, intent(in) :: ph
end subroutine mechanical_restartRead
module subroutine thermal_restartRead(groupHandle,ph)
integer(HID_T), intent(in) :: groupHandle
integer, intent(in) :: ph
end subroutine thermal_restartRead
module function mechanical_S(ph,en) result(S)
integer, intent(in) :: ph,en
@ -641,6 +650,7 @@ subroutine phase_restartWrite(fileHandle)
groupHandle(2) = HDF5_addGroup(groupHandle(1),material_name_phase(ph))
call mechanical_restartWrite(groupHandle(2),ph)
call thermal_restartWrite(groupHandle(2),ph)
call HDF5_closeGroup(groupHandle(2))
@ -669,6 +679,7 @@ subroutine phase_restartRead(fileHandle)
groupHandle(2) = HDF5_openGroup(groupHandle(1),material_name_phase(ph))
call mechanical_restartRead(groupHandle(2),ph)
call thermal_restartRead(groupHandle(2),ph)
call HDF5_closeGroup(groupHandle(2))

View File

@ -1248,7 +1248,7 @@ module subroutine mechanical_restartWrite(groupHandle,ph)
integer, intent(in) :: ph
call HDF5_write(plasticState(ph)%state,groupHandle,'omega')
call HDF5_write(plasticState(ph)%state,groupHandle,'omega_plastic')
call HDF5_write(phase_mechanical_Fi(ph)%data,groupHandle,'F_i')
call HDF5_write(phase_mechanical_Li(ph)%data,groupHandle,'L_i')
call HDF5_write(phase_mechanical_Lp(ph)%data,groupHandle,'L_p')
@ -1265,7 +1265,7 @@ module subroutine mechanical_restartRead(groupHandle,ph)
integer, intent(in) :: ph
call HDF5_read(plasticState(ph)%state0,groupHandle,'omega')
call HDF5_read(plasticState(ph)%state0,groupHandle,'omega_plastic')
call HDF5_read(phase_mechanical_Fi0(ph)%data,groupHandle,'F_i')
call HDF5_read(phase_mechanical_Li0(ph)%data,groupHandle,'L_i')
call HDF5_read(phase_mechanical_Lp0(ph)%data,groupHandle,'L_p')

View File

@ -254,6 +254,36 @@ function integrateThermalState(Delta_t, ph,en) result(broken)
end function integrateThermalState
module subroutine thermal_restartWrite(groupHandle,ph)
integer(HID_T), intent(in) :: groupHandle
integer, intent(in) :: ph
integer(HID_T) :: new_group
integer :: so
do so = 1,thermal_Nsources(ph)
call HDF5_write(thermalState(ph)%p(so)%state,groupHandle,'omega_thermal')
enddo
end subroutine thermal_restartWrite
module subroutine thermal_restartRead(groupHandle,ph)
integer(HID_T), intent(in) :: groupHandle
integer, intent(in) :: ph
integer(HID_T) :: new_group
integer :: so
do so = 1,thermal_Nsources(ph)
call HDF5_read(thermalState(ph)%p(so)%state0,groupHandle,'omega_thermal')
enddo
end subroutine thermal_restartRead
module subroutine thermal_forward()
integer :: ph, so