Merge branch 'development' into 228-unit-tests-for-spectral-functionality

This commit is contained in:
Philip Eisenlohr 2022-12-05 11:01:48 -05:00
commit 65cb9d5997
30 changed files with 611 additions and 439 deletions

@ -1 +1 @@
Subproject commit 81f5f24d076a623e6052c234825c591267915285
Subproject commit 599d5accf4f5249257972cef997e8078e857c5a1

View File

@ -1 +1 @@
3.0.0-alpha7-160-g92ae86b63
3.0.0-alpha7-177-g30998d667

View File

@ -26,11 +26,8 @@ class Rotation:
- Coordinate frames are right-handed.
- A rotation angle ω is taken to be positive for a counterclockwise rotation
when viewing from the end point of the rotation axis towards the origin.
- Rotations will be interpreted in the passive sense.
- Euler angle triplets are implemented using the Bunge convention,
with angular ranges of [0,2π], [0,π], [0,2π].
- The rotation angle ω is limited to the interval [0,π].
- The real part of a quaternion is positive, Re(q) 0
- Rotations will be interpreted in the passive sense, i.e. as rotation of
the coordinate frame.
- P = -1 (as default).
Examples
@ -879,6 +876,10 @@ class Rotation:
reciprocal : bool, optional
Basis vectors are given in reciprocal (instead of real) space. Defaults to False.
Returns
-------
new : damask.Rotation
"""
om = np.array(basis,dtype=float)
if om.shape[-2:] != (3,3): raise ValueError('invalid shape')

View File

@ -116,7 +116,10 @@ class VTK:
"""
s = vtk.vtkStringArray()
s.SetName('comments')
for c in util.tail_repack(comments,self.comments):
comments_ = util.tail_repack(comments,self.comments) if comments[:len(self.comments)] == self.comments else \
[comments] if isinstance(comments,str) else \
comments
for c in comments_:
s.InsertNextValue(c)
self.vtk_data.GetFieldData().AddArray(s)
@ -547,9 +550,11 @@ class VTK:
Notes
-----
See http://compilatrix.com/article/vtk-1 for further ideas.
The first component is shown when visualizing vector datasets
(this includes tensor datasets because they are flattened).
"""
# See http://compilatrix.com/article/vtk-1 for possible improvements.
try:
import wx
_ = wx.App(False) # noqa

View File

@ -574,7 +574,7 @@ def _docstringer(docstring: _Union[str, _Callable],
shift = min([len(line)-len(line.lstrip(' '))-indent for line in content])
extra = '\n'.join([(line[shift:] if shift > 0 else
f'{" "*-shift}{line}') for line in content])
docstring_ = _re.sub(fr'(^([ ]*){key}\s*\n\2{"-"*len(key)}[\n ]*[A-Za-z0-9 ]*: ([^\n]+\n)*)',
docstring_ = _re.sub(fr'(^([ ]*){key}\s*\n\2{"-"*len(key)}[\n ]*[A-Za-z0-9_ ]*: ([^\n]+\n)*)',
fr'\1{extra}\n',
docstring_,flags=_re.MULTILINE)
@ -590,7 +590,7 @@ def _docstringer(docstring: _Union[str, _Callable],
+(return_class.__name__ if not isinstance(return_class,str) else return_class)
)
return _re.sub(r'(^([ ]*)Returns\s*\n\2-------\s*\n[ ]*[A-Za-z0-9 ]*: )(.*)\n',
return _re.sub(r'(^([ ]*)Returns\s*\n\2-------\s*\n[ ]*[A-Za-z0-9_ ]*: )(.*)\n',
fr'\1{return_type_}\n',
docstring_,flags=_re.MULTILINE)
@ -793,7 +793,7 @@ def tail_repack(extended: _Union[str, _Sequence[str]],
Parameters
----------
extended : (list of) str
extended : (sequence of) str
Extended string list with potentially autosplitted tailing string relative to `existing`.
existing : list of str
Base string list.
@ -811,9 +811,9 @@ def tail_repack(extended: _Union[str, _Sequence[str]],
['a','new','shiny','e','n','t','r','y']
"""
return [extended] if isinstance(extended,str) else existing + \
([''.join(extended[len(existing):])] if _np.prod([len(i) for i in extended[len(existing):]]) == 1 else
list(extended[len(existing):]))
new = extended[len(existing):]
return [extended] if isinstance(extended,str) else \
existing + list([''.join(new)] if _np.prod([len(i) for i in new]) == 1 else new)
def aslist(arg: _Union[_IntCollection, int, None]) -> _List:

View File

@ -7,6 +7,7 @@ from damask import Table
from damask import _rotation
from damask import grid_filters
from damask import util
from damask import tensor
n = 1000
atol=1.e-4
@ -20,6 +21,16 @@ def ref_path(ref_path_base):
def set_of_rotations(set_of_quaternions):
return [Rotation.from_quaternion(s) for s in set_of_quaternions]
@pytest.fixture
def multidim_rotations(set_of_quaternions):
L = len(set_of_quaternions)
i = 0
while L%(f:=np.random.randint(2,np.sqrt(L).astype(int))) > 0 and i<L:
i += 1
f = i if i == L else f
return Rotation.from_quaternion(set_of_quaternions.reshape((L//f,f,-1)))
####################################################################################################
# Code below available according to the following conditions
@ -691,117 +702,156 @@ class TestRotation:
def test_to_numpy(self):
r = Rotation.from_random(np.random.randint(0,10,4))
assert np.all(r.as_quaternion() == np.array(r))
assert (r.as_quaternion() == np.array(r)).all()
@pytest.mark.parametrize('degrees',[True,False])
def test_Eulers(self,set_of_rotations,degrees):
for rot in set_of_rotations:
m = rot.as_quaternion()
o = Rotation.from_Euler_angles(rot.as_Euler_angles(degrees),degrees).as_quaternion()
ok = np.allclose(m,o,atol=atol)
if np.isclose(rot.as_quaternion()[0],0.0,atol=atol):
ok |= np.allclose(m*-1.,o,atol=atol)
assert ok and np.isclose(np.linalg.norm(o),1.0), f'{m},{o},{rot.as_quaternion()}'
def test_bounds(self,multidim_rotations):
m = multidim_rotations
@pytest.mark.parametrize('P',[1,-1])
@pytest.mark.parametrize('normalize',[True,False])
@pytest.mark.parametrize('degrees',[True,False])
def test_axis_angle(self,set_of_rotations,degrees,normalize,P):
c = np.array([P*-1,P*-1,P*-1,1.])
c[:3] *= 0.9 if normalize else 1.0
for rot in set_of_rotations:
m = rot.as_Euler_angles()
o = Rotation.from_axis_angle(rot.as_axis_angle(degrees)*c,degrees,normalize,P).as_Euler_angles()
u = np.array([np.pi*2,np.pi,np.pi*2])
ok = np.allclose(m,o,atol=atol)
ok |= np.allclose(np.where(np.isclose(m,u),m-u,m),np.where(np.isclose(o,u),o-u,o),atol=atol)
if np.isclose(m[1],0.0,atol=atol) or np.isclose(m[1],np.pi,atol=atol):
sum_phi = np.unwrap([m[0]+m[2],o[0]+o[2]])
ok |= np.isclose(sum_phi[0],sum_phi[1],atol=atol)
assert ok and (np.zeros(3)-1.e-9 <= o).all() \
and (o <= np.array([np.pi*2.,np.pi,np.pi*2.])+1.e-9).all(), f'{m},{o},{rot.as_quaternion()}'
q = m.as_quaternion()
assert np.allclose(1.,np.linalg.norm(q,axis=-1))
def test_matrix(self,set_of_rotations):
for rot in set_of_rotations:
m = rot.as_axis_angle()
o = Rotation.from_axis_angle(rot.as_axis_angle()).as_axis_angle()
ok = np.allclose(m,o,atol=atol)
if np.isclose(m[3],np.pi,atol=atol):
ok |= np.allclose(m*np.array([-1.,-1.,-1.,1.]),o,atol=atol)
assert ok and np.isclose(np.linalg.norm(o[:3]),1.0) \
and o[3]<=np.pi+1.e-9, f'{m},{o},{rot.as_quaternion()}'
v = m.as_Rodrigues_vector(compact=False)
assert np.allclose(1.,np.linalg.norm(v[...,:3],axis=-1))
def test_parallel(self,set_of_rotations):
a = np.array([[1.0,0.0,0.0],
[0.0,1.0,0.0]])
for rot in set_of_rotations:
assert rot.allclose(Rotation.from_parallel(a,rot.broadcast_to((2,))@a))
v = m.as_axis_angle(degrees=False)
assert np.allclose(1.,np.linalg.norm(v[...,:3],axis=-1))
assert (v[...,3] >= 0.).all and (v < np.pi+1.e-9).all()
@pytest.mark.parametrize('P',[1,-1])
@pytest.mark.parametrize('normalize',[True,False])
def test_Rodrigues(self,set_of_rotations,normalize,P):
c = np.array([P*-1,P*-1,P*-1,1.])
c[:3] *= 0.9 if normalize else 1.0
for rot in set_of_rotations:
m = rot.as_matrix()
o = Rotation.from_Rodrigues_vector(rot.as_Rodrigues_vector()*c,normalize,P).as_matrix()
ok = np.allclose(m,o,atol=atol)
assert ok and np.isclose(np.linalg.det(o),1.0), f'{m},{o}'
r = m.as_matrix()
assert np.allclose(1.,np.linalg.det(r))
def test_Rodrigues_compact(self,set_of_rotations):
for rot in set_of_rotations:
c = rot.as_Rodrigues_vector(compact=True)
r = rot.as_Rodrigues_vector(compact=False)
assert np.allclose(r[:3]*r[3], c, equal_nan=True)
e = m.as_Euler_angles(degrees=False)
assert (e >= 0.).all and (e < np.pi*np.array([2.,1.,2.])+1.e-9).all()
c = m.as_cubochoric()
assert (np.linalg.norm(c,ord=np.inf,axis=-1) < np.pi**(2./3.)*0.5+1.e-9).all()
h = m.as_homochoric()
assert (np.linalg.norm(h,axis=-1) < (3.*np.pi/4.)**(1./3.) + 1.e-9).all()
@pytest.mark.parametrize('P',[1,-1])
def test_homochoric(self,set_of_rotations,P):
cutoff = np.tan(np.pi*.5*(1.-1e-4))
for rot in set_of_rotations:
m = rot.as_Rodrigues_vector()
o = Rotation.from_homochoric(rot.as_homochoric()*P*-1,P).as_Rodrigues_vector()
ok = np.allclose(np.clip(m,None,cutoff),np.clip(o,None,cutoff),atol=atol)
ok |= np.isclose(m[3],0.0,atol=atol)
assert ok and np.isclose(np.linalg.norm(o[:3]),1.0), f'{m},{o},{rot.as_quaternion()}'
@pytest.mark.parametrize('P',[1,-1])
def test_cubochoric(self,set_of_rotations,P):
for rot in set_of_rotations:
m = rot.as_homochoric()
o = Rotation.from_cubochoric(rot.as_cubochoric()*P*-1,P).as_homochoric()
ok = np.allclose(m,o,atol=atol)
assert ok and np.linalg.norm(o) < (3.*np.pi/4.)**(1./3.) + 1.e-9, f'{m},{o},{rot.as_quaternion()}'
@pytest.mark.parametrize('P',[1,-1])
@pytest.mark.parametrize('accept_homomorph',[True,False])
@pytest.mark.parametrize('normalize',[True,False])
def test_quaternion(self,set_of_rotations,P,accept_homomorph,normalize):
c = np.array([1,P*-1,P*-1,P*-1]) * (-1 if accept_homomorph else 1) * (0.9 if normalize else 1.0)
for rot in set_of_rotations:
m = rot.as_cubochoric()
o = Rotation.from_quaternion(rot.as_quaternion()*c,accept_homomorph,normalize,P).as_cubochoric()
ok = np.allclose(m,o,atol=atol)
if np.count_nonzero(np.isclose(np.abs(o),np.pi**(2./3.)*.5)):
ok |= np.allclose(m*-1.,o,atol=atol)
assert ok and o.max() < np.pi**(2./3.)*0.5+1.e-9, f'{m},{o},{rot.as_quaternion()}'
@pytest.mark.parametrize('P',[1,-1])
def test_quaternion(self,multidim_rotations,accept_homomorph,normalize,P):
c = np.array([1,-P,-P,-P]) * (-1 if accept_homomorph else 1) * (0.9 if normalize else 1.0)
m = multidim_rotations
o = Rotation.from_quaternion(m.as_quaternion()*c,
accept_homomorph=accept_homomorph,
normalize=normalize,
P=P)
f = Rotation(np.where(np.isclose(m.as_quaternion()[...,0],0.0,atol=atol)[...,np.newaxis],~o,o))
assert np.logical_or(m.isclose(o,atol=atol),
m.isclose(f,atol=atol)
).all()
@pytest.mark.parametrize('degrees',[True,False])
def test_Eulers(self,multidim_rotations,degrees):
m = multidim_rotations
o = Rotation.from_Euler_angles(m.as_Euler_angles(degrees),
degrees=degrees)
f = Rotation(np.where(np.isclose(m.as_quaternion()[...,0],0.0,atol=atol)[...,np.newaxis],~o,o))
assert np.logical_or(m.isclose(o,atol=atol),
m.isclose(f,atol=atol)
).all()
@pytest.mark.parametrize('degrees',[True,False])
@pytest.mark.parametrize('normalize',[True,False])
@pytest.mark.parametrize('P',[1,-1])
def test_axis_angle(self,multidim_rotations,degrees,normalize,P):
c = np.array([-P,-P,-P,1.])
c[:3] *= 0.9 if normalize else 1.0
m = multidim_rotations
o = Rotation.from_axis_angle(m.as_axis_angle(degrees)*c,
degrees=degrees,
normalize=normalize,
P=P)
f = Rotation(np.where(np.isclose(m.as_quaternion()[...,0],0.0,atol=atol)[...,np.newaxis],~o,o))
assert np.logical_or(m.isclose(o,atol=atol),
m.isclose(f,atol=atol)
).all()
def test_matrix(self,multidim_rotations):
m = multidim_rotations
o = Rotation.from_matrix(m.as_matrix())
f = Rotation(np.where(np.isclose(m.as_quaternion()[...,0],0.0,atol=atol)[...,np.newaxis],~o,o))
assert np.logical_or(m.isclose(o,atol=atol),
m.isclose(f,atol=atol)
).all()
def test_parallel(self,multidim_rotations):
m = multidim_rotations
a = np.broadcast_to(np.array([[1.0,0.0,0.0],
[0.0,1.0,0.0]]),m.shape+(2,3))
assert m.allclose(Rotation.from_parallel(a,m.broadcast_to(m.shape+(2,))@a))
@pytest.mark.parametrize('normalize',[True,False])
@pytest.mark.parametrize('P',[1,-1])
def test_Rodrigues(self,multidim_rotations,normalize,P):
c = np.array([-P,-P,-P,1.])
c[:3] *= 0.9 if normalize else 1.0
m = multidim_rotations
o = Rotation.from_Rodrigues_vector(m.as_Rodrigues_vector()*c,
normalize=normalize,
P=P)
f = Rotation(np.where(np.isclose(m.as_quaternion()[...,0],0.0,atol=atol)[...,np.newaxis],~o,o))
assert np.logical_or(m.isclose(o,atol=atol),
m.isclose(f,atol=atol)
).all()
def test_Rodrigues_compact(self,multidim_rotations):
m = multidim_rotations
c = m.as_Rodrigues_vector(compact=True)
r = m.as_Rodrigues_vector(compact=False)
assert np.allclose(r[...,:3]*r[...,3:], c, equal_nan=True)
@pytest.mark.parametrize('P',[1,-1])
def test_homochoric(self,multidim_rotations,P):
m = multidim_rotations
o = Rotation.from_homochoric(m.as_homochoric()*-P,
P=P)
f = Rotation(np.where(np.isclose(m.as_quaternion()[...,0],0.0,atol=atol)[...,np.newaxis],~o,o))
assert np.logical_or(m.isclose(o,atol=atol),
m.isclose(f,atol=atol)
).all()
@pytest.mark.parametrize('P',[1,-1])
def test_cubochoric(self,multidim_rotations,P):
m = multidim_rotations
o = Rotation.from_cubochoric(m.as_cubochoric()*-P,
P=P)
f = Rotation(np.where(np.isclose(m.as_quaternion()[...,0],0.0,atol=atol)[...,np.newaxis],~o,o))
assert np.logical_or(m.isclose(o,atol=atol),
m.isclose(f,atol=atol)
).all()
@pytest.mark.parametrize('reciprocal',[True,False])
def test_basis(self,set_of_rotations,reciprocal):
for rot in set_of_rotations:
om = rot.as_matrix() + 0.1*np.eye(3)
rot = Rotation.from_basis(om,False,reciprocal=reciprocal)
assert np.isclose(np.linalg.det(rot.as_matrix()),1.0)
def test_basis(self,multidim_rotations,reciprocal):
m = multidim_rotations
r = m.as_matrix()
r = np.linalg.inv(tensor.transpose(r)/np.pi) if reciprocal else r
o = Rotation.from_basis(r,
reciprocal=reciprocal)
f = Rotation(np.where(np.isclose(m.as_quaternion()[...,0],0.0,atol=atol)[...,np.newaxis],~o,o))
assert np.logical_or(m.isclose(o,atol=atol),
m.isclose(f,atol=atol)
).all()
@pytest.mark.parametrize('shape',[None,1,(4,4)])
def test_random(self,shape):
r = Rotation.from_random(shape)
if shape is None:
assert r.shape == ()
elif shape == 1:
assert r.shape == (1,)
else:
assert r.shape == shape
assert r.shape == () if shape is None else (1,) if shape == 1 else shape
@pytest.mark.parametrize('shape',[None,5,(4,6)])
def test_equal(self,shape):
@ -947,13 +997,13 @@ class TestRotation:
p = np.random.rand(n,3)
o = Rotation._get_pyramid_order(p,direction)
for i,o_i in enumerate(o):
assert np.all(o_i==Rotation._get_pyramid_order(p[i],direction))
assert (o_i==Rotation._get_pyramid_order(p[i],direction)).all()
def test_pyramid_invariant(self):
a = np.random.rand(n,3)
f = Rotation._get_pyramid_order(a,'forward')
b = Rotation._get_pyramid_order(a,'backward')
assert np.all(np.take_along_axis(np.take_along_axis(a,f,-1),b,-1) == a)
assert (np.take_along_axis(np.take_along_axis(a,f,-1),b,-1) == a).all()
@pytest.mark.parametrize('data',[np.random.rand(5,3),

View File

@ -764,7 +764,7 @@ end subroutine dct
!--------------------------------------------------------------------------------------------------
! @brief decide whether next block is list or dict
! @brief Decide whether next block is list or dict.
!--------------------------------------------------------------------------------------------------
recursive subroutine decide(blck,flow,s_blck,s_flow,offset)
@ -811,7 +811,7 @@ recursive subroutine decide(blck,flow,s_blck,s_flow,offset)
end if
end if
end subroutine
end subroutine decide
!--------------------------------------------------------------------------------------------------

View File

@ -482,6 +482,8 @@ program DAMASK_grid
call mechanical_restartWrite
case(FIELD_THERMAL_ID)
call grid_thermal_spectral_restartWrite
case(FIELD_DAMAGE_ID)
call grid_damage_spectral_restartWrite
end select
end do
call materialpoint_restartWrite
@ -526,6 +528,6 @@ subroutine getMaskedTensor(values,mask,tensor)
end do
end do
end subroutine
end subroutine getMaskedTensor
end program DAMASK_grid

View File

@ -222,7 +222,7 @@ subroutine cellsSizeOrigin(c,s,o,header)
temp = getXMLValue(header,'Origin')
o = [(IO_floatValue(temp,IO_stringPos(temp),i),i=1,3)]
end subroutine
end subroutine cellsSizeOrigin
!--------------------------------------------------------------------------------------------------
@ -421,7 +421,7 @@ pure function getXMLValue(line,key)
end if
end if
end function
end function getXMLValue
!--------------------------------------------------------------------------------------------------

View File

@ -16,6 +16,9 @@ module grid_damage_spectral
use prec
use parallelization
use IO
use CLI
use HDF5_utilities
use HDF5
use spectral_utilities
use discretization_grid
use homogenization
@ -46,7 +49,7 @@ module grid_damage_spectral
SNES :: SNES_damage
Vec :: solution_vec
real(pReal), dimension(:,:,:), allocatable :: &
phi_current, & !< field of current damage
phi, & !< field of current damage
phi_lastInc, & !< field of previous damage
phi_stagInc !< field of staggered damage
@ -59,13 +62,13 @@ module grid_damage_spectral
public :: &
grid_damage_spectral_init, &
grid_damage_spectral_solution, &
grid_damage_spectral_restartWrite, &
grid_damage_spectral_forward
contains
!--------------------------------------------------------------------------------------------------
!> @brief allocates all neccessary fields and fills them with data
! ToDo: Restart not implemented
!--------------------------------------------------------------------------------------------------
subroutine grid_damage_spectral_init()
@ -76,6 +79,8 @@ subroutine grid_damage_spectral_init()
Vec :: uBound, lBound
integer(MPI_INTEGER_KIND) :: err_MPI
PetscErrorCode :: err_PETSc
integer(HID_T) :: fileHandle, groupHandle
real(pReal), dimension(1,product(cells(1:2))*cells3) :: tempN
type(tDict), pointer :: &
num_grid, &
num_generic
@ -112,9 +117,9 @@ subroutine grid_damage_spectral_init()
!--------------------------------------------------------------------------------------------------
! init fields
phi_current = discretization_grid_getInitialCondition('phi')
phi_lastInc = phi_current
phi_stagInc = phi_current
phi = discretization_grid_getInitialCondition('phi')
phi_lastInc = phi
phi_stagInc = phi
!--------------------------------------------------------------------------------------------------
! initialize solver specific parts of PETSc
@ -167,15 +172,27 @@ subroutine grid_damage_spectral_init()
CHKERRQ(err_PETSc)
end if
restartRead: if (CLI_restartInc > 0) then
print'(/,1x,a,i0,a)', 'reading restart data of increment ', CLI_restartInc, ' from file'
fileHandle = HDF5_openFile(getSolverJobName()//'_restart.hdf5','r')
groupHandle = HDF5_openGroup(fileHandle,'solver')
call HDF5_read(tempN,groupHandle,'phi',.false.)
phi = reshape(tempN,[cells(1),cells(2),cells3])
call HDF5_read(tempN,groupHandle,'phi_lastInc',.false.)
phi_lastInc = reshape(tempN,[cells(1),cells(2),cells3])
end if restartRead
ce = 0
do k = 1, cells3; do j = 1, cells(2); do i = 1, cells(1)
ce = ce + 1
call homogenization_set_phi(phi_current(i,j,k),ce)
call homogenization_set_phi(phi(i,j,k),ce)
end do; end do; end do
call DMDAVecGetArrayF90(damage_grid,solution_vec,phi_PETSc,err_PETSc)
CHKERRQ(err_PETSc)
phi_PETSc = phi_current
phi_PETSc = phi
call DMDAVecRestoreArrayF90(damage_grid,solution_vec,phi_PETSc,err_PETSc)
CHKERRQ(err_PETSc)
@ -218,20 +235,20 @@ function grid_damage_spectral_solution(Delta_t) result(solution)
solution%converged = .true.
solution%iterationsNeeded = totalIter
end if
stagNorm = maxval(abs(phi_current - phi_stagInc))
stagNorm = maxval(abs(phi - phi_stagInc))
call MPI_Allreduce(MPI_IN_PLACE,stagNorm,1_MPI_INTEGER_KIND,MPI_DOUBLE,MPI_MAX,MPI_COMM_WORLD,err_MPI)
if (err_MPI /= 0_MPI_INTEGER_KIND) error stop 'MPI error'
solution%stagConverged = stagNorm < max(num%eps_damage_atol, num%eps_damage_rtol*maxval(phi_current))
solution%stagConverged = stagNorm < max(num%eps_damage_atol, num%eps_damage_rtol*maxval(phi))
call MPI_Allreduce(MPI_IN_PLACE,solution%stagConverged,1_MPI_INTEGER_KIND,MPI_LOGICAL,MPI_LAND,MPI_COMM_WORLD,err_MPI)
if (err_MPI /= 0_MPI_INTEGER_KIND) error stop 'MPI error'
phi_stagInc = phi_current
phi_stagInc = phi
!--------------------------------------------------------------------------------------------------
! updating damage state
ce = 0
do k = 1, cells3; do j = 1, cells(2); do i = 1,cells(1)
ce = ce + 1
call homogenization_set_phi(phi_current(i,j,k),ce)
call homogenization_set_phi(phi(i,j,k),ce)
end do; end do; end do
call VecMin(solution_vec,devNull,phi_min,err_PETSc)
@ -261,7 +278,7 @@ subroutine grid_damage_spectral_forward(cutBack)
if (cutBack) then
phi_current = phi_lastInc
phi = phi_lastInc
phi_stagInc = phi_lastInc
!--------------------------------------------------------------------------------------------------
! reverting damage field state
@ -269,22 +286,52 @@ subroutine grid_damage_spectral_forward(cutBack)
CHKERRQ(err_PETSc)
call DMDAVecGetArrayF90(dm_local,solution_vec,phi_PETSc,err_PETSc) !< get the data out of PETSc to work with
CHKERRQ(err_PETSc)
phi_PETSc = phi_current
phi_PETSc = phi
call DMDAVecRestoreArrayF90(dm_local,solution_vec,phi_PETSc,err_PETSc)
CHKERRQ(err_PETSc)
ce = 0
do k = 1, cells3; do j = 1, cells(2); do i = 1,cells(1)
ce = ce + 1
call homogenization_set_phi(phi_current(i,j,k),ce)
call homogenization_set_phi(phi(i,j,k),ce)
end do; end do; end do
else
phi_lastInc = phi_current
phi_lastInc = phi
call updateReference
end if
end subroutine grid_damage_spectral_forward
!--------------------------------------------------------------------------------------------------
!> @brief Write current solver and constitutive data for restart to file.
!--------------------------------------------------------------------------------------------------
subroutine grid_damage_spectral_restartWrite
PetscErrorCode :: err_PETSc
DM :: dm_local
integer(HID_T) :: fileHandle, groupHandle
PetscScalar, dimension(:,:,:), pointer :: phi
call SNESGetDM(SNES_damage,dm_local,err_PETSc);
CHKERRQ(err_PETSc)
call DMDAVecGetArrayF90(dm_local,solution_vec,phi,err_PETSc);
CHKERRQ(err_PETSc)
print'(1x,a)', 'writing damage solver data required for restart to file'; flush(IO_STDOUT)
fileHandle = HDF5_openFile(getSolverJobName()//'_restart.hdf5','a')
groupHandle = HDF5_openGroup(fileHandle,'solver')
call HDF5_write(reshape(phi,[1,product(cells(1:2))*cells3]),groupHandle,'phi')
call HDF5_write(reshape(phi_lastInc,[1,product(cells(1:2))*cells3]),groupHandle,'phi_lastInc')
call HDF5_closeGroup(groupHandle)
call HDF5_closeFile(fileHandle)
call DMDAVecRestoreArrayF90(dm_local,solution_vec,phi,err_PETSc);
CHKERRQ(err_PETSc)
end subroutine grid_damage_spectral_restartWrite
!--------------------------------------------------------------------------------------------------
!> @brief Construct the residual vector.
!--------------------------------------------------------------------------------------------------
@ -297,7 +344,7 @@ subroutine formResidual(in,x_scal,r,dummy,err_PETSc)
x_scal
PetscScalar, dimension( &
X_RANGE,Y_RANGE,Z_RANGE), intent(out) :: &
r
r !< residual
PetscObject :: dummy
PetscErrorCode, intent(out) :: err_PETSc
@ -305,10 +352,8 @@ subroutine formResidual(in,x_scal,r,dummy,err_PETSc)
real(pReal), dimension(3,cells(1),cells(2),cells3) :: vectorField
phi_current = x_scal
!--------------------------------------------------------------------------------------------------
! evaluate polarization field
vectorField = utilities_ScalarGradient(phi_current)
phi = x_scal
vectorField = utilities_ScalarGradient(phi)
ce = 0
do k = 1, cells3; do j = 1, cells(2); do i = 1,cells(1)
ce = ce + 1
@ -318,22 +363,20 @@ subroutine formResidual(in,x_scal,r,dummy,err_PETSc)
ce = 0
do k = 1, cells3; do j = 1, cells(2); do i = 1,cells(1)
ce = ce + 1
r(i,j,k) = params%Delta_t*(r(i,j,k) + homogenization_f_phi(phi_current(i,j,k),ce)) &
+ homogenization_mu_phi(ce)*(phi_lastInc(i,j,k) - phi_current(i,j,k)) &
+ mu_ref*phi_current(i,j,k)
r(i,j,k) = params%Delta_t*(r(i,j,k) + homogenization_f_phi(phi(i,j,k),ce)) &
+ homogenization_mu_phi(ce)*(phi_lastInc(i,j,k) - phi(i,j,k)) &
+ mu_ref*phi(i,j,k)
end do; end do; end do
!--------------------------------------------------------------------------------------------------
! constructing residual
r = max(min(utilities_GreenConvolution(r, K_ref, mu_ref, params%Delta_t),phi_lastInc),num%phi_min) &
- phi_current
- phi
err_PETSc = 0
end subroutine formResidual
!--------------------------------------------------------------------------------------------------
!> @brief update reference viscosity and conductivity
!> @brief Update reference viscosity and conductivity.
!--------------------------------------------------------------------------------------------------
subroutine updateReference()

View File

@ -111,10 +111,12 @@ subroutine grid_mechanical_FEM_init
1.0_pReal,-1.0_pReal,-1.0_pReal,-1.0_pReal, &
1.0_pReal, 1.0_pReal, 1.0_pReal, 1.0_pReal], [4,8])
real(pReal), dimension(3,3,3,3) :: devNull
real(pReal), dimension(3,3,product(cells(1:2))*cells3) :: temp33n
real(pReal), dimension(3,product(cells(1:2))*cells3) :: temp3n
PetscErrorCode :: err_PETSc
integer(MPI_INTEGER_KIND) :: err_MPI
PetscScalar, pointer, dimension(:,:,:,:) :: &
u_current,u_lastInc
u,u_lastInc
PetscInt, dimension(0:worldsize-1) :: localK
integer(HID_T) :: fileHandle, groupHandle
type(tDict), pointer :: &
@ -220,7 +222,7 @@ subroutine grid_mechanical_FEM_init
CHKERRQ(err_PETSc)
call VecSet(solution_rate ,0.0_pReal,err_PETSc)
CHKERRQ(err_PETSc)
call DMDAVecGetArrayF90(mechanical_grid,solution_current,u_current,err_PETSc)
call DMDAVecGetArrayF90(mechanical_grid,solution_current,u,err_PETSc)
CHKERRQ(err_PETSc)
call DMDAVecGetArrayF90(mechanical_grid,solution_lastInc,u_lastInc,err_PETSc)
CHKERRQ(err_PETSc)
@ -260,10 +262,14 @@ subroutine grid_mechanical_FEM_init
call HDF5_read(F_aimDot,groupHandle,'F_aimDot',.false.)
call MPI_Bcast(F_aimDot,9_MPI_INTEGER_KIND,MPI_DOUBLE,0_MPI_INTEGER_KIND,MPI_COMM_WORLD,err_MPI)
if(err_MPI /= 0_MPI_INTEGER_KIND) error stop 'MPI error'
call HDF5_read(F,groupHandle,'F')
call HDF5_read(F_lastInc,groupHandle,'F_lastInc')
call HDF5_read(u_current,groupHandle,'u')
call HDF5_read(u_lastInc,groupHandle,'u_lastInc')
call HDF5_read(temp33n,groupHandle,'F')
F = reshape(temp33n,[3,3,cells(1),cells(2),cells3])
call HDF5_read(temp33n,groupHandle,'F_lastInc')
F_lastInc = reshape(temp33n,[3,3,cells(1),cells(2),cells3])
call HDF5_read(temp3n,groupHandle,'u')
u = reshape(temp3n,[3,cells(1),cells(2),cells3])
call HDF5_read(temp3n,groupHandle,'u_lastInc')
u_lastInc = reshape(temp3n,[3,cells(1),cells(2),cells3])
elseif (CLI_restartInc == 0) then restartRead
F_lastInc = spread(spread(spread(math_I3,3,cells(1)),4,cells(2)),5,cells3) ! initialize to identity
@ -275,7 +281,7 @@ subroutine grid_mechanical_FEM_init
call utilities_constitutiveResponse(P_current,P_av,C_volAvg,devNull, & ! stress field, stress avg, global average of stiffness and (min+max)/2
F, & ! target F
0.0_pReal) ! time increment
call DMDAVecRestoreArrayF90(mechanical_grid,solution_current,u_current,err_PETSc)
call DMDAVecRestoreArrayF90(mechanical_grid,solution_current,u,err_PETSc)
CHKERRQ(err_PETSc)
call DMDAVecRestoreArrayF90(mechanical_grid,solution_lastInc,u_lastInc,err_PETSc)
CHKERRQ(err_PETSc)
@ -354,10 +360,10 @@ subroutine grid_mechanical_FEM_forward(cutBack,guess,Delta_t,Delta_t_old,t_remai
rotation_BC
PetscErrorCode :: err_PETSc
PetscScalar, pointer, dimension(:,:,:,:) :: &
u_current,u_lastInc
u,u_lastInc
call DMDAVecGetArrayF90(mechanical_grid,solution_current,u_current,err_PETSc)
call DMDAVecGetArrayF90(mechanical_grid,solution_current,u,err_PETSc)
CHKERRQ(err_PETSc)
call DMDAVecGetArrayF90(mechanical_grid,solution_lastInc,u_lastInc,err_PETSc)
CHKERRQ(err_PETSc)
@ -410,7 +416,7 @@ subroutine grid_mechanical_FEM_forward(cutBack,guess,Delta_t,Delta_t_old,t_remai
call VecAXPY(solution_current,Delta_t,solution_rate,err_PETSc)
CHKERRQ(err_PETSc)
call DMDAVecRestoreArrayF90(mechanical_grid,solution_current,u_current,err_PETSc)
call DMDAVecRestoreArrayF90(mechanical_grid,solution_current,u,err_PETSc)
CHKERRQ(err_PETSc)
call DMDAVecRestoreArrayF90(mechanical_grid,solution_lastInc,u_lastInc,err_PETSc)
CHKERRQ(err_PETSc)
@ -441,10 +447,10 @@ subroutine grid_mechanical_FEM_restartWrite
PetscErrorCode :: err_PETSc
integer(HID_T) :: fileHandle, groupHandle
PetscScalar, dimension(:,:,:,:), pointer :: u_current,u_lastInc
PetscScalar, dimension(:,:,:,:), pointer :: u,u_lastInc
call DMDAVecGetArrayF90(mechanical_grid,solution_current,u_current,err_PETSc)
call DMDAVecGetArrayF90(mechanical_grid,solution_current,u,err_PETSc)
CHKERRQ(err_PETSc)
call DMDAVecGetArrayF90(mechanical_grid,solution_lastInc,u_lastInc,err_PETSc)
CHKERRQ(err_PETSc)
@ -453,10 +459,10 @@ subroutine grid_mechanical_FEM_restartWrite
fileHandle = HDF5_openFile(getSolverJobName()//'_restart.hdf5','w')
groupHandle = HDF5_addGroup(fileHandle,'solver')
call HDF5_write(F,groupHandle,'F')
call HDF5_write(F_lastInc,groupHandle,'F_lastInc')
call HDF5_write(u_current,groupHandle,'u')
call HDF5_write(u_lastInc,groupHandle,'u_lastInc')
call HDF5_write(reshape(F,[3,3,product(cells(1:2))*cells3]),groupHandle,'F')
call HDF5_write(reshape(F_lastInc,[3,3,product(cells(1:2))*cells3]),groupHandle,'F_lastInc')
call HDF5_write(reshape(u,[3,product(cells(1:2))*cells3]),groupHandle,'u')
call HDF5_write(reshape(u_lastInc,[3,product(cells(1:2))*cells3]),groupHandle,'u_lastInc')
call HDF5_closeGroup(groupHandle)
call HDF5_closeFile(fileHandle)
@ -473,7 +479,7 @@ subroutine grid_mechanical_FEM_restartWrite
call HDF5_closeFile(fileHandle)
end if
call DMDAVecRestoreArrayF90(mechanical_grid,solution_current,u_current,err_PETSc)
call DMDAVecRestoreArrayF90(mechanical_grid,solution_current,u,err_PETSc)
CHKERRQ(err_PETSc)
call DMDAVecRestoreArrayF90(mechanical_grid,solution_lastInc,u_lastInc,err_PETSc)
CHKERRQ(err_PETSc)

View File

@ -104,7 +104,7 @@ contains
!--------------------------------------------------------------------------------------------------
!> @brief allocates all necessary fields and fills them with data, potentially from restart info
!--------------------------------------------------------------------------------------------------
subroutine grid_mechanical_spectral_basic_init
subroutine grid_mechanical_spectral_basic_init()
real(pReal), dimension(3,3,cells(1),cells(2),cells3) :: P
PetscErrorCode :: err_PETSc
@ -112,6 +112,7 @@ subroutine grid_mechanical_spectral_basic_init
PetscScalar, pointer, dimension(:,:,:,:) :: &
F ! pointer to solution data
PetscInt, dimension(0:worldsize-1) :: localK
real(pReal), dimension(3,3,product(cells(1:2))*cells3) :: temp33n
integer(HID_T) :: fileHandle, groupHandle
#if (PETSC_VERSION_MAJOR==3 && PETSC_VERSION_MINOR>14) && !defined(PETSC_HAVE_MPI_F90MODULE_VISIBILITY)
type(MPI_File) :: fileUnit
@ -229,8 +230,10 @@ subroutine grid_mechanical_spectral_basic_init
call HDF5_read(F_aimDot,groupHandle,'F_aimDot',.false.)
call MPI_Bcast(F_aimDot,9_MPI_INTEGER_KIND,MPI_DOUBLE,0_MPI_INTEGER_KIND,MPI_COMM_WORLD,err_MPI)
if (err_MPI /= 0_MPI_INTEGER_KIND) error stop 'MPI error'
call HDF5_read(F,groupHandle,'F')
call HDF5_read(F_lastInc,groupHandle,'F_lastInc')
call HDF5_read(temp33n,groupHandle,'F')
F = reshape(temp33n,[9,cells(1),cells(2),cells3])
call HDF5_read(temp33n,groupHandle,'F_lastInc')
F_lastInc = reshape(temp33n,[3,3,cells(1),cells(2),cells3])
elseif (CLI_restartInc == 0) then restartRead
F_lastInc = spread(spread(spread(math_I3,3,cells(1)),4,cells(2)),5,cells3) ! initialize to identity
@ -421,8 +424,8 @@ subroutine grid_mechanical_spectral_basic_restartWrite
fileHandle = HDF5_openFile(getSolverJobName()//'_restart.hdf5','w')
groupHandle = HDF5_addGroup(fileHandle,'solver')
call HDF5_write(F,groupHandle,'F')
call HDF5_write(F_lastInc,groupHandle,'F_lastInc')
call HDF5_write(reshape(F,[3,3,product(cells(1:2))*cells3]),groupHandle,'F')
call HDF5_write(reshape(F_lastInc,[3,3,product(cells(1:2))*cells3]),groupHandle,'F_lastInc')
call HDF5_closeGroup(groupHandle)
call HDF5_closeFile(fileHandle)

View File

@ -115,7 +115,7 @@ contains
!--------------------------------------------------------------------------------------------------
!> @brief allocates all necessary fields and fills them with data, potentially from restart info
!--------------------------------------------------------------------------------------------------
subroutine grid_mechanical_spectral_polarisation_init
subroutine grid_mechanical_spectral_polarisation_init()
real(pReal), dimension(3,3,cells(1),cells(2),cells3) :: P
PetscErrorCode :: err_PETSc
@ -125,6 +125,7 @@ subroutine grid_mechanical_spectral_polarisation_init
F, & ! specific (sub)pointer
F_tau ! specific (sub)pointer
PetscInt, dimension(0:worldsize-1) :: localK
real(pReal), dimension(3,3,product(cells(1:2))*cells3) :: temp33n
integer(HID_T) :: fileHandle, groupHandle
#if (PETSC_VERSION_MAJOR==3 && PETSC_VERSION_MINOR>14) && !defined(PETSC_HAVE_MPI_F90MODULE_VISIBILITY)
type(MPI_File) :: fileUnit
@ -250,10 +251,14 @@ subroutine grid_mechanical_spectral_polarisation_init
call HDF5_read(F_aimDot,groupHandle,'F_aimDot',.false.)
call MPI_Bcast(F_aimDot,9_MPI_INTEGER_KIND,MPI_DOUBLE,0_MPI_INTEGER_KIND,MPI_COMM_WORLD,err_MPI)
if (err_MPI /= 0_MPI_INTEGER_KIND) error stop 'MPI error'
call HDF5_read(F,groupHandle,'F')
call HDF5_read(F_lastInc,groupHandle,'F_lastInc')
call HDF5_read(F_tau,groupHandle,'F_tau')
call HDF5_read(F_tau_lastInc,groupHandle,'F_tau_lastInc')
call HDF5_read(temp33n,groupHandle,'F')
F = reshape(temp33n,[9,cells(1),cells(2),cells3])
call HDF5_read(temp33n,groupHandle,'F_lastInc')
F_lastInc = reshape(temp33n,[3,3,cells(1),cells(2),cells3])
call HDF5_read(temp33n,groupHandle,'F_tau')
F_tau = reshape(temp33n,[9,cells(1),cells(2),cells3])
call HDF5_read(temp33n,groupHandle,'F_tau_lastInc')
F_tau_lastInc = reshape(temp33n,[3,3,cells(1),cells(2),cells3])
elseif (CLI_restartInc == 0) then restartRead
F_lastInc = spread(spread(spread(math_I3,3,cells(1)),4,cells(2)),5,cells3) ! initialize to identity
@ -476,10 +481,10 @@ subroutine grid_mechanical_spectral_polarisation_restartWrite
fileHandle = HDF5_openFile(getSolverJobName()//'_restart.hdf5','w')
groupHandle = HDF5_addGroup(fileHandle,'solver')
call HDF5_write(F,groupHandle,'F')
call HDF5_write(F_lastInc,groupHandle,'F_lastInc')
call HDF5_write(F_tau,groupHandle,'F_tau')
call HDF5_write(F_tau_lastInc,groupHandle,'F_tau_lastInc')
call HDF5_write(reshape(F,[3,3,product(cells(1:2))*cells3]),groupHandle,'F')
call HDF5_write(reshape(F_lastInc,[3,3,product(cells(1:2))*cells3]),groupHandle,'F_lastInc')
call HDF5_write(reshape(F_tau,[3,3,product(cells(1:2))*cells3]),groupHandle,'F_tau')
call HDF5_write(reshape(F_tau_lastInc,[3,3,product(cells(1:2))*cells3]),groupHandle,'F_tau_lastInc')
call HDF5_closeGroup(groupHandle)
call HDF5_closeFile(fileHandle)

View File

@ -48,7 +48,7 @@ module grid_thermal_spectral
SNES :: SNES_thermal
Vec :: solution_vec
real(pReal), dimension(:,:,:), allocatable :: &
T_current, & !< field of current temperature
T, & !< field of current temperature
T_lastInc, & !< field of previous temperature
T_stagInc, & !< field of staggered temperature
dotT_lastInc
@ -78,6 +78,7 @@ subroutine grid_thermal_spectral_init()
integer(MPI_INTEGER_KIND) :: err_MPI
PetscErrorCode :: err_PETSc
integer(HID_T) :: fileHandle, groupHandle
real(pReal), dimension(1,product(cells(1:2))*cells3) :: tempN
type(tDict), pointer :: &
num_grid
@ -107,10 +108,10 @@ subroutine grid_thermal_spectral_init()
!--------------------------------------------------------------------------------------------------
! init fields
T_current = discretization_grid_getInitialCondition('T')
T_lastInc = T_current
T_stagInc = T_current
dotT_lastInc = 0.0_pReal * T_current
T = discretization_grid_getInitialCondition('T')
T_lastInc = T
T_stagInc = T
dotT_lastInc = 0.0_pReal * T
!--------------------------------------------------------------------------------------------------
! initialize solver specific parts of PETSc
@ -151,20 +152,23 @@ subroutine grid_thermal_spectral_init()
fileHandle = HDF5_openFile(getSolverJobName()//'_restart.hdf5','r')
groupHandle = HDF5_openGroup(fileHandle,'solver')
call HDF5_read(T_current,groupHandle,'T',.false.)
call HDF5_read(T_lastInc,groupHandle,'T_lastInc',.false.)
call HDF5_read(dotT_lastInc,groupHandle,'dotT_lastInc',.false.)
call HDF5_read(tempN,groupHandle,'T',.false.)
T = reshape(tempN,[cells(1),cells(2),cells3])
call HDF5_read(tempN,groupHandle,'T_lastInc',.false.)
T_lastInc = reshape(tempN,[cells(1),cells(2),cells3])
call HDF5_read(tempN,groupHandle,'dotT_lastInc',.false.)
dotT_lastInc = reshape(tempN,[cells(1),cells(2),cells3])
end if restartRead
ce = 0
do k = 1, cells3; do j = 1, cells(2); do i = 1, cells(1)
ce = ce + 1
call homogenization_thermal_setField(T_current(i,j,k),0.0_pReal,ce)
call homogenization_thermal_setField(T(i,j,k),0.0_pReal,ce)
end do; end do; end do
call DMDAVecGetArrayF90(thermal_grid,solution_vec,T_PETSc,err_PETSc)
CHKERRQ(err_PETSc)
T_PETSc = T_current
T_PETSc = T
call DMDAVecRestoreArrayF90(thermal_grid,solution_vec,T_PETSc,err_PETSc)
CHKERRQ(err_PETSc)
@ -207,20 +211,20 @@ function grid_thermal_spectral_solution(Delta_t) result(solution)
solution%converged = .true.
solution%iterationsNeeded = totalIter
end if
stagNorm = maxval(abs(T_current - T_stagInc))
stagNorm = maxval(abs(T - T_stagInc))
call MPI_Allreduce(MPI_IN_PLACE,stagNorm,1_MPI_INTEGER_KIND,MPI_DOUBLE,MPI_MAX,MPI_COMM_WORLD,err_MPI)
if (err_MPI /= 0_MPI_INTEGER_KIND) error stop 'MPI error'
solution%stagConverged = stagNorm < max(num%eps_thermal_atol, num%eps_thermal_rtol*maxval(T_current))
solution%stagConverged = stagNorm < max(num%eps_thermal_atol, num%eps_thermal_rtol*maxval(T))
call MPI_Allreduce(MPI_IN_PLACE,solution%stagConverged,1_MPI_INTEGER_KIND,MPI_LOGICAL,MPI_LAND,MPI_COMM_WORLD,err_MPI)
if (err_MPI /= 0_MPI_INTEGER_KIND) error stop 'MPI error'
T_stagInc = T_current
T_stagInc = T
!--------------------------------------------------------------------------------------------------
! updating thermal state
ce = 0
do k = 1, cells3; do j = 1, cells(2); do i = 1,cells(1)
ce = ce + 1
call homogenization_thermal_setField(T_current(i,j,k),(T_current(i,j,k)-T_lastInc(i,j,k))/params%Delta_t,ce)
call homogenization_thermal_setField(T(i,j,k),(T(i,j,k)-T_lastInc(i,j,k))/params%Delta_t,ce)
end do; end do; end do
call VecMin(solution_vec,devNull,T_min,err_PETSc)
@ -250,7 +254,7 @@ subroutine grid_thermal_spectral_forward(cutBack)
if (cutBack) then
T_current = T_lastInc
T = T_lastInc
T_stagInc = T_lastInc
!--------------------------------------------------------------------------------------------------
@ -259,17 +263,17 @@ subroutine grid_thermal_spectral_forward(cutBack)
CHKERRQ(err_PETSc)
call DMDAVecGetArrayF90(dm_local,solution_vec,T_PETSc,err_PETSc) !< get the data out of PETSc to work with
CHKERRQ(err_PETSc)
T_PETSc = T_current
T_PETSc = T
call DMDAVecRestoreArrayF90(dm_local,solution_vec,T_PETSc,err_PETSc)
CHKERRQ(err_PETSc)
ce = 0
do k = 1, cells3; do j = 1, cells(2); do i = 1,cells(1)
ce = ce + 1
call homogenization_thermal_setField(T_current(i,j,k),dotT_lastInc(i,j,k),ce)
call homogenization_thermal_setField(T(i,j,k),dotT_lastInc(i,j,k),ce)
end do; end do; end do
else
dotT_lastInc = (T_current - T_lastInc)/params%Delta_t
T_lastInc = T_current
dotT_lastInc = (T - T_lastInc)/params%Delta_t
T_lastInc = T
call updateReference
end if
@ -277,7 +281,7 @@ end subroutine grid_thermal_spectral_forward
!--------------------------------------------------------------------------------------------------
!> @brief Write current solver and constitutive data for restart to file
!> @brief Write current solver and constitutive data for restart to file.
!--------------------------------------------------------------------------------------------------
subroutine grid_thermal_spectral_restartWrite
@ -295,9 +299,9 @@ subroutine grid_thermal_spectral_restartWrite
fileHandle = HDF5_openFile(getSolverJobName()//'_restart.hdf5','a')
groupHandle = HDF5_openGroup(fileHandle,'solver')
call HDF5_write(T,groupHandle,'T')
call HDF5_write(T_lastInc,groupHandle,'T_lastInc')
call HDF5_write(dotT_lastInc,groupHandle,'dotT_lastInc')
call HDF5_write(reshape(T,[1,product(cells(1:2))*cells3]),groupHandle,'T')
call HDF5_write(reshape(T_lastInc,[1,product(cells(1:2))*cells3]),groupHandle,'T_lastInc')
call HDF5_write(reshape(dotT_lastInc,[1,product(cells(1:2))*cells3]),groupHandle,'dotT_lastInc')
call HDF5_closeGroup(groupHandle)
call HDF5_closeFile(fileHandle)
@ -320,7 +324,7 @@ subroutine formResidual(in,x_scal,r,dummy,err_PETSc)
x_scal
PetscScalar, dimension( &
X_RANGE,Y_RANGE,Z_RANGE), intent(out) :: &
r
r !< residual
PetscObject :: dummy
PetscErrorCode, intent(out) :: err_PETSc
@ -328,10 +332,8 @@ subroutine formResidual(in,x_scal,r,dummy,err_PETSc)
real(pReal), dimension(3,cells(1),cells(2),cells3) :: vectorField
T_current = x_scal
!--------------------------------------------------------------------------------------------------
! evaluate polarization field
vectorField = utilities_ScalarGradient(T_current)
T = x_scal
vectorField = utilities_ScalarGradient(T)
ce = 0
do k = 1, cells3; do j = 1, cells(2); do i = 1,cells(1)
ce = ce + 1
@ -342,13 +344,11 @@ subroutine formResidual(in,x_scal,r,dummy,err_PETSc)
do k = 1, cells3; do j = 1, cells(2); do i = 1,cells(1)
ce = ce + 1
r(i,j,k) = params%Delta_t*(r(i,j,k) + homogenization_f_T(ce)) &
+ homogenization_mu_T(ce) * (T_lastInc(i,j,k) - T_current(i,j,k)) &
+ mu_ref*T_current(i,j,k)
+ homogenization_mu_T(ce) * (T_lastInc(i,j,k) - T(i,j,k)) &
+ mu_ref*T(i,j,k)
end do; end do; end do
!--------------------------------------------------------------------------------------------------
! constructing residual
r = T_current &
r = T &
- utilities_GreenConvolution(r, K_ref, mu_ref, params%Delta_t)
err_PETSc = 0
@ -356,7 +356,7 @@ end subroutine formResidual
!--------------------------------------------------------------------------------------------------
!> @brief update reference viscosity and conductivity
!> @brief Update reference viscosity and conductivity.
!--------------------------------------------------------------------------------------------------
subroutine updateReference()

View File

@ -1142,11 +1142,13 @@ subroutine selfTest()
MPI_DOUBLE,MPI_SUM,MPI_COMM_WORLD,err_MPI)
if (err_MPI /= 0_MPI_INTEGER_KIND) error stop 'MPI error'
if (worldrank==0) then
if (any(dNeq(tensorSum/tensorField_fourier(:,:,1,1,1)%re,1.0_pReal,1.0e-12_pReal))) error stop 'mismatch avg tensorField FFT <-> real'
if (any(dNeq(tensorSum/tensorField_fourier(:,:,1,1,1)%re,1.0_pReal,1.0e-12_pReal))) &
error stop 'mismatch avg tensorField FFT <-> real'
end if
call fftw_mpi_execute_dft_c2r(planTensorBack,tensorField_fourier,tensorField_real)
tensorField_real(1:3,1:3,cells(1)+1:cells1Red*2,:,:) = 0.0_pReal
if (maxval(abs(tensorField_real_ - tensorField_real*wgt))>5.0e-15_pReal) error stop 'mismatch tensorField FFT/invFFT <-> real'
if (maxval(abs(tensorField_real_ - tensorField_real*wgt))>5.0e-15_pReal) &
error stop 'mismatch tensorField FFT/invFFT <-> real'
call random_number(vectorField_real)
vectorField_real(1:3,cells(1)+1:cells1Red*2,:,:) = 0.0_pReal
@ -1156,11 +1158,13 @@ subroutine selfTest()
MPI_DOUBLE,MPI_SUM,MPI_COMM_WORLD,err_MPI)
if (err_MPI /= 0_MPI_INTEGER_KIND) error stop 'MPI error'
if (worldrank==0) then
if (any(dNeq(vectorSum/vectorField_fourier(:,1,1,1)%re,1.0_pReal,1.0e-12_pReal))) error stop 'mismatch avg vectorField FFT <-> real'
if (any(dNeq(vectorSum/vectorField_fourier(:,1,1,1)%re,1.0_pReal,1.0e-12_pReal))) &
error stop 'mismatch avg vectorField FFT <-> real'
end if
call fftw_mpi_execute_dft_c2r(planVectorBack,vectorField_fourier,vectorField_real)
vectorField_real(1:3,cells(1)+1:cells1Red*2,:,:) = 0.0_pReal
if (maxval(abs(vectorField_real_ - vectorField_real*wgt))>5.0e-15_pReal) error stop 'mismatch vectorField FFT/invFFT <-> real'
if (maxval(abs(vectorField_real_ - vectorField_real*wgt))>5.0e-15_pReal) &
error stop 'mismatch vectorField FFT/invFFT <-> real'
call random_number(scalarField_real)
scalarField_real(cells(1)+1:cells1Red*2,:,:) = 0.0_pReal
@ -1170,11 +1174,13 @@ subroutine selfTest()
MPI_DOUBLE,MPI_SUM,MPI_COMM_WORLD,err_MPI)
if (err_MPI /= 0_MPI_INTEGER_KIND) error stop 'MPI error'
if (worldrank==0) then
if (dNeq(scalarSum/scalarField_fourier(1,1,1)%re,1.0_pReal,1.0e-12_pReal)) error stop 'mismatch avg scalarField FFT <-> real'
if (dNeq(scalarSum/scalarField_fourier(1,1,1)%re,1.0_pReal,1.0e-12_pReal)) &
error stop 'mismatch avg scalarField FFT <-> real'
end if
call fftw_mpi_execute_dft_c2r(planScalarBack,scalarField_fourier,scalarField_real)
scalarField_real(cells(1)+1:cells1Red*2,:,:) = 0.0_pReal
if (maxval(abs(scalarField_real_ - scalarField_real*wgt))>5.0e-15_pReal) error stop 'mismatch scalarField FFT/invFFT <-> real'
if (maxval(abs(scalarField_real_ - scalarField_real*wgt))>5.0e-15_pReal) &
error stop 'mismatch scalarField FFT/invFFT <-> real'
call random_number(r)
call MPI_Bcast(r,9_MPI_INTEGER_KIND,MPI_DOUBLE,0_MPI_INTEGER_KIND,MPI_COMM_WORLD,err_MPI)

View File

@ -406,6 +406,9 @@ subroutine homogenization_restartWrite(fileHandle)
call HDF5_write(homogState(ho)%state,groupHandle(2),'omega_mechanical') ! ToDo: should be done by mech
if(damageState_h(ho)%sizeState > 0) &
call HDF5_write(damageState_h(ho)%state,groupHandle(2),'omega_damage') ! ToDo: should be done by mech
call HDF5_closeGroup(groupHandle(2))
end do
@ -433,6 +436,9 @@ subroutine homogenization_restartRead(fileHandle)
call HDF5_read(homogState(ho)%state0,groupHandle(2),'omega_mechanical') ! ToDo: should be done by mech
if(damageState_h(ho)%sizeState > 0) &
call HDF5_read(damageState_h(ho)%state0,groupHandle(2),'omega_damage') ! ToDo: should be done by mech
call HDF5_closeGroup(groupHandle(2))
end do

View File

@ -80,11 +80,15 @@ module subroutine damage_partition(ce)
integer, intent(in) :: ce
real(pReal) :: phi
integer :: co
if(damageState_h(material_homogenizationID(ce))%sizeState < 1) return
phi = damagestate_h(material_homogenizationID(ce))%state(1,material_homogenizationEntry(ce))
call phase_set_phi(phi,1,ce)
do co = 1, homogenization_Nconstituents(material_homogenizationID(ce))
call phase_set_phi(phi,co,ce)
end do
end subroutine damage_partition

View File

@ -160,6 +160,11 @@ module phase
integer, intent(in) :: ph
end subroutine thermal_restartWrite
module subroutine damage_restartWrite(groupHandle,ph)
integer(HID_T), intent(in) :: groupHandle
integer, intent(in) :: ph
end subroutine damage_restartWrite
module subroutine mechanical_restartRead(groupHandle,ph)
integer(HID_T), intent(in) :: groupHandle
integer, intent(in) :: ph
@ -170,6 +175,11 @@ module phase
integer, intent(in) :: ph
end subroutine thermal_restartRead
module subroutine damage_restartRead(groupHandle,ph)
integer(HID_T), intent(in) :: groupHandle
integer, intent(in) :: ph
end subroutine damage_restartRead
module function mechanical_S(ph,en) result(S)
integer, intent(in) :: ph,en
real(pReal), dimension(3,3) :: S
@ -674,6 +684,7 @@ subroutine phase_restartWrite(fileHandle)
call mechanical_restartWrite(groupHandle(2),ph)
call thermal_restartWrite(groupHandle(2),ph)
call damage_restartWrite(groupHandle(2),ph)
call HDF5_closeGroup(groupHandle(2))
@ -703,6 +714,7 @@ subroutine phase_restartRead(fileHandle)
call mechanical_restartRead(groupHandle(2),ph)
call thermal_restartRead(groupHandle(2),ph)
call damage_restartRead(groupHandle(2),ph)
call HDF5_closeGroup(groupHandle(2))

View File

@ -1,6 +1,6 @@
!----------------------------------------------------------------------------------------------------
!--------------------------------------------------------------------------------------------------
!> @brief internal microstructure state for all damage sources and kinematics constitutive models
!----------------------------------------------------------------------------------------------------
!--------------------------------------------------------------------------------------------------
submodule(phase) damage
type :: tDamageParameters
@ -310,6 +310,35 @@ function integrateDamageState(Delta_t,ph,en) result(broken)
end function integrateDamageState
module subroutine damage_restartWrite(groupHandle,ph)
integer(HID_T), intent(in) :: groupHandle
integer, intent(in) :: ph
select case(phase_damage(ph))
case(DAMAGE_ISOBRITTLE_ID,DAMAGE_ANISOBRITTLE_ID)
call HDF5_write(damageState(ph)%state,groupHandle,'omega_damage')
end select
end subroutine damage_restartWrite
module subroutine damage_restartRead(groupHandle,ph)
integer(HID_T), intent(in) :: groupHandle
integer, intent(in) :: ph
select case(phase_damage(ph))
case(DAMAGE_ISOBRITTLE_ID,DAMAGE_ANISOBRITTLE_ID)
call HDF5_read(damageState(ph)%state0,groupHandle,'omega_damage')
end select
end subroutine damage_restartRead
!----------------------------------------------------------------------------------------------
!< @brief writes damage sources results to HDF5 output file
!----------------------------------------------------------------------------------------------

View File

@ -1783,6 +1783,6 @@ subroutine storeGeometry(ph)
end do
end do
end subroutine
end subroutine storeGeometry
end submodule nonlocal