Merge branch 'development' into 228-unit-tests-for-spectral-functionality

This commit is contained in:
Philip Eisenlohr 2022-12-05 11:01:48 -05:00
commit 65cb9d5997
30 changed files with 611 additions and 439 deletions

@ -1 +1 @@
Subproject commit 81f5f24d076a623e6052c234825c591267915285
Subproject commit 599d5accf4f5249257972cef997e8078e857c5a1

View File

@ -1 +1 @@
3.0.0-alpha7-160-g92ae86b63
3.0.0-alpha7-177-g30998d667

View File

@ -26,11 +26,8 @@ class Rotation:
- Coordinate frames are right-handed.
- A rotation angle ω is taken to be positive for a counterclockwise rotation
when viewing from the end point of the rotation axis towards the origin.
- Rotations will be interpreted in the passive sense.
- Euler angle triplets are implemented using the Bunge convention,
with angular ranges of [0,2π], [0,π], [0,2π].
- The rotation angle ω is limited to the interval [0,π].
- The real part of a quaternion is positive, Re(q) 0
- Rotations will be interpreted in the passive sense, i.e. as rotation of
the coordinate frame.
- P = -1 (as default).
Examples
@ -879,6 +876,10 @@ class Rotation:
reciprocal : bool, optional
Basis vectors are given in reciprocal (instead of real) space. Defaults to False.
Returns
-------
new : damask.Rotation
"""
om = np.array(basis,dtype=float)
if om.shape[-2:] != (3,3): raise ValueError('invalid shape')

View File

@ -116,7 +116,10 @@ class VTK:
"""
s = vtk.vtkStringArray()
s.SetName('comments')
for c in util.tail_repack(comments,self.comments):
comments_ = util.tail_repack(comments,self.comments) if comments[:len(self.comments)] == self.comments else \
[comments] if isinstance(comments,str) else \
comments
for c in comments_:
s.InsertNextValue(c)
self.vtk_data.GetFieldData().AddArray(s)
@ -547,9 +550,11 @@ class VTK:
Notes
-----
See http://compilatrix.com/article/vtk-1 for further ideas.
The first component is shown when visualizing vector datasets
(this includes tensor datasets because they are flattened).
"""
# See http://compilatrix.com/article/vtk-1 for possible improvements.
try:
import wx
_ = wx.App(False) # noqa

View File

@ -574,7 +574,7 @@ def _docstringer(docstring: _Union[str, _Callable],
shift = min([len(line)-len(line.lstrip(' '))-indent for line in content])
extra = '\n'.join([(line[shift:] if shift > 0 else
f'{" "*-shift}{line}') for line in content])
docstring_ = _re.sub(fr'(^([ ]*){key}\s*\n\2{"-"*len(key)}[\n ]*[A-Za-z0-9 ]*: ([^\n]+\n)*)',
docstring_ = _re.sub(fr'(^([ ]*){key}\s*\n\2{"-"*len(key)}[\n ]*[A-Za-z0-9_ ]*: ([^\n]+\n)*)',
fr'\1{extra}\n',
docstring_,flags=_re.MULTILINE)
@ -590,7 +590,7 @@ def _docstringer(docstring: _Union[str, _Callable],
+(return_class.__name__ if not isinstance(return_class,str) else return_class)
)
return _re.sub(r'(^([ ]*)Returns\s*\n\2-------\s*\n[ ]*[A-Za-z0-9 ]*: )(.*)\n',
return _re.sub(r'(^([ ]*)Returns\s*\n\2-------\s*\n[ ]*[A-Za-z0-9_ ]*: )(.*)\n',
fr'\1{return_type_}\n',
docstring_,flags=_re.MULTILINE)
@ -793,7 +793,7 @@ def tail_repack(extended: _Union[str, _Sequence[str]],
Parameters
----------
extended : (list of) str
extended : (sequence of) str
Extended string list with potentially autosplitted tailing string relative to `existing`.
existing : list of str
Base string list.
@ -811,9 +811,9 @@ def tail_repack(extended: _Union[str, _Sequence[str]],
['a','new','shiny','e','n','t','r','y']
"""
return [extended] if isinstance(extended,str) else existing + \
([''.join(extended[len(existing):])] if _np.prod([len(i) for i in extended[len(existing):]]) == 1 else
list(extended[len(existing):]))
new = extended[len(existing):]
return [extended] if isinstance(extended,str) else \
existing + list([''.join(new)] if _np.prod([len(i) for i in new]) == 1 else new)
def aslist(arg: _Union[_IntCollection, int, None]) -> _List:

View File

@ -7,6 +7,7 @@ from damask import Table
from damask import _rotation
from damask import grid_filters
from damask import util
from damask import tensor
n = 1000
atol=1.e-4
@ -20,6 +21,16 @@ def ref_path(ref_path_base):
def set_of_rotations(set_of_quaternions):
return [Rotation.from_quaternion(s) for s in set_of_quaternions]
@pytest.fixture
def multidim_rotations(set_of_quaternions):
L = len(set_of_quaternions)
i = 0
while L%(f:=np.random.randint(2,np.sqrt(L).astype(int))) > 0 and i<L:
i += 1
f = i if i == L else f
return Rotation.from_quaternion(set_of_quaternions.reshape((L//f,f,-1)))
####################################################################################################
# Code below available according to the following conditions
@ -691,117 +702,156 @@ class TestRotation:
def test_to_numpy(self):
r = Rotation.from_random(np.random.randint(0,10,4))
assert np.all(r.as_quaternion() == np.array(r))
assert (r.as_quaternion() == np.array(r)).all()
@pytest.mark.parametrize('degrees',[True,False])
def test_Eulers(self,set_of_rotations,degrees):
for rot in set_of_rotations:
m = rot.as_quaternion()
o = Rotation.from_Euler_angles(rot.as_Euler_angles(degrees),degrees).as_quaternion()
ok = np.allclose(m,o,atol=atol)
if np.isclose(rot.as_quaternion()[0],0.0,atol=atol):
ok |= np.allclose(m*-1.,o,atol=atol)
assert ok and np.isclose(np.linalg.norm(o),1.0), f'{m},{o},{rot.as_quaternion()}'
def test_bounds(self,multidim_rotations):
m = multidim_rotations
@pytest.mark.parametrize('P',[1,-1])
@pytest.mark.parametrize('normalize',[True,False])
@pytest.mark.parametrize('degrees',[True,False])
def test_axis_angle(self,set_of_rotations,degrees,normalize,P):
c = np.array([P*-1,P*-1,P*-1,1.])
c[:3] *= 0.9 if normalize else 1.0
for rot in set_of_rotations:
m = rot.as_Euler_angles()
o = Rotation.from_axis_angle(rot.as_axis_angle(degrees)*c,degrees,normalize,P).as_Euler_angles()
u = np.array([np.pi*2,np.pi,np.pi*2])
ok = np.allclose(m,o,atol=atol)
ok |= np.allclose(np.where(np.isclose(m,u),m-u,m),np.where(np.isclose(o,u),o-u,o),atol=atol)
if np.isclose(m[1],0.0,atol=atol) or np.isclose(m[1],np.pi,atol=atol):
sum_phi = np.unwrap([m[0]+m[2],o[0]+o[2]])
ok |= np.isclose(sum_phi[0],sum_phi[1],atol=atol)
assert ok and (np.zeros(3)-1.e-9 <= o).all() \
and (o <= np.array([np.pi*2.,np.pi,np.pi*2.])+1.e-9).all(), f'{m},{o},{rot.as_quaternion()}'
q = m.as_quaternion()
assert np.allclose(1.,np.linalg.norm(q,axis=-1))
def test_matrix(self,set_of_rotations):
for rot in set_of_rotations:
m = rot.as_axis_angle()
o = Rotation.from_axis_angle(rot.as_axis_angle()).as_axis_angle()
ok = np.allclose(m,o,atol=atol)
if np.isclose(m[3],np.pi,atol=atol):
ok |= np.allclose(m*np.array([-1.,-1.,-1.,1.]),o,atol=atol)
assert ok and np.isclose(np.linalg.norm(o[:3]),1.0) \
and o[3]<=np.pi+1.e-9, f'{m},{o},{rot.as_quaternion()}'
v = m.as_Rodrigues_vector(compact=False)
assert np.allclose(1.,np.linalg.norm(v[...,:3],axis=-1))
def test_parallel(self,set_of_rotations):
a = np.array([[1.0,0.0,0.0],
[0.0,1.0,0.0]])
for rot in set_of_rotations:
assert rot.allclose(Rotation.from_parallel(a,rot.broadcast_to((2,))@a))
v = m.as_axis_angle(degrees=False)
assert np.allclose(1.,np.linalg.norm(v[...,:3],axis=-1))
assert (v[...,3] >= 0.).all and (v < np.pi+1.e-9).all()
@pytest.mark.parametrize('P',[1,-1])
@pytest.mark.parametrize('normalize',[True,False])
def test_Rodrigues(self,set_of_rotations,normalize,P):
c = np.array([P*-1,P*-1,P*-1,1.])
c[:3] *= 0.9 if normalize else 1.0
for rot in set_of_rotations:
m = rot.as_matrix()
o = Rotation.from_Rodrigues_vector(rot.as_Rodrigues_vector()*c,normalize,P).as_matrix()
ok = np.allclose(m,o,atol=atol)
assert ok and np.isclose(np.linalg.det(o),1.0), f'{m},{o}'
r = m.as_matrix()
assert np.allclose(1.,np.linalg.det(r))
def test_Rodrigues_compact(self,set_of_rotations):
for rot in set_of_rotations:
c = rot.as_Rodrigues_vector(compact=True)
r = rot.as_Rodrigues_vector(compact=False)
assert np.allclose(r[:3]*r[3], c, equal_nan=True)
e = m.as_Euler_angles(degrees=False)
assert (e >= 0.).all and (e < np.pi*np.array([2.,1.,2.])+1.e-9).all()
c = m.as_cubochoric()
assert (np.linalg.norm(c,ord=np.inf,axis=-1) < np.pi**(2./3.)*0.5+1.e-9).all()
h = m.as_homochoric()
assert (np.linalg.norm(h,axis=-1) < (3.*np.pi/4.)**(1./3.) + 1.e-9).all()
@pytest.mark.parametrize('P',[1,-1])
def test_homochoric(self,set_of_rotations,P):
cutoff = np.tan(np.pi*.5*(1.-1e-4))
for rot in set_of_rotations:
m = rot.as_Rodrigues_vector()
o = Rotation.from_homochoric(rot.as_homochoric()*P*-1,P).as_Rodrigues_vector()
ok = np.allclose(np.clip(m,None,cutoff),np.clip(o,None,cutoff),atol=atol)
ok |= np.isclose(m[3],0.0,atol=atol)
assert ok and np.isclose(np.linalg.norm(o[:3]),1.0), f'{m},{o},{rot.as_quaternion()}'
@pytest.mark.parametrize('P',[1,-1])
def test_cubochoric(self,set_of_rotations,P):
for rot in set_of_rotations:
m = rot.as_homochoric()
o = Rotation.from_cubochoric(rot.as_cubochoric()*P*-1,P).as_homochoric()
ok = np.allclose(m,o,atol=atol)
assert ok and np.linalg.norm(o) < (3.*np.pi/4.)**(1./3.) + 1.e-9, f'{m},{o},{rot.as_quaternion()}'
@pytest.mark.parametrize('P',[1,-1])
@pytest.mark.parametrize('accept_homomorph',[True,False])
@pytest.mark.parametrize('normalize',[True,False])
def test_quaternion(self,set_of_rotations,P,accept_homomorph,normalize):
c = np.array([1,P*-1,P*-1,P*-1]) * (-1 if accept_homomorph else 1) * (0.9 if normalize else 1.0)
for rot in set_of_rotations:
m = rot.as_cubochoric()
o = Rotation.from_quaternion(rot.as_quaternion()*c,accept_homomorph,normalize,P).as_cubochoric()
ok = np.allclose(m,o,atol=atol)
if np.count_nonzero(np.isclose(np.abs(o),np.pi**(2./3.)*.5)):
ok |= np.allclose(m*-1.,o,atol=atol)
assert ok and o.max() < np.pi**(2./3.)*0.5+1.e-9, f'{m},{o},{rot.as_quaternion()}'
@pytest.mark.parametrize('P',[1,-1])
def test_quaternion(self,multidim_rotations,accept_homomorph,normalize,P):
c = np.array([1,-P,-P,-P]) * (-1 if accept_homomorph else 1) * (0.9 if normalize else 1.0)
m = multidim_rotations
o = Rotation.from_quaternion(m.as_quaternion()*c,
accept_homomorph=accept_homomorph,
normalize=normalize,
P=P)
f = Rotation(np.where(np.isclose(m.as_quaternion()[...,0],0.0,atol=atol)[...,np.newaxis],~o,o))
assert np.logical_or(m.isclose(o,atol=atol),
m.isclose(f,atol=atol)
).all()
@pytest.mark.parametrize('degrees',[True,False])
def test_Eulers(self,multidim_rotations,degrees):
m = multidim_rotations
o = Rotation.from_Euler_angles(m.as_Euler_angles(degrees),
degrees=degrees)
f = Rotation(np.where(np.isclose(m.as_quaternion()[...,0],0.0,atol=atol)[...,np.newaxis],~o,o))
assert np.logical_or(m.isclose(o,atol=atol),
m.isclose(f,atol=atol)
).all()
@pytest.mark.parametrize('degrees',[True,False])
@pytest.mark.parametrize('normalize',[True,False])
@pytest.mark.parametrize('P',[1,-1])
def test_axis_angle(self,multidim_rotations,degrees,normalize,P):
c = np.array([-P,-P,-P,1.])
c[:3] *= 0.9 if normalize else 1.0
m = multidim_rotations
o = Rotation.from_axis_angle(m.as_axis_angle(degrees)*c,
degrees=degrees,
normalize=normalize,
P=P)
f = Rotation(np.where(np.isclose(m.as_quaternion()[...,0],0.0,atol=atol)[...,np.newaxis],~o,o))
assert np.logical_or(m.isclose(o,atol=atol),
m.isclose(f,atol=atol)
).all()
def test_matrix(self,multidim_rotations):
m = multidim_rotations
o = Rotation.from_matrix(m.as_matrix())
f = Rotation(np.where(np.isclose(m.as_quaternion()[...,0],0.0,atol=atol)[...,np.newaxis],~o,o))
assert np.logical_or(m.isclose(o,atol=atol),
m.isclose(f,atol=atol)
).all()
def test_parallel(self,multidim_rotations):
m = multidim_rotations
a = np.broadcast_to(np.array([[1.0,0.0,0.0],
[0.0,1.0,0.0]]),m.shape+(2,3))
assert m.allclose(Rotation.from_parallel(a,m.broadcast_to(m.shape+(2,))@a))
@pytest.mark.parametrize('normalize',[True,False])
@pytest.mark.parametrize('P',[1,-1])
def test_Rodrigues(self,multidim_rotations,normalize,P):
c = np.array([-P,-P,-P,1.])
c[:3] *= 0.9 if normalize else 1.0
m = multidim_rotations
o = Rotation.from_Rodrigues_vector(m.as_Rodrigues_vector()*c,
normalize=normalize,
P=P)
f = Rotation(np.where(np.isclose(m.as_quaternion()[...,0],0.0,atol=atol)[...,np.newaxis],~o,o))
assert np.logical_or(m.isclose(o,atol=atol),
m.isclose(f,atol=atol)
).all()
def test_Rodrigues_compact(self,multidim_rotations):
m = multidim_rotations
c = m.as_Rodrigues_vector(compact=True)
r = m.as_Rodrigues_vector(compact=False)
assert np.allclose(r[...,:3]*r[...,3:], c, equal_nan=True)
@pytest.mark.parametrize('P',[1,-1])
def test_homochoric(self,multidim_rotations,P):
m = multidim_rotations
o = Rotation.from_homochoric(m.as_homochoric()*-P,
P=P)
f = Rotation(np.where(np.isclose(m.as_quaternion()[...,0],0.0,atol=atol)[...,np.newaxis],~o,o))
assert np.logical_or(m.isclose(o,atol=atol),
m.isclose(f,atol=atol)
).all()
@pytest.mark.parametrize('P',[1,-1])
def test_cubochoric(self,multidim_rotations,P):
m = multidim_rotations
o = Rotation.from_cubochoric(m.as_cubochoric()*-P,
P=P)
f = Rotation(np.where(np.isclose(m.as_quaternion()[...,0],0.0,atol=atol)[...,np.newaxis],~o,o))
assert np.logical_or(m.isclose(o,atol=atol),
m.isclose(f,atol=atol)
).all()
@pytest.mark.parametrize('reciprocal',[True,False])
def test_basis(self,set_of_rotations,reciprocal):
for rot in set_of_rotations:
om = rot.as_matrix() + 0.1*np.eye(3)
rot = Rotation.from_basis(om,False,reciprocal=reciprocal)
assert np.isclose(np.linalg.det(rot.as_matrix()),1.0)
def test_basis(self,multidim_rotations,reciprocal):
m = multidim_rotations
r = m.as_matrix()
r = np.linalg.inv(tensor.transpose(r)/np.pi) if reciprocal else r
o = Rotation.from_basis(r,
reciprocal=reciprocal)
f = Rotation(np.where(np.isclose(m.as_quaternion()[...,0],0.0,atol=atol)[...,np.newaxis],~o,o))
assert np.logical_or(m.isclose(o,atol=atol),
m.isclose(f,atol=atol)
).all()
@pytest.mark.parametrize('shape',[None,1,(4,4)])
def test_random(self,shape):
r = Rotation.from_random(shape)
if shape is None:
assert r.shape == ()
elif shape == 1:
assert r.shape == (1,)
else:
assert r.shape == shape
assert r.shape == () if shape is None else (1,) if shape == 1 else shape
@pytest.mark.parametrize('shape',[None,5,(4,6)])
def test_equal(self,shape):
@ -822,7 +872,7 @@ class TestRotation:
def test_equal_ambiguous(self):
qu = np.random.rand(10,4)
qu[:,0] = 0.
qu/=np.linalg.norm(qu,axis=1,keepdims=True)
qu /= np.linalg.norm(qu,axis=1,keepdims=True)
assert (Rotation(qu) == Rotation(-qu)).all()
def test_inversion(self):
@ -947,13 +997,13 @@ class TestRotation:
p = np.random.rand(n,3)
o = Rotation._get_pyramid_order(p,direction)
for i,o_i in enumerate(o):
assert np.all(o_i==Rotation._get_pyramid_order(p[i],direction))
assert (o_i==Rotation._get_pyramid_order(p[i],direction)).all()
def test_pyramid_invariant(self):
a = np.random.rand(n,3)
f = Rotation._get_pyramid_order(a,'forward')
b = Rotation._get_pyramid_order(a,'backward')
assert np.all(np.take_along_axis(np.take_along_axis(a,f,-1),b,-1) == a)
assert (np.take_along_axis(np.take_along_axis(a,f,-1),b,-1) == a).all()
@pytest.mark.parametrize('data',[np.random.rand(5,3),

View File

@ -70,7 +70,7 @@ subroutine DAMASK_interface_init
if (ierr /= 0) then
print*, 'working directory "'//trim(wd)//'" does not exist'
call quit(1)
endif
end if
symmetricSolver = solverIsSymmetric()
end subroutine DAMASK_interface_init
@ -111,8 +111,8 @@ logical function solverIsSymmetric()
s = s + verify(line(s+1:),' ') ! start of second chunk
e = s + scan (line(s+1:),' ') ! end of second chunk
solverIsSymmetric = line(s:e) /= '1'
endif
enddo
end if
end do
100 close(fileUnit)
contains
@ -134,7 +134,7 @@ logical function solverIsSymmetric()
lc(i:i) = string(i:i)
n = index(UPPER,lc(i:i))
if (n/=0) lc(i:i) = LOWER(n:n)
enddo
end do
end function lc
end function solverIsSymmetric
@ -299,7 +299,7 @@ subroutine hypela2(d,g,e,de,s,t,dt,ngens,m,nn,kcus,matus,ndi,nshear,disp, &
transpose(ffn)
write(6,'(/,a,/,3(3(f12.7,1x)/))',advance='no') ' Deformation gradient at t=n+1:', &
transpose(ffn1)
endif
end if
defaultNumThreadsInt = omp_get_num_threads() ! remember number of threads set by Marc
call omp_set_num_threads(1_pI32) ! no openMP
@ -309,7 +309,7 @@ subroutine hypela2(d,g,e,de,s,t,dt,ngens,m,nn,kcus,matus,ndi,nshear,disp, &
call materialpoint_initAll
debug_Marc => config_debug%get_list('Marc',defaultVal=emptyList)
debug_basic = debug_Marc%contains('basic')
endif
end if
computationMode = 0 ! save initialization value, since it does not result in any calculation
if (lovl == 4 ) then ! jacobian requested by marc
@ -333,35 +333,35 @@ subroutine hypela2(d,g,e,de,s,t,dt,ngens,m,nn,kcus,matus,ndi,nshear,disp, &
lastIncConverged = .true.
outdatedByNewInc = .true.
print'(a,i6,1x,i2)', '<< HYPELA2 >> new increment..! ',m(1),nn
endif
end if
else if ( timinc < theDelta ) then ! >> cutBack <<
lastIncConverged = .false.
outdatedByNewInc = .false.
terminallyIll = .false.
cycleCounter = -1 ! first calc step increments this to cycle = 0
print'(a,i6,1x,i2)', '<< HYPELA2 >> cutback detected..! ',m(1),nn
endif ! convergence treatment end
end if ! convergence treatment end
flush(6)
if (lastLovl /= lovl) then
cycleCounter = cycleCounter + 1
!mesh_cellnode = mesh_build_cellnodes() ! update cell node coordinates
!call mesh_build_ipCoordinates() ! update ip coordinates
endif
end if
if (outdatedByNewInc) then
computationMode = ior(computationMode,materialpoint_AGERESULTS)
outdatedByNewInc = .false.
endif
end if
if (lastIncConverged) then
computationMode = ior(computationMode,materialpoint_BACKUPJACOBIAN)
lastIncConverged = .false.
endif
end if
theTime = cptim
theDelta = timinc
theInc = inc
endif
end if
lastLovl = lovl
call materialpoint_general(computationMode,ffn,ffn1,t(1),timinc,int(m(1)),int(nn),stress,ddsdde)
@ -429,13 +429,13 @@ subroutine uedinc(inc,incsub)
if (discretization_Marc_FEM2DAMASK_node(n) /= -1) then
call nodvar(1,n,d_n(1:3,discretization_Marc_FEM2DAMASK_node(n)),nqncomp,nqdatatype)
if(nqncomp == 2) d_n(3,discretization_Marc_FEM2DAMASK_node(n)) = 0.0_pReal
endif
enddo
end if
end do
call discretization_Marc_UpdateNodeAndIpCoords(d_n)
call materialpoint_results(int(inc),cptim)
inc_written = int(inc)
endif
end if
end subroutine uedinc

View File

@ -275,8 +275,8 @@ subroutine inputRead_fileFormat(fileFormat,fileContent)
if(IO_lc(IO_stringValue(fileContent(l),chunkPos,1)) == 'version') then
fileFormat = IO_intValue(fileContent(l),chunkPos,2)
exit
endif
enddo
end if
end do
end subroutine inputRead_fileFormat
@ -302,8 +302,8 @@ subroutine inputRead_tableStyles(initialcond,hypoelastic,fileContent)
initialcond = IO_intValue(fileContent(l),chunkPos,4)
hypoelastic = IO_intValue(fileContent(l),chunkPos,5)
exit
endif
enddo
end if
end do
end subroutine inputRead_tableStyles
@ -331,16 +331,16 @@ subroutine inputRead_matNumber(matNumber, &
data_blocks = IO_intValue(fileContent(l+1),chunkPos,1)
else
data_blocks = 1
endif
end if
allocate(matNumber(data_blocks), source = 0)
do i = 0, data_blocks - 1
j = i*(2+tableStyle) + 1
chunkPos = IO_stringPos(fileContent(l+1+j))
matNumber(i+1) = IO_intValue(fileContent(l+1+j),chunkPos,1)
enddo
end do
exit
endif
enddo
end if
end do
end subroutine inputRead_matNumber
@ -368,8 +368,8 @@ subroutine inputRead_NnodesAndElements(nNodes,nElems,&
elseif(IO_lc(IO_StringValue(fileContent(l),chunkPos,1)) == 'coordinates') then
chunkPos = IO_stringPos(fileContent(l+1))
nNodes = IO_IntValue (fileContent(l+1),chunkPos,2)
endif
enddo
end if
end do
end subroutine inputRead_NnodesAndElements
@ -411,12 +411,12 @@ subroutine inputRead_NelemSets(nElemSets,maxNelemInSet,&
if(IO_lc(IO_stringValue(fileContent(l+i),chunkPos,chunkPos(1))) /= 'c') then ! line finished, read last value
elemInCurrentSet = elemInCurrentSet + 1 ! data ended
exit
endif
enddo
endif
end if
end do
end if
maxNelemInSet = max(maxNelemInSet, elemInCurrentSet)
endif
enddo
end if
end do
end subroutine inputRead_NelemSets
@ -448,8 +448,8 @@ subroutine inputRead_mapElemSets(nameElemSet,mapElemSet,&
elemSet = elemSet+1
nameElemSet(elemSet) = trim(IO_stringValue(fileContent(l),chunkPos,4))
mapElemSet(:,elemSet) = continuousIntValues(fileContent(l+1:),size(mapElemSet,1)-1,nameElemSet,mapElemSet,size(nameElemSet))
endif
enddo
end if
end do
end subroutine inputRead_mapElemSets
@ -484,17 +484,17 @@ subroutine inputRead_mapElems(FEM2DAMASK, &
j = j + 1
chunkPos = IO_stringPos(fileContent(l+1+i+j))
nNodesAlreadyRead = nNodesAlreadyRead + chunkPos(1)
enddo
enddo
end do
end do
exit
endif
enddo
end if
end do
call math_sort(map_unsorted)
allocate(FEM2DAMASK(minval(map_unsorted(1,:)):maxval(map_unsorted(1,:))),source=-1)
do i = 1, nElems
FEM2DAMASK(map_unsorted(1,i)) = map_unsorted(2,i)
enddo
end do
end subroutine inputRead_mapElems
@ -522,16 +522,16 @@ subroutine inputRead_mapNodes(FEM2DAMASK, &
chunkPos = [1,1,10]
do i = 1,nNodes
map_unsorted(:,i) = [IO_intValue(fileContent(l+1+i),chunkPos,1),i]
enddo
end do
exit
endif
enddo
end if
end do
call math_sort(map_unsorted)
allocate(FEM2DAMASK(minval(map_unsorted(1,:)):maxval(map_unsorted(1,:))),source=-1)
do i = 1, nNodes
FEM2DAMASK(map_unsorted(1,i)) = map_unsorted(2,i)
enddo
end do
end subroutine inputRead_mapNodes
@ -560,10 +560,10 @@ subroutine inputRead_elemNodes(nodes, &
do i=1,nNode
m = discretization_Marc_FEM2DAMASK_node(IO_intValue(fileContent(l+1+i),chunkPos,1))
nodes(1:3,m) = [(mesh_unitlength * IO_floatValue(fileContent(l+1+i),chunkPos,j+1),j=1,3)]
enddo
end do
exit
endif
enddo
end if
end do
end subroutine inputRead_elemNodes
@ -596,17 +596,17 @@ subroutine inputRead_elemType(elem, &
else
t_ = mapElemtype(IO_stringValue(fileContent(l+1+i+j),chunkPos,2))
if (t /= t_) call IO_error(191,IO_stringValue(fileContent(l+1+i+j),chunkPos,2),label1='type',ID1=t)
endif
end if
remainingChunks = elem%nNodes - (chunkPos(1) - 2)
do while(remainingChunks > 0)
j = j + 1
chunkPos = IO_stringPos(fileContent(l+1+i+j))
remainingChunks = remainingChunks - chunkPos(1)
enddo
enddo
end do
end do
exit
endif
enddo
end if
end do
contains
@ -686,7 +686,7 @@ function inputRead_connectivityElem(nElem,nNodes,fileContent)
do k = 1,chunkPos(1)-2
inputRead_connectivityElem(k,e) = &
discretization_Marc_FEM2DAMASK_node(IO_IntValue(fileContent(l+1+i+j),chunkPos,k+2))
enddo
end do
nNodesAlreadyRead = chunkPos(1) - 2
do while(nNodesAlreadyRead < nNodes) ! read on if not all nodes in one line
j = j + 1
@ -694,14 +694,14 @@ function inputRead_connectivityElem(nElem,nNodes,fileContent)
do k = 1,chunkPos(1)
inputRead_connectivityElem(nNodesAlreadyRead+k,e) = &
discretization_Marc_FEM2DAMASK_node(IO_IntValue(fileContent(l+1+i+j),chunkPos,k))
enddo
end do
nNodesAlreadyRead = nNodesAlreadyRead + chunkPos(1)
enddo
endif
enddo
end do
end if
end do
exit
endif
enddo
end if
end do
end function inputRead_connectivityElem
@ -749,12 +749,12 @@ subroutine inputRead_material(materialAt,&
do i = 1,contInts(1)
e = discretization_Marc_FEM2DAMASK_elem(contInts(1+i))
materialAt(e) = ID + 1
enddo
end do
if (initialcondTableStyle == 0) m = m + 1
enddo
endif
endif
enddo
end do
end if
end if
end do
if(any(materialAt < 1)) call IO_error(180)
@ -791,9 +791,9 @@ pure subroutine buildCells(connectivity,definition, &
do c = 1, elem%NcellNodes
realNode: if (count(elem%cellNodeParentNodeWeights(:,c) /= 0) == 1) then
where(connectivity(:,:,e) == -c) connectivity(:,:,e) = connectivity_elem(c,e)
endif realNode
enddo
enddo
end if realNode
end do
end do
nCellNode = maxval(connectivity_elem)
@ -806,7 +806,7 @@ pure subroutine buildCells(connectivity,definition, &
do c = 1, elem%NcellNodes
if (count(elem%cellNodeParentNodeWeights(:,c) /= 0) == nParentNodes) &
candidates_local = [candidates_local,c]
enddo
end do
s = size(candidates_local)
if (allocated(candidates_global)) deallocate(candidates_global)
@ -822,8 +822,8 @@ pure subroutine buildCells(connectivity,definition, &
if (elem%cellNodeParentNodeWeights(j,c) /= 0) then ! real node 'j' partly defines cell node 'c'
p = p + 1
parentsAndWeights(p,1:2) = [connectivity_elem(j,e),elem%cellNodeParentNodeWeights(j,c)]
endif
enddo
end if
end do
! store (and order) real node IDs and their weights together with the element number and local ID
do p = 1, nParentNodes
m = maxloc(parentsAndWeights(:,1),1)
@ -833,9 +833,9 @@ pure subroutine buildCells(connectivity,definition, &
candidates_global(nParentNodes*2+1:nParentNodes*2+2,candidateID) = [e,c]
parentsAndWeights(m,1) = -huge(parentsAndWeights(m,1)) ! out of the competition
enddo
enddo
enddo
end do
end do
end do
! sort according to real node IDs + weight (from left to right)
call math_sort(candidates_global,sortDim=1) ! sort according to first column
@ -847,13 +847,13 @@ pure subroutine buildCells(connectivity,definition, &
do while (n+j<= size(candidates_local)*Nelem)
if (candidates_global(p-1,n+j)/=candidates_global(p-1,n)) exit
j = j + 1
enddo
end do
e = n+j-1
if (any(candidates_global(p,n:e)/=candidates_global(p,n))) &
call math_sort(candidates_global(:,n:e),sortDim=p)
n = e+1
enddo
enddo
end do
end do
i = uniqueRows(candidates_global(1:2*nParentNodes,:))
allocate(definition(nParentNodes-1)%parents(i,nParentNodes))
@ -876,15 +876,15 @@ pure subroutine buildCells(connectivity,definition, &
end where
j = j+1
enddo
end do
nCellNode = nCellNode + 1
definition(nParentNodes-1)%parents(i,:) = parentsAndWeights(:,1)
definition(nParentNodes-1)%weights(i,:) = parentsAndWeights(:,2)
i = i + 1
n = n+j
enddo
end do
enddo
end do
contains
!------------------------------------------------------------------------------------------------
@ -906,10 +906,10 @@ pure subroutine buildCells(connectivity,definition, &
do while (r+d<= size(A,2))
if (any(A(:,r)/=A(:,r+d))) exit
d = d+1
enddo
end do
u = u+1
r = r+d
enddo
end do
end function uniqueRows
@ -939,10 +939,10 @@ pure function buildCellNodes(node_elem)
buildCellNodes(:,n) = buildCellNodes(:,n) &
+ buildCellNodes(:,cellNodeDefinition(i)%parents(j,k)) &
* real(cellNodeDefinition(i)%weights(j,k),pReal)
enddo
end do
buildCellNodes(:,n) = buildCellNodes(:,n)/real(sum(cellNodeDefinition(i)%weights(j,:)),pReal)
enddo
enddo
end do
end do
end function buildCellNodes
@ -970,9 +970,9 @@ pure function buildIPcoordinates(node_cell)
do n = 1, size(connectivity_cell_reshaped,1)
buildIPcoordinates(:,i) = buildIPcoordinates(:,i) &
+ node_cell(:,connectivity_cell_reshaped(n,i))
enddo
end do
buildIPcoordinates(:,i) = buildIPcoordinates(:,i)/real(size(connectivity_cell_reshaped,1),pReal)
enddo
end do
end function buildIPcoordinates
@ -1031,8 +1031,8 @@ pure function IPvolume(elem,node)
+ dot_product((x7-x1), math_cross((x5-x0), (x7-x4)+(x3-x0)))
IPvolume(i,e) = IPvolume(i,e)/12.0_pReal
end select
enddo
enddo
end do
end do
end function IPvolume
@ -1075,11 +1075,11 @@ pure function IPareaNormal(elem,nElem,node)
IPareaNormal(1:3,f,i,e) = IPareaNormal(1:3,f,i,e) &
+ math_cross(nodePos(1:3,mod(n+0,m)+1) - nodePos(1:3,n), &
nodePos(1:3,mod(n+1,m)+1) - nodePos(1:3,n)) * 0.5_pReal
enddo
end do
end select
enddo
enddo
enddo
end do
end do
end do
end function IPareaNormal
@ -1109,10 +1109,10 @@ function IPneighborhood(elem)
do n = 1, size(face_unordered)
face(n,c) = minval(face_unordered)
face_unordered(minloc(face_unordered)) = huge(face_unordered)
enddo
end do
face(n:n+3,c) = [e,i,f]
enddo
enddo; enddo
end do
end do; end do
!--------------------------------------------------------------------------------------------------
! sort face definitions
@ -1125,17 +1125,17 @@ function IPneighborhood(elem)
if(any(face(:c,s) /= face(:c,e))) then
if(e-1/=s) call math_sort(face(:,s:e-1),sortDim=c)
s = e
endif
enddo
enddo
end if
end do
end do
IPneighborhood = 0
do c=1, size(face,2) - 1
if(all(face(:n-1,c) == face(:n-1,c+1))) then
IPneighborhood(:,face(n+2,c+1),face(n+1,c+1),face(n+0,c+1)) = face(n:n+3,c+0)
IPneighborhood(:,face(n+2,c+0),face(n+1,c+0),face(n+0,c+0)) = face(n:n+3,c+1)
endif
enddo
end if
end do
end function IPneighborhood
@ -1171,8 +1171,8 @@ function continuousIntValues(fileContent,maxN,lookupName,lookupMap,lookupMaxN)
if (IO_stringValue(fileContent(l),chunkPos,1) == lookupName(i)) then ! found matching name
continuousIntValues = lookupMap(:,i) ! return resp. entity list
exit
endif
enddo
end if
end do
exit
elseif(containsRange(fileContent(l),chunkPos)) then
first = IO_intValue(fileContent(l),chunkPos,1)
@ -1180,20 +1180,20 @@ function continuousIntValues(fileContent,maxN,lookupName,lookupMap,lookupMaxN)
do i = first, last, sign(1,last-first)
continuousIntValues(1) = continuousIntValues(1) + 1
continuousIntValues(1+continuousIntValues(1)) = i
enddo
end do
exit
else
do i = 1,chunkPos(1)-1 ! interpret up to second to last value
continuousIntValues(1) = continuousIntValues(1) + 1
continuousIntValues(1+continuousIntValues(1)) = IO_intValue(fileContent(l),chunkPos,i)
enddo
end do
if ( IO_lc(IO_stringValue(fileContent(l),chunkPos,chunkPos(1))) /= 'c' ) then ! line finished, read last value
continuousIntValues(1) = continuousIntValues(1) + 1
continuousIntValues(1+continuousIntValues(1)) = IO_intValue(fileContent(l),chunkPos,chunkPos(1))
exit
endif
endif
enddo
end if
end if
end do
end function continuousIntValues
@ -1210,7 +1210,7 @@ logical function containsRange(str,chunkPos)
containsRange = .False.
if(chunkPos(1) == 3) then
if(IO_lc(IO_stringValue(str,chunkPos,2)) == 'to') containsRange = .True.
endif
end if
end function containsRange

View File

@ -126,7 +126,7 @@ subroutine materialpoint_init
print'(a32,1x,6(i8,1x))', 'materialpoint_dcsdE: ', shape(materialpoint_dcsdE)
print'(a32,1x,6(i8,1x),/)', 'materialpoint_dcsdE_knownGood: ', shape(materialpoint_dcsdE_knownGood)
flush(IO_STDOUT)
endif
end if
end subroutine materialpoint_init
@ -171,7 +171,7 @@ subroutine materialpoint_general(mode, ffn, ffn1, temperature_inp, dt, elFE, ip,
if (terminallyIll) &
print'(a,/)', '# --- terminallyIll --- #'
print'(a,/)', '#############################################'; flush (6)
endif
end if
if (iand(mode, materialpoint_BACKUPJACOBIAN) /= 0) &
materialpoint_dcsde_knownGood = materialpoint_dcsde
@ -220,15 +220,15 @@ subroutine materialpoint_general(mode, ffn, ffn1, temperature_inp, dt, elFE, ip,
- math_delta(j,l) * homogenization_F(i,m,ce) * homogenization_P(k,m,ce) &
+ 0.5_pReal * ( Kirchhoff(j,l)*math_delta(i,k) + Kirchhoff(i,k)*math_delta(j,l) &
+ Kirchhoff(j,k)*math_delta(i,l) + Kirchhoff(i,l)*math_delta(j,k))
enddo; enddo; enddo; enddo; enddo; enddo
end do; end do; end do; end do; end do; end do
forall(i=1:3, j=1:3,k=1:3,l=1:3) &
H_sym(i,j,k,l) = 0.25_pReal * (H(i,j,k,l) + H(j,i,k,l) + H(i,j,l,k) + H(j,i,l,k))
materialpoint_dcsde(1:6,1:6,ip,elCP) = math_sym3333to66(J_inverse * H_sym,weighted=.false.)
endif terminalIllness
endif validCalculation
end if terminalIllness
end if validCalculation
if (debugmaterialpoint%extensive &
.and. ((debugmaterialpoint%element == elCP .and. debugmaterialpoint%ip == ip) .or. .not. debugmaterialpoint%selective)) then
@ -237,9 +237,9 @@ subroutine materialpoint_general(mode, ffn, ffn1, temperature_inp, dt, elFE, ip,
print'(a,i8,1x,i2,/,6(12x,6(f10.3,1x)/))', &
'<< materialpoint >> Jacobian/GPa at elFE ip ', elFE, ip, transpose(materialpoint_dcsdE(1:6,1:6,ip,elCP))*1.0e-9_pReal
flush(IO_STDOUT)
endif
end if
endif
end if
if (all(abs(materialpoint_dcsdE(1:6,1:6,ip,elCP)) < 1e-10_pReal)) &
call IO_warning(601,label1='element (CP)',ID1=elCP,label2='IP',ID2=ip)

View File

@ -764,7 +764,7 @@ end subroutine dct
!--------------------------------------------------------------------------------------------------
! @brief decide whether next block is list or dict
! @brief Decide whether next block is list or dict.
!--------------------------------------------------------------------------------------------------
recursive subroutine decide(blck,flow,s_blck,s_flow,offset)
@ -811,7 +811,7 @@ recursive subroutine decide(blck,flow,s_blck,s_flow,offset)
end if
end if
end subroutine
end subroutine decide
!--------------------------------------------------------------------------------------------------

View File

@ -148,7 +148,7 @@ program DAMASK_grid
call results_openJobFile(parallel=.false.)
call results_writeDataset_str(fileContent,'setup',fname,'load case definition (grid solver)')
call results_closeJobFile
endif
end if
call parallelization_bcast_str(fileContent)
config_load => YAML_parse_str_asDict(fileContent)
@ -198,11 +198,11 @@ program DAMASK_grid
thermalActive: if (solver%get_asString('thermal',defaultVal = 'n/a') == 'spectral') then
field = field + 1
ID(field) = FIELD_THERMAL_ID
endif thermalActive
end if thermalActive
damageActive: if (solver%get_asString('damage',defaultVal = 'n/a') == 'spectral') then
field = field + 1
ID(field) = FIELD_DAMAGE_ID
endif damageActive
end if damageActive
!--------------------------------------------------------------------------------------------------
@ -235,7 +235,7 @@ program DAMASK_grid
#endif
end select
call loadCases(l)%rot%fromAxisAngle(step_mech%get_as1dFloat('R',defaultVal = real([0.0,0.0,1.0,0.0],pReal)),degrees=.true.)
enddo readMech
end do readMech
if (.not. allocated(loadCases(l)%deformation%myType)) call IO_error(error_ID=837,ext_msg = 'L/dot_F/F missing')
step_discretization => load_step%get_dict('discretization')
@ -264,9 +264,9 @@ program DAMASK_grid
write(IO_STDOUT,'(2x,12a)',advance='no') ' x '
else
write(IO_STDOUT,'(2x,f12.7)',advance='no') loadCases(l)%deformation%values(i,j)
endif
enddo; write(IO_STDOUT,'(/)',advance='no')
enddo
end if
end do; write(IO_STDOUT,'(/)',advance='no')
end do
if (any(loadCases(l)%stress%mask .eqv. loadCases(l)%deformation%mask)) errorID = 831
if (any(.not.(loadCases(l)%stress%mask .or. transpose(loadCases(l)%stress%mask)) .and. (math_I3<1))) &
errorID = 838 ! no rotation is allowed by stress BC
@ -280,10 +280,10 @@ program DAMASK_grid
write(IO_STDOUT,'(2x,12a)',advance='no') ' x '
else
write(IO_STDOUT,'(2x,f12.4)',advance='no') loadCases(l)%stress%values(i,j)*1e-6_pReal
endif
enddo; write(IO_STDOUT,'(/)',advance='no')
enddo
endif
end if
end do; write(IO_STDOUT,'(/)',advance='no')
end do
end if
if (any(dNeq(loadCases(l)%rot%asMatrix(), math_I3))) &
write(IO_STDOUT,'(2x,a,/,3(3(3x,f12.7,1x)/))',advance='no') 'R:',&
transpose(loadCases(l)%rot%asMatrix())
@ -298,7 +298,7 @@ program DAMASK_grid
print'(2x,a)', 'r: 1 (constant step width)'
else
print'(2x,a,1x,f0.3)', 'r:', loadCases(l)%r
endif
end if
print'(2x,a,1x,f0.3)', 't:', loadCases(l)%t
print'(2x,a,1x,i0)', 'N:', loadCases(l)%N
if (loadCases(l)%f_out < huge(0)) &
@ -308,8 +308,8 @@ program DAMASK_grid
if (errorID > 0) call IO_error(errorID,label1='line',ID1=l)
endif reportAndCheck
enddo
end if reportAndCheck
end do
!--------------------------------------------------------------------------------------------------
! doing initialization depending on active solvers
@ -337,14 +337,14 @@ program DAMASK_grid
else writeHeader
open(newunit=statUnit,file=trim(getSolverJobName())//&
'.sta',form='FORMATTED', position='APPEND', status='OLD')
endif writeHeader
endif
end if writeHeader
end if
writeUndeformed: if (CLI_restartInc < 1) then
print'(/,1x,a)', '... writing initial configuration to file .................................'
flush(IO_STDOUT)
call materialpoint_results(0,0.0_pReal)
endif writeUndeformed
end if writeUndeformed
loadCaseLooping: do l = 1, size(loadCases)
t_0 = t ! load case start time
@ -361,7 +361,7 @@ program DAMASK_grid
else
Delta_t = loadCases(l)%t * (loadCases(l)%r**(inc-1)-loadCases(l)%r**inc) &
/ (1.0_pReal-loadCases(l)%r**loadCases(l)%N)
endif
end if
Delta_t = Delta_t * real(subStepFactor,pReal)**real(-cutBackLevel,pReal) ! depending on cut back level, decrease time step
skipping: if (totalIncsCounter <= CLI_restartInc) then ! not yet at restart inc?
@ -402,7 +402,7 @@ program DAMASK_grid
case(FIELD_THERMAL_ID); call grid_thermal_spectral_forward(cutBack)
case(FIELD_DAMAGE_ID); call grid_damage_spectral_forward(cutBack)
end select
enddo
end do
if (.not. cutBack) call materialpoint_forward
!--------------------------------------------------------------------------------------------------
@ -422,12 +422,12 @@ program DAMASK_grid
if (.not. solres(field)%converged) exit ! no solution found
enddo
end do
stagIter = stagIter + 1
stagIterate = stagIter < stagItMax &
.and. all(solres(:)%converged) &
.and. .not. all(solres(:)%stagConverged) ! stationary with respect to staggered iteration
enddo
end do
!--------------------------------------------------------------------------------------------------
! check solution for either advance or retry
@ -442,7 +442,7 @@ program DAMASK_grid
write(statUnit,*) totalIncsCounter, t, cutBackLevel, &
solres(1)%converged, solres(1)%iterationsNeeded
flush(statUnit)
endif
end if
elseif (cutBackLevel < maxCutBack) then ! further cutbacking tolerated?
cutBack = .true.
stepFraction = (stepFraction - 1) * subStepFactor ! adjust to new denominator
@ -453,9 +453,9 @@ program DAMASK_grid
else ! no more options to continue
if (worldrank == 0) close(statUnit)
call IO_error(950)
endif
end if
enddo subStepLooping
end do subStepLooping
cutBackLevel = max(0, cutBackLevel - 1) ! try half number of subincs next inc
@ -463,7 +463,7 @@ program DAMASK_grid
print'(/,1x,a,i0,a)', 'increment ', totalIncsCounter, ' converged'
else
print'(/,1x,a,i0,a)', 'increment ', totalIncsCounter, ' NOT converged'
endif; flush(IO_STDOUT)
end if; flush(IO_STDOUT)
call MPI_Allreduce(signals_SIGUSR1,signal,1_MPI_INTEGER_KIND,MPI_LOGICAL,MPI_LOR,MPI_COMM_WORLD,err_MPI)
if (err_MPI /= 0_MPI_INTEGER_KIND) error stop 'MPI error'
@ -471,7 +471,7 @@ program DAMASK_grid
print'(/,1x,a)', '... writing results to file ...............................................'
flush(IO_STDOUT)
call materialpoint_results(totalIncsCounter,t)
endif
end if
if (signal) call signals_setSIGUSR1(.false.)
call MPI_Allreduce(signals_SIGUSR2,signal,1_MPI_INTEGER_KIND,MPI_LOGICAL,MPI_LOR,MPI_COMM_WORLD,err_MPI)
if (err_MPI /= 0_MPI_INTEGER_KIND) error stop 'MPI error'
@ -482,19 +482,21 @@ program DAMASK_grid
call mechanical_restartWrite
case(FIELD_THERMAL_ID)
call grid_thermal_spectral_restartWrite
case(FIELD_DAMAGE_ID)
call grid_damage_spectral_restartWrite
end select
end do
call materialpoint_restartWrite
endif
end if
if (signal) call signals_setSIGUSR2(.false.)
call MPI_Allreduce(signals_SIGINT,signal,1_MPI_INTEGER_KIND,MPI_LOGICAL,MPI_LOR,MPI_COMM_WORLD,err_MPI)
if (err_MPI /= 0_MPI_INTEGER_KIND) error stop 'MPI error'
if (signal) exit loadCaseLooping
endif skipping
end if skipping
enddo incLooping
end do incLooping
enddo loadCaseLooping
end do loadCaseLooping
!--------------------------------------------------------------------------------------------------
@ -523,9 +525,9 @@ subroutine getMaskedTensor(values,mask,tensor)
do j = 1,3
mask(i,j) = row%get_asString(j) == 'x'
if (.not. mask(i,j)) values(i,j) = row%get_asFloat(j)
enddo
enddo
end do
end do
end subroutine
end subroutine getMaskedTensor
end program DAMASK_grid

View File

@ -222,7 +222,7 @@ subroutine cellsSizeOrigin(c,s,o,header)
temp = getXMLValue(header,'Origin')
o = [(IO_floatValue(temp,IO_stringPos(temp),i),i=1,3)]
end subroutine
end subroutine cellsSizeOrigin
!--------------------------------------------------------------------------------------------------
@ -421,7 +421,7 @@ pure function getXMLValue(line,key)
end if
end if
end function
end function getXMLValue
!--------------------------------------------------------------------------------------------------

View File

@ -82,7 +82,7 @@ function base64_to_bytes(base64_str,s,e) result(bytes)
else
s_str = 1_pI64
s_bytes = 1_pI64
endif
end if
if(present(e)) then
if(e>base64_nByte(len(base64_str,kind=pI64))) call IO_error(114, ext_msg='e out of range')
@ -93,7 +93,7 @@ function base64_to_bytes(base64_str,s,e) result(bytes)
e_bytes = base64_nByte(len(base64_str,kind=pI64)) - base64_nByte(s_str)
if(base64_str(e_str-0_pI64:e_str-0_pI64) == '=') e_bytes = e_bytes - 1_pI64
if(base64_str(e_str-1_pI64:e_str-1_pI64) == '=') e_bytes = e_bytes - 1_pI64
endif
end if
bytes = decodeBase64(base64_str(s_str:e_str))
bytes = bytes(s_bytes:e_bytes)
@ -122,8 +122,8 @@ pure function decodeBase64(base64_str) result(bytes)
charPos(p) = int(index(base64_encoding,base64_str(c+p:c+p))-1,C_SIGNED_CHAR)
else
charPos(p) = 0_C_SIGNED_CHAR
endif
enddo
end if
end do
call mvbits(charPos(0),0,6,bytes(b+0),2)
call mvbits(charPos(1),4,2,bytes(b+0),0)
@ -133,7 +133,7 @@ pure function decodeBase64(base64_str) result(bytes)
call mvbits(charPos(3),0,6,bytes(b+2),0)
b = b+3_pI64
c = c+4_pI64
enddo
end do
end function decodeBase64

View File

@ -334,7 +334,7 @@ function discretization_grid_getInitialCondition(label) result(ic)
ic_global = VTI_readDataset_real(IO_read(CLI_geomFile),label)
else
allocate(ic_global(0)) ! needed for IntelMPI
endif
end if
call MPI_Gather(product(cells(1:2))*cells3Offset, 1_MPI_INTEGER_KIND,MPI_INTEGER,displs,&
1_MPI_INTEGER_KIND,MPI_INTEGER,0_MPI_INTEGER_KIND,MPI_COMM_WORLD,err_MPI)

View File

@ -16,6 +16,9 @@ module grid_damage_spectral
use prec
use parallelization
use IO
use CLI
use HDF5_utilities
use HDF5
use spectral_utilities
use discretization_grid
use homogenization
@ -46,7 +49,7 @@ module grid_damage_spectral
SNES :: SNES_damage
Vec :: solution_vec
real(pReal), dimension(:,:,:), allocatable :: &
phi_current, & !< field of current damage
phi, & !< field of current damage
phi_lastInc, & !< field of previous damage
phi_stagInc !< field of staggered damage
@ -59,13 +62,13 @@ module grid_damage_spectral
public :: &
grid_damage_spectral_init, &
grid_damage_spectral_solution, &
grid_damage_spectral_restartWrite, &
grid_damage_spectral_forward
contains
!--------------------------------------------------------------------------------------------------
!> @brief allocates all neccessary fields and fills them with data
! ToDo: Restart not implemented
!--------------------------------------------------------------------------------------------------
subroutine grid_damage_spectral_init()
@ -76,6 +79,8 @@ subroutine grid_damage_spectral_init()
Vec :: uBound, lBound
integer(MPI_INTEGER_KIND) :: err_MPI
PetscErrorCode :: err_PETSc
integer(HID_T) :: fileHandle, groupHandle
real(pReal), dimension(1,product(cells(1:2))*cells3) :: tempN
type(tDict), pointer :: &
num_grid, &
num_generic
@ -112,9 +117,9 @@ subroutine grid_damage_spectral_init()
!--------------------------------------------------------------------------------------------------
! init fields
phi_current = discretization_grid_getInitialCondition('phi')
phi_lastInc = phi_current
phi_stagInc = phi_current
phi = discretization_grid_getInitialCondition('phi')
phi_lastInc = phi
phi_stagInc = phi
!--------------------------------------------------------------------------------------------------
! initialize solver specific parts of PETSc
@ -167,15 +172,27 @@ subroutine grid_damage_spectral_init()
CHKERRQ(err_PETSc)
end if
restartRead: if (CLI_restartInc > 0) then
print'(/,1x,a,i0,a)', 'reading restart data of increment ', CLI_restartInc, ' from file'
fileHandle = HDF5_openFile(getSolverJobName()//'_restart.hdf5','r')
groupHandle = HDF5_openGroup(fileHandle,'solver')
call HDF5_read(tempN,groupHandle,'phi',.false.)
phi = reshape(tempN,[cells(1),cells(2),cells3])
call HDF5_read(tempN,groupHandle,'phi_lastInc',.false.)
phi_lastInc = reshape(tempN,[cells(1),cells(2),cells3])
end if restartRead
ce = 0
do k = 1, cells3; do j = 1, cells(2); do i = 1, cells(1)
ce = ce + 1
call homogenization_set_phi(phi_current(i,j,k),ce)
call homogenization_set_phi(phi(i,j,k),ce)
end do; end do; end do
call DMDAVecGetArrayF90(damage_grid,solution_vec,phi_PETSc,err_PETSc)
CHKERRQ(err_PETSc)
phi_PETSc = phi_current
phi_PETSc = phi
call DMDAVecRestoreArrayF90(damage_grid,solution_vec,phi_PETSc,err_PETSc)
CHKERRQ(err_PETSc)
@ -218,20 +235,20 @@ function grid_damage_spectral_solution(Delta_t) result(solution)
solution%converged = .true.
solution%iterationsNeeded = totalIter
end if
stagNorm = maxval(abs(phi_current - phi_stagInc))
stagNorm = maxval(abs(phi - phi_stagInc))
call MPI_Allreduce(MPI_IN_PLACE,stagNorm,1_MPI_INTEGER_KIND,MPI_DOUBLE,MPI_MAX,MPI_COMM_WORLD,err_MPI)
if (err_MPI /= 0_MPI_INTEGER_KIND) error stop 'MPI error'
solution%stagConverged = stagNorm < max(num%eps_damage_atol, num%eps_damage_rtol*maxval(phi_current))
solution%stagConverged = stagNorm < max(num%eps_damage_atol, num%eps_damage_rtol*maxval(phi))
call MPI_Allreduce(MPI_IN_PLACE,solution%stagConverged,1_MPI_INTEGER_KIND,MPI_LOGICAL,MPI_LAND,MPI_COMM_WORLD,err_MPI)
if (err_MPI /= 0_MPI_INTEGER_KIND) error stop 'MPI error'
phi_stagInc = phi_current
phi_stagInc = phi
!--------------------------------------------------------------------------------------------------
! updating damage state
ce = 0
do k = 1, cells3; do j = 1, cells(2); do i = 1,cells(1)
ce = ce + 1
call homogenization_set_phi(phi_current(i,j,k),ce)
call homogenization_set_phi(phi(i,j,k),ce)
end do; end do; end do
call VecMin(solution_vec,devNull,phi_min,err_PETSc)
@ -261,7 +278,7 @@ subroutine grid_damage_spectral_forward(cutBack)
if (cutBack) then
phi_current = phi_lastInc
phi = phi_lastInc
phi_stagInc = phi_lastInc
!--------------------------------------------------------------------------------------------------
! reverting damage field state
@ -269,22 +286,52 @@ subroutine grid_damage_spectral_forward(cutBack)
CHKERRQ(err_PETSc)
call DMDAVecGetArrayF90(dm_local,solution_vec,phi_PETSc,err_PETSc) !< get the data out of PETSc to work with
CHKERRQ(err_PETSc)
phi_PETSc = phi_current
phi_PETSc = phi
call DMDAVecRestoreArrayF90(dm_local,solution_vec,phi_PETSc,err_PETSc)
CHKERRQ(err_PETSc)
ce = 0
do k = 1, cells3; do j = 1, cells(2); do i = 1,cells(1)
ce = ce + 1
call homogenization_set_phi(phi_current(i,j,k),ce)
call homogenization_set_phi(phi(i,j,k),ce)
end do; end do; end do
else
phi_lastInc = phi_current
phi_lastInc = phi
call updateReference
end if
end subroutine grid_damage_spectral_forward
!--------------------------------------------------------------------------------------------------
!> @brief Write current solver and constitutive data for restart to file.
!--------------------------------------------------------------------------------------------------
subroutine grid_damage_spectral_restartWrite
PetscErrorCode :: err_PETSc
DM :: dm_local
integer(HID_T) :: fileHandle, groupHandle
PetscScalar, dimension(:,:,:), pointer :: phi
call SNESGetDM(SNES_damage,dm_local,err_PETSc);
CHKERRQ(err_PETSc)
call DMDAVecGetArrayF90(dm_local,solution_vec,phi,err_PETSc);
CHKERRQ(err_PETSc)
print'(1x,a)', 'writing damage solver data required for restart to file'; flush(IO_STDOUT)
fileHandle = HDF5_openFile(getSolverJobName()//'_restart.hdf5','a')
groupHandle = HDF5_openGroup(fileHandle,'solver')
call HDF5_write(reshape(phi,[1,product(cells(1:2))*cells3]),groupHandle,'phi')
call HDF5_write(reshape(phi_lastInc,[1,product(cells(1:2))*cells3]),groupHandle,'phi_lastInc')
call HDF5_closeGroup(groupHandle)
call HDF5_closeFile(fileHandle)
call DMDAVecRestoreArrayF90(dm_local,solution_vec,phi,err_PETSc);
CHKERRQ(err_PETSc)
end subroutine grid_damage_spectral_restartWrite
!--------------------------------------------------------------------------------------------------
!> @brief Construct the residual vector.
!--------------------------------------------------------------------------------------------------
@ -297,7 +344,7 @@ subroutine formResidual(in,x_scal,r,dummy,err_PETSc)
x_scal
PetscScalar, dimension( &
X_RANGE,Y_RANGE,Z_RANGE), intent(out) :: &
r
r !< residual
PetscObject :: dummy
PetscErrorCode, intent(out) :: err_PETSc
@ -305,10 +352,8 @@ subroutine formResidual(in,x_scal,r,dummy,err_PETSc)
real(pReal), dimension(3,cells(1),cells(2),cells3) :: vectorField
phi_current = x_scal
!--------------------------------------------------------------------------------------------------
! evaluate polarization field
vectorField = utilities_ScalarGradient(phi_current)
phi = x_scal
vectorField = utilities_ScalarGradient(phi)
ce = 0
do k = 1, cells3; do j = 1, cells(2); do i = 1,cells(1)
ce = ce + 1
@ -318,22 +363,20 @@ subroutine formResidual(in,x_scal,r,dummy,err_PETSc)
ce = 0
do k = 1, cells3; do j = 1, cells(2); do i = 1,cells(1)
ce = ce + 1
r(i,j,k) = params%Delta_t*(r(i,j,k) + homogenization_f_phi(phi_current(i,j,k),ce)) &
+ homogenization_mu_phi(ce)*(phi_lastInc(i,j,k) - phi_current(i,j,k)) &
+ mu_ref*phi_current(i,j,k)
r(i,j,k) = params%Delta_t*(r(i,j,k) + homogenization_f_phi(phi(i,j,k),ce)) &
+ homogenization_mu_phi(ce)*(phi_lastInc(i,j,k) - phi(i,j,k)) &
+ mu_ref*phi(i,j,k)
end do; end do; end do
!--------------------------------------------------------------------------------------------------
! constructing residual
r = max(min(utilities_GreenConvolution(r, K_ref, mu_ref, params%Delta_t),phi_lastInc),num%phi_min) &
- phi_current
- phi
err_PETSc = 0
end subroutine formResidual
!--------------------------------------------------------------------------------------------------
!> @brief update reference viscosity and conductivity
!> @brief Update reference viscosity and conductivity.
!--------------------------------------------------------------------------------------------------
subroutine updateReference()

View File

@ -111,10 +111,12 @@ subroutine grid_mechanical_FEM_init
1.0_pReal,-1.0_pReal,-1.0_pReal,-1.0_pReal, &
1.0_pReal, 1.0_pReal, 1.0_pReal, 1.0_pReal], [4,8])
real(pReal), dimension(3,3,3,3) :: devNull
real(pReal), dimension(3,3,product(cells(1:2))*cells3) :: temp33n
real(pReal), dimension(3,product(cells(1:2))*cells3) :: temp3n
PetscErrorCode :: err_PETSc
integer(MPI_INTEGER_KIND) :: err_MPI
PetscScalar, pointer, dimension(:,:,:,:) :: &
u_current,u_lastInc
u,u_lastInc
PetscInt, dimension(0:worldsize-1) :: localK
integer(HID_T) :: fileHandle, groupHandle
type(tDict), pointer :: &
@ -220,7 +222,7 @@ subroutine grid_mechanical_FEM_init
CHKERRQ(err_PETSc)
call VecSet(solution_rate ,0.0_pReal,err_PETSc)
CHKERRQ(err_PETSc)
call DMDAVecGetArrayF90(mechanical_grid,solution_current,u_current,err_PETSc)
call DMDAVecGetArrayF90(mechanical_grid,solution_current,u,err_PETSc)
CHKERRQ(err_PETSc)
call DMDAVecGetArrayF90(mechanical_grid,solution_lastInc,u_lastInc,err_PETSc)
CHKERRQ(err_PETSc)
@ -260,22 +262,26 @@ subroutine grid_mechanical_FEM_init
call HDF5_read(F_aimDot,groupHandle,'F_aimDot',.false.)
call MPI_Bcast(F_aimDot,9_MPI_INTEGER_KIND,MPI_DOUBLE,0_MPI_INTEGER_KIND,MPI_COMM_WORLD,err_MPI)
if(err_MPI /= 0_MPI_INTEGER_KIND) error stop 'MPI error'
call HDF5_read(F,groupHandle,'F')
call HDF5_read(F_lastInc,groupHandle,'F_lastInc')
call HDF5_read(u_current,groupHandle,'u')
call HDF5_read(u_lastInc,groupHandle,'u_lastInc')
call HDF5_read(temp33n,groupHandle,'F')
F = reshape(temp33n,[3,3,cells(1),cells(2),cells3])
call HDF5_read(temp33n,groupHandle,'F_lastInc')
F_lastInc = reshape(temp33n,[3,3,cells(1),cells(2),cells3])
call HDF5_read(temp3n,groupHandle,'u')
u = reshape(temp3n,[3,cells(1),cells(2),cells3])
call HDF5_read(temp3n,groupHandle,'u_lastInc')
u_lastInc = reshape(temp3n,[3,cells(1),cells(2),cells3])
elseif (CLI_restartInc == 0) then restartRead
F_lastInc = spread(spread(spread(math_I3,3,cells(1)),4,cells(2)),5,cells3) ! initialize to identity
F = spread(spread(spread(math_I3,3,cells(1)),4,cells(2)),5,cells3)
endif restartRead
end if restartRead
homogenization_F0 = reshape(F_lastInc, [3,3,product(cells(1:2))*cells3]) ! set starting condition for homogenization_mechanical_response
call utilities_updateCoords(F)
call utilities_constitutiveResponse(P_current,P_av,C_volAvg,devNull, & ! stress field, stress avg, global average of stiffness and (min+max)/2
F, & ! target F
0.0_pReal) ! time increment
call DMDAVecRestoreArrayF90(mechanical_grid,solution_current,u_current,err_PETSc)
call DMDAVecRestoreArrayF90(mechanical_grid,solution_current,u,err_PETSc)
CHKERRQ(err_PETSc)
call DMDAVecRestoreArrayF90(mechanical_grid,solution_lastInc,u_lastInc,err_PETSc)
CHKERRQ(err_PETSc)
@ -292,7 +298,7 @@ subroutine grid_mechanical_FEM_init
call HDF5_closeGroup(groupHandle)
call HDF5_closeFile(fileHandle)
endif restartRead2
end if restartRead2
end subroutine grid_mechanical_FEM_init
@ -354,10 +360,10 @@ subroutine grid_mechanical_FEM_forward(cutBack,guess,Delta_t,Delta_t_old,t_remai
rotation_BC
PetscErrorCode :: err_PETSc
PetscScalar, pointer, dimension(:,:,:,:) :: &
u_current,u_lastInc
u,u_lastInc
call DMDAVecGetArrayF90(mechanical_grid,solution_current,u_current,err_PETSc)
call DMDAVecGetArrayF90(mechanical_grid,solution_current,u,err_PETSc)
CHKERRQ(err_PETSc)
call DMDAVecGetArrayF90(mechanical_grid,solution_lastInc,u_lastInc,err_PETSc)
CHKERRQ(err_PETSc)
@ -381,7 +387,7 @@ subroutine grid_mechanical_FEM_forward(cutBack,guess,Delta_t,Delta_t_old,t_remai
elseif (deformation_BC%myType=='F') then ! aim at end of load case is prescribed
F_aimDot = F_aimDot &
+ merge(.0_pReal,(deformation_BC%values - F_aim_lastInc)/t_remaining,deformation_BC%mask)
endif
end if
if (guess) then
call VecWAXPY(solution_rate,-1.0_pReal,solution_lastInc,solution_current,err_PETSc)
@ -391,14 +397,14 @@ subroutine grid_mechanical_FEM_forward(cutBack,guess,Delta_t,Delta_t_old,t_remai
else
call VecSet(solution_rate,0.0_pReal,err_PETSc)
CHKERRQ(err_PETSc)
endif
end if
call VecCopy(solution_current,solution_lastInc,err_PETSc)
CHKERRQ(err_PETSc)
F_lastInc = F
homogenization_F0 = reshape(F, [3,3,product(cells(1:2))*cells3])
endif
end if
!--------------------------------------------------------------------------------------------------
! update average and local deformation gradients
@ -410,7 +416,7 @@ subroutine grid_mechanical_FEM_forward(cutBack,guess,Delta_t,Delta_t_old,t_remai
call VecAXPY(solution_current,Delta_t,solution_rate,err_PETSc)
CHKERRQ(err_PETSc)
call DMDAVecRestoreArrayF90(mechanical_grid,solution_current,u_current,err_PETSc)
call DMDAVecRestoreArrayF90(mechanical_grid,solution_current,u,err_PETSc)
CHKERRQ(err_PETSc)
call DMDAVecRestoreArrayF90(mechanical_grid,solution_lastInc,u_lastInc,err_PETSc)
CHKERRQ(err_PETSc)
@ -441,10 +447,10 @@ subroutine grid_mechanical_FEM_restartWrite
PetscErrorCode :: err_PETSc
integer(HID_T) :: fileHandle, groupHandle
PetscScalar, dimension(:,:,:,:), pointer :: u_current,u_lastInc
PetscScalar, dimension(:,:,:,:), pointer :: u,u_lastInc
call DMDAVecGetArrayF90(mechanical_grid,solution_current,u_current,err_PETSc)
call DMDAVecGetArrayF90(mechanical_grid,solution_current,u,err_PETSc)
CHKERRQ(err_PETSc)
call DMDAVecGetArrayF90(mechanical_grid,solution_lastInc,u_lastInc,err_PETSc)
CHKERRQ(err_PETSc)
@ -453,10 +459,10 @@ subroutine grid_mechanical_FEM_restartWrite
fileHandle = HDF5_openFile(getSolverJobName()//'_restart.hdf5','w')
groupHandle = HDF5_addGroup(fileHandle,'solver')
call HDF5_write(F,groupHandle,'F')
call HDF5_write(F_lastInc,groupHandle,'F_lastInc')
call HDF5_write(u_current,groupHandle,'u')
call HDF5_write(u_lastInc,groupHandle,'u_lastInc')
call HDF5_write(reshape(F,[3,3,product(cells(1:2))*cells3]),groupHandle,'F')
call HDF5_write(reshape(F_lastInc,[3,3,product(cells(1:2))*cells3]),groupHandle,'F_lastInc')
call HDF5_write(reshape(u,[3,product(cells(1:2))*cells3]),groupHandle,'u')
call HDF5_write(reshape(u_lastInc,[3,product(cells(1:2))*cells3]),groupHandle,'u_lastInc')
call HDF5_closeGroup(groupHandle)
call HDF5_closeFile(fileHandle)
@ -471,9 +477,9 @@ subroutine grid_mechanical_FEM_restartWrite
call HDF5_write(C_volAvgLastInc,groupHandle,'C_volAvgLastInc',.false.)
call HDF5_closeGroup(groupHandle)
call HDF5_closeFile(fileHandle)
endif
end if
call DMDAVecRestoreArrayF90(mechanical_grid,solution_current,u_current,err_PETSc)
call DMDAVecRestoreArrayF90(mechanical_grid,solution_current,u,err_PETSc)
CHKERRQ(err_PETSc)
call DMDAVecRestoreArrayF90(mechanical_grid,solution_lastInc,u_lastInc,err_PETSc)
CHKERRQ(err_PETSc)
@ -511,7 +517,7 @@ subroutine converged(snes_local,PETScIter,devNull1,devNull2,fnorm,reason,dummy,e
reason = -1
else
reason = 0
endif
end if
print'(/,1x,a)', '... reporting .............................................................'
print'(/,1x,a,f12.2,a,es8.2,a,es9.2,a)', 'error divergence = ', &
@ -561,7 +567,7 @@ subroutine formResidual(da_local,x_local, &
print'(/,1x,a,/,2(3(f12.7,1x)/),3(f12.7,1x))', &
'deformation gradient aim =', transpose(F_aim)
flush(IO_STDOUT)
endif newIteration
end if newIteration
!--------------------------------------------------------------------------------------------------
! get deformation gradient
@ -572,9 +578,9 @@ subroutine formResidual(da_local,x_local, &
do kk = -1, 0; do jj = -1, 0; do ii = -1, 0
ctr = ctr + 1
x_elem(ctr,1:3) = x_scal(0:2,i+ii,j+jj,k+kk)
enddo; enddo; enddo
end do; end do; end do
F(1:3,1:3,i,j,k-cells3Offset) = params%rotation_BC%rotate(F_aim,active=.true.) + transpose(matmul(BMat,x_elem))
enddo; enddo; enddo
end do; end do; end do
call DMDAVecRestoreArrayF90(da_local,x_local,x_scal,err_PETSc)
CHKERRQ(err_PETSc)
@ -605,7 +611,7 @@ subroutine formResidual(da_local,x_local, &
do kk = -1, 0; do jj = -1, 0; do ii = -1, 0
ctr = ctr + 1
x_elem(ctr,1:3) = x_scal(0:2,i+ii,j+jj,k+kk)
enddo; enddo; enddo
end do; end do; end do
ele = ele + 1
f_elem = matmul(transpose(BMat),transpose(P_current(1:3,1:3,i,j,k-cells3Offset)))*detJ + &
matmul(HGMat,x_elem)*(homogenization_dPdF(1,1,1,1,ele) + &
@ -615,8 +621,8 @@ subroutine formResidual(da_local,x_local, &
do kk = -1, 0; do jj = -1, 0; do ii = -1, 0
ctr = ctr + 1
r(0:2,i+ii,j+jj,k+kk) = r(0:2,i+ii,j+jj,k+kk) + f_elem(ctr,1:3)
enddo; enddo; enddo
enddo; enddo; enddo
end do; end do; end do
end do; end do; end do
call DMDAVecRestoreArrayF90(da_local,x_local,x_scal,err_PETSc)
CHKERRQ(err_PETSc)
call DMDAVecRestoreArrayF90(da_local,f_local,r,err_PETSc)
@ -690,7 +696,7 @@ subroutine formJacobian(da_local,x_local,Jac_pre,Jac,dummy,err_PETSc)
col(MatStencil_j,ctr+16) = j+jj
col(MatStencil_k,ctr+16) = k+kk
col(MatStencil_c,ctr+16) = 2
enddo; enddo; enddo
end do; end do; end do
row = col
ce = ce + 1
K_ele = 0.0_pReal
@ -709,7 +715,7 @@ subroutine formJacobian(da_local,x_local,Jac_pre,Jac,dummy,err_PETSc)
shape=[3,3,3,3], order=[2,1,4,3]),shape=[9,9]),BMatFull))*detJ
call MatSetValuesStencil(Jac,24_pPETScInt,row,24_pPetscInt,col,K_ele,ADD_VALUES,err_PETSc)
CHKERRQ(err_PETSc)
enddo; enddo; enddo
end do; end do; end do
call MatAssemblyBegin(Jac,MAT_FINAL_ASSEMBLY,err_PETSc)
CHKERRQ(err_PETSc)
call MatAssemblyEnd(Jac,MAT_FINAL_ASSEMBLY,err_PETSc)
@ -733,7 +739,7 @@ subroutine formJacobian(da_local,x_local,Jac_pre,Jac,dummy,err_PETSc)
do k = cells3Offset+1, cells3Offset+cells3; do j = 1, cells(2); do i = 1, cells(1)
ce = ce + 1
x_scal(0:2,i-1,j-1,k-1) = discretization_IPcoords(1:3,ce)
enddo; enddo; enddo
end do; end do; end do
call DMDAVecRestoreArrayF90(da_local,coordinates,x_scal,err_PETSc)
CHKERRQ(err_PETSc) ! initialize to undeformed coordinates (ToDo: use ip coordinates)
call MatNullSpaceCreateRigidBody(coordinates,matnull,err_PETSc)

View File

@ -104,7 +104,7 @@ contains
!--------------------------------------------------------------------------------------------------
!> @brief allocates all necessary fields and fills them with data, potentially from restart info
!--------------------------------------------------------------------------------------------------
subroutine grid_mechanical_spectral_basic_init
subroutine grid_mechanical_spectral_basic_init()
real(pReal), dimension(3,3,cells(1),cells(2),cells3) :: P
PetscErrorCode :: err_PETSc
@ -112,6 +112,7 @@ subroutine grid_mechanical_spectral_basic_init
PetscScalar, pointer, dimension(:,:,:,:) :: &
F ! pointer to solution data
PetscInt, dimension(0:worldsize-1) :: localK
real(pReal), dimension(3,3,product(cells(1:2))*cells3) :: temp33n
integer(HID_T) :: fileHandle, groupHandle
#if (PETSC_VERSION_MAJOR==3 && PETSC_VERSION_MINOR>14) && !defined(PETSC_HAVE_MPI_F90MODULE_VISIBILITY)
type(MPI_File) :: fileUnit
@ -229,8 +230,10 @@ subroutine grid_mechanical_spectral_basic_init
call HDF5_read(F_aimDot,groupHandle,'F_aimDot',.false.)
call MPI_Bcast(F_aimDot,9_MPI_INTEGER_KIND,MPI_DOUBLE,0_MPI_INTEGER_KIND,MPI_COMM_WORLD,err_MPI)
if (err_MPI /= 0_MPI_INTEGER_KIND) error stop 'MPI error'
call HDF5_read(F,groupHandle,'F')
call HDF5_read(F_lastInc,groupHandle,'F_lastInc')
call HDF5_read(temp33n,groupHandle,'F')
F = reshape(temp33n,[9,cells(1),cells(2),cells3])
call HDF5_read(temp33n,groupHandle,'F_lastInc')
F_lastInc = reshape(temp33n,[3,3,cells(1),cells(2),cells3])
elseif (CLI_restartInc == 0) then restartRead
F_lastInc = spread(spread(spread(math_I3,3,cells(1)),4,cells(2)),5,cells3) ! initialize to identity
@ -421,8 +424,8 @@ subroutine grid_mechanical_spectral_basic_restartWrite
fileHandle = HDF5_openFile(getSolverJobName()//'_restart.hdf5','w')
groupHandle = HDF5_addGroup(fileHandle,'solver')
call HDF5_write(F,groupHandle,'F')
call HDF5_write(F_lastInc,groupHandle,'F_lastInc')
call HDF5_write(reshape(F,[3,3,product(cells(1:2))*cells3]),groupHandle,'F')
call HDF5_write(reshape(F_lastInc,[3,3,product(cells(1:2))*cells3]),groupHandle,'F_lastInc')
call HDF5_closeGroup(groupHandle)
call HDF5_closeFile(fileHandle)

View File

@ -115,7 +115,7 @@ contains
!--------------------------------------------------------------------------------------------------
!> @brief allocates all necessary fields and fills them with data, potentially from restart info
!--------------------------------------------------------------------------------------------------
subroutine grid_mechanical_spectral_polarisation_init
subroutine grid_mechanical_spectral_polarisation_init()
real(pReal), dimension(3,3,cells(1),cells(2),cells3) :: P
PetscErrorCode :: err_PETSc
@ -125,6 +125,7 @@ subroutine grid_mechanical_spectral_polarisation_init
F, & ! specific (sub)pointer
F_tau ! specific (sub)pointer
PetscInt, dimension(0:worldsize-1) :: localK
real(pReal), dimension(3,3,product(cells(1:2))*cells3) :: temp33n
integer(HID_T) :: fileHandle, groupHandle
#if (PETSC_VERSION_MAJOR==3 && PETSC_VERSION_MINOR>14) && !defined(PETSC_HAVE_MPI_F90MODULE_VISIBILITY)
type(MPI_File) :: fileUnit
@ -250,10 +251,14 @@ subroutine grid_mechanical_spectral_polarisation_init
call HDF5_read(F_aimDot,groupHandle,'F_aimDot',.false.)
call MPI_Bcast(F_aimDot,9_MPI_INTEGER_KIND,MPI_DOUBLE,0_MPI_INTEGER_KIND,MPI_COMM_WORLD,err_MPI)
if (err_MPI /= 0_MPI_INTEGER_KIND) error stop 'MPI error'
call HDF5_read(F,groupHandle,'F')
call HDF5_read(F_lastInc,groupHandle,'F_lastInc')
call HDF5_read(F_tau,groupHandle,'F_tau')
call HDF5_read(F_tau_lastInc,groupHandle,'F_tau_lastInc')
call HDF5_read(temp33n,groupHandle,'F')
F = reshape(temp33n,[9,cells(1),cells(2),cells3])
call HDF5_read(temp33n,groupHandle,'F_lastInc')
F_lastInc = reshape(temp33n,[3,3,cells(1),cells(2),cells3])
call HDF5_read(temp33n,groupHandle,'F_tau')
F_tau = reshape(temp33n,[9,cells(1),cells(2),cells3])
call HDF5_read(temp33n,groupHandle,'F_tau_lastInc')
F_tau_lastInc = reshape(temp33n,[3,3,cells(1),cells(2),cells3])
elseif (CLI_restartInc == 0) then restartRead
F_lastInc = spread(spread(spread(math_I3,3,cells(1)),4,cells(2)),5,cells3) ! initialize to identity
@ -476,10 +481,10 @@ subroutine grid_mechanical_spectral_polarisation_restartWrite
fileHandle = HDF5_openFile(getSolverJobName()//'_restart.hdf5','w')
groupHandle = HDF5_addGroup(fileHandle,'solver')
call HDF5_write(F,groupHandle,'F')
call HDF5_write(F_lastInc,groupHandle,'F_lastInc')
call HDF5_write(F_tau,groupHandle,'F_tau')
call HDF5_write(F_tau_lastInc,groupHandle,'F_tau_lastInc')
call HDF5_write(reshape(F,[3,3,product(cells(1:2))*cells3]),groupHandle,'F')
call HDF5_write(reshape(F_lastInc,[3,3,product(cells(1:2))*cells3]),groupHandle,'F_lastInc')
call HDF5_write(reshape(F_tau,[3,3,product(cells(1:2))*cells3]),groupHandle,'F_tau')
call HDF5_write(reshape(F_tau_lastInc,[3,3,product(cells(1:2))*cells3]),groupHandle,'F_tau_lastInc')
call HDF5_closeGroup(groupHandle)
call HDF5_closeFile(fileHandle)

View File

@ -48,7 +48,7 @@ module grid_thermal_spectral
SNES :: SNES_thermal
Vec :: solution_vec
real(pReal), dimension(:,:,:), allocatable :: &
T_current, & !< field of current temperature
T, & !< field of current temperature
T_lastInc, & !< field of previous temperature
T_stagInc, & !< field of staggered temperature
dotT_lastInc
@ -78,6 +78,7 @@ subroutine grid_thermal_spectral_init()
integer(MPI_INTEGER_KIND) :: err_MPI
PetscErrorCode :: err_PETSc
integer(HID_T) :: fileHandle, groupHandle
real(pReal), dimension(1,product(cells(1:2))*cells3) :: tempN
type(tDict), pointer :: &
num_grid
@ -107,10 +108,10 @@ subroutine grid_thermal_spectral_init()
!--------------------------------------------------------------------------------------------------
! init fields
T_current = discretization_grid_getInitialCondition('T')
T_lastInc = T_current
T_stagInc = T_current
dotT_lastInc = 0.0_pReal * T_current
T = discretization_grid_getInitialCondition('T')
T_lastInc = T
T_stagInc = T
dotT_lastInc = 0.0_pReal * T
!--------------------------------------------------------------------------------------------------
! initialize solver specific parts of PETSc
@ -151,20 +152,23 @@ subroutine grid_thermal_spectral_init()
fileHandle = HDF5_openFile(getSolverJobName()//'_restart.hdf5','r')
groupHandle = HDF5_openGroup(fileHandle,'solver')
call HDF5_read(T_current,groupHandle,'T',.false.)
call HDF5_read(T_lastInc,groupHandle,'T_lastInc',.false.)
call HDF5_read(dotT_lastInc,groupHandle,'dotT_lastInc',.false.)
call HDF5_read(tempN,groupHandle,'T',.false.)
T = reshape(tempN,[cells(1),cells(2),cells3])
call HDF5_read(tempN,groupHandle,'T_lastInc',.false.)
T_lastInc = reshape(tempN,[cells(1),cells(2),cells3])
call HDF5_read(tempN,groupHandle,'dotT_lastInc',.false.)
dotT_lastInc = reshape(tempN,[cells(1),cells(2),cells3])
end if restartRead
ce = 0
do k = 1, cells3; do j = 1, cells(2); do i = 1, cells(1)
ce = ce + 1
call homogenization_thermal_setField(T_current(i,j,k),0.0_pReal,ce)
call homogenization_thermal_setField(T(i,j,k),0.0_pReal,ce)
end do; end do; end do
call DMDAVecGetArrayF90(thermal_grid,solution_vec,T_PETSc,err_PETSc)
CHKERRQ(err_PETSc)
T_PETSc = T_current
T_PETSc = T
call DMDAVecRestoreArrayF90(thermal_grid,solution_vec,T_PETSc,err_PETSc)
CHKERRQ(err_PETSc)
@ -207,20 +211,20 @@ function grid_thermal_spectral_solution(Delta_t) result(solution)
solution%converged = .true.
solution%iterationsNeeded = totalIter
end if
stagNorm = maxval(abs(T_current - T_stagInc))
stagNorm = maxval(abs(T - T_stagInc))
call MPI_Allreduce(MPI_IN_PLACE,stagNorm,1_MPI_INTEGER_KIND,MPI_DOUBLE,MPI_MAX,MPI_COMM_WORLD,err_MPI)
if (err_MPI /= 0_MPI_INTEGER_KIND) error stop 'MPI error'
solution%stagConverged = stagNorm < max(num%eps_thermal_atol, num%eps_thermal_rtol*maxval(T_current))
solution%stagConverged = stagNorm < max(num%eps_thermal_atol, num%eps_thermal_rtol*maxval(T))
call MPI_Allreduce(MPI_IN_PLACE,solution%stagConverged,1_MPI_INTEGER_KIND,MPI_LOGICAL,MPI_LAND,MPI_COMM_WORLD,err_MPI)
if (err_MPI /= 0_MPI_INTEGER_KIND) error stop 'MPI error'
T_stagInc = T_current
T_stagInc = T
!--------------------------------------------------------------------------------------------------
! updating thermal state
ce = 0
do k = 1, cells3; do j = 1, cells(2); do i = 1,cells(1)
ce = ce + 1
call homogenization_thermal_setField(T_current(i,j,k),(T_current(i,j,k)-T_lastInc(i,j,k))/params%Delta_t,ce)
call homogenization_thermal_setField(T(i,j,k),(T(i,j,k)-T_lastInc(i,j,k))/params%Delta_t,ce)
end do; end do; end do
call VecMin(solution_vec,devNull,T_min,err_PETSc)
@ -250,7 +254,7 @@ subroutine grid_thermal_spectral_forward(cutBack)
if (cutBack) then
T_current = T_lastInc
T = T_lastInc
T_stagInc = T_lastInc
!--------------------------------------------------------------------------------------------------
@ -259,17 +263,17 @@ subroutine grid_thermal_spectral_forward(cutBack)
CHKERRQ(err_PETSc)
call DMDAVecGetArrayF90(dm_local,solution_vec,T_PETSc,err_PETSc) !< get the data out of PETSc to work with
CHKERRQ(err_PETSc)
T_PETSc = T_current
T_PETSc = T
call DMDAVecRestoreArrayF90(dm_local,solution_vec,T_PETSc,err_PETSc)
CHKERRQ(err_PETSc)
ce = 0
do k = 1, cells3; do j = 1, cells(2); do i = 1,cells(1)
ce = ce + 1
call homogenization_thermal_setField(T_current(i,j,k),dotT_lastInc(i,j,k),ce)
call homogenization_thermal_setField(T(i,j,k),dotT_lastInc(i,j,k),ce)
end do; end do; end do
else
dotT_lastInc = (T_current - T_lastInc)/params%Delta_t
T_lastInc = T_current
dotT_lastInc = (T - T_lastInc)/params%Delta_t
T_lastInc = T
call updateReference
end if
@ -277,7 +281,7 @@ end subroutine grid_thermal_spectral_forward
!--------------------------------------------------------------------------------------------------
!> @brief Write current solver and constitutive data for restart to file
!> @brief Write current solver and constitutive data for restart to file.
!--------------------------------------------------------------------------------------------------
subroutine grid_thermal_spectral_restartWrite
@ -295,9 +299,9 @@ subroutine grid_thermal_spectral_restartWrite
fileHandle = HDF5_openFile(getSolverJobName()//'_restart.hdf5','a')
groupHandle = HDF5_openGroup(fileHandle,'solver')
call HDF5_write(T,groupHandle,'T')
call HDF5_write(T_lastInc,groupHandle,'T_lastInc')
call HDF5_write(dotT_lastInc,groupHandle,'dotT_lastInc')
call HDF5_write(reshape(T,[1,product(cells(1:2))*cells3]),groupHandle,'T')
call HDF5_write(reshape(T_lastInc,[1,product(cells(1:2))*cells3]),groupHandle,'T_lastInc')
call HDF5_write(reshape(dotT_lastInc,[1,product(cells(1:2))*cells3]),groupHandle,'dotT_lastInc')
call HDF5_closeGroup(groupHandle)
call HDF5_closeFile(fileHandle)
@ -320,7 +324,7 @@ subroutine formResidual(in,x_scal,r,dummy,err_PETSc)
x_scal
PetscScalar, dimension( &
X_RANGE,Y_RANGE,Z_RANGE), intent(out) :: &
r
r !< residual
PetscObject :: dummy
PetscErrorCode, intent(out) :: err_PETSc
@ -328,10 +332,8 @@ subroutine formResidual(in,x_scal,r,dummy,err_PETSc)
real(pReal), dimension(3,cells(1),cells(2),cells3) :: vectorField
T_current = x_scal
!--------------------------------------------------------------------------------------------------
! evaluate polarization field
vectorField = utilities_ScalarGradient(T_current)
T = x_scal
vectorField = utilities_ScalarGradient(T)
ce = 0
do k = 1, cells3; do j = 1, cells(2); do i = 1,cells(1)
ce = ce + 1
@ -342,13 +344,11 @@ subroutine formResidual(in,x_scal,r,dummy,err_PETSc)
do k = 1, cells3; do j = 1, cells(2); do i = 1,cells(1)
ce = ce + 1
r(i,j,k) = params%Delta_t*(r(i,j,k) + homogenization_f_T(ce)) &
+ homogenization_mu_T(ce) * (T_lastInc(i,j,k) - T_current(i,j,k)) &
+ mu_ref*T_current(i,j,k)
+ homogenization_mu_T(ce) * (T_lastInc(i,j,k) - T(i,j,k)) &
+ mu_ref*T(i,j,k)
end do; end do; end do
!--------------------------------------------------------------------------------------------------
! constructing residual
r = T_current &
r = T &
- utilities_GreenConvolution(r, K_ref, mu_ref, params%Delta_t)
err_PETSc = 0
@ -356,7 +356,7 @@ end subroutine formResidual
!--------------------------------------------------------------------------------------------------
!> @brief update reference viscosity and conductivity
!> @brief Update reference viscosity and conductivity.
!--------------------------------------------------------------------------------------------------
subroutine updateReference()

View File

@ -1142,11 +1142,13 @@ subroutine selfTest()
MPI_DOUBLE,MPI_SUM,MPI_COMM_WORLD,err_MPI)
if (err_MPI /= 0_MPI_INTEGER_KIND) error stop 'MPI error'
if (worldrank==0) then
if (any(dNeq(tensorSum/tensorField_fourier(:,:,1,1,1)%re,1.0_pReal,1.0e-12_pReal))) error stop 'mismatch avg tensorField FFT <-> real'
if (any(dNeq(tensorSum/tensorField_fourier(:,:,1,1,1)%re,1.0_pReal,1.0e-12_pReal))) &
error stop 'mismatch avg tensorField FFT <-> real'
end if
call fftw_mpi_execute_dft_c2r(planTensorBack,tensorField_fourier,tensorField_real)
tensorField_real(1:3,1:3,cells(1)+1:cells1Red*2,:,:) = 0.0_pReal
if (maxval(abs(tensorField_real_ - tensorField_real*wgt))>5.0e-15_pReal) error stop 'mismatch tensorField FFT/invFFT <-> real'
if (maxval(abs(tensorField_real_ - tensorField_real*wgt))>5.0e-15_pReal) &
error stop 'mismatch tensorField FFT/invFFT <-> real'
call random_number(vectorField_real)
vectorField_real(1:3,cells(1)+1:cells1Red*2,:,:) = 0.0_pReal
@ -1156,11 +1158,13 @@ subroutine selfTest()
MPI_DOUBLE,MPI_SUM,MPI_COMM_WORLD,err_MPI)
if (err_MPI /= 0_MPI_INTEGER_KIND) error stop 'MPI error'
if (worldrank==0) then
if (any(dNeq(vectorSum/vectorField_fourier(:,1,1,1)%re,1.0_pReal,1.0e-12_pReal))) error stop 'mismatch avg vectorField FFT <-> real'
if (any(dNeq(vectorSum/vectorField_fourier(:,1,1,1)%re,1.0_pReal,1.0e-12_pReal))) &
error stop 'mismatch avg vectorField FFT <-> real'
end if
call fftw_mpi_execute_dft_c2r(planVectorBack,vectorField_fourier,vectorField_real)
vectorField_real(1:3,cells(1)+1:cells1Red*2,:,:) = 0.0_pReal
if (maxval(abs(vectorField_real_ - vectorField_real*wgt))>5.0e-15_pReal) error stop 'mismatch vectorField FFT/invFFT <-> real'
if (maxval(abs(vectorField_real_ - vectorField_real*wgt))>5.0e-15_pReal) &
error stop 'mismatch vectorField FFT/invFFT <-> real'
call random_number(scalarField_real)
scalarField_real(cells(1)+1:cells1Red*2,:,:) = 0.0_pReal
@ -1170,11 +1174,13 @@ subroutine selfTest()
MPI_DOUBLE,MPI_SUM,MPI_COMM_WORLD,err_MPI)
if (err_MPI /= 0_MPI_INTEGER_KIND) error stop 'MPI error'
if (worldrank==0) then
if (dNeq(scalarSum/scalarField_fourier(1,1,1)%re,1.0_pReal,1.0e-12_pReal)) error stop 'mismatch avg scalarField FFT <-> real'
if (dNeq(scalarSum/scalarField_fourier(1,1,1)%re,1.0_pReal,1.0e-12_pReal)) &
error stop 'mismatch avg scalarField FFT <-> real'
end if
call fftw_mpi_execute_dft_c2r(planScalarBack,scalarField_fourier,scalarField_real)
scalarField_real(cells(1)+1:cells1Red*2,:,:) = 0.0_pReal
if (maxval(abs(scalarField_real_ - scalarField_real*wgt))>5.0e-15_pReal) error stop 'mismatch scalarField FFT/invFFT <-> real'
if (maxval(abs(scalarField_real_ - scalarField_real*wgt))>5.0e-15_pReal) &
error stop 'mismatch scalarField FFT/invFFT <-> real'
call random_number(r)
call MPI_Bcast(r,9_MPI_INTEGER_KIND,MPI_DOUBLE,0_MPI_INTEGER_KIND,MPI_COMM_WORLD,err_MPI)

View File

@ -406,6 +406,9 @@ subroutine homogenization_restartWrite(fileHandle)
call HDF5_write(homogState(ho)%state,groupHandle(2),'omega_mechanical') ! ToDo: should be done by mech
if(damageState_h(ho)%sizeState > 0) &
call HDF5_write(damageState_h(ho)%state,groupHandle(2),'omega_damage') ! ToDo: should be done by mech
call HDF5_closeGroup(groupHandle(2))
end do
@ -433,6 +436,9 @@ subroutine homogenization_restartRead(fileHandle)
call HDF5_read(homogState(ho)%state0,groupHandle(2),'omega_mechanical') ! ToDo: should be done by mech
if(damageState_h(ho)%sizeState > 0) &
call HDF5_read(damageState_h(ho)%state0,groupHandle(2),'omega_damage') ! ToDo: should be done by mech
call HDF5_closeGroup(groupHandle(2))
end do

View File

@ -80,11 +80,15 @@ module subroutine damage_partition(ce)
integer, intent(in) :: ce
real(pReal) :: phi
integer :: co
if(damageState_h(material_homogenizationID(ce))%sizeState < 1) return
phi = damagestate_h(material_homogenizationID(ce))%state(1,material_homogenizationEntry(ce))
call phase_set_phi(phi,1,ce)
do co = 1, homogenization_Nconstituents(material_homogenizationID(ce))
call phase_set_phi(phi,co,ce)
end do
end subroutine damage_partition

View File

@ -95,7 +95,7 @@ subroutine materialpoint_init
call phase_restartRead(fileHandle)
call HDF5_closeFile(fileHandle)
endif
end if
end subroutine materialpoint_init

View File

@ -311,7 +311,7 @@ program DAMASK_mesh
write(statUnit,*) totalIncsCounter, time, cutBackLevel, &
solres%converged, solres%iterationsNeeded ! write statistics about accepted solution
flush(statUnit)
endif
end if
end do subStepLooping
cutBackLevel = max(0, cutBackLevel - 1) ! try half number of subincs next inc

View File

@ -365,16 +365,16 @@ subroutine selfTest
do o = lbound(FEM_quadrature_weights(d,:),1), ubound(FEM_quadrature_weights(d,:),1)
if (dNeq(sum(FEM_quadrature_weights(d,o)%p),1.0_pReal,5e-15_pReal)) &
error stop 'quadrature weights'
enddo
enddo
end do
end do
do d = lbound(FEM_quadrature_points,1), ubound(FEM_quadrature_points,1)
do o = lbound(FEM_quadrature_points(d,:),1), ubound(FEM_quadrature_points(d,:),1)
n = size(FEM_quadrature_points(d,o)%p,1)/d
if (any(dNeq(sum(reshape(FEM_quadrature_points(d,o)%p,[d,n]),2),-real(n,pReal)/w(d),1.e-14_pReal))) &
error stop 'quadrature points'
enddo
enddo
end do
end do
end subroutine selfTest

View File

@ -140,7 +140,7 @@ subroutine discretization_mesh_init(restart)
call DMClone(globalMesh,geomMesh,err_PETSc)
else
call DMPlexDistribute(globalMesh,0_pPETSCINT,sf,geomMesh,err_PETSc)
endif
end if
CHKERRQ(err_PETSc)
allocate(mesh_boundaries(mesh_Nboundaries), source = 0_pPETSCINT)
@ -154,7 +154,7 @@ subroutine discretization_mesh_init(restart)
mesh_boundaries(1:nFaceSets) = pFaceSets
CHKERRQ(err_PETSc)
call ISRestoreIndicesF90(faceSetIS,pFaceSets,err_PETSc)
endif
end if
call MPI_Bcast(mesh_boundaries,mesh_Nboundaries,MPI_INTEGER,0_MPI_INTEGER_KIND,MPI_COMM_WORLD,err_MPI)
if (err_MPI /= 0_MPI_INTEGER_KIND) error stop 'MPI error'
@ -182,7 +182,7 @@ subroutine discretization_mesh_init(restart)
do j = 1, mesh_NcpElems
call DMGetLabelValue(geomMesh,'Cell Sets',j-1,materialAt(j),err_PETSc)
CHKERRQ(err_PETSc)
enddo
end do
materialAt = materialAt + 1_pPETSCINT
if (debug_element < 1 .or. debug_element > mesh_NcpElems) call IO_error(602,ext_msg='element')
@ -222,7 +222,7 @@ subroutine mesh_FEM_build_ipVolumes(dimPlex)
call DMPlexComputeCellGeometryFVM(geomMesh,cell,vol,pCent,pNorm,err_PETSc)
CHKERRQ(err_PETSc)
mesh_ipVolume(:,cell+1) = vol/real(mesh_maxNips,pReal)
enddo
end do
end subroutine mesh_FEM_build_ipVolumes
@ -258,11 +258,11 @@ subroutine mesh_FEM_build_ipCoordinates(dimPlex,qPoints)
do dirJ = 1_pPETSCINT, dimPlex
mesh_ipCoordinates(dirI,qPt,cell+1) = mesh_ipCoordinates(dirI,qPt,cell+1) + &
pCellJ((dirI-1)*dimPlex+dirJ)*(qPoints(qOffset+dirJ) + 1.0_pReal)
enddo
enddo
end do
end do
qOffset = qOffset + dimPlex
enddo
enddo
end do
end do
end subroutine mesh_FEM_build_ipCoordinates

View File

@ -199,11 +199,11 @@ subroutine FEM_mechanical_init(fieldBC)
CHKERRQ(err_PETSc)
call PetscSectionGetDof(section,cellStart,pnumDof(topologDim),err_PETSc)
CHKERRQ(err_PETSc)
enddo
end do
numBC = 0
do field = 1, dimPlex; do faceSet = 1, mesh_Nboundaries
if (fieldBC%componentBC(field)%Mask(faceSet)) numBC = numBC + 1
enddo; enddo
end do; end do
allocate(pbcField(numBC), source=0_pPETSCINT)
allocate(pbcComps(numBC))
allocate(pbcPoints(numBC))
@ -229,9 +229,9 @@ subroutine FEM_mechanical_init(fieldBC)
else
call ISCreateGeneral(PETSC_COMM_WORLD,0_pPETSCINT,[0_pPETSCINT],PETSC_COPY_VALUES,pbcPoints(numBC),err_PETSc)
CHKERRQ(err_PETSc)
endif
endif
enddo; enddo
end if
end if
end do; end do
call DMPlexCreateSection(mechanical_mesh,nolabel,pNumComp,pNumDof, &
numBC,pBcField,pBcComps,pBcPoints,PETSC_NULL_IS,section,err_PETSc)
CHKERRQ(err_PETSc)
@ -240,7 +240,7 @@ subroutine FEM_mechanical_init(fieldBC)
do faceSet = 1, numBC
call ISDestroy(pbcPoints(faceSet),err_PETSc)
CHKERRQ(err_PETSc)
enddo
end do
!--------------------------------------------------------------------------------------------------
! initialize solver specific parts of PETSc
@ -299,11 +299,11 @@ subroutine FEM_mechanical_init(fieldBC)
call PetscQuadratureGetData(functional,dimPlex,nc,nNodalPoints,nodalPointsP,nodalWeightsP,err_PETSc)
CHKERRQ(err_PETSc)
x_scal(basis+1:basis+dimPlex) = pV0 + matmul(transpose(cellJMat),nodalPointsP + 1.0_pReal)
enddo
end do
px_scal => x_scal
call DMPlexVecSetClosure(mechanical_mesh,section,solution_local,cell,px_scal,5,err_PETSc)
CHKERRQ(err_PETSc)
enddo
end do
call utilities_constitutiveResponse(0.0_pReal,devNull,.true.)
end subroutine FEM_mechanical_init
@ -348,7 +348,7 @@ type(tSolutionState) function FEM_mechanical_solution( &
FEM_mechanical_solution%converged = .true.
call SNESGetIterationNumber(mechanical_snes,FEM_mechanical_solution%iterationsNeeded,err_PETSc)
CHKERRQ(err_PETSc)
endif
end if
print'(/,1x,a)', '==========================================================================='
flush(IO_STDOUT)
@ -409,9 +409,9 @@ subroutine FEM_mechanical_formResidual(dm_local,xx_local,f_local,dummy,err_PETSc
0.0_pReal,params%fieldBC%componentBC(field)%Value(face),params%timeinc)
call ISDestroy(bcPoints,err_PETSc)
CHKERRQ(err_PETSc)
endif
endif
enddo; enddo
end if
end if
end do; end do
!--------------------------------------------------------------------------------------------------
! evaluate field derivatives
@ -433,10 +433,10 @@ subroutine FEM_mechanical_formResidual(dm_local,xx_local,f_local,dummy,err_PETSc
i = ((qPt*nBasis + basis)*dimPlex + comp)*dimPlex+comp
BMat(comp*dimPlex+1_pPETSCINT:(comp+1_pPETSCINT)*dimPlex,basis*dimPlex+comp+1_pPETSCINT) = &
matmul(IcellJMat,basisFieldDer(i*dimPlex+1_pPETSCINT:(i+1_pPETSCINT)*dimPlex))
enddo
enddo
end do
end do
homogenization_F(1:dimPlex,1:dimPlex,m) = reshape(matmul(BMat,x_scal),shape=[dimPlex,dimPlex], order=[2,1])
enddo
end do
if (num%BBarStabilisation) then
detFAvg = math_det33(sum(homogenization_F(1:3,1:3,cell*nQuadrature+1:(cell+1)*nQuadrature),dim=3)/real(nQuadrature,pReal))
do qPt = 0, nQuadrature-1
@ -444,11 +444,11 @@ subroutine FEM_mechanical_formResidual(dm_local,xx_local,f_local,dummy,err_PETSc
homogenization_F(1:dimPlex,1:dimPlex,m) = homogenization_F(1:dimPlex,1:dimPlex,m) &
* (detFAvg/math_det33(homogenization_F(1:3,1:3,m)))**(1.0_pReal/real(dimPlex,pReal))
enddo
endif
end do
end if
call DMPlexVecRestoreClosure(dm_local,section,x_local,cell,x_scal,err_PETSc)
CHKERRQ(err_PETSc)
enddo
end do
!--------------------------------------------------------------------------------------------------
! evaluate constitutive response
@ -475,20 +475,20 @@ subroutine FEM_mechanical_formResidual(dm_local,xx_local,f_local,dummy,err_PETSc
i = ((qPt*nBasis + basis)*dimPlex + comp)*dimPlex+comp
BMat(comp*dimPlex+1_pPETSCINT:(comp+1_pPETSCINT)*dimPlex,basis*dimPlex+comp+1_pPETSCINT) = &
matmul(IcellJMat,basisFieldDer(i*dimPlex+1_pPETSCINT:(i+1_pPETSCINT)*dimPlex))
enddo
enddo
end do
end do
f_scal = f_scal &
+ matmul(transpose(BMat), &
reshape(transpose(homogenization_P(1:dimPlex,1:dimPlex,m)), &
shape=[dimPlex*dimPlex]))*qWeights(qPt+1_pPETSCINT)
enddo
end do
f_scal = f_scal*abs(detJ)
pf_scal => f_scal
call DMPlexVecSetClosure(dm_local,section,f_local,cell,pf_scal,ADD_VALUES,err_PETSc)
CHKERRQ(err_PETSc)
call DMPlexVecRestoreClosure(dm_local,section,x_local,cell,x_scal,err_PETSc)
CHKERRQ(err_PETSc)
enddo
end do
call DMRestoreLocalVector(dm_local,x_local,err_PETSc)
CHKERRQ(err_PETSc)
@ -559,9 +559,9 @@ subroutine FEM_mechanical_formJacobian(dm_local,xx_local,Jac_pre,Jac,dummy,err_P
0.0_pReal,params%fieldBC%componentBC(field)%Value(face),params%timeinc)
call ISDestroy(bcPoints,err_PETSc)
CHKERRQ(err_PETSc)
endif
endif
enddo; enddo
end if
end if
end do; end do
call DMPlexGetHeightStratum(dm_local,0_pPETSCINT,cellStart,cellEnd,err_PETSc)
CHKERRQ(err_PETSc)
do cell = cellStart, cellEnd-1 !< loop over all elements
@ -583,8 +583,8 @@ subroutine FEM_mechanical_formJacobian(dm_local,xx_local,Jac_pre,Jac,dummy,err_P
i = ((qPt*nBasis + basis)*dimPlex + comp)*dimPlex+comp
BMat(comp*dimPlex+1_pPETSCINT:(comp+1_pPETSCINT)*dimPlex,basis*dimPlex+comp+1_pPETSCINT) = &
matmul(reshape(pInvcellJ,[dimPlex,dimPlex]),basisFieldDer(i*dimPlex+1_pPETSCINT:(i+1_pPETSCINT)*dimPlex))
enddo
enddo
end do
end do
MatA = matmul(reshape(reshape(homogenization_dPdF(1:dimPlex,1:dimPlex,1:dimPlex,1:dimPlex,m), &
shape=[dimPlex,dimPlex,dimPlex,dimPlex], order=[2,1,4,3]), &
shape=[dimPlex*dimPlex,dimPlex*dimPlex]),BMat)*qWeights(qPt+1_pPETSCINT)
@ -602,8 +602,8 @@ subroutine FEM_mechanical_formJacobian(dm_local,xx_local,Jac_pre,Jac,dummy,err_P
BMatAvg = BMatAvg + BMat
else
K_eA = K_eA + matmul(transpose(BMat),MatA)
endif
enddo
end if
end do
if (num%BBarStabilisation) then
FInv = math_inv33(FAvg)
K_e = K_eA*math_det33(FAvg/real(nQuadrature,pReal))**(1.0_pReal/real(dimPlex,pReal)) + &
@ -612,7 +612,7 @@ subroutine FEM_mechanical_formJacobian(dm_local,xx_local,Jac_pre,Jac,dummy,err_P
K_eB)/real(dimPlex,pReal)
else
K_e = K_eA
endif
end if
K_e = (K_e + eps*math_eye(int(cellDof))) * abs(detJ)
#ifndef __INTEL_COMPILER
pK_e(1:cellDOF**2) => K_e
@ -624,7 +624,7 @@ subroutine FEM_mechanical_formJacobian(dm_local,xx_local,Jac_pre,Jac,dummy,err_P
CHKERRQ(err_PETSc)
call DMPlexVecRestoreClosure(dm_local,section,x_local,cell,x_scal,err_PETSc)
CHKERRQ(err_PETSc)
enddo
end do
call MatAssemblyBegin(Jac,MAT_FINAL_ASSEMBLY,err_PETSc)
CHKERRQ(err_PETSc)
call MatAssemblyEnd(Jac,MAT_FINAL_ASSEMBLY,err_PETSc)
@ -704,9 +704,9 @@ subroutine FEM_mechanical_forward(guess,timeinc,timeinc_old,fieldBC)
0.0_pReal,fieldBC%componentBC(field)%Value(face),timeinc_old)
call ISDestroy(bcPoints,err_PETSc)
CHKERRQ(err_PETSc)
endif
endif
enddo; enddo
end if
end if
end do; end do
call DMRestoreLocalVector(dm_local,x_local,err_PETSc)
CHKERRQ(err_PETSc)
@ -716,7 +716,7 @@ subroutine FEM_mechanical_forward(guess,timeinc,timeinc_old,fieldBC)
CHKERRQ(err_PETSc)
call VecScale(solution_rate,timeinc_old**(-1),err_PETSc)
CHKERRQ(err_PETSc)
endif
end if
call VecCopy(solution_rate,solution,err_PETSc)
CHKERRQ(err_PETSc)
call VecScale(solution,timeinc,err_PETSc)
@ -800,7 +800,7 @@ subroutine FEM_mechanical_updateCoords()
call DMPlexGetPointLocal(dm_local, p, s, e, err_PETSc)
CHKERRQ(err_PETSc)
nodeCoords(1:dimPlex,p)=nodeCoords_linear(s+1:e)
enddo
end do
call discretization_setNodeCoords(nodeCoords)
call VecRestoreArrayF90(x_local,nodeCoords_linear,err_PETSc)
@ -827,9 +827,9 @@ subroutine FEM_mechanical_updateCoords()
x_scal(nOffset+1:nOffset+dimPlex))
q = q+dimPlex
nOffset = nOffset+dimPlex
enddo
enddo
enddo
end do
end do
end do
call DMPlexVecRestoreClosure(dm_local,section,x_local,c,x_scal,err_PETSc)
CHKERRQ(err_PETSc)
end do

View File

@ -160,6 +160,11 @@ module phase
integer, intent(in) :: ph
end subroutine thermal_restartWrite
module subroutine damage_restartWrite(groupHandle,ph)
integer(HID_T), intent(in) :: groupHandle
integer, intent(in) :: ph
end subroutine damage_restartWrite
module subroutine mechanical_restartRead(groupHandle,ph)
integer(HID_T), intent(in) :: groupHandle
integer, intent(in) :: ph
@ -170,6 +175,11 @@ module phase
integer, intent(in) :: ph
end subroutine thermal_restartRead
module subroutine damage_restartRead(groupHandle,ph)
integer(HID_T), intent(in) :: groupHandle
integer, intent(in) :: ph
end subroutine damage_restartRead
module function mechanical_S(ph,en) result(S)
integer, intent(in) :: ph,en
real(pReal), dimension(3,3) :: S
@ -674,6 +684,7 @@ subroutine phase_restartWrite(fileHandle)
call mechanical_restartWrite(groupHandle(2),ph)
call thermal_restartWrite(groupHandle(2),ph)
call damage_restartWrite(groupHandle(2),ph)
call HDF5_closeGroup(groupHandle(2))
@ -703,6 +714,7 @@ subroutine phase_restartRead(fileHandle)
call mechanical_restartRead(groupHandle(2),ph)
call thermal_restartRead(groupHandle(2),ph)
call damage_restartRead(groupHandle(2),ph)
call HDF5_closeGroup(groupHandle(2))

View File

@ -1,6 +1,6 @@
!----------------------------------------------------------------------------------------------------
!--------------------------------------------------------------------------------------------------
!> @brief internal microstructure state for all damage sources and kinematics constitutive models
!----------------------------------------------------------------------------------------------------
!--------------------------------------------------------------------------------------------------
submodule(phase) damage
type :: tDamageParameters
@ -310,6 +310,35 @@ function integrateDamageState(Delta_t,ph,en) result(broken)
end function integrateDamageState
module subroutine damage_restartWrite(groupHandle,ph)
integer(HID_T), intent(in) :: groupHandle
integer, intent(in) :: ph
select case(phase_damage(ph))
case(DAMAGE_ISOBRITTLE_ID,DAMAGE_ANISOBRITTLE_ID)
call HDF5_write(damageState(ph)%state,groupHandle,'omega_damage')
end select
end subroutine damage_restartWrite
module subroutine damage_restartRead(groupHandle,ph)
integer(HID_T), intent(in) :: groupHandle
integer, intent(in) :: ph
select case(phase_damage(ph))
case(DAMAGE_ISOBRITTLE_ID,DAMAGE_ANISOBRITTLE_ID)
call HDF5_read(damageState(ph)%state0,groupHandle,'omega_damage')
end select
end subroutine damage_restartRead
!----------------------------------------------------------------------------------------------
!< @brief writes damage sources results to HDF5 output file
!----------------------------------------------------------------------------------------------

View File

@ -1783,6 +1783,6 @@ subroutine storeGeometry(ph)
end do
end do
end subroutine
end subroutine storeGeometry
end submodule nonlocal