Merge branch 'development' into 'export_DAMASK_to_DREAM3D'

# Conflicts:
#   python/damask/_result.py
This commit is contained in:
Vitesh 2023-10-13 12:21:46 +00:00
commit f71b1c4dc3
17 changed files with 172 additions and 122 deletions

View File

@ -89,10 +89,10 @@ unittest_GNU_DEBUG:
stage: compile stage: compile
script: script:
- module load ${COMPILER_GNU} ${MPI_GNU} ${PETSC_GNU} - module load ${COMPILER_GNU} ${MPI_GNU} ${PETSC_GNU}
- TMPDIR=$(mktemp -d) - TEMPDIR=$(mktemp -d)
- cmake -B ${TMPDIR} -DDAMASK_SOLVER=test -DCMAKE_INSTALL_PREFIX=${TMPDIR} -DCMAKE_BUILD_TYPE=RELEASE -DBUILDCMD_POST=-coverage - cmake -B ${TEMPDIR} -DDAMASK_SOLVER=test -DCMAKE_INSTALL_PREFIX=${TEMPDIR} -DCMAKE_BUILD_TYPE=RELEASE -DBUILDCMD_POST=-coverage
- cmake --build ${TMPDIR} --target install - cmake --build ${TEMPDIR} --target install
- cd ${TMPDIR} - cd ${TEMPDIR}
- ./bin/DAMASK_test - ./bin/DAMASK_test
- find -name \*.gcda -not -path "**/test/*" | xargs gcov - find -name \*.gcda -not -path "**/test/*" | xargs gcov
@ -100,10 +100,10 @@ unittest_GNU_RELEASE:
stage: compile stage: compile
script: script:
- module load ${COMPILER_GNU} ${MPI_GNU} ${PETSC_GNU} - module load ${COMPILER_GNU} ${MPI_GNU} ${PETSC_GNU}
- TMPDIR=$(mktemp -d) - TEMPDIR=$(mktemp -d)
- cmake -B ${TMPDIR} -DDAMASK_SOLVER=test -DCMAKE_INSTALL_PREFIX=${TMPDIR} -DCMAKE_BUILD_TYPE=RELEASE -DBUILDCMD_POST=-coverage - cmake -B ${TEMPDIR} -DDAMASK_SOLVER=test -DCMAKE_INSTALL_PREFIX=${TEMPDIR} -DCMAKE_BUILD_TYPE=RELEASE -DBUILDCMD_POST=-coverage
- cmake --build ${TMPDIR} --target install - cmake --build ${TEMPDIR} --target install
- cd ${TMPDIR} - cd ${TEMPDIR}
- ./bin/DAMASK_test - ./bin/DAMASK_test
- find -name \*.gcda -not -path "**/test/*" | xargs gcov - find -name \*.gcda -not -path "**/test/*" | xargs gcov
@ -111,10 +111,10 @@ unittest_GNU_PERFORMANCE:
stage: compile stage: compile
script: script:
- module load ${COMPILER_GNU} ${MPI_GNU} ${PETSC_GNU} - module load ${COMPILER_GNU} ${MPI_GNU} ${PETSC_GNU}
- TMPDIR=$(mktemp -d) - TEMPDIR=$(mktemp -d)
- cmake -B ${TMPDIR} -DDAMASK_SOLVER=test -DCMAKE_INSTALL_PREFIX=${TMPDIR} -DCMAKE_BUILD_TYPE=PERFORMANCE -DBUILDCMD_POST=-coverage - cmake -B ${TEMPDIR} -DDAMASK_SOLVER=test -DCMAKE_INSTALL_PREFIX=${TEMPDIR} -DCMAKE_BUILD_TYPE=PERFORMANCE -DBUILDCMD_POST=-coverage
- cmake --build ${TMPDIR} --target install - cmake --build ${TEMPDIR} --target install
- cd ${TMPDIR} - cd ${TEMPDIR}
- ./bin/DAMASK_test - ./bin/DAMASK_test
- find -name \*.gcda -not -path "**/test/*" | xargs gcov - find -name \*.gcda -not -path "**/test/*" | xargs gcov

View File

@ -11,7 +11,7 @@ endif()
project(Prerequisites LANGUAGES) project(Prerequisites LANGUAGES)
set(ENV{PKG_CONFIG_PATH} "$ENV{PETSC_DIR}/$ENV{PETSC_ARCH}/lib/pkgconfig:$ENV{PKG_CONFIG_PATH}") set(ENV{PKG_CONFIG_PATH} "$ENV{PETSC_DIR}/$ENV{PETSC_ARCH}/lib/pkgconfig:$ENV{PKG_CONFIG_PATH}")
pkg_check_modules(PETSC_MIN REQUIRED PETSc>=3.12.0 QUIET) #CMake does not support version range pkg_check_modules(PETSC_MIN REQUIRED PETSc>=3.12.0 QUIET) #CMake does not support version range
pkg_check_modules(PETSC REQUIRED PETSc<3.20.0) pkg_check_modules(PETSC REQUIRED PETSc<3.21.0)
pkg_get_variable(CMAKE_Fortran_COMPILER PETSc fcompiler) pkg_get_variable(CMAKE_Fortran_COMPILER PETSc fcompiler)
pkg_get_variable(CMAKE_C_COMPILER PETSc ccompiler) pkg_get_variable(CMAKE_C_COMPILER PETSc ccompiler)

View File

@ -1 +1 @@
3.0.0-alpha7-864-g9cf37c493 3.0.0-alpha7-880-g3644fe586

View File

@ -434,30 +434,20 @@ class Orientation(Rotation,Crystal):
https://doi.org/10.1107/S0108767391006864 https://doi.org/10.1107/S0108767391006864
""" """
rho = self.as_Rodrigues_vector(compact=True)*(1.0-1.0e-9) def larger_or_equal(v,c):
return ((np.isclose(c[0],v[...,0]) | (v[...,0] > c[0])) &
(np.isclose(c[1],v[...,1]) | (v[...,1] > c[1])) &
(np.isclose(c[2],v[...,2]) | (v[...,2] > c[2]))).astype(bool)
with np.errstate(invalid='ignore'): rho = self.as_Rodrigues_vector(compact=True)
if self.family == 'cubic': return larger_or_equal(rho,
return ((rho[...,0] >= rho[...,1]) & [rho[...,1], rho[...,2],0] if self.family == 'cubic'
(rho[...,1] >= rho[...,2]) & else [rho[...,1]*np.sqrt(3),0, 0] if self.family == 'hexagonal'
(rho[...,2] >= 0)).astype(bool) else [rho[...,1], 0, 0] if self.family == 'tetragonal'
if self.family == 'hexagonal': else [0, 0, 0] if self.family == 'orthorhombic'
return ((rho[...,0] >= rho[...,1]*np.sqrt(3)) & else [-np.inf, 0, 0] if self.family == 'monoclinic'
(rho[...,1] >= 0) & else [-np.inf, -np.inf, -np.inf]) & self.in_FZ
(rho[...,2] >= 0)).astype(bool)
if self.family == 'tetragonal':
return ((rho[...,0] >= rho[...,1]) &
(rho[...,1] >= 0) &
(rho[...,2] >= 0)).astype(bool)
if self.family == 'orthorhombic':
return ((rho[...,0] >= 0) &
(rho[...,1] >= 0) &
(rho[...,2] >= 0)).astype(bool)
if self.family == 'monoclinic':
return ((rho[...,1] >= 0) &
(rho[...,2] >= 0)).astype(bool)
return np.ones_like(rho[...,0],dtype=bool)
def disorientation(self, def disorientation(self,
other: 'Orientation', other: 'Orientation',
@ -521,20 +511,21 @@ class Orientation(Rotation,Crystal):
if self.family != other.family: if self.family != other.family:
raise NotImplementedError('disorientation between different crystal families') raise NotImplementedError('disorientation between different crystal families')
blend = util.shapeblender(self.shape,other.shape) blend = util.shapeblender( self.shape,other.shape)
s = self.equivalent s_m = util.shapeshifter( self.shape,blend,mode='right')
o = other.equivalent s_o = util.shapeshifter(other.shape,blend,mode='left')
s_ = s.reshape((s.shape[0],1)+ self.shape).broadcast_to((s.shape[0],o.shape[0])+blend,mode='right') s = self.broadcast_to(s_m).equivalent
o_ = o.reshape((1,o.shape[0])+other.shape).broadcast_to((s.shape[0],o.shape[0])+blend,mode='right') o = other.broadcast_to(s_o).equivalent
r_ = s_.misorientation(o_)
r_ = s[:,np.newaxis,...].misorientation(o[np.newaxis,:,...]) # type: ignore[index]
_r = ~r_ _r = ~r_
forward = r_.in_FZ & r_.in_disorientation_FZ forward = r_.in_disorientation_FZ
reverse = _r.in_FZ & _r.in_disorientation_FZ reverse = _r.in_disorientation_FZ
ok = forward | reverse ok = forward | reverse
ok &= (np.cumsum(ok.reshape((-1,)+ok.shape[2:]),axis=0) == 1).reshape(ok.shape) ok &= (np.cumsum(ok.reshape((-1,)+blend),axis=0) == 1).reshape(ok.shape)
r = np.where(np.any(forward[...,np.newaxis],axis=(0,1),keepdims=True), r = np.where(np.any((ok&forward)[...,np.newaxis],axis=(0,1),keepdims=True),
r_.quaternion, r_.quaternion,
_r.quaternion) _r.quaternion)
loc = np.where(ok) loc = np.where(ok)
@ -584,6 +575,7 @@ class Orientation(Rotation,Crystal):
np.argmin(m,axis=0)[np.newaxis,...,np.newaxis], np.argmin(m,axis=0)[np.newaxis,...,np.newaxis],
axis=0), axis=0),
axis=0)) axis=0))
return ((self.copy(Rotation(r).average(weights)),self.copy(Rotation(r))) if return_cloud else return ((self.copy(Rotation(r).average(weights)),self.copy(Rotation(r))) if return_cloud else
self.copy(Rotation(r).average(weights)) self.copy(Rotation(r).average(weights))
) )
@ -620,17 +612,19 @@ class Orientation(Rotation,Crystal):
vector_ = np.array(vector,float) vector_ = np.array(vector,float)
if vector_.shape[-1] != 3: if vector_.shape[-1] != 3:
raise ValueError('input is not a field of three-dimensional vectors') raise ValueError('input is not a field of three-dimensional vectors')
eq = self.equivalent
blend = util.shapeblender(eq.shape,vector_.shape[:-1]) blend = util.shapeblender( self.shape,vector_.shape[:-1])
poles = eq.broadcast_to(blend,mode='right') @ np.broadcast_to(vector_,blend+(3,)) eq = self.broadcast_to(util.shapeshifter( self.shape,blend,mode='right')).equivalent
poles = np.atleast_2d(eq @ np.broadcast_to(vector_,(1,)+blend+(3,)))
ok = self.in_SST(poles,proper=proper) ok = self.in_SST(poles,proper=proper)
ok &= np.cumsum(ok,axis=0) == 1 ok &= np.cumsum(ok,axis=0) == 1
loc = np.where(ok) loc = np.where(ok)
sort = 0 if len(loc) == 1 else np.lexsort(loc[:0:-1]) sort = 0 if len(loc) == 1 else np.lexsort(loc[:0:-1])
return ( return (
(poles[ok][sort].reshape(blend[1:]+(3,)), (np.vstack(loc[:1]).T)[sort].reshape(blend[1:])) (poles[ok][sort].reshape(blend+(3,)), (np.vstack(loc[:1]).T)[sort].reshape(blend))
if return_operators else if return_operators else
poles[ok][sort].reshape(blend[1:]+(3,)) poles[ok][sort].reshape(blend+(3,))
) )
@ -795,16 +789,17 @@ class Orientation(Rotation,Crystal):
""" """
v = self.to_frame(uvw=uvw,hkl=hkl) v = self.to_frame(uvw=uvw,hkl=hkl)
blend = util.shapeblender(self.shape,v.shape[:-1]) s_v = v.shape[:-1]
blend = util.shapeblender(self.shape,s_v)
if normalize: if normalize:
v /= np.linalg.norm(v,axis=-1,keepdims=len(v.shape)>1) v /= np.linalg.norm(v,axis=-1,keepdims=len(s_v)>0)
if with_symmetry: if with_symmetry:
sym_ops = self.symmetry_operations sym_ops = self.symmetry_operations
shape = v.shape[:-1]+sym_ops.shape s_v += sym_ops.shape
blend += sym_ops.shape blend += sym_ops.shape
v = sym_ops.broadcast_to(shape) \ v = sym_ops.broadcast_to(s_v) @ v[...,np.newaxis,:]
@ np.broadcast_to(v.reshape(util.shapeshifter(v.shape,shape+(3,))),shape+(3,))
return ~(self.broadcast_to(blend))@np.broadcast_to(v,blend+(3,)) return ~(self.broadcast_to(blend)) @ np.broadcast_to(v,blend+(3,))
def Schmid(self, *, def Schmid(self, *,
@ -854,6 +849,7 @@ class Orientation(Rotation,Crystal):
p/np.linalg.norm(p,axis=1,keepdims=True)) p/np.linalg.norm(p,axis=1,keepdims=True))
shape = P.shape[0:1]+self.shape+(3,3) shape = P.shape[0:1]+self.shape+(3,3)
return ~self.broadcast_to(shape[:-2]) \ return ~self.broadcast_to(shape[:-2]) \
@ np.broadcast_to(P.reshape(util.shapeshifter(P.shape,shape)),shape) @ np.broadcast_to(P.reshape(util.shapeshifter(P.shape,shape)),shape)
@ -897,6 +893,7 @@ class Orientation(Rotation,Crystal):
lattice,o = self.relation_operations(model) lattice,o = self.relation_operations(model)
target = Crystal(lattice=lattice) target = Crystal(lattice=lattice)
o = o.broadcast_to(o.shape+self.shape,mode='right') o = o.broadcast_to(o.shape+self.shape,mode='right')
return Orientation(rotation=o*Rotation(self.quaternion).broadcast_to(o.shape,mode='left'), return Orientation(rotation=o*Rotation(self.quaternion).broadcast_to(o.shape,mode='left'),
lattice=lattice, lattice=lattice,
b = self.b if target.ratio['b'] is None else self.a*target.ratio['b'], b = self.b if target.ratio['b'] is None else self.a*target.ratio['b'],

View File

@ -23,7 +23,7 @@ from . import grid_filters
from . import mechanics from . import mechanics
from . import tensor from . import tensor
from . import util from . import util
from ._typehints import FloatSequence, IntSequence from ._typehints import FloatSequence, IntSequence, DADF5Dataset
h5py3 = h5py.__version__[0] == '3' h5py3 = h5py.__version__[0] == '3'
@ -37,7 +37,7 @@ def _read(dataset: h5py._hl.dataset.Dataset) -> np.ndarray:
return np.array(dataset,dtype=dtype) return np.array(dataset,dtype=dtype)
def _match(requested, def _match(requested,
existing: h5py._hl.base.KeysViewHDF5) -> List[Any]: existing: h5py._hl.base.KeysViewHDF5) -> List[str]:
"""Find matches among two sets of labels.""" """Find matches among two sets of labels."""
def flatten_list(list_of_lists): def flatten_list(list_of_lists):
return [e for e_ in list_of_lists for e in e_] return [e for e_ in list_of_lists for e in e_]
@ -646,7 +646,7 @@ class Result:
Name of scalar, vector, or tensor dataset to take absolute value of. Name of scalar, vector, or tensor dataset to take absolute value of.
""" """
def absolute(x: Dict[str, Any]) -> Dict[str, Any]: def absolute(x: DADF5Dataset) -> DADF5Dataset:
return { return {
'data': np.abs(x['data']), 'data': np.abs(x['data']),
'label': f'|{x["label"]}|', 'label': f'|{x["label"]}|',
@ -708,7 +708,7 @@ class Result:
... 'Mises equivalent of the Cauchy stress') ... 'Mises equivalent of the Cauchy stress')
""" """
def calculation(**kwargs) -> Dict[str, Any]: def calculation(**kwargs) -> DADF5Dataset:
formula = kwargs['formula'] formula = kwargs['formula']
for d in re.findall(r'#(.*?)#',formula): for d in re.findall(r'#(.*?)#',formula):
formula = formula.replace(f'#{d}#',f"kwargs['{d}']['data']") formula = formula.replace(f'#{d}#',f"kwargs['{d}']['data']")
@ -749,7 +749,7 @@ class Result:
""" """
def stress_Cauchy(P: Dict[str, Any], F: Dict[str, Any]) -> Dict[str, Any]: def stress_Cauchy(P: DADF5Dataset, F: DADF5Dataset) -> DADF5Dataset:
return { return {
'data': mechanics.stress_Cauchy(P['data'],F['data']), 'data': mechanics.stress_Cauchy(P['data'],F['data']),
'label': 'sigma', 'label': 'sigma',
@ -784,7 +784,7 @@ class Result:
""" """
def determinant(T: Dict[str, Any]) -> Dict[str, Any]: def determinant(T: DADF5Dataset) -> DADF5Dataset:
return { return {
'data': np.linalg.det(T['data']), 'data': np.linalg.det(T['data']),
'label': f"det({T['label']})", 'label': f"det({T['label']})",
@ -817,7 +817,7 @@ class Result:
""" """
def deviator(T: Dict[str, Any]) -> Dict[str, Any]: def deviator(T: DADF5Dataset) -> DADF5Dataset:
return { return {
'data': tensor.deviatoric(T['data']), 'data': tensor.deviatoric(T['data']),
'label': f"s_{T['label']}", 'label': f"s_{T['label']}",
@ -854,7 +854,7 @@ class Result:
""" """
def eigenval(T_sym: Dict[str, Any], eigenvalue: Literal['max, mid, min']) -> Dict[str, Any]: def eigenval(T_sym: DADF5Dataset, eigenvalue: Literal['max', 'mid', 'min']) -> DADF5Dataset:
if eigenvalue == 'max': if eigenvalue == 'max':
label,p = 'maximum',2 label,p = 'maximum',2
elif eigenvalue == 'mid': elif eigenvalue == 'mid':
@ -893,7 +893,7 @@ class Result:
""" """
def eigenvector(T_sym: Dict[str, Any], eigenvalue: Literal['max', 'mid', 'min']) -> Dict[str, Any]: def eigenvector(T_sym: DADF5Dataset, eigenvalue: Literal['max', 'mid', 'min']) -> DADF5Dataset:
if eigenvalue == 'max': if eigenvalue == 'max':
label,p = 'maximum',2 label,p = 'maximum',2
elif eigenvalue == 'mid': elif eigenvalue == 'mid':
@ -941,13 +941,13 @@ class Result:
""" """
def IPF_color(l: FloatSequence, q: Dict[str, Any]) -> Dict[str, Any]: def IPF_color(l: FloatSequence, q: DADF5Dataset) -> DADF5Dataset:
m = util.scale_to_coprime(np.array(l)) m = util.scale_to_coprime(np.array(l))
lattice = q['meta']['lattice'] lattice = q['meta']['lattice']
o = Orientation(rotation = q['data'],lattice=lattice) o = Orientation(rotation = q['data'],lattice=lattice)
return { return {
'data': np.uint8(o.IPF_color(l)*255), 'data': (o.IPF_color(l)*255).astype(np.uint8),
'label': 'IPFcolor_({} {} {})'.format(*m), 'label': 'IPFcolor_({} {} {})'.format(*m),
'meta' : { 'meta' : {
'unit': '8-bit RGB', 'unit': '8-bit RGB',
@ -970,7 +970,7 @@ class Result:
Name of symmetric tensor dataset. Name of symmetric tensor dataset.
""" """
def maximum_shear(T_sym: Dict[str, Any]) -> Dict[str, Any]: def maximum_shear(T_sym: DADF5Dataset) -> DADF5Dataset:
return { return {
'data': mechanics.maximum_shear(T_sym['data']), 'data': mechanics.maximum_shear(T_sym['data']),
'label': f"max_shear({T_sym['label']})", 'label': f"max_shear({T_sym['label']})",
@ -1013,7 +1013,7 @@ class Result:
>>> r.add_equivalent_Mises('epsilon_V^0.0(F)') >>> r.add_equivalent_Mises('epsilon_V^0.0(F)')
""" """
def equivalent_Mises(T_sym: Dict[str, Any], kind: str) -> Dict[str, Any]: def equivalent_Mises(T_sym: DADF5Dataset, kind: str) -> DADF5Dataset:
k = kind k = kind
if k is None: if k is None:
if T_sym['meta']['unit'] == '1': if T_sym['meta']['unit'] == '1':
@ -1051,7 +1051,7 @@ class Result:
Order of the norm. inf means NumPy's inf object. For details refer to numpy.linalg.norm. Order of the norm. inf means NumPy's inf object. For details refer to numpy.linalg.norm.
""" """
def norm(x: Dict[str, Any], ord: Union[int, float, Literal['fro', 'nuc']]) -> Dict[str, Any]: def norm(x: DADF5Dataset, ord: Union[int, float, Literal['fro', 'nuc']]) -> DADF5Dataset:
o = ord o = ord
if len(x['data'].shape) == 2: if len(x['data'].shape) == 2:
axis: Union[int, Tuple[int, int]] = 1 axis: Union[int, Tuple[int, int]] = 1
@ -1099,7 +1099,7 @@ class Result:
is taken into account. is taken into account.
""" """
def stress_second_Piola_Kirchhoff(P: Dict[str, Any], F: Dict[str, Any]) -> Dict[str, Any]: def stress_second_Piola_Kirchhoff(P: DADF5Dataset, F: DADF5Dataset) -> DADF5Dataset:
return { return {
'data': mechanics.stress_second_Piola_Kirchhoff(P['data'],F['data']), 'data': mechanics.stress_second_Piola_Kirchhoff(P['data'],F['data']),
'label': 'S', 'label': 'S',
@ -1141,12 +1141,11 @@ class Result:
Defaults to True. Defaults to True.
""" """
def pole(q: Dict[str, Any], def pole(q: DADF5Dataset,
uvw: FloatSequence, uvw: FloatSequence, hkl: FloatSequence,
hkl: FloatSequence, with_symmetry: bool,
with_symmetry: bool, normalize: bool) -> DADF5Dataset:
normalize: bool) -> Dict[str, Any]: c = q['meta']['c/a'] if 'c/a' in q['meta'] else 1.0
c = q['meta']['c/a'] if 'c/a' in q['meta'] else 1
brackets = ['[]','()','⟨⟩','{}'][(uvw is None)*1+with_symmetry*2] brackets = ['[]','()','⟨⟩','{}'][(uvw is None)*1+with_symmetry*2]
label = 'p^' + '{}{} {} {}{}'.format(brackets[0], label = 'p^' + '{}{} {} {}{}'.format(brackets[0],
*(uvw if uvw else hkl), *(uvw if uvw else hkl),
@ -1186,7 +1185,7 @@ class Result:
>>> r.add_rotation('F') >>> r.add_rotation('F')
""" """
def rotation(F: Dict[str, Any]) -> Dict[str, Any]: def rotation(F: DADF5Dataset) -> DADF5Dataset:
return { return {
'data': mechanics.rotation(F['data']).as_matrix(), 'data': mechanics.rotation(F['data']).as_matrix(),
'label': f"R({F['label']})", 'label': f"R({F['label']})",
@ -1218,7 +1217,7 @@ class Result:
>>> r.add_spherical('sigma') >>> r.add_spherical('sigma')
""" """
def spherical(T: Dict[str, Any]) -> Dict[str, Any]: def spherical(T: DADF5Dataset) -> DADF5Dataset:
return { return {
'data': tensor.spherical(T['data'],False), 'data': tensor.spherical(T['data'],False),
'label': f"p_{T['label']}", 'label': f"p_{T['label']}",
@ -1292,14 +1291,14 @@ class Result:
| https://de.wikipedia.org/wiki/Verzerrungstensor | https://de.wikipedia.org/wiki/Verzerrungstensor
""" """
def strain(F: Dict[str, Any], t: Literal['V', 'U'], m: float) -> Dict[str, Any]: def strain(F: DADF5Dataset, t: Literal['V', 'U'], m: float) -> DADF5Dataset:
side = 'left' if t == 'V' else 'right' side = 'left' if t == 'V' else 'right'
return { return {
'data': mechanics.strain(F['data'],t,m), 'data': mechanics.strain(F['data'],t,m),
'label': f"epsilon_{t}^{m}({F['label']})", 'label': f"epsilon_{t}^{m}({F['label']})",
'meta': { 'meta': {
'unit': F['meta']['unit'], 'unit': F['meta']['unit'],
'description': f'Seth-Hill strain tensor of order {m} based on {side} stretch tensor '+\ 'description': f'Seth-Hill strain tensor of order {m} based on {side} stretch tensor '
f"of {F['label']} ({F['meta']['description']})", f"of {F['label']} ({F['meta']['description']})",
'creator': 'add_strain' 'creator': 'add_strain'
} }
@ -1323,14 +1322,14 @@ class Result:
Defaults to 'V'. Defaults to 'V'.
""" """
def stretch_tensor(F: Dict[str, Any], t: str) -> Dict[str, Any]: def stretch_tensor(F: DADF5Dataset, t: str) -> DADF5Dataset:
return { return {
'data': (mechanics.stretch_left if t.upper() == 'V' else mechanics.stretch_right)(F['data']), 'data': (mechanics.stretch_left if t.upper() == 'V' else mechanics.stretch_right)(F['data']),
'label': f"{t}({F['label']})", 'label': f"{t}({F['label']})",
'meta': { 'meta': {
'unit': F['meta']['unit'], 'unit': F['meta']['unit'],
'description': f"{'left' if t.upper() == 'V' else 'right'} stretch tensor "\ 'description': f"{'left' if t.upper() == 'V' else 'right'} stretch tensor "
+f"of {F['label']} ({F['meta']['description']})", # noqa f"of {F['label']} ({F['meta']['description']})", # noqa
'creator': 'add_stretch_tensor' 'creator': 'add_stretch_tensor'
} }
} }
@ -1353,7 +1352,7 @@ class Result:
i.e. fields resulting from the grid solver. i.e. fields resulting from the grid solver.
""" """
def curl(f: Dict[str, Any], size: np.ndarray) -> Dict[str, Any]: def curl(f: DADF5Dataset, size: np.ndarray) -> DADF5Dataset:
return { return {
'data': grid_filters.curl(size,f['data']), 'data': grid_filters.curl(size,f['data']),
'label': f"curl({f['label']})", 'label': f"curl({f['label']})",
@ -1382,7 +1381,7 @@ class Result:
i.e. fields resulting from the grid solver. i.e. fields resulting from the grid solver.
""" """
def divergence(f: Dict[str, Any], size: np.ndarray) -> Dict[str, Any]: def divergence(f: DADF5Dataset, size: np.ndarray) -> DADF5Dataset:
return { return {
'data': grid_filters.divergence(size,f['data']), 'data': grid_filters.divergence(size,f['data']),
'label': f"divergence({f['label']})", 'label': f"divergence({f['label']})",
@ -1411,7 +1410,7 @@ class Result:
i.e. fields resulting from the grid solver. i.e. fields resulting from the grid solver.
""" """
def gradient(f: Dict[str, Any], size: np.ndarray) -> Dict[str, Any]: def gradient(f: DADF5Dataset, size: np.ndarray) -> DADF5Dataset:
return { return {
'data': grid_filters.gradient(size,f['data'] if len(f['data'].shape) == 4 else \ 'data': grid_filters.gradient(size,f['data'] if len(f['data'].shape) == 4 else \
f['data'].reshape(f['data'].shape+(1,))), f['data'].reshape(f['data'].shape+(1,))),
@ -1427,7 +1426,7 @@ class Result:
def _add_generic_grid(self, def _add_generic_grid(self,
func: Callable, func: Callable[..., DADF5Dataset],
datasets: Dict[str, str], datasets: Dict[str, str],
args: Dict[str, str] = {}, args: Dict[str, str] = {},
constituents = None): constituents = None):
@ -1478,7 +1477,7 @@ class Result:
now.strftime('%Y-%m-%d %H:%M:%S%z').encode() now.strftime('%Y-%m-%d %H:%M:%S%z').encode()
for l,v in r['meta'].items(): for l,v in r['meta'].items():
h5_dataset.attrs[l.lower()]=v if h5py3 else v.encode() h5_dataset.attrs[l.lower()]=v.encode() if not h5py3 and type(v) is str else v
creator = h5_dataset.attrs['creator'] if h5py3 else \ creator = h5_dataset.attrs['creator'] if h5py3 else \
h5_dataset.attrs['creator'].decode() h5_dataset.attrs['creator'].decode()
h5_dataset.attrs['creator'] = f'damask.Result.{creator} v{damask.version}' if h5py3 else \ h5_dataset.attrs['creator'] = f'damask.Result.{creator} v{damask.version}' if h5py3 else \
@ -1488,8 +1487,8 @@ class Result:
def _add_generic_pointwise(self, def _add_generic_pointwise(self,
func: Callable, func: Callable[..., DADF5Dataset],
datasets: Dict[str, Any], datasets: Dict[str, str],
args: Dict[str, Any] = {}): args: Dict[str, Any] = {}):
""" """
General function to add pointwise data. General function to add pointwise data.
@ -1508,9 +1507,9 @@ class Result:
""" """
def job_pointwise(group: str, def job_pointwise(group: str,
callback: Callable, callback: Callable[..., DADF5Dataset],
datasets: Dict[str, str], datasets: Dict[str, str],
args: Dict[str, str]) -> Union[None, Any]: args: Dict[str, str]) -> Union[None, DADF5Dataset]:
try: try:
datasets_in = {} datasets_in = {}
with h5py.File(self.fname,'r') as f: with h5py.File(self.fname,'r') as f:
@ -1598,7 +1597,7 @@ class Result:
def get(self, def get(self,
output: Union[str, List[str]] = '*', output: Union[str, List[str]] = '*',
flatten: bool = True, flatten: bool = True,
prune: bool = True) -> Optional[Dict[str,Any]]: prune: bool = True) -> Union[None,Dict[str,Any]]:
""" """
Collect data per phase/homogenization reflecting the group/folder structure in the DADF5 file. Collect data per phase/homogenization reflecting the group/folder structure in the DADF5 file.

View File

@ -112,7 +112,7 @@ class Rotation:
def __getitem__(self, def __getitem__(self,
item: Union[Tuple[int], int, bool, np.bool_, np.ndarray]): item: Union[Tuple[Union[None, int, slice]], int, bool, np.bool_, np.ndarray]):
""" """
Return self[item]. Return self[item].
@ -295,6 +295,7 @@ class Rotation:
---------- ----------
other : Rotation, shape (self.shape) other : Rotation, shape (self.shape)
Rotation for composition. Rotation for composition.
Compatible innermost dimensions will blend.
Returns Returns
------- -------
@ -303,10 +304,15 @@ class Rotation:
""" """
if isinstance(other,Rotation): if isinstance(other,Rotation):
q_m = self.quaternion[...,0:1] blend = util.shapeblender( self.shape,other.shape)
p_m = self.quaternion[...,1:] s_m = util.shapeshifter( self.shape,blend,mode='right')
q_o = other.quaternion[...,0:1] s_o = util.shapeshifter(other.shape,blend,mode='left')
p_o = other.quaternion[...,1:]
q_m = self.broadcast_to(s_m).quaternion[...,0:1]
p_m = self.broadcast_to(s_m).quaternion[...,1:]
q_o = other.broadcast_to(s_o).quaternion[...,0:1]
p_o = other.broadcast_to(s_o).quaternion[...,1:]
qmo = q_m*q_o qmo = q_m*q_o
q = (qmo - np.einsum('...i,...i',p_m,p_o).reshape(qmo.shape)) q = (qmo - np.einsum('...i,...i',p_m,p_o).reshape(qmo.shape))
p = q_m*p_o + q_o*p_m + _P * np.cross(p_m,p_o) p = q_m*p_o + q_o*p_m + _P * np.cross(p_m,p_o)
@ -325,6 +331,7 @@ class Rotation:
---------- ----------
other : Rotation, shape (self.shape) other : Rotation, shape (self.shape)
Rotation for composition. Rotation for composition.
Compatible innermost dimensions will blend.
""" """
return self*other return self*other
@ -341,6 +348,7 @@ class Rotation:
---------- ----------
other : damask.Rotation, shape (self.shape) other : damask.Rotation, shape (self.shape)
Rotation to invert for composition. Rotation to invert for composition.
Compatible innermost dimensions will blend.
Returns Returns
------- -------
@ -434,7 +442,7 @@ class Rotation:
""" """
if isinstance(other, np.ndarray): if isinstance(other, np.ndarray):
obs = util.shapeblender(self.shape,other.shape,keep_ones=False)[len(self.shape):] obs = util.shapeblender(self.shape,other.shape)[len(self.shape):]
for l in [4,2,1]: for l in [4,2,1]:
if obs[-l:] == l*(3,): if obs[-l:] == l*(3,):
bs = util.shapeblender(self.shape,other.shape[:-l],False) bs = util.shapeblender(self.shape,other.shape[:-l],False)

View File

@ -1,6 +1,6 @@
"""Functionality for typehints.""" """Functionality for typehints."""
from typing import Sequence, Union, Literal, TextIO from typing import Sequence, Union, TypedDict, Literal, TextIO
from pathlib import Path from pathlib import Path
import numpy as np import numpy as np
@ -16,3 +16,15 @@ CrystalKinematics = Literal['slip', 'twin']
NumpyRngSeed = Union[int, IntSequence, np.random.SeedSequence, np.random.Generator] NumpyRngSeed = Union[int, IntSequence, np.random.SeedSequence, np.random.Generator]
# BitGenerator does not exists in older numpy versions # BitGenerator does not exists in older numpy versions
#NumpyRngSeed = Union[int, IntSequence, np.random.SeedSequence, np.random.BitGenerator, np.random.Generator] #NumpyRngSeed = Union[int, IntSequence, np.random.SeedSequence, np.random.BitGenerator, np.random.Generator]
# https://peps.python.org/pep-0655/
# Metadata = TypedDict('Metadata', {'unit': str, 'description': str, 'creator': str, 'lattice': NotRequired[str]})
_Metadata = TypedDict('_Metadata', {'lattice': str, 'c/a': float}, total=False)
class Metadata(_Metadata):
unit: str
description: str
creator: str
DADF5Dataset = TypedDict('DADF5Dataset', {'data': np.ndarray, 'label': str, 'meta': Metadata})

View File

@ -513,7 +513,7 @@ def shapeshifter(fro: _Tuple[int, ...],
def shapeblender(a: _Tuple[int, ...], def shapeblender(a: _Tuple[int, ...],
b: _Tuple[int, ...], b: _Tuple[int, ...],
keep_ones: bool = True) -> _Tuple[int, ...]: keep_ones: bool = False) -> _Tuple[int, ...]:
""" """
Return a shape that overlaps the rightmost entries of 'a' with the leftmost of 'b'. Return a shape that overlaps the rightmost entries of 'a' with the leftmost of 'b'.
@ -525,20 +525,24 @@ def shapeblender(a: _Tuple[int, ...],
Shape of second array. Shape of second array.
keep_ones : bool, optional keep_ones : bool, optional
Treat innermost '1's as literal value instead of dimensional placeholder. Treat innermost '1's as literal value instead of dimensional placeholder.
Defaults to True. Defaults to False.
Examples Examples
-------- --------
>>> shapeblender((4,4,3),(3,2,1))
(4,4,3,2,1)
>>> shapeblender((1,2),(1,2,3))
(1,2,3)
>>> shapeblender((1,),(2,2,1))
(1,2,2,1)
>>> shapeblender((1,),(2,2,1),False)
(2,2,1)
>>> shapeblender((3,2),(3,2)) >>> shapeblender((3,2),(3,2))
(3,2) (3,2)
>>> shapeblender((4,3),(3,2))
(4,3,2)
>>> shapeblender((4,4),(3,2))
(4,4,3,2)
>>> shapeblender((1,2),(1,2,3))
(1,2,3)
>>> shapeblender((),(2,2,1))
(2,2,1)
>>> shapeblender((1,),(2,2,1))
(2,2,1)
>>> shapeblender((1,),(2,2,1),True)
(1,2,2,1)
""" """
def is_broadcastable(a,b): def is_broadcastable(a,b):

View File

@ -358,7 +358,9 @@ class TestOrientation:
a=a,b=b,c=c, a=a,b=b,c=c,
alpha=alpha,beta=beta,gamma=gamma) alpha=alpha,beta=beta,gamma=gamma)
assert o.to_pole(**{kw:vector,'with_symmetry':with_symmetry}).shape \ assert o.to_pole(**{kw:vector,'with_symmetry':with_symmetry}).shape \
== o.shape + vector.shape[:-1] + (o.symmetry_operations.shape if with_symmetry else ()) + vector.shape[-1:] == util.shapeblender(o.shape,vector.shape[:-1]) \
+ (o.symmetry_operations.shape if with_symmetry else ()) \
+ vector.shape[-1:]
@pytest.mark.parametrize('lattice',['hP','cI','cF']) #tI not included yet @pytest.mark.parametrize('lattice',['hP','cI','cF']) #tI not included yet
def test_Schmid(self,update,res_path,lattice): def test_Schmid(self,update,res_path,lattice):

View File

@ -136,11 +136,13 @@ class TestUtil:
((1,),(7,),False,(7,)), ((1,),(7,),False,(7,)),
((1,),(7,),True,(1,7)), ((1,),(7,),True,(1,7)),
((2,),(2,2),False,(2,2)), ((2,),(2,2),False,(2,2)),
((1,2),(2,2),False,(2,2)), ((1,3),(2,3),False,(2,3)),
((1,1,2),(2,2),False,(1,2,2)), ((1,1,2),(2,2),False,(1,2,2)),
((1,1,2),(2,2),True,(1,1,2,2)), ((1,1,2),(2,2),True,(1,1,2,2)),
((1,2,3),(2,3,4),False,(1,2,3,4)), ((1,2,3),(2,3,4),False,(1,2,3,4)),
((1,2,3),(1,2,3),False,(1,2,3)), ((1,2,3),(1,2,3),False,(1,2,3)),
((2,3,1,1),(2,3),False,(2,3,2,3)),
((2,3,1,1),(2,3),True,(2,3,1,1,2,3)),
]) ])
def test_shapeblender(self,a,b,ones,answer): def test_shapeblender(self,a,b,ones,answer):
assert util.shapeblender(a,b,ones) == answer assert util.shapeblender(a,b,ones) == answer

View File

@ -6,7 +6,7 @@
!> @brief Parse command line interface for PETSc-based solvers !> @brief Parse command line interface for PETSc-based solvers
!-------------------------------------------------------------------------------------------------- !--------------------------------------------------------------------------------------------------
#define PETSC_MINOR_MIN 12 #define PETSC_MINOR_MIN 12
#define PETSC_MINOR_MAX 19 #define PETSC_MINOR_MAX 20
module CLI module CLI
use, intrinsic :: ISO_fortran_env use, intrinsic :: ISO_fortran_env

View File

@ -48,7 +48,7 @@ subroutine YAML_parse_init()
#ifdef FYAML #ifdef FYAML
print'(/,1x,a)', 'libfyaml powered' print'(/,1x,a)', 'libfyaml powered'
#else #else
call selfTest() call YAML_parse_selfTest()
#endif #endif
end subroutine YAML_parse_init end subroutine YAML_parse_init
@ -870,7 +870,7 @@ end function to_flow
!-------------------------------------------------------------------------------------------------- !--------------------------------------------------------------------------------------------------
!> @brief Check correctness of some YAML functions. !> @brief Check correctness of some YAML functions.
!-------------------------------------------------------------------------------------------------- !--------------------------------------------------------------------------------------------------
subroutine selfTest() subroutine YAML_parse_selfTest()
if (indentDepth(' a') /= 1) error stop 'indentDepth' if (indentDepth(' a') /= 1) error stop 'indentDepth'
if (indentDepth('a') /= 0) error stop 'indentDepth' if (indentDepth('a') /= 0) error stop 'indentDepth'
@ -1031,7 +1031,7 @@ subroutine selfTest()
end block parse end block parse
end subroutine selfTest end subroutine YAML_parse_selfTest
#endif #endif
end module YAML_parse end module YAML_parse

View File

@ -150,6 +150,7 @@ module YAML_types
public :: & public :: &
YAML_types_init, & YAML_types_init, &
YAML_types_selfTest, &
#ifdef __GFORTRAN__ #ifdef __GFORTRAN__
output_as1dStr, & !ToDo: Hack for GNU. Remove later output_as1dStr, & !ToDo: Hack for GNU. Remove later
#endif #endif
@ -164,7 +165,7 @@ subroutine YAML_types_init
print'(/,1x,a)', '<<<+- YAML_types init -+>>>' print'(/,1x,a)', '<<<+- YAML_types init -+>>>'
call selfTest() call YAML_types_selfTest()
end subroutine YAML_types_init end subroutine YAML_types_init
@ -172,7 +173,7 @@ end subroutine YAML_types_init
!-------------------------------------------------------------------------------------------------- !--------------------------------------------------------------------------------------------------
!> @brief Check correctness of some type bound procedures. !> @brief Check correctness of some type bound procedures.
!-------------------------------------------------------------------------------------------------- !--------------------------------------------------------------------------------------------------
subroutine selfTest() subroutine YAML_types_selfTest()
scalar: block scalar: block
type(tScalar), target :: s type(tScalar), target :: s
@ -266,7 +267,7 @@ subroutine selfTest()
end block dict end block dict
end subroutine selfTest end subroutine YAML_types_selfTest
!--------------------------------------------------------------------------------------------------- !---------------------------------------------------------------------------------------------------

View File

@ -2223,7 +2223,7 @@ end function crystal_isotropic_mu
!-------------------------------------------------------------------------------------------------- !--------------------------------------------------------------------------------------------------
!> @brief Check correctness of some crystal functions. !> @brief Check correctness of some crystal functions.
!-------------------------------------------------------------------------------------------------- !--------------------------------------------------------------------------------------------------
subroutine crystal_selfTest subroutine crystal_selfTest()
real(pREAL), dimension(:,:,:), allocatable :: CoSy real(pREAL), dimension(:,:,:), allocatable :: CoSy
real(pREAL), dimension(:,:), allocatable :: system real(pREAL), dimension(:,:), allocatable :: system

View File

@ -74,6 +74,9 @@ module mesh_mechanical_FEM
external :: & ! ToDo: write interfaces external :: & ! ToDo: write interfaces
#if defined(PETSC_USE_64BIT_INDICES) || PETSC_VERSION_MINOR < 17 #if defined(PETSC_USE_64BIT_INDICES) || PETSC_VERSION_MINOR < 17
ISDestroy, & ISDestroy, &
#endif
#if PETSC_VERSION_MINOR > 18
DMAddField, &
#endif #endif
PetscSectionGetNumFields, & PetscSectionGetNumFields, &
PetscFESetQuadrature, & PetscFESetQuadrature, &

View File

@ -12,6 +12,7 @@ program DAMASK_test
use test_crystal use test_crystal
use test_rotations use test_rotations
use test_IO use test_IO
use test_YAML_types
use test_HDF5_utilities use test_HDF5_utilities
external :: quit external :: quit
@ -57,6 +58,10 @@ program DAMASK_test
call test_IO_run() call test_IO_run()
write(IO_STDOUT,fmt='(a)') ok write(IO_STDOUT,fmt='(a)') ok
write(IO_STDOUT,fmt=fmt, advance='no') 'YAML_types','...'
call test_YAML_types_run()
write(IO_STDOUT,fmt='(a)') ok
write(IO_STDOUT,fmt=fmt, advance='no') 'HDF5_utilities','...' write(IO_STDOUT,fmt=fmt, advance='no') 'HDF5_utilities','...'
call test_HDF5_utilities_run() call test_HDF5_utilities_run()
write(IO_STDOUT,fmt='(a)') ok write(IO_STDOUT,fmt='(a)') ok

View File

@ -0,0 +1,17 @@
module test_YAML_types
use YAML_types
implicit none(type,external)
private
public :: test_YAML_types_run
contains
subroutine test_YAML_types_run()
call YAML_types_selfTest()
end subroutine test_YAML_types_run
end module test_YAML_types