Merge branch 'development' into 'export_DAMASK_to_DREAM3D'
# Conflicts: # python/damask/_result.py
This commit is contained in:
commit
f71b1c4dc3
|
@ -89,10 +89,10 @@ unittest_GNU_DEBUG:
|
|||
stage: compile
|
||||
script:
|
||||
- module load ${COMPILER_GNU} ${MPI_GNU} ${PETSC_GNU}
|
||||
- TMPDIR=$(mktemp -d)
|
||||
- cmake -B ${TMPDIR} -DDAMASK_SOLVER=test -DCMAKE_INSTALL_PREFIX=${TMPDIR} -DCMAKE_BUILD_TYPE=RELEASE -DBUILDCMD_POST=-coverage
|
||||
- cmake --build ${TMPDIR} --target install
|
||||
- cd ${TMPDIR}
|
||||
- TEMPDIR=$(mktemp -d)
|
||||
- cmake -B ${TEMPDIR} -DDAMASK_SOLVER=test -DCMAKE_INSTALL_PREFIX=${TEMPDIR} -DCMAKE_BUILD_TYPE=RELEASE -DBUILDCMD_POST=-coverage
|
||||
- cmake --build ${TEMPDIR} --target install
|
||||
- cd ${TEMPDIR}
|
||||
- ./bin/DAMASK_test
|
||||
- find -name \*.gcda -not -path "**/test/*" | xargs gcov
|
||||
|
||||
|
@ -100,10 +100,10 @@ unittest_GNU_RELEASE:
|
|||
stage: compile
|
||||
script:
|
||||
- module load ${COMPILER_GNU} ${MPI_GNU} ${PETSC_GNU}
|
||||
- TMPDIR=$(mktemp -d)
|
||||
- cmake -B ${TMPDIR} -DDAMASK_SOLVER=test -DCMAKE_INSTALL_PREFIX=${TMPDIR} -DCMAKE_BUILD_TYPE=RELEASE -DBUILDCMD_POST=-coverage
|
||||
- cmake --build ${TMPDIR} --target install
|
||||
- cd ${TMPDIR}
|
||||
- TEMPDIR=$(mktemp -d)
|
||||
- cmake -B ${TEMPDIR} -DDAMASK_SOLVER=test -DCMAKE_INSTALL_PREFIX=${TEMPDIR} -DCMAKE_BUILD_TYPE=RELEASE -DBUILDCMD_POST=-coverage
|
||||
- cmake --build ${TEMPDIR} --target install
|
||||
- cd ${TEMPDIR}
|
||||
- ./bin/DAMASK_test
|
||||
- find -name \*.gcda -not -path "**/test/*" | xargs gcov
|
||||
|
||||
|
@ -111,10 +111,10 @@ unittest_GNU_PERFORMANCE:
|
|||
stage: compile
|
||||
script:
|
||||
- module load ${COMPILER_GNU} ${MPI_GNU} ${PETSC_GNU}
|
||||
- TMPDIR=$(mktemp -d)
|
||||
- cmake -B ${TMPDIR} -DDAMASK_SOLVER=test -DCMAKE_INSTALL_PREFIX=${TMPDIR} -DCMAKE_BUILD_TYPE=PERFORMANCE -DBUILDCMD_POST=-coverage
|
||||
- cmake --build ${TMPDIR} --target install
|
||||
- cd ${TMPDIR}
|
||||
- TEMPDIR=$(mktemp -d)
|
||||
- cmake -B ${TEMPDIR} -DDAMASK_SOLVER=test -DCMAKE_INSTALL_PREFIX=${TEMPDIR} -DCMAKE_BUILD_TYPE=PERFORMANCE -DBUILDCMD_POST=-coverage
|
||||
- cmake --build ${TEMPDIR} --target install
|
||||
- cd ${TEMPDIR}
|
||||
- ./bin/DAMASK_test
|
||||
- find -name \*.gcda -not -path "**/test/*" | xargs gcov
|
||||
|
||||
|
|
|
@ -11,7 +11,7 @@ endif()
|
|||
project(Prerequisites LANGUAGES)
|
||||
set(ENV{PKG_CONFIG_PATH} "$ENV{PETSC_DIR}/$ENV{PETSC_ARCH}/lib/pkgconfig:$ENV{PKG_CONFIG_PATH}")
|
||||
pkg_check_modules(PETSC_MIN REQUIRED PETSc>=3.12.0 QUIET) #CMake does not support version range
|
||||
pkg_check_modules(PETSC REQUIRED PETSc<3.20.0)
|
||||
pkg_check_modules(PETSC REQUIRED PETSc<3.21.0)
|
||||
|
||||
pkg_get_variable(CMAKE_Fortran_COMPILER PETSc fcompiler)
|
||||
pkg_get_variable(CMAKE_C_COMPILER PETSc ccompiler)
|
||||
|
|
|
@ -434,30 +434,20 @@ class Orientation(Rotation,Crystal):
|
|||
https://doi.org/10.1107/S0108767391006864
|
||||
|
||||
"""
|
||||
rho = self.as_Rodrigues_vector(compact=True)*(1.0-1.0e-9)
|
||||
def larger_or_equal(v,c):
|
||||
return ((np.isclose(c[0],v[...,0]) | (v[...,0] > c[0])) &
|
||||
(np.isclose(c[1],v[...,1]) | (v[...,1] > c[1])) &
|
||||
(np.isclose(c[2],v[...,2]) | (v[...,2] > c[2]))).astype(bool)
|
||||
|
||||
with np.errstate(invalid='ignore'):
|
||||
if self.family == 'cubic':
|
||||
return ((rho[...,0] >= rho[...,1]) &
|
||||
(rho[...,1] >= rho[...,2]) &
|
||||
(rho[...,2] >= 0)).astype(bool)
|
||||
if self.family == 'hexagonal':
|
||||
return ((rho[...,0] >= rho[...,1]*np.sqrt(3)) &
|
||||
(rho[...,1] >= 0) &
|
||||
(rho[...,2] >= 0)).astype(bool)
|
||||
if self.family == 'tetragonal':
|
||||
return ((rho[...,0] >= rho[...,1]) &
|
||||
(rho[...,1] >= 0) &
|
||||
(rho[...,2] >= 0)).astype(bool)
|
||||
if self.family == 'orthorhombic':
|
||||
return ((rho[...,0] >= 0) &
|
||||
(rho[...,1] >= 0) &
|
||||
(rho[...,2] >= 0)).astype(bool)
|
||||
if self.family == 'monoclinic':
|
||||
return ((rho[...,1] >= 0) &
|
||||
(rho[...,2] >= 0)).astype(bool)
|
||||
rho = self.as_Rodrigues_vector(compact=True)
|
||||
return larger_or_equal(rho,
|
||||
[rho[...,1], rho[...,2],0] if self.family == 'cubic'
|
||||
else [rho[...,1]*np.sqrt(3),0, 0] if self.family == 'hexagonal'
|
||||
else [rho[...,1], 0, 0] if self.family == 'tetragonal'
|
||||
else [0, 0, 0] if self.family == 'orthorhombic'
|
||||
else [-np.inf, 0, 0] if self.family == 'monoclinic'
|
||||
else [-np.inf, -np.inf, -np.inf]) & self.in_FZ
|
||||
|
||||
return np.ones_like(rho[...,0],dtype=bool)
|
||||
|
||||
def disorientation(self,
|
||||
other: 'Orientation',
|
||||
|
@ -521,20 +511,21 @@ class Orientation(Rotation,Crystal):
|
|||
if self.family != other.family:
|
||||
raise NotImplementedError('disorientation between different crystal families')
|
||||
|
||||
blend = util.shapeblender(self.shape,other.shape)
|
||||
s = self.equivalent
|
||||
o = other.equivalent
|
||||
blend = util.shapeblender( self.shape,other.shape)
|
||||
s_m = util.shapeshifter( self.shape,blend,mode='right')
|
||||
s_o = util.shapeshifter(other.shape,blend,mode='left')
|
||||
|
||||
s_ = s.reshape((s.shape[0],1)+ self.shape).broadcast_to((s.shape[0],o.shape[0])+blend,mode='right')
|
||||
o_ = o.reshape((1,o.shape[0])+other.shape).broadcast_to((s.shape[0],o.shape[0])+blend,mode='right')
|
||||
r_ = s_.misorientation(o_)
|
||||
s = self.broadcast_to(s_m).equivalent
|
||||
o = other.broadcast_to(s_o).equivalent
|
||||
|
||||
r_ = s[:,np.newaxis,...].misorientation(o[np.newaxis,:,...]) # type: ignore[index]
|
||||
_r = ~r_
|
||||
|
||||
forward = r_.in_FZ & r_.in_disorientation_FZ
|
||||
reverse = _r.in_FZ & _r.in_disorientation_FZ
|
||||
forward = r_.in_disorientation_FZ
|
||||
reverse = _r.in_disorientation_FZ
|
||||
ok = forward | reverse
|
||||
ok &= (np.cumsum(ok.reshape((-1,)+ok.shape[2:]),axis=0) == 1).reshape(ok.shape)
|
||||
r = np.where(np.any(forward[...,np.newaxis],axis=(0,1),keepdims=True),
|
||||
ok &= (np.cumsum(ok.reshape((-1,)+blend),axis=0) == 1).reshape(ok.shape)
|
||||
r = np.where(np.any((ok&forward)[...,np.newaxis],axis=(0,1),keepdims=True),
|
||||
r_.quaternion,
|
||||
_r.quaternion)
|
||||
loc = np.where(ok)
|
||||
|
@ -584,6 +575,7 @@ class Orientation(Rotation,Crystal):
|
|||
np.argmin(m,axis=0)[np.newaxis,...,np.newaxis],
|
||||
axis=0),
|
||||
axis=0))
|
||||
|
||||
return ((self.copy(Rotation(r).average(weights)),self.copy(Rotation(r))) if return_cloud else
|
||||
self.copy(Rotation(r).average(weights))
|
||||
)
|
||||
|
@ -620,17 +612,19 @@ class Orientation(Rotation,Crystal):
|
|||
vector_ = np.array(vector,float)
|
||||
if vector_.shape[-1] != 3:
|
||||
raise ValueError('input is not a field of three-dimensional vectors')
|
||||
eq = self.equivalent
|
||||
blend = util.shapeblender(eq.shape,vector_.shape[:-1])
|
||||
poles = eq.broadcast_to(blend,mode='right') @ np.broadcast_to(vector_,blend+(3,))
|
||||
|
||||
blend = util.shapeblender( self.shape,vector_.shape[:-1])
|
||||
eq = self.broadcast_to(util.shapeshifter( self.shape,blend,mode='right')).equivalent
|
||||
poles = np.atleast_2d(eq @ np.broadcast_to(vector_,(1,)+blend+(3,)))
|
||||
ok = self.in_SST(poles,proper=proper)
|
||||
ok &= np.cumsum(ok,axis=0) == 1
|
||||
loc = np.where(ok)
|
||||
sort = 0 if len(loc) == 1 else np.lexsort(loc[:0:-1])
|
||||
|
||||
return (
|
||||
(poles[ok][sort].reshape(blend[1:]+(3,)), (np.vstack(loc[:1]).T)[sort].reshape(blend[1:]))
|
||||
(poles[ok][sort].reshape(blend+(3,)), (np.vstack(loc[:1]).T)[sort].reshape(blend))
|
||||
if return_operators else
|
||||
poles[ok][sort].reshape(blend[1:]+(3,))
|
||||
poles[ok][sort].reshape(blend+(3,))
|
||||
)
|
||||
|
||||
|
||||
|
@ -795,16 +789,17 @@ class Orientation(Rotation,Crystal):
|
|||
|
||||
"""
|
||||
v = self.to_frame(uvw=uvw,hkl=hkl)
|
||||
blend = util.shapeblender(self.shape,v.shape[:-1])
|
||||
s_v = v.shape[:-1]
|
||||
blend = util.shapeblender(self.shape,s_v)
|
||||
if normalize:
|
||||
v /= np.linalg.norm(v,axis=-1,keepdims=len(v.shape)>1)
|
||||
v /= np.linalg.norm(v,axis=-1,keepdims=len(s_v)>0)
|
||||
if with_symmetry:
|
||||
sym_ops = self.symmetry_operations
|
||||
shape = v.shape[:-1]+sym_ops.shape
|
||||
s_v += sym_ops.shape
|
||||
blend += sym_ops.shape
|
||||
v = sym_ops.broadcast_to(shape) \
|
||||
@ np.broadcast_to(v.reshape(util.shapeshifter(v.shape,shape+(3,))),shape+(3,))
|
||||
return ~(self.broadcast_to(blend))@np.broadcast_to(v,blend+(3,))
|
||||
v = sym_ops.broadcast_to(s_v) @ v[...,np.newaxis,:]
|
||||
|
||||
return ~(self.broadcast_to(blend)) @ np.broadcast_to(v,blend+(3,))
|
||||
|
||||
|
||||
def Schmid(self, *,
|
||||
|
@ -854,6 +849,7 @@ class Orientation(Rotation,Crystal):
|
|||
p/np.linalg.norm(p,axis=1,keepdims=True))
|
||||
|
||||
shape = P.shape[0:1]+self.shape+(3,3)
|
||||
|
||||
return ~self.broadcast_to(shape[:-2]) \
|
||||
@ np.broadcast_to(P.reshape(util.shapeshifter(P.shape,shape)),shape)
|
||||
|
||||
|
@ -897,6 +893,7 @@ class Orientation(Rotation,Crystal):
|
|||
lattice,o = self.relation_operations(model)
|
||||
target = Crystal(lattice=lattice)
|
||||
o = o.broadcast_to(o.shape+self.shape,mode='right')
|
||||
|
||||
return Orientation(rotation=o*Rotation(self.quaternion).broadcast_to(o.shape,mode='left'),
|
||||
lattice=lattice,
|
||||
b = self.b if target.ratio['b'] is None else self.a*target.ratio['b'],
|
||||
|
|
|
@ -23,7 +23,7 @@ from . import grid_filters
|
|||
from . import mechanics
|
||||
from . import tensor
|
||||
from . import util
|
||||
from ._typehints import FloatSequence, IntSequence
|
||||
from ._typehints import FloatSequence, IntSequence, DADF5Dataset
|
||||
|
||||
h5py3 = h5py.__version__[0] == '3'
|
||||
|
||||
|
@ -37,7 +37,7 @@ def _read(dataset: h5py._hl.dataset.Dataset) -> np.ndarray:
|
|||
return np.array(dataset,dtype=dtype)
|
||||
|
||||
def _match(requested,
|
||||
existing: h5py._hl.base.KeysViewHDF5) -> List[Any]:
|
||||
existing: h5py._hl.base.KeysViewHDF5) -> List[str]:
|
||||
"""Find matches among two sets of labels."""
|
||||
def flatten_list(list_of_lists):
|
||||
return [e for e_ in list_of_lists for e in e_]
|
||||
|
@ -646,7 +646,7 @@ class Result:
|
|||
Name of scalar, vector, or tensor dataset to take absolute value of.
|
||||
|
||||
"""
|
||||
def absolute(x: Dict[str, Any]) -> Dict[str, Any]:
|
||||
def absolute(x: DADF5Dataset) -> DADF5Dataset:
|
||||
return {
|
||||
'data': np.abs(x['data']),
|
||||
'label': f'|{x["label"]}|',
|
||||
|
@ -708,7 +708,7 @@ class Result:
|
|||
... 'Mises equivalent of the Cauchy stress')
|
||||
|
||||
"""
|
||||
def calculation(**kwargs) -> Dict[str, Any]:
|
||||
def calculation(**kwargs) -> DADF5Dataset:
|
||||
formula = kwargs['formula']
|
||||
for d in re.findall(r'#(.*?)#',formula):
|
||||
formula = formula.replace(f'#{d}#',f"kwargs['{d}']['data']")
|
||||
|
@ -749,7 +749,7 @@ class Result:
|
|||
|
||||
"""
|
||||
|
||||
def stress_Cauchy(P: Dict[str, Any], F: Dict[str, Any]) -> Dict[str, Any]:
|
||||
def stress_Cauchy(P: DADF5Dataset, F: DADF5Dataset) -> DADF5Dataset:
|
||||
return {
|
||||
'data': mechanics.stress_Cauchy(P['data'],F['data']),
|
||||
'label': 'sigma',
|
||||
|
@ -784,7 +784,7 @@ class Result:
|
|||
|
||||
"""
|
||||
|
||||
def determinant(T: Dict[str, Any]) -> Dict[str, Any]:
|
||||
def determinant(T: DADF5Dataset) -> DADF5Dataset:
|
||||
return {
|
||||
'data': np.linalg.det(T['data']),
|
||||
'label': f"det({T['label']})",
|
||||
|
@ -817,7 +817,7 @@ class Result:
|
|||
|
||||
"""
|
||||
|
||||
def deviator(T: Dict[str, Any]) -> Dict[str, Any]:
|
||||
def deviator(T: DADF5Dataset) -> DADF5Dataset:
|
||||
return {
|
||||
'data': tensor.deviatoric(T['data']),
|
||||
'label': f"s_{T['label']}",
|
||||
|
@ -854,7 +854,7 @@ class Result:
|
|||
|
||||
"""
|
||||
|
||||
def eigenval(T_sym: Dict[str, Any], eigenvalue: Literal['max, mid, min']) -> Dict[str, Any]:
|
||||
def eigenval(T_sym: DADF5Dataset, eigenvalue: Literal['max', 'mid', 'min']) -> DADF5Dataset:
|
||||
if eigenvalue == 'max':
|
||||
label,p = 'maximum',2
|
||||
elif eigenvalue == 'mid':
|
||||
|
@ -893,7 +893,7 @@ class Result:
|
|||
|
||||
"""
|
||||
|
||||
def eigenvector(T_sym: Dict[str, Any], eigenvalue: Literal['max', 'mid', 'min']) -> Dict[str, Any]:
|
||||
def eigenvector(T_sym: DADF5Dataset, eigenvalue: Literal['max', 'mid', 'min']) -> DADF5Dataset:
|
||||
if eigenvalue == 'max':
|
||||
label,p = 'maximum',2
|
||||
elif eigenvalue == 'mid':
|
||||
|
@ -941,13 +941,13 @@ class Result:
|
|||
|
||||
"""
|
||||
|
||||
def IPF_color(l: FloatSequence, q: Dict[str, Any]) -> Dict[str, Any]:
|
||||
def IPF_color(l: FloatSequence, q: DADF5Dataset) -> DADF5Dataset:
|
||||
m = util.scale_to_coprime(np.array(l))
|
||||
lattice = q['meta']['lattice']
|
||||
o = Orientation(rotation = q['data'],lattice=lattice)
|
||||
|
||||
return {
|
||||
'data': np.uint8(o.IPF_color(l)*255),
|
||||
'data': (o.IPF_color(l)*255).astype(np.uint8),
|
||||
'label': 'IPFcolor_({} {} {})'.format(*m),
|
||||
'meta' : {
|
||||
'unit': '8-bit RGB',
|
||||
|
@ -970,7 +970,7 @@ class Result:
|
|||
Name of symmetric tensor dataset.
|
||||
|
||||
"""
|
||||
def maximum_shear(T_sym: Dict[str, Any]) -> Dict[str, Any]:
|
||||
def maximum_shear(T_sym: DADF5Dataset) -> DADF5Dataset:
|
||||
return {
|
||||
'data': mechanics.maximum_shear(T_sym['data']),
|
||||
'label': f"max_shear({T_sym['label']})",
|
||||
|
@ -1013,7 +1013,7 @@ class Result:
|
|||
>>> r.add_equivalent_Mises('epsilon_V^0.0(F)')
|
||||
|
||||
"""
|
||||
def equivalent_Mises(T_sym: Dict[str, Any], kind: str) -> Dict[str, Any]:
|
||||
def equivalent_Mises(T_sym: DADF5Dataset, kind: str) -> DADF5Dataset:
|
||||
k = kind
|
||||
if k is None:
|
||||
if T_sym['meta']['unit'] == '1':
|
||||
|
@ -1051,7 +1051,7 @@ class Result:
|
|||
Order of the norm. inf means NumPy's inf object. For details refer to numpy.linalg.norm.
|
||||
|
||||
"""
|
||||
def norm(x: Dict[str, Any], ord: Union[int, float, Literal['fro', 'nuc']]) -> Dict[str, Any]:
|
||||
def norm(x: DADF5Dataset, ord: Union[int, float, Literal['fro', 'nuc']]) -> DADF5Dataset:
|
||||
o = ord
|
||||
if len(x['data'].shape) == 2:
|
||||
axis: Union[int, Tuple[int, int]] = 1
|
||||
|
@ -1099,7 +1099,7 @@ class Result:
|
|||
is taken into account.
|
||||
|
||||
"""
|
||||
def stress_second_Piola_Kirchhoff(P: Dict[str, Any], F: Dict[str, Any]) -> Dict[str, Any]:
|
||||
def stress_second_Piola_Kirchhoff(P: DADF5Dataset, F: DADF5Dataset) -> DADF5Dataset:
|
||||
return {
|
||||
'data': mechanics.stress_second_Piola_Kirchhoff(P['data'],F['data']),
|
||||
'label': 'S',
|
||||
|
@ -1141,12 +1141,11 @@ class Result:
|
|||
Defaults to True.
|
||||
|
||||
"""
|
||||
def pole(q: Dict[str, Any],
|
||||
uvw: FloatSequence,
|
||||
hkl: FloatSequence,
|
||||
with_symmetry: bool,
|
||||
normalize: bool) -> Dict[str, Any]:
|
||||
c = q['meta']['c/a'] if 'c/a' in q['meta'] else 1
|
||||
def pole(q: DADF5Dataset,
|
||||
uvw: FloatSequence, hkl: FloatSequence,
|
||||
with_symmetry: bool,
|
||||
normalize: bool) -> DADF5Dataset:
|
||||
c = q['meta']['c/a'] if 'c/a' in q['meta'] else 1.0
|
||||
brackets = ['[]','()','⟨⟩','{}'][(uvw is None)*1+with_symmetry*2]
|
||||
label = 'p^' + '{}{} {} {}{}'.format(brackets[0],
|
||||
*(uvw if uvw else hkl),
|
||||
|
@ -1186,7 +1185,7 @@ class Result:
|
|||
>>> r.add_rotation('F')
|
||||
|
||||
"""
|
||||
def rotation(F: Dict[str, Any]) -> Dict[str, Any]:
|
||||
def rotation(F: DADF5Dataset) -> DADF5Dataset:
|
||||
return {
|
||||
'data': mechanics.rotation(F['data']).as_matrix(),
|
||||
'label': f"R({F['label']})",
|
||||
|
@ -1218,7 +1217,7 @@ class Result:
|
|||
>>> r.add_spherical('sigma')
|
||||
|
||||
"""
|
||||
def spherical(T: Dict[str, Any]) -> Dict[str, Any]:
|
||||
def spherical(T: DADF5Dataset) -> DADF5Dataset:
|
||||
return {
|
||||
'data': tensor.spherical(T['data'],False),
|
||||
'label': f"p_{T['label']}",
|
||||
|
@ -1292,14 +1291,14 @@ class Result:
|
|||
| https://de.wikipedia.org/wiki/Verzerrungstensor
|
||||
|
||||
"""
|
||||
def strain(F: Dict[str, Any], t: Literal['V', 'U'], m: float) -> Dict[str, Any]:
|
||||
def strain(F: DADF5Dataset, t: Literal['V', 'U'], m: float) -> DADF5Dataset:
|
||||
side = 'left' if t == 'V' else 'right'
|
||||
return {
|
||||
'data': mechanics.strain(F['data'],t,m),
|
||||
'label': f"epsilon_{t}^{m}({F['label']})",
|
||||
'meta': {
|
||||
'unit': F['meta']['unit'],
|
||||
'description': f'Seth-Hill strain tensor of order {m} based on {side} stretch tensor '+\
|
||||
'description': f'Seth-Hill strain tensor of order {m} based on {side} stretch tensor '
|
||||
f"of {F['label']} ({F['meta']['description']})",
|
||||
'creator': 'add_strain'
|
||||
}
|
||||
|
@ -1323,14 +1322,14 @@ class Result:
|
|||
Defaults to 'V'.
|
||||
|
||||
"""
|
||||
def stretch_tensor(F: Dict[str, Any], t: str) -> Dict[str, Any]:
|
||||
def stretch_tensor(F: DADF5Dataset, t: str) -> DADF5Dataset:
|
||||
return {
|
||||
'data': (mechanics.stretch_left if t.upper() == 'V' else mechanics.stretch_right)(F['data']),
|
||||
'label': f"{t}({F['label']})",
|
||||
'meta': {
|
||||
'unit': F['meta']['unit'],
|
||||
'description': f"{'left' if t.upper() == 'V' else 'right'} stretch tensor "\
|
||||
+f"of {F['label']} ({F['meta']['description']})", # noqa
|
||||
'description': f"{'left' if t.upper() == 'V' else 'right'} stretch tensor "
|
||||
f"of {F['label']} ({F['meta']['description']})", # noqa
|
||||
'creator': 'add_stretch_tensor'
|
||||
}
|
||||
}
|
||||
|
@ -1353,7 +1352,7 @@ class Result:
|
|||
i.e. fields resulting from the grid solver.
|
||||
|
||||
"""
|
||||
def curl(f: Dict[str, Any], size: np.ndarray) -> Dict[str, Any]:
|
||||
def curl(f: DADF5Dataset, size: np.ndarray) -> DADF5Dataset:
|
||||
return {
|
||||
'data': grid_filters.curl(size,f['data']),
|
||||
'label': f"curl({f['label']})",
|
||||
|
@ -1382,7 +1381,7 @@ class Result:
|
|||
i.e. fields resulting from the grid solver.
|
||||
|
||||
"""
|
||||
def divergence(f: Dict[str, Any], size: np.ndarray) -> Dict[str, Any]:
|
||||
def divergence(f: DADF5Dataset, size: np.ndarray) -> DADF5Dataset:
|
||||
return {
|
||||
'data': grid_filters.divergence(size,f['data']),
|
||||
'label': f"divergence({f['label']})",
|
||||
|
@ -1411,7 +1410,7 @@ class Result:
|
|||
i.e. fields resulting from the grid solver.
|
||||
|
||||
"""
|
||||
def gradient(f: Dict[str, Any], size: np.ndarray) -> Dict[str, Any]:
|
||||
def gradient(f: DADF5Dataset, size: np.ndarray) -> DADF5Dataset:
|
||||
return {
|
||||
'data': grid_filters.gradient(size,f['data'] if len(f['data'].shape) == 4 else \
|
||||
f['data'].reshape(f['data'].shape+(1,))),
|
||||
|
@ -1427,7 +1426,7 @@ class Result:
|
|||
|
||||
|
||||
def _add_generic_grid(self,
|
||||
func: Callable,
|
||||
func: Callable[..., DADF5Dataset],
|
||||
datasets: Dict[str, str],
|
||||
args: Dict[str, str] = {},
|
||||
constituents = None):
|
||||
|
@ -1478,7 +1477,7 @@ class Result:
|
|||
now.strftime('%Y-%m-%d %H:%M:%S%z').encode()
|
||||
|
||||
for l,v in r['meta'].items():
|
||||
h5_dataset.attrs[l.lower()]=v if h5py3 else v.encode()
|
||||
h5_dataset.attrs[l.lower()]=v.encode() if not h5py3 and type(v) is str else v
|
||||
creator = h5_dataset.attrs['creator'] if h5py3 else \
|
||||
h5_dataset.attrs['creator'].decode()
|
||||
h5_dataset.attrs['creator'] = f'damask.Result.{creator} v{damask.version}' if h5py3 else \
|
||||
|
@ -1488,8 +1487,8 @@ class Result:
|
|||
|
||||
|
||||
def _add_generic_pointwise(self,
|
||||
func: Callable,
|
||||
datasets: Dict[str, Any],
|
||||
func: Callable[..., DADF5Dataset],
|
||||
datasets: Dict[str, str],
|
||||
args: Dict[str, Any] = {}):
|
||||
"""
|
||||
General function to add pointwise data.
|
||||
|
@ -1508,9 +1507,9 @@ class Result:
|
|||
"""
|
||||
|
||||
def job_pointwise(group: str,
|
||||
callback: Callable,
|
||||
callback: Callable[..., DADF5Dataset],
|
||||
datasets: Dict[str, str],
|
||||
args: Dict[str, str]) -> Union[None, Any]:
|
||||
args: Dict[str, str]) -> Union[None, DADF5Dataset]:
|
||||
try:
|
||||
datasets_in = {}
|
||||
with h5py.File(self.fname,'r') as f:
|
||||
|
@ -1598,7 +1597,7 @@ class Result:
|
|||
def get(self,
|
||||
output: Union[str, List[str]] = '*',
|
||||
flatten: bool = True,
|
||||
prune: bool = True) -> Optional[Dict[str,Any]]:
|
||||
prune: bool = True) -> Union[None,Dict[str,Any]]:
|
||||
"""
|
||||
Collect data per phase/homogenization reflecting the group/folder structure in the DADF5 file.
|
||||
|
||||
|
|
|
@ -112,7 +112,7 @@ class Rotation:
|
|||
|
||||
|
||||
def __getitem__(self,
|
||||
item: Union[Tuple[int], int, bool, np.bool_, np.ndarray]):
|
||||
item: Union[Tuple[Union[None, int, slice]], int, bool, np.bool_, np.ndarray]):
|
||||
"""
|
||||
Return self[item].
|
||||
|
||||
|
@ -295,6 +295,7 @@ class Rotation:
|
|||
----------
|
||||
other : Rotation, shape (self.shape)
|
||||
Rotation for composition.
|
||||
Compatible innermost dimensions will blend.
|
||||
|
||||
Returns
|
||||
-------
|
||||
|
@ -303,10 +304,15 @@ class Rotation:
|
|||
|
||||
"""
|
||||
if isinstance(other,Rotation):
|
||||
q_m = self.quaternion[...,0:1]
|
||||
p_m = self.quaternion[...,1:]
|
||||
q_o = other.quaternion[...,0:1]
|
||||
p_o = other.quaternion[...,1:]
|
||||
blend = util.shapeblender( self.shape,other.shape)
|
||||
s_m = util.shapeshifter( self.shape,blend,mode='right')
|
||||
s_o = util.shapeshifter(other.shape,blend,mode='left')
|
||||
|
||||
q_m = self.broadcast_to(s_m).quaternion[...,0:1]
|
||||
p_m = self.broadcast_to(s_m).quaternion[...,1:]
|
||||
q_o = other.broadcast_to(s_o).quaternion[...,0:1]
|
||||
p_o = other.broadcast_to(s_o).quaternion[...,1:]
|
||||
|
||||
qmo = q_m*q_o
|
||||
q = (qmo - np.einsum('...i,...i',p_m,p_o).reshape(qmo.shape))
|
||||
p = q_m*p_o + q_o*p_m + _P * np.cross(p_m,p_o)
|
||||
|
@ -325,6 +331,7 @@ class Rotation:
|
|||
----------
|
||||
other : Rotation, shape (self.shape)
|
||||
Rotation for composition.
|
||||
Compatible innermost dimensions will blend.
|
||||
|
||||
"""
|
||||
return self*other
|
||||
|
@ -341,6 +348,7 @@ class Rotation:
|
|||
----------
|
||||
other : damask.Rotation, shape (self.shape)
|
||||
Rotation to invert for composition.
|
||||
Compatible innermost dimensions will blend.
|
||||
|
||||
Returns
|
||||
-------
|
||||
|
@ -434,7 +442,7 @@ class Rotation:
|
|||
|
||||
"""
|
||||
if isinstance(other, np.ndarray):
|
||||
obs = util.shapeblender(self.shape,other.shape,keep_ones=False)[len(self.shape):]
|
||||
obs = util.shapeblender(self.shape,other.shape)[len(self.shape):]
|
||||
for l in [4,2,1]:
|
||||
if obs[-l:] == l*(3,):
|
||||
bs = util.shapeblender(self.shape,other.shape[:-l],False)
|
||||
|
|
|
@ -1,6 +1,6 @@
|
|||
"""Functionality for typehints."""
|
||||
|
||||
from typing import Sequence, Union, Literal, TextIO
|
||||
from typing import Sequence, Union, TypedDict, Literal, TextIO
|
||||
from pathlib import Path
|
||||
|
||||
import numpy as np
|
||||
|
@ -16,3 +16,15 @@ CrystalKinematics = Literal['slip', 'twin']
|
|||
NumpyRngSeed = Union[int, IntSequence, np.random.SeedSequence, np.random.Generator]
|
||||
# BitGenerator does not exists in older numpy versions
|
||||
#NumpyRngSeed = Union[int, IntSequence, np.random.SeedSequence, np.random.BitGenerator, np.random.Generator]
|
||||
|
||||
# https://peps.python.org/pep-0655/
|
||||
# Metadata = TypedDict('Metadata', {'unit': str, 'description': str, 'creator': str, 'lattice': NotRequired[str]})
|
||||
_Metadata = TypedDict('_Metadata', {'lattice': str, 'c/a': float}, total=False)
|
||||
|
||||
class Metadata(_Metadata):
|
||||
unit: str
|
||||
description: str
|
||||
creator: str
|
||||
|
||||
|
||||
DADF5Dataset = TypedDict('DADF5Dataset', {'data': np.ndarray, 'label': str, 'meta': Metadata})
|
||||
|
|
|
@ -513,7 +513,7 @@ def shapeshifter(fro: _Tuple[int, ...],
|
|||
|
||||
def shapeblender(a: _Tuple[int, ...],
|
||||
b: _Tuple[int, ...],
|
||||
keep_ones: bool = True) -> _Tuple[int, ...]:
|
||||
keep_ones: bool = False) -> _Tuple[int, ...]:
|
||||
"""
|
||||
Return a shape that overlaps the rightmost entries of 'a' with the leftmost of 'b'.
|
||||
|
||||
|
@ -525,20 +525,24 @@ def shapeblender(a: _Tuple[int, ...],
|
|||
Shape of second array.
|
||||
keep_ones : bool, optional
|
||||
Treat innermost '1's as literal value instead of dimensional placeholder.
|
||||
Defaults to True.
|
||||
Defaults to False.
|
||||
|
||||
Examples
|
||||
--------
|
||||
>>> shapeblender((4,4,3),(3,2,1))
|
||||
(4,4,3,2,1)
|
||||
>>> shapeblender((1,2),(1,2,3))
|
||||
(1,2,3)
|
||||
>>> shapeblender((1,),(2,2,1))
|
||||
(1,2,2,1)
|
||||
>>> shapeblender((1,),(2,2,1),False)
|
||||
(2,2,1)
|
||||
>>> shapeblender((3,2),(3,2))
|
||||
(3,2)
|
||||
>>> shapeblender((4,3),(3,2))
|
||||
(4,3,2)
|
||||
>>> shapeblender((4,4),(3,2))
|
||||
(4,4,3,2)
|
||||
>>> shapeblender((1,2),(1,2,3))
|
||||
(1,2,3)
|
||||
>>> shapeblender((),(2,2,1))
|
||||
(2,2,1)
|
||||
>>> shapeblender((1,),(2,2,1))
|
||||
(2,2,1)
|
||||
>>> shapeblender((1,),(2,2,1),True)
|
||||
(1,2,2,1)
|
||||
|
||||
"""
|
||||
def is_broadcastable(a,b):
|
||||
|
|
|
@ -358,7 +358,9 @@ class TestOrientation:
|
|||
a=a,b=b,c=c,
|
||||
alpha=alpha,beta=beta,gamma=gamma)
|
||||
assert o.to_pole(**{kw:vector,'with_symmetry':with_symmetry}).shape \
|
||||
== o.shape + vector.shape[:-1] + (o.symmetry_operations.shape if with_symmetry else ()) + vector.shape[-1:]
|
||||
== util.shapeblender(o.shape,vector.shape[:-1]) \
|
||||
+ (o.symmetry_operations.shape if with_symmetry else ()) \
|
||||
+ vector.shape[-1:]
|
||||
|
||||
@pytest.mark.parametrize('lattice',['hP','cI','cF']) #tI not included yet
|
||||
def test_Schmid(self,update,res_path,lattice):
|
||||
|
|
|
@ -136,11 +136,13 @@ class TestUtil:
|
|||
((1,),(7,),False,(7,)),
|
||||
((1,),(7,),True,(1,7)),
|
||||
((2,),(2,2),False,(2,2)),
|
||||
((1,2),(2,2),False,(2,2)),
|
||||
((1,3),(2,3),False,(2,3)),
|
||||
((1,1,2),(2,2),False,(1,2,2)),
|
||||
((1,1,2),(2,2),True,(1,1,2,2)),
|
||||
((1,2,3),(2,3,4),False,(1,2,3,4)),
|
||||
((1,2,3),(1,2,3),False,(1,2,3)),
|
||||
((2,3,1,1),(2,3),False,(2,3,2,3)),
|
||||
((2,3,1,1),(2,3),True,(2,3,1,1,2,3)),
|
||||
])
|
||||
def test_shapeblender(self,a,b,ones,answer):
|
||||
assert util.shapeblender(a,b,ones) == answer
|
||||
|
|
|
@ -6,7 +6,7 @@
|
|||
!> @brief Parse command line interface for PETSc-based solvers
|
||||
!--------------------------------------------------------------------------------------------------
|
||||
#define PETSC_MINOR_MIN 12
|
||||
#define PETSC_MINOR_MAX 19
|
||||
#define PETSC_MINOR_MAX 20
|
||||
|
||||
module CLI
|
||||
use, intrinsic :: ISO_fortran_env
|
||||
|
|
|
@ -48,7 +48,7 @@ subroutine YAML_parse_init()
|
|||
#ifdef FYAML
|
||||
print'(/,1x,a)', 'libfyaml powered'
|
||||
#else
|
||||
call selfTest()
|
||||
call YAML_parse_selfTest()
|
||||
#endif
|
||||
|
||||
end subroutine YAML_parse_init
|
||||
|
@ -870,7 +870,7 @@ end function to_flow
|
|||
!--------------------------------------------------------------------------------------------------
|
||||
!> @brief Check correctness of some YAML functions.
|
||||
!--------------------------------------------------------------------------------------------------
|
||||
subroutine selfTest()
|
||||
subroutine YAML_parse_selfTest()
|
||||
|
||||
if (indentDepth(' a') /= 1) error stop 'indentDepth'
|
||||
if (indentDepth('a') /= 0) error stop 'indentDepth'
|
||||
|
@ -1031,7 +1031,7 @@ subroutine selfTest()
|
|||
|
||||
end block parse
|
||||
|
||||
end subroutine selfTest
|
||||
end subroutine YAML_parse_selfTest
|
||||
#endif
|
||||
|
||||
end module YAML_parse
|
||||
|
|
|
@ -150,6 +150,7 @@ module YAML_types
|
|||
|
||||
public :: &
|
||||
YAML_types_init, &
|
||||
YAML_types_selfTest, &
|
||||
#ifdef __GFORTRAN__
|
||||
output_as1dStr, & !ToDo: Hack for GNU. Remove later
|
||||
#endif
|
||||
|
@ -164,7 +165,7 @@ subroutine YAML_types_init
|
|||
|
||||
print'(/,1x,a)', '<<<+- YAML_types init -+>>>'
|
||||
|
||||
call selfTest()
|
||||
call YAML_types_selfTest()
|
||||
|
||||
end subroutine YAML_types_init
|
||||
|
||||
|
@ -172,7 +173,7 @@ end subroutine YAML_types_init
|
|||
!--------------------------------------------------------------------------------------------------
|
||||
!> @brief Check correctness of some type bound procedures.
|
||||
!--------------------------------------------------------------------------------------------------
|
||||
subroutine selfTest()
|
||||
subroutine YAML_types_selfTest()
|
||||
|
||||
scalar: block
|
||||
type(tScalar), target :: s
|
||||
|
@ -266,7 +267,7 @@ subroutine selfTest()
|
|||
|
||||
end block dict
|
||||
|
||||
end subroutine selfTest
|
||||
end subroutine YAML_types_selfTest
|
||||
|
||||
|
||||
!---------------------------------------------------------------------------------------------------
|
||||
|
|
|
@ -2223,7 +2223,7 @@ end function crystal_isotropic_mu
|
|||
!--------------------------------------------------------------------------------------------------
|
||||
!> @brief Check correctness of some crystal functions.
|
||||
!--------------------------------------------------------------------------------------------------
|
||||
subroutine crystal_selfTest
|
||||
subroutine crystal_selfTest()
|
||||
|
||||
real(pREAL), dimension(:,:,:), allocatable :: CoSy
|
||||
real(pREAL), dimension(:,:), allocatable :: system
|
||||
|
|
|
@ -74,6 +74,9 @@ module mesh_mechanical_FEM
|
|||
external :: & ! ToDo: write interfaces
|
||||
#if defined(PETSC_USE_64BIT_INDICES) || PETSC_VERSION_MINOR < 17
|
||||
ISDestroy, &
|
||||
#endif
|
||||
#if PETSC_VERSION_MINOR > 18
|
||||
DMAddField, &
|
||||
#endif
|
||||
PetscSectionGetNumFields, &
|
||||
PetscFESetQuadrature, &
|
||||
|
|
|
@ -12,6 +12,7 @@ program DAMASK_test
|
|||
use test_crystal
|
||||
use test_rotations
|
||||
use test_IO
|
||||
use test_YAML_types
|
||||
use test_HDF5_utilities
|
||||
|
||||
external :: quit
|
||||
|
@ -57,6 +58,10 @@ program DAMASK_test
|
|||
call test_IO_run()
|
||||
write(IO_STDOUT,fmt='(a)') ok
|
||||
|
||||
write(IO_STDOUT,fmt=fmt, advance='no') 'YAML_types','...'
|
||||
call test_YAML_types_run()
|
||||
write(IO_STDOUT,fmt='(a)') ok
|
||||
|
||||
write(IO_STDOUT,fmt=fmt, advance='no') 'HDF5_utilities','...'
|
||||
call test_HDF5_utilities_run()
|
||||
write(IO_STDOUT,fmt='(a)') ok
|
||||
|
|
|
@ -0,0 +1,17 @@
|
|||
module test_YAML_types
|
||||
use YAML_types
|
||||
|
||||
implicit none(type,external)
|
||||
|
||||
private
|
||||
public :: test_YAML_types_run
|
||||
|
||||
contains
|
||||
|
||||
subroutine test_YAML_types_run()
|
||||
|
||||
call YAML_types_selfTest()
|
||||
|
||||
end subroutine test_YAML_types_run
|
||||
|
||||
end module test_YAML_types
|
Loading…
Reference in New Issue