Merge remote-tracking branch 'origin/development' into polishing

This commit is contained in:
Martin Diehl 2022-06-14 07:42:54 +02:00
commit 8da04f197c
7 changed files with 161 additions and 174 deletions

View File

@ -1 +1 @@
3.0.0-alpha6-442-g49e40923e 3.0.0-alpha6-458-g132823519

View File

@ -6,15 +6,18 @@ import glob
import argparse import argparse
import shutil import shutil
from pathlib import Path from pathlib import Path
import subprocess
import shlex
import damask sys.path.append(str(Path(__file__).parents[2]/'python/damask'))
import solver
def copy_and_patch(patch,orig,editor): def copy_and_patch(patch,orig,editor):
try: try:
shutil.copyfile(orig,orig.parent/patch.stem) shutil.copyfile(orig,orig.parent/patch.stem)
except shutil.SameFileError: except shutil.SameFileError:
pass pass
damask.util.run(f'patch {orig.parent/patch.stem} {patch} --backup --forward') subprocess.run(shlex.split(f'patch {orig.parent/patch.stem} {patch} --backup --forward'))
with open(orig.parent/patch.stem) as f_in: with open(orig.parent/patch.stem) as f_in:
content = f_in.read() content = f_in.read()
with open(orig.parent/patch.stem,'w') as f_out: with open(orig.parent/patch.stem,'w') as f_out:
@ -28,15 +31,16 @@ parser = argparse.ArgumentParser(
parser.add_argument('--editor', dest='editor', metavar='string', default='vi', parser.add_argument('--editor', dest='editor', metavar='string', default='vi',
help='Name of the editor (executable) used by Marc Mentat') help='Name of the editor (executable) used by Marc Mentat')
parser.add_argument('--marc-root', dest='marc_root', metavar='string', parser.add_argument('--marc-root', dest='marc_root', metavar='string',
default=damask.solver._marc._marc_root, default=solver._marc._marc_root,
help='Marc root directory') help='Marc root directory')
parser.add_argument('--marc-version', dest='marc_version', metavar='string', parser.add_argument('--marc-version', dest='marc_version', metavar='string',
default=damask.solver._marc._marc_version, default=solver._marc._marc_version,
help='Marc version') help='Marc version')
parser.add_argument('--damask-root', dest='damask_root', metavar = 'string', parser.add_argument('--damask-root', dest='damask_root', metavar = 'string',
default=damask.solver._marc._damask_root, default=solver._marc._damask_root,
help='DAMASK root directory') help='DAMASK root directory')
args = parser.parse_args() args = parser.parse_args()
marc_root = Path(args.marc_root).expanduser() marc_root = Path(args.marc_root).expanduser()
damask_root = Path(args.damask_root).expanduser() damask_root = Path(args.damask_root).expanduser()
@ -52,7 +56,7 @@ matches = {'Marc_tools': [['comp_user','comp_damask_*mp'],
for cmd in ['patch','xvfb-run']: for cmd in ['patch','xvfb-run']:
try: try:
damask.util.run(f'{cmd} --help') subprocess.run(shlex.split(f'{cmd} --help'))
except FileNotFoundError: except FileNotFoundError:
print(f'"{cmd}" not found, please install') print(f'"{cmd}" not found, please install')
sys.exit() sys.exit()
@ -71,7 +75,7 @@ print('compiling Mentat menu binaries...')
executable = marc_root/f'mentat{marc_version}/bin/mentat' executable = marc_root/f'mentat{marc_version}/bin/mentat'
menu_file = marc_root/f'mentat{marc_version}/menus/linux64/main.msb' menu_file = marc_root/f'mentat{marc_version}/menus/linux64/main.msb'
damask.util.run(f'xvfb-run -a {executable} -compile {menu_file}') subprocess.run(shlex.split(f'xvfb-run -a {executable} -compile {menu_file}'))
print('setting file access rights...') print('setting file access rights...')

View File

@ -448,9 +448,12 @@ class Orientation(Rotation,Crystal):
elif self.family == 'orthorhombic': elif self.family == 'orthorhombic':
return (np.prod(1. >= rho_abs,axis=-1)).astype(bool) return (np.prod(1. >= rho_abs,axis=-1)).astype(bool)
elif self.family == 'monoclinic': elif self.family == 'monoclinic':
return (1. >= rho_abs[...,1]).astype(bool) return np.logical_or( 1. >= rho_abs[...,1],
np.isnan(rho_abs[...,1]))
elif self.family == 'triclinic':
return np.ones(rho_abs.shape[:-1]).astype(bool)
else: else:
return np.all(np.isfinite(rho_abs),axis=-1) raise TypeError(f'unknown symmetry "{self.family}"')
@property @property

View File

@ -1372,7 +1372,7 @@ class Rotation:
w[np.isclose(w[...,0],1.0+0.0j),1:] = 0. w[np.isclose(w[...,0],1.0+0.0j),1:] = 0.
w[np.isclose(w[...,1],1.0+0.0j),2:] = 0. w[np.isclose(w[...,1],1.0+0.0j),2:] = 0.
vr = np.swapaxes(vr,-1,-2) vr = np.swapaxes(vr,-1,-2)
ax = np.where(np.abs(diag_delta)<1e-12, ax = np.where(np.abs(diag_delta)<1e-13,
np.real(vr[np.isclose(w,1.0+0.0j)]).reshape(om.shape[:-2]+(3,)), np.real(vr[np.isclose(w,1.0+0.0j)]).reshape(om.shape[:-2]+(3,)),
np.abs(np.real(vr[np.isclose(w,1.0+0.0j)]).reshape(om.shape[:-2]+(3,))) \ np.abs(np.real(vr[np.isclose(w,1.0+0.0j)]).reshape(om.shape[:-2]+(3,))) \
*np.sign(diag_delta)) *np.sign(diag_delta))
@ -1581,14 +1581,13 @@ class Rotation:
@staticmethod @staticmethod
def _ho2ax(ho: np.ndarray) -> np.ndarray: def _ho2ax(ho: np.ndarray) -> np.ndarray:
"""Homochoric vector to axisangle pair.""" """Homochoric vector to axisangle pair."""
tfit = np.array([+1.0000000000018852, -0.5000000002194847, tfit = np.array([+0.9999999999999968, -0.49999999999986866, -0.025000000000632055,
-0.024999992127593126, -0.003928701544781374, -0.003928571496460683, -0.0008164666077062752, -0.00019411896443261646,
-0.0008152701535450438, -0.0002009500426119712, -0.00004985822229871769, -0.000014164962366386031, -1.9000248160936107e-6,
-0.00002397986776071756, -0.00008202868926605841, -5.72184549898506e-6, +7.772149920658778e-6, -0.00001053483452909705,
+0.00012448715042090092, -0.0001749114214822577, +9.528014229335313e-6, -5.660288876265125e-6, +1.2844901692764126e-6,
+0.0001703481934140054, -0.00012062065004116828, +1.1255185726258763e-6, -1.3834391419956455e-6, +7.513691751164847e-7,
+0.000059719705868660826, -0.00001980756723965647, -2.401996891720091e-7, +4.386887017466388e-8, -3.5917775353564864e-9])
+0.000003953714684212874, -0.00000036555001439719544])
hmag_squared = np.sum(ho**2.,axis=-1,keepdims=True) hmag_squared = np.sum(ho**2.,axis=-1,keepdims=True)
s = np.sum(tfit*hmag_squared**np.arange(len(tfit)),axis=-1,keepdims=True) s = np.sum(tfit*hmag_squared**np.arange(len(tfit)),axis=-1,keepdims=True)
with np.errstate(invalid='ignore'): with np.errstate(invalid='ignore'):
@ -1679,7 +1678,7 @@ class Rotation:
""" """
with np.errstate(invalid='ignore',divide='ignore'): with np.errstate(invalid='ignore',divide='ignore'):
# get pyramide and scale by grid parameter ratio # get pyramid and scale by grid parameter ratio
XYZ = np.take_along_axis(cu,Rotation._get_pyramid_order(cu,'forward'),-1) * _sc XYZ = np.take_along_axis(cu,Rotation._get_pyramid_order(cu,'forward'),-1) * _sc
order = np.abs(XYZ[...,1:2]) <= np.abs(XYZ[...,0:1]) order = np.abs(XYZ[...,1:2]) <= np.abs(XYZ[...,0:1])
q = np.pi/12.0 * np.where(order,XYZ[...,1:2],XYZ[...,0:1]) \ q = np.pi/12.0 * np.where(order,XYZ[...,1:2],XYZ[...,0:1]) \

View File

@ -1,43 +1,25 @@
"""Miscellaneous helper functionality.""" """Miscellaneous helper functionality."""
import sys import sys as _sys
import datetime import datetime as _datetime
import os import os as _os
import subprocess import subprocess as _subprocess
import shlex import shlex as _shlex
import re import re as _re
import signal import signal as _signal
import fractions import fractions as _fractions
from collections import abc from collections import abc as _abc
from functools import reduce, partial from functools import reduce as _reduce, partial as _partial
from typing import Callable, Union, Iterable, Sequence, Dict, List, Tuple, Literal, Any, Collection, TextIO from typing import Callable as _Callable, Union as _Union, Iterable as _Iterable, Sequence as _Sequence, Dict as _Dict, \
from pathlib import Path List as _List, Tuple as _Tuple, Literal as _Literal, Any as _Any, Collection as _Collection, TextIO as _TextIO
from pathlib import Path as _Path
import numpy as np import numpy as _np
import h5py import h5py as _h5py
from . import version from . import version as _version
from ._typehints import FloatSequence, NumpyRngSeed, IntCollection, FileHandle from ._typehints import FloatSequence as _FloatSequence, NumpyRngSeed as _NumpyRngSeed, IntCollection as _IntCollection, \
FileHandle as _FileHandle
# limit visibility
__all__=[
'srepr',
'emph', 'deemph', 'warn', 'strikeout',
'run',
'open_text',
'natural_sort',
'show_progress',
'scale_to_coprime',
'project_equal_angle', 'project_equal_area',
'hybrid_IA',
'execution_stamp',
'shapeshifter', 'shapeblender',
'extend_docstring', 'extended_docstring',
'Bravais_to_Miller', 'Miller_to_Bravais',
'DREAM3D_base_group', 'DREAM3D_cell_data_group',
'dict_prune', 'dict_flatten',
'tail_repack',
]
# https://svn.blender.org/svnroot/bf-blender/trunk/blender/build_files/scons/tools/bcolors.py # https://svn.blender.org/svnroot/bf-blender/trunk/blender/build_files/scons/tools/bcolors.py
# https://stackoverflow.com/questions/287871 # https://stackoverflow.com/questions/287871
@ -154,8 +136,8 @@ def strikeout(msg) -> str:
def run(cmd: str, def run(cmd: str,
wd: str = './', wd: str = './',
env: Dict[str, str] = None, env: _Dict[str, str] = None,
timeout: int = None) -> Tuple[str, str]: timeout: int = None) -> _Tuple[str, str]:
""" """
Run a command. Run a command.
@ -178,26 +160,26 @@ def run(cmd: str,
""" """
def pass_signal(sig,_,proc,default): def pass_signal(sig,_,proc,default):
proc.send_signal(sig) proc.send_signal(sig)
signal.signal(sig,default) _signal.signal(sig,default)
signal.raise_signal(sig) _signal.raise_signal(sig)
signals = [signal.SIGINT,signal.SIGTERM] signals = [_signal.SIGINT,_signal.SIGTERM]
print(f"running '{cmd}' in '{wd}'") print(f"running '{cmd}' in '{wd}'")
process = subprocess.Popen(shlex.split(cmd), process = _subprocess.Popen(_shlex.split(cmd),
stdout = subprocess.PIPE, stdout = _subprocess.PIPE,
stderr = subprocess.PIPE, stderr = _subprocess.PIPE,
env = os.environ if env is None else env, env = _os.environ if env is None else env,
cwd = wd, cwd = wd,
encoding = 'utf-8') encoding = 'utf-8')
# ensure that process is terminated (https://stackoverflow.com/questions/22916783) # ensure that process is terminated (https://stackoverflow.com/questions/22916783)
sig_states = [signal.signal(sig,partial(pass_signal,proc=process,default=signal.getsignal(sig))) for sig in signals] sig_states = [_signal.signal(sig,_partial(pass_signal,proc=process,default=_signal.getsignal(sig))) for sig in signals]
try: try:
stdout,stderr = process.communicate(timeout=timeout) stdout,stderr = process.communicate(timeout=timeout)
finally: finally:
for sig,state in zip(signals,sig_states): for sig,state in zip(signals,sig_states):
signal.signal(sig,state) _signal.signal(sig,state)
if process.returncode != 0: if process.returncode != 0:
print(stdout) print(stdout)
@ -207,8 +189,8 @@ def run(cmd: str,
return stdout, stderr return stdout, stderr
def open_text(fname: FileHandle, def open_text(fname: _FileHandle,
mode: Literal['r','w'] = 'r') -> TextIO: mode: _Literal['r','w'] = 'r') -> _TextIO: # noqa
""" """
Open a text file. Open a text file.
@ -224,11 +206,11 @@ def open_text(fname: FileHandle,
f : file handle f : file handle
""" """
return fname if not isinstance(fname, (str,Path)) else \ return fname if not isinstance(fname, (str,_Path)) else \
open(Path(fname).expanduser(),mode,newline=('\n' if mode == 'w' else None)) open(_Path(fname).expanduser(),mode,newline=('\n' if mode == 'w' else None))
def natural_sort(key: str) -> List[Union[int, str]]: def natural_sort(key: str) -> _List[_Union[int, str]]:
""" """
Natural sort. Natural sort.
@ -240,13 +222,13 @@ def natural_sort(key: str) -> List[Union[int, str]]:
""" """
convert = lambda text: int(text) if text.isdigit() else text convert = lambda text: int(text) if text.isdigit() else text
return [ convert(c) for c in re.split('([0-9]+)', key) ] return [ convert(c) for c in _re.split('([0-9]+)', key) ]
def show_progress(iterable: Iterable, def show_progress(iterable: _Iterable,
N_iter: int = None, N_iter: int = None,
prefix: str = '', prefix: str = '',
bar_length: int = 50) -> Any: bar_length: int = 50) -> _Any:
""" """
Decorate a loop with a progress bar. Decorate a loop with a progress bar.
@ -264,7 +246,7 @@ def show_progress(iterable: Iterable,
Length of progress bar in characters. Defaults to 50. Length of progress bar in characters. Defaults to 50.
""" """
if isinstance(iterable,abc.Sequence): if isinstance(iterable,_abc.Sequence):
if N_iter is None: if N_iter is None:
N = len(iterable) N = len(iterable)
else: else:
@ -285,7 +267,7 @@ def show_progress(iterable: Iterable,
status.update(i) status.update(i)
def scale_to_coprime(v: FloatSequence) -> np.ndarray: def scale_to_coprime(v: _FloatSequence) -> _np.ndarray:
""" """
Scale vector to co-prime (relatively prime) integers. Scale vector to co-prime (relatively prime) integers.
@ -304,30 +286,30 @@ def scale_to_coprime(v: FloatSequence) -> np.ndarray:
def get_square_denominator(x): def get_square_denominator(x):
"""Denominator of the square of a number.""" """Denominator of the square of a number."""
return fractions.Fraction(x ** 2).limit_denominator(MAX_DENOMINATOR).denominator return _fractions.Fraction(x ** 2).limit_denominator(MAX_DENOMINATOR).denominator
def lcm(a,b): def lcm(a,b):
"""Least common multiple.""" """Least common multiple."""
try: try:
return np.lcm(a,b) # numpy > 1.18 return _np.lcm(a,b) # numpy > 1.18
except AttributeError: except AttributeError:
return a * b // np.gcd(a, b) return a * b // _np.gcd(a, b)
v_ = np.array(v) v_ = _np.array(v)
m = (v_ * reduce(lcm, map(lambda x: int(get_square_denominator(x)),v_))**0.5).astype(np.int64) m = (v_ * _reduce(lcm, map(lambda x: int(get_square_denominator(x)),v_))**0.5).astype(_np.int64)
m = m//reduce(np.gcd,m) m = m//_reduce(_np.gcd,m)
with np.errstate(invalid='ignore'): with _np.errstate(invalid='ignore'):
if not np.allclose(np.ma.masked_invalid(v_/m),v_[np.argmax(abs(v_))]/m[np.argmax(abs(v_))]): if not _np.allclose(_np.ma.masked_invalid(v_/m),v_[_np.argmax(abs(v_))]/m[_np.argmax(abs(v_))]):
raise ValueError(f'invalid result "{m}" for input "{v_}"') raise ValueError(f'invalid result "{m}" for input "{v_}"')
return m return m
def project_equal_angle(vector: np.ndarray, def project_equal_angle(vector: _np.ndarray,
direction: Literal['x', 'y', 'z'] = 'z', direction: _Literal['x', 'y', 'z'] = 'z', # noqa
normalize: bool = True, normalize: bool = True,
keepdims: bool = False) -> np.ndarray: keepdims: bool = False) -> _np.ndarray:
""" """
Apply equal-angle projection to vector. Apply equal-angle projection to vector.
@ -367,15 +349,15 @@ def project_equal_angle(vector: np.ndarray,
""" """
shift = 'zyx'.index(direction) shift = 'zyx'.index(direction)
v = np.roll(vector/np.linalg.norm(vector,axis=-1,keepdims=True) if normalize else vector, v = _np.roll(vector/_np.linalg.norm(vector,axis=-1,keepdims=True) if normalize else vector,
shift,axis=-1) shift,axis=-1)
return np.roll(np.block([v[...,:2]/(1.0+np.abs(v[...,2:3])),np.zeros_like(v[...,2:3])]), return _np.roll(_np.block([v[...,:2]/(1.0+_np.abs(v[...,2:3])),_np.zeros_like(v[...,2:3])]),
-shift if keepdims else 0,axis=-1)[...,:3 if keepdims else 2] -shift if keepdims else 0,axis=-1)[...,:3 if keepdims else 2]
def project_equal_area(vector: np.ndarray, def project_equal_area(vector: _np.ndarray,
direction: Literal['x', 'y', 'z'] = 'z', direction: _Literal['x', 'y', 'z'] = 'z', # noqa
normalize: bool = True, normalize: bool = True,
keepdims: bool = False) -> np.ndarray: keepdims: bool = False) -> _np.ndarray:
""" """
Apply equal-area projection to vector. Apply equal-area projection to vector.
@ -416,22 +398,22 @@ def project_equal_area(vector: np.ndarray,
""" """
shift = 'zyx'.index(direction) shift = 'zyx'.index(direction)
v = np.roll(vector/np.linalg.norm(vector,axis=-1,keepdims=True) if normalize else vector, v = _np.roll(vector/_np.linalg.norm(vector,axis=-1,keepdims=True) if normalize else vector,
shift,axis=-1) shift,axis=-1)
return np.roll(np.block([v[...,:2]/np.sqrt(1.0+np.abs(v[...,2:3])),np.zeros_like(v[...,2:3])]), return _np.roll(_np.block([v[...,:2]/_np.sqrt(1.0+_np.abs(v[...,2:3])),_np.zeros_like(v[...,2:3])]),
-shift if keepdims else 0,axis=-1)[...,:3 if keepdims else 2] -shift if keepdims else 0,axis=-1)[...,:3 if keepdims else 2]
def execution_stamp(class_name: str, def execution_stamp(class_name: str,
function_name: str = None) -> str: function_name: str = None) -> str:
"""Timestamp the execution of a (function within a) class.""" """Timestamp the execution of a (function within a) class."""
now = datetime.datetime.now().astimezone().strftime('%Y-%m-%d %H:%M:%S%z') now = _datetime.datetime.now().astimezone().strftime('%Y-%m-%d %H:%M:%S%z')
_function_name = '' if function_name is None else f'.{function_name}' _function_name = '' if function_name is None else f'.{function_name}'
return f'damask.{class_name}{_function_name} v{version} ({now})' return f'damask.{class_name}{_function_name} v{_version} ({now})'
def hybrid_IA(dist: np.ndarray, def hybrid_IA(dist: _np.ndarray,
N: int, N: int,
rng_seed: NumpyRngSeed = None) -> np.ndarray: rng_seed: _NumpyRngSeed = None) -> _np.ndarray:
""" """
Hybrid integer approximation. Hybrid integer approximation.
@ -446,23 +428,23 @@ def hybrid_IA(dist: np.ndarray,
If None, then fresh, unpredictable entropy will be pulled from the OS. If None, then fresh, unpredictable entropy will be pulled from the OS.
""" """
N_opt_samples,N_inv_samples = (max(np.count_nonzero(dist),N),0) # random subsampling if too little samples requested N_opt_samples,N_inv_samples = (max(_np.count_nonzero(dist),N),0) # random subsampling if too little samples requested
scale_,scale,inc_factor = (0.0,float(N_opt_samples),1.0) scale_,scale,inc_factor = (0.0,float(N_opt_samples),1.0)
while (not np.isclose(scale, scale_)) and (N_inv_samples != N_opt_samples): while (not _np.isclose(scale, scale_)) and (N_inv_samples != N_opt_samples):
repeats = np.rint(scale*dist).astype(np.int64) repeats = _np.rint(scale*dist).astype(_np.int64)
N_inv_samples = np.sum(repeats) N_inv_samples = _np.sum(repeats)
scale_,scale,inc_factor = (scale,scale+inc_factor*0.5*(scale - scale_), inc_factor*2.0) \ scale_,scale,inc_factor = (scale,scale+inc_factor*0.5*(scale - scale_), inc_factor*2.0) \
if N_inv_samples < N_opt_samples else \ if N_inv_samples < N_opt_samples else \
(scale_,0.5*(scale_ + scale), 1.0) (scale_,0.5*(scale_ + scale), 1.0)
return np.repeat(np.arange(len(dist)),repeats)[np.random.default_rng(rng_seed).permutation(N_inv_samples)[:N]] return _np.repeat(_np.arange(len(dist)),repeats)[_np.random.default_rng(rng_seed).permutation(N_inv_samples)[:N]]
def shapeshifter(fro: Tuple[int, ...], def shapeshifter(fro: _Tuple[int, ...],
to: Tuple[int, ...], to: _Tuple[int, ...],
mode: Literal['left','right'] = 'left', mode: _Literal['left','right'] = 'left', # noqa
keep_ones: bool = False) -> Tuple[int, ...]: keep_ones: bool = False) -> _Tuple[int, ...]:
""" """
Return dimensions that reshape 'fro' to become broadcastable to 'to'. Return dimensions that reshape 'fro' to become broadcastable to 'to'.
@ -509,7 +491,7 @@ def shapeshifter(fro: Tuple[int, ...],
fro = (1,) if len(fro) == 0 else fro fro = (1,) if len(fro) == 0 else fro
to = (1,) if len(to) == 0 else to to = (1,) if len(to) == 0 else to
try: try:
match = re.match(beg[mode] match = _re.match(beg[mode]
+f',{sep[mode]}'.join(map(lambda x: f'{x}' +f',{sep[mode]}'.join(map(lambda x: f'{x}'
if x>1 or (keep_ones and len(fro)>1) else if x>1 or (keep_ones and len(fro)>1) else
'\\d+',fro)) '\\d+',fro))
@ -518,14 +500,14 @@ def shapeshifter(fro: Tuple[int, ...],
grp = match.groups() grp = match.groups()
except AssertionError: except AssertionError:
raise ValueError(f'shapes cannot be shifted {fro} --> {to}') raise ValueError(f'shapes cannot be shifted {fro} --> {to}')
fill: Any = () fill: _Any = ()
for g,d in zip(grp,fro+(None,)): for g,d in zip(grp,fro+(None,)):
fill += (1,)*g.count(',')+(d,) fill += (1,)*g.count(',')+(d,)
return fill[:-1] return fill[:-1]
def shapeblender(a: Tuple[int, ...], def shapeblender(a: _Tuple[int, ...],
b: Tuple[int, ...]) -> Tuple[int, ...]: b: _Tuple[int, ...]) -> _Tuple[int, ...]:
""" """
Return a shape that overlaps the rightmost entries of 'a' with the leftmost of 'b'. Return a shape that overlaps the rightmost entries of 'a' with the leftmost of 'b'.
@ -553,7 +535,7 @@ def shapeblender(a: Tuple[int, ...],
return a + b[i:] return a + b[i:]
def extend_docstring(extra_docstring: str) -> Callable: def extend_docstring(extra_docstring: str) -> _Callable:
""" """
Decorator: Append to function's docstring. Decorator: Append to function's docstring.
@ -569,8 +551,8 @@ def extend_docstring(extra_docstring: str) -> Callable:
return _decorator return _decorator
def extended_docstring(f: Callable, def extended_docstring(f: _Callable,
extra_docstring: str) -> Callable: extra_docstring: str) -> _Callable:
""" """
Decorator: Combine another function's docstring with a given docstring. Decorator: Combine another function's docstring with a given docstring.
@ -588,7 +570,7 @@ def extended_docstring(f: Callable,
return _decorator return _decorator
def DREAM3D_base_group(fname: Union[str, Path]) -> str: def DREAM3D_base_group(fname: _Union[str, _Path]) -> str:
""" """
Determine the base group of a DREAM.3D file. Determine the base group of a DREAM.3D file.
@ -606,7 +588,7 @@ def DREAM3D_base_group(fname: Union[str, Path]) -> str:
Path to the base group. Path to the base group.
""" """
with h5py.File(Path(fname).expanduser(),'r') as f: with _h5py.File(_Path(fname).expanduser(),'r') as f:
base_group = f.visit(lambda path: path.rsplit('/',2)[0] if '_SIMPL_GEOMETRY/SPACING' in path else None) base_group = f.visit(lambda path: path.rsplit('/',2)[0] if '_SIMPL_GEOMETRY/SPACING' in path else None)
if base_group is None: if base_group is None:
@ -614,7 +596,7 @@ def DREAM3D_base_group(fname: Union[str, Path]) -> str:
return base_group return base_group
def DREAM3D_cell_data_group(fname: Union[str, Path]) -> str: def DREAM3D_cell_data_group(fname: _Union[str, _Path]) -> str:
""" """
Determine the cell data group of a DREAM.3D file. Determine the cell data group of a DREAM.3D file.
@ -634,10 +616,10 @@ def DREAM3D_cell_data_group(fname: Union[str, Path]) -> str:
""" """
base_group = DREAM3D_base_group(fname) base_group = DREAM3D_base_group(fname)
with h5py.File(Path(fname).expanduser(),'r') as f: with _h5py.File(_Path(fname).expanduser(),'r') as f:
cells = tuple(f['/'.join([base_group,'_SIMPL_GEOMETRY','DIMENSIONS'])][()][::-1]) cells = tuple(f['/'.join([base_group,'_SIMPL_GEOMETRY','DIMENSIONS'])][()][::-1])
cell_data_group = f[base_group].visititems(lambda path,obj: path.split('/')[0] \ cell_data_group = f[base_group].visititems(lambda path,obj: path.split('/')[0] \
if isinstance(obj,h5py._hl.dataset.Dataset) and np.shape(obj)[:-1] == cells \ if isinstance(obj,_h5py._hl.dataset.Dataset) and _np.shape(obj)[:-1] == cells \
else None) else None)
if cell_data_group is None: if cell_data_group is None:
@ -647,8 +629,8 @@ def DREAM3D_cell_data_group(fname: Union[str, Path]) -> str:
def Bravais_to_Miller(*, def Bravais_to_Miller(*,
uvtw: np.ndarray = None, uvtw: _np.ndarray = None,
hkil: np.ndarray = None) -> np.ndarray: hkil: _np.ndarray = None) -> _np.ndarray:
""" """
Transform 4 MillerBravais indices to 3 Miller indices of crystal direction [uvw] or plane normal (hkl). Transform 4 MillerBravais indices to 3 Miller indices of crystal direction [uvw] or plane normal (hkl).
@ -665,19 +647,19 @@ def Bravais_to_Miller(*,
""" """
if (uvtw is not None) ^ (hkil is None): if (uvtw is not None) ^ (hkil is None):
raise KeyError('specify either "uvtw" or "hkil"') raise KeyError('specify either "uvtw" or "hkil"')
axis,basis = (np.array(uvtw),np.array([[1,0,-1,0], axis,basis = (_np.array(uvtw),_np.array([[1,0,-1,0],
[0,1,-1,0], [0,1,-1,0],
[0,0, 0,1]])) \ [0,0, 0,1]])) \
if hkil is None else \ if hkil is None else \
(np.array(hkil),np.array([[1,0,0,0], (_np.array(hkil),_np.array([[1,0,0,0],
[0,1,0,0], [0,1,0,0],
[0,0,0,1]])) [0,0,0,1]]))
return np.einsum('il,...l',basis,axis) return _np.einsum('il,...l',basis,axis)
def Miller_to_Bravais(*, def Miller_to_Bravais(*,
uvw: np.ndarray = None, uvw: _np.ndarray = None,
hkl: np.ndarray = None) -> np.ndarray: hkl: _np.ndarray = None) -> _np.ndarray:
""" """
Transform 3 Miller indices to 4 MillerBravais indices of crystal direction [uvtw] or plane normal (hkil). Transform 3 Miller indices to 4 MillerBravais indices of crystal direction [uvtw] or plane normal (hkil).
@ -694,19 +676,19 @@ def Miller_to_Bravais(*,
""" """
if (uvw is not None) ^ (hkl is None): if (uvw is not None) ^ (hkl is None):
raise KeyError('specify either "uvw" or "hkl"') raise KeyError('specify either "uvw" or "hkl"')
axis,basis = (np.array(uvw),np.array([[ 2,-1, 0], axis,basis = (_np.array(uvw),_np.array([[ 2,-1, 0],
[-1, 2, 0], [-1, 2, 0],
[-1,-1, 0], [-1,-1, 0],
[ 0, 0, 3]])/3) \ [ 0, 0, 3]])/3) \
if hkl is None else \ if hkl is None else \
(np.array(hkl),np.array([[ 1, 0, 0], (_np.array(hkl),_np.array([[ 1, 0, 0],
[ 0, 1, 0], [ 0, 1, 0],
[-1,-1, 0], [-1,-1, 0],
[ 0, 0, 1]])) [ 0, 0, 1]]))
return np.einsum('il,...l',basis,axis) return _np.einsum('il,...l',basis,axis)
def dict_prune(d: Dict) -> Dict: def dict_prune(d: _Dict) -> _Dict:
""" """
Recursively remove empty dictionaries. Recursively remove empty dictionaries.
@ -732,7 +714,7 @@ def dict_prune(d: Dict) -> Dict:
return new return new
def dict_flatten(d: Dict) -> Dict: def dict_flatten(d: _Dict) -> _Dict:
""" """
Recursively remove keys of single-entry dictionaries. Recursively remove keys of single-entry dictionaries.
@ -756,8 +738,8 @@ def dict_flatten(d: Dict) -> Dict:
return new return new
def tail_repack(extended: Union[str, Sequence[str]], def tail_repack(extended: _Union[str, _Sequence[str]],
existing: List[str] = []) -> List[str]: existing: _List[str] = []) -> _List[str]:
""" """
Repack tailing characters into single string if all are new. Repack tailing characters into single string if all are new.
@ -782,11 +764,11 @@ def tail_repack(extended: Union[str, Sequence[str]],
""" """
return [extended] if isinstance(extended,str) else existing + \ return [extended] if isinstance(extended,str) else existing + \
([''.join(extended[len(existing):])] if np.prod([len(i) for i in extended[len(existing):]]) == 1 else ([''.join(extended[len(existing):])] if _np.prod([len(i) for i in extended[len(existing):]]) == 1 else
list(extended[len(existing):])) list(extended[len(existing):]))
def aslist(arg: Union[IntCollection,int,None]) -> List: def aslist(arg: _Union[_IntCollection, int, None]) -> _List:
""" """
Transform argument to list. Transform argument to list.
@ -801,7 +783,7 @@ def aslist(arg: Union[IntCollection,int,None]) -> List:
Entity transformed into list. Entity transformed into list.
""" """
return [] if arg is None else list(arg) if isinstance(arg,(np.ndarray,Collection)) else [arg] return [] if arg is None else list(arg) if isinstance(arg,(_np.ndarray,_Collection)) else [arg]
#################################################################################################### ####################################################################################################
@ -834,11 +816,11 @@ class ProgressBar:
self.total = total self.total = total
self.prefix = prefix self.prefix = prefix
self.bar_length = bar_length self.bar_length = bar_length
self.time_start = self.time_last_update = datetime.datetime.now() self.time_start = self.time_last_update = _datetime.datetime.now()
self.fraction_last = 0.0 self.fraction_last = 0.0
sys.stderr.write(f"{self.prefix} {''*self.bar_length} 0% ETA n/a") _sys.stderr.write(f"{self.prefix} {''*self.bar_length} 0% ETA n/a")
sys.stderr.flush() _sys.stderr.flush()
def update(self, def update(self,
iteration: int) -> None: iteration: int) -> None:
@ -846,17 +828,17 @@ class ProgressBar:
fraction = (iteration+1) / self.total fraction = (iteration+1) / self.total
if (filled_length := int(self.bar_length * fraction)) > int(self.bar_length * self.fraction_last) or \ if (filled_length := int(self.bar_length * fraction)) > int(self.bar_length * self.fraction_last) or \
datetime.datetime.now() - self.time_last_update > datetime.timedelta(seconds=10): _datetime.datetime.now() - self.time_last_update > _datetime.timedelta(seconds=10):
self.time_last_update = datetime.datetime.now() self.time_last_update = _datetime.datetime.now()
bar = '' * filled_length + '' * (self.bar_length - filled_length) bar = '' * filled_length + '' * (self.bar_length - filled_length)
remaining_time = (datetime.datetime.now() - self.time_start) \ remaining_time = (_datetime.datetime.now() - self.time_start) \
* (self.total - (iteration+1)) / (iteration+1) * (self.total - (iteration+1)) / (iteration+1)
remaining_time -= datetime.timedelta(microseconds=remaining_time.microseconds) # remove μs remaining_time -= _datetime.timedelta(microseconds=remaining_time.microseconds) # remove μs
sys.stderr.write(f'\r{self.prefix} {bar} {fraction:>4.0%} ETA {remaining_time}') _sys.stderr.write(f'\r{self.prefix} {bar} {fraction:>4.0%} ETA {remaining_time}')
sys.stderr.flush() _sys.stderr.flush()
self.fraction_last = fraction self.fraction_last = fraction
if iteration == self.total - 1: if iteration == self.total - 1:
sys.stderr.write('\n') _sys.stderr.write('\n')
sys.stderr.flush() _sys.stderr.flush()

View File

@ -224,11 +224,11 @@ class TestOrientation:
@pytest.mark.parametrize('family',crystal_families) @pytest.mark.parametrize('family',crystal_families)
def test_reduced_corner_cases(self,family): def test_reduced_corner_cases(self,family):
# test whether there is always a sym-eq rotation that falls into the FZ # test whether there is always exactly one sym-eq rotation that falls into the FZ
N = np.random.randint(10,40) N = np.random.randint(10,40)
size = np.ones(3)*np.pi**(2./3.) size = np.ones(3)*np.pi**(2./3.)
grid = grid_filters.coordinates0_node([N+1,N+1,N+1],size,-size*.5) grid = grid_filters.coordinates0_node([N+1,N+1,N+1],size,-size*.5)
evenly_distributed = Orientation.from_cubochoric(x=grid[:-2,:-2,:-2],family=family) evenly_distributed = Orientation.from_cubochoric(x=grid,family=family)
assert evenly_distributed.shape == evenly_distributed.reduced.shape assert evenly_distributed.shape == evenly_distributed.reduced.shape
@pytest.mark.parametrize('family',crystal_families) @pytest.mark.parametrize('family',crystal_families)

View File

@ -301,14 +301,13 @@ def ro2ho(ro):
#---------- Homochoric vector---------- #---------- Homochoric vector----------
def ho2ax(ho): def ho2ax(ho):
"""Homochoric vector to axis angle pair.""" """Homochoric vector to axis angle pair."""
tfit = np.array([+1.0000000000018852, -0.5000000002194847, tfit = np.array([+0.9999999999999968, -0.49999999999986866, -0.025000000000632055,
-0.024999992127593126, -0.003928701544781374, -0.003928571496460683, -0.0008164666077062752, -0.00019411896443261646,
-0.0008152701535450438, -0.0002009500426119712, -0.00004985822229871769, -0.000014164962366386031, -1.9000248160936107e-6,
-0.00002397986776071756, -0.00008202868926605841, -5.72184549898506e-6, +7.772149920658778e-6, -0.00001053483452909705,
+0.00012448715042090092, -0.0001749114214822577, +9.528014229335313e-6, -5.660288876265125e-6, +1.2844901692764126e-6,
+0.0001703481934140054, -0.00012062065004116828, +1.1255185726258763e-6, -1.3834391419956455e-6, +7.513691751164847e-7,
+0.000059719705868660826, -0.00001980756723965647, -2.401996891720091e-7, +4.386887017466388e-8, -3.5917775353564864e-9])
+0.000003953714684212874, -0.00000036555001439719544])
# normalize h and store the magnitude # normalize h and store the magnitude
hmag_squared = np.sum(ho**2.) hmag_squared = np.sum(ho**2.)
if iszero(hmag_squared): if iszero(hmag_squared):