Merge branch '238-tail_repack-behavior-and-use' into 'development'
simplifications of Python code Closes #238 See merge request damask/DAMASK!684
This commit is contained in:
commit
5d2d611898
|
@ -399,7 +399,7 @@ class Colormap(mpl.colors.ListedColormap):
|
||||||
|
|
||||||
"""
|
"""
|
||||||
labels = {'RGBA':4} if self.colors.shape[1] == 4 else {'RGB': 3}
|
labels = {'RGBA':4} if self.colors.shape[1] == 4 else {'RGB': 3}
|
||||||
t = Table(labels,self.colors,f'Creator: {util.execution_stamp("Colormap")}')
|
t = Table(labels,self.colors,[f'Creator: {util.execution_stamp("Colormap")}'])
|
||||||
t.save(self._get_file_handle(fname,'.txt'))
|
t.save(self._get_file_handle(fname,'.txt'))
|
||||||
|
|
||||||
|
|
||||||
|
|
|
@ -4,7 +4,7 @@ import warnings
|
||||||
import multiprocessing as mp
|
import multiprocessing as mp
|
||||||
from functools import partial
|
from functools import partial
|
||||||
import typing
|
import typing
|
||||||
from typing import Optional, Union, TextIO, List, Sequence, Dict
|
from typing import Optional, Union, TextIO, Sequence, Dict
|
||||||
from pathlib import Path
|
from pathlib import Path
|
||||||
|
|
||||||
import numpy as np
|
import numpy as np
|
||||||
|
@ -18,7 +18,7 @@ from . import grid_filters
|
||||||
from . import Rotation
|
from . import Rotation
|
||||||
from . import Table
|
from . import Table
|
||||||
from . import Colormap
|
from . import Colormap
|
||||||
from ._typehints import FloatSequence, IntSequence, IntCollection, NumpyRngSeed
|
from ._typehints import FloatSequence, IntSequence, NumpyRngSeed
|
||||||
try:
|
try:
|
||||||
import numba as nb # type: ignore
|
import numba as nb # type: ignore
|
||||||
except ImportError:
|
except ImportError:
|
||||||
|
@ -66,8 +66,9 @@ class Grid:
|
||||||
self.size = size # type: ignore
|
self.size = size # type: ignore
|
||||||
self.origin = origin # type: ignore
|
self.origin = origin # type: ignore
|
||||||
self.initial_conditions = {} if initial_conditions is None else initial_conditions
|
self.initial_conditions = {} if initial_conditions is None else initial_conditions
|
||||||
comments_ = [comments] if isinstance(comments,str) else comments
|
self.comments = [] if comments is None else \
|
||||||
self.comments = [] if comments_ is None else [str(c) for c in comments_]
|
[comments] if isinstance(comments,str) else \
|
||||||
|
[str(c) for c in comments]
|
||||||
|
|
||||||
def __repr__(self) -> str:
|
def __repr__(self) -> str:
|
||||||
"""
|
"""
|
||||||
|
@ -183,17 +184,6 @@ class Grid:
|
||||||
|
|
||||||
self._ic = ic
|
self._ic = ic
|
||||||
|
|
||||||
@property
|
|
||||||
def comments(self) -> List[str]:
|
|
||||||
"""Comments, e.g. history of operations."""
|
|
||||||
return self._comments
|
|
||||||
|
|
||||||
@comments.setter
|
|
||||||
def comments(self,
|
|
||||||
comments: Union[str, Sequence[str]]):
|
|
||||||
self._comments = [str(c) for c in comments] if isinstance(comments,Sequence) else [str(comments)]
|
|
||||||
|
|
||||||
|
|
||||||
@property
|
@property
|
||||||
def cells(self) -> np.ndarray:
|
def cells(self) -> np.ndarray:
|
||||||
"""Number of cells in x,y,z direction."""
|
"""Number of cells in x,y,z direction."""
|
||||||
|
@ -226,14 +216,13 @@ class Grid:
|
||||||
v = VTK.load(fname if str(fname).endswith('.vti') else str(fname)+'.vti')
|
v = VTK.load(fname if str(fname).endswith('.vti') else str(fname)+'.vti')
|
||||||
cells = np.array(v.vtk_data.GetDimensions())-1
|
cells = np.array(v.vtk_data.GetDimensions())-1
|
||||||
bbox = np.array(v.vtk_data.GetBounds()).reshape(3,2).T
|
bbox = np.array(v.vtk_data.GetBounds()).reshape(3,2).T
|
||||||
comments = v.comments
|
|
||||||
ic = {label:v.get(label).reshape(cells,order='F') for label in set(v.labels['Cell Data']) - {'material'}}
|
ic = {label:v.get(label).reshape(cells,order='F') for label in set(v.labels['Cell Data']) - {'material'}}
|
||||||
|
|
||||||
return Grid(material = v.get('material').reshape(cells,order='F'),
|
return Grid(material = v.get('material').reshape(cells,order='F'),
|
||||||
size = bbox[1] - bbox[0],
|
size = bbox[1] - bbox[0],
|
||||||
origin = bbox[0],
|
origin = bbox[0],
|
||||||
initial_conditions = ic,
|
initial_conditions = ic,
|
||||||
comments = comments,
|
comments = v.comments,
|
||||||
)
|
)
|
||||||
|
|
||||||
|
|
||||||
|
@ -1103,7 +1092,7 @@ class Grid:
|
||||||
|
|
||||||
def clean(self,
|
def clean(self,
|
||||||
distance: float = np.sqrt(3),
|
distance: float = np.sqrt(3),
|
||||||
selection: Optional[IntCollection] = None,
|
selection: Optional[IntSequence] = None,
|
||||||
invert_selection: bool = False,
|
invert_selection: bool = False,
|
||||||
periodic: bool = True,
|
periodic: bool = True,
|
||||||
rng_seed: Optional[NumpyRngSeed] = None) -> 'Grid':
|
rng_seed: Optional[NumpyRngSeed] = None) -> 'Grid':
|
||||||
|
@ -1115,7 +1104,7 @@ class Grid:
|
||||||
distance : float, optional
|
distance : float, optional
|
||||||
Voxel distance checked for presence of other materials.
|
Voxel distance checked for presence of other materials.
|
||||||
Defaults to sqrt(3).
|
Defaults to sqrt(3).
|
||||||
selection : (collection of) int, optional
|
selection : (sequence of) int, optional
|
||||||
Material IDs to consider. Defaults to all.
|
Material IDs to consider. Defaults to all.
|
||||||
invert_selection : bool, optional
|
invert_selection : bool, optional
|
||||||
Consider all material IDs except those in selection. Defaults to False.
|
Consider all material IDs except those in selection. Defaults to False.
|
||||||
|
@ -1136,7 +1125,7 @@ class Grid:
|
||||||
|
|
||||||
"""
|
"""
|
||||||
def most_frequent(stencil: np.ndarray,
|
def most_frequent(stencil: np.ndarray,
|
||||||
selection: Union[None,set],
|
selection: Union[None,np.ndarray],
|
||||||
rng: np.random.Generator):
|
rng: np.random.Generator):
|
||||||
me = stencil[stencil.size//2]
|
me = stencil[stencil.size//2]
|
||||||
if selection is None or me in selection:
|
if selection is None or me in selection:
|
||||||
|
@ -1152,8 +1141,8 @@ class Grid:
|
||||||
xx,yy,zz = np.meshgrid(ext,ext,ext)
|
xx,yy,zz = np.meshgrid(ext,ext,ext)
|
||||||
footprint = xx**2+yy**2+zz**2 <= distance**2+distance*1e-8
|
footprint = xx**2+yy**2+zz**2 <= distance**2+distance*1e-8
|
||||||
selection_ = None if selection is None else \
|
selection_ = None if selection is None else \
|
||||||
set(self.material.flatten()) - set(util.aslist(selection)) if invert_selection else \
|
np.setdiff1d(self.material,selection) if invert_selection else \
|
||||||
set(self.material.flatten()) & set(util.aslist(selection))
|
np.intersect1d(self.material,selection)
|
||||||
material = ndimage.generic_filter(
|
material = ndimage.generic_filter(
|
||||||
self.material,
|
self.material,
|
||||||
most_frequent,
|
most_frequent,
|
||||||
|
@ -1265,7 +1254,7 @@ class Grid:
|
||||||
def vicinity_offset(self,
|
def vicinity_offset(self,
|
||||||
distance: float = np.sqrt(3),
|
distance: float = np.sqrt(3),
|
||||||
offset: Optional[int] = None,
|
offset: Optional[int] = None,
|
||||||
selection: Optional[IntCollection] = None,
|
selection: Optional[IntSequence] = None,
|
||||||
invert_selection: bool = False,
|
invert_selection: bool = False,
|
||||||
periodic: bool = True) -> 'Grid':
|
periodic: bool = True) -> 'Grid':
|
||||||
"""
|
"""
|
||||||
|
@ -1282,7 +1271,7 @@ class Grid:
|
||||||
offset : int, optional
|
offset : int, optional
|
||||||
Offset (positive or negative) to tag material IDs.
|
Offset (positive or negative) to tag material IDs.
|
||||||
Defaults to material.max()+1.
|
Defaults to material.max()+1.
|
||||||
selection : (collection of) int, optional
|
selection : (sequence of) int, optional
|
||||||
Material IDs that trigger an offset.
|
Material IDs that trigger an offset.
|
||||||
Defaults to any other than own material ID.
|
Defaults to any other than own material ID.
|
||||||
invert_selection : bool, optional
|
invert_selection : bool, optional
|
||||||
|
@ -1315,8 +1304,8 @@ class Grid:
|
||||||
footprint = xx**2+yy**2+zz**2 <= distance**2+distance*1e-8
|
footprint = xx**2+yy**2+zz**2 <= distance**2+distance*1e-8
|
||||||
offset_ = np.nanmax(self.material)+1 if offset is None else offset
|
offset_ = np.nanmax(self.material)+1 if offset is None else offset
|
||||||
selection_ = None if selection is None else \
|
selection_ = None if selection is None else \
|
||||||
np.array(list(set(self.material.flatten()) - set(util.aslist(selection)))) if invert_selection else \
|
np.setdiff1d(self.material,selection) if invert_selection else \
|
||||||
np.array(list(set(self.material.flatten()) & set(util.aslist(selection))))
|
np.intersect1d(self.material,selection)
|
||||||
|
|
||||||
mask = ndimage.generic_filter(self.material,
|
mask = ndimage.generic_filter(self.material,
|
||||||
tainted_neighborhood,
|
tainted_neighborhood,
|
||||||
|
|
|
@ -1874,7 +1874,7 @@ class Result:
|
||||||
if self.version_minor >= 13:
|
if self.version_minor >= 13:
|
||||||
creator = f.attrs['creator'] if h5py3 else f.attrs['creator'].decode()
|
creator = f.attrs['creator'] if h5py3 else f.attrs['creator'].decode()
|
||||||
created = f.attrs['created'] if h5py3 else f.attrs['created'].decode()
|
created = f.attrs['created'] if h5py3 else f.attrs['created'].decode()
|
||||||
v.comments += f'{creator} ({created})'
|
v.comments += [f'{creator} ({created})']
|
||||||
|
|
||||||
for inc in util.show_progress(self.visible['increments']):
|
for inc in util.show_progress(self.visible['increments']):
|
||||||
|
|
||||||
|
|
|
@ -29,8 +29,9 @@ class Table:
|
||||||
Additional, human-readable information.
|
Additional, human-readable information.
|
||||||
|
|
||||||
"""
|
"""
|
||||||
comments_ = [comments] if isinstance(comments,str) else comments
|
self.comments = [] if comments is None else \
|
||||||
self.comments = [] if comments_ is None else [str(c) for c in comments_]
|
[comments] if isinstance(comments,str) else \
|
||||||
|
[str(c) for c in comments]
|
||||||
self.shapes = { k:(v,) if isinstance(v,(np.int64,np.int32,int)) else v for k,v in shapes.items() }
|
self.shapes = { k:(v,) if isinstance(v,(np.int64,np.int32,int)) else v for k,v in shapes.items() }
|
||||||
self.data = pd.DataFrame(data=data)
|
self.data = pd.DataFrame(data=data)
|
||||||
self._relabel('uniform')
|
self._relabel('uniform')
|
||||||
|
@ -185,16 +186,6 @@ class Table:
|
||||||
self.data.columns = self._label(self.shapes,how) # type: ignore
|
self.data.columns = self._label(self.shapes,how) # type: ignore
|
||||||
|
|
||||||
|
|
||||||
def _add_comment(self,
|
|
||||||
label: str,
|
|
||||||
shape: Tuple[int, ...],
|
|
||||||
info: Optional[str] = None):
|
|
||||||
if info is not None:
|
|
||||||
specific = f'{label}{" "+str(shape) if np.prod(shape,dtype=np.int64) > 1 else ""}: {info}'
|
|
||||||
general = util.execution_stamp('Table')
|
|
||||||
self.comments.append(f'{specific} / {general}')
|
|
||||||
|
|
||||||
|
|
||||||
def isclose(self,
|
def isclose(self,
|
||||||
other: 'Table',
|
other: 'Table',
|
||||||
rtol: float = 1e-5,
|
rtol: float = 1e-5,
|
||||||
|
@ -402,13 +393,15 @@ class Table:
|
||||||
Updated table.
|
Updated table.
|
||||||
|
|
||||||
"""
|
"""
|
||||||
dup = self.copy()
|
def add_comment(label: str, shape: Tuple[int, ...],info: str) -> List[str]:
|
||||||
dup._add_comment(label, data.shape[1:], info)
|
specific = f'{label}{" "+str(shape) if np.prod(shape,dtype=np.int64) > 1 else ""}: {info}'
|
||||||
|
general = util.execution_stamp('Table')
|
||||||
|
return [f'{specific} / {general}']
|
||||||
|
|
||||||
if m := re.match(r'(.*)\[((\d+,)*(\d+))\]',label):
|
dup = self.copy()
|
||||||
key = m.group(1)
|
if info is not None: self.comments += add_comment(label,data.shape[1:],info)
|
||||||
else:
|
|
||||||
key = label
|
key = m.group(1) if (m := re.match(r'(.*)\[((\d+,)*(\d+))\]',label)) else label
|
||||||
|
|
||||||
if key in dup.shapes:
|
if key in dup.shapes:
|
||||||
|
|
||||||
|
|
|
@ -1,6 +1,6 @@
|
||||||
"""Functionality for typehints."""
|
"""Functionality for typehints."""
|
||||||
|
|
||||||
from typing import Sequence, Union, Literal, TextIO, Collection
|
from typing import Sequence, Union, Literal, TextIO
|
||||||
from pathlib import Path
|
from pathlib import Path
|
||||||
|
|
||||||
import numpy as np
|
import numpy as np
|
||||||
|
@ -8,7 +8,6 @@ import numpy as np
|
||||||
|
|
||||||
FloatSequence = Union[np.ndarray,Sequence[float]]
|
FloatSequence = Union[np.ndarray,Sequence[float]]
|
||||||
IntSequence = Union[np.ndarray,Sequence[int]]
|
IntSequence = Union[np.ndarray,Sequence[int]]
|
||||||
IntCollection = Union[np.ndarray,Collection[int]]
|
|
||||||
FileHandle = Union[TextIO, str, Path]
|
FileHandle = Union[TextIO, str, Path]
|
||||||
CrystalFamily = Union[None,Literal['triclinic', 'monoclinic', 'orthorhombic', 'tetragonal', 'hexagonal', 'cubic']]
|
CrystalFamily = Union[None,Literal['triclinic', 'monoclinic', 'orthorhombic', 'tetragonal', 'hexagonal', 'cubic']]
|
||||||
CrystalLattice = Union[None,Literal['aP', 'mP', 'mS', 'oP', 'oS', 'oI', 'oF', 'tP', 'tI', 'hP', 'cP', 'cI', 'cF']]
|
CrystalLattice = Union[None,Literal['aP', 'mP', 'mS', 'oP', 'oS', 'oI', 'oF', 'tP', 'tI', 'hP', 'cP', 'cI', 'cF']]
|
||||||
|
|
|
@ -104,22 +104,19 @@ class VTK:
|
||||||
|
|
||||||
@comments.setter
|
@comments.setter
|
||||||
def comments(self,
|
def comments(self,
|
||||||
comments: Union[str, Sequence[str]]):
|
comments: Sequence[str]):
|
||||||
"""
|
"""
|
||||||
Set comments.
|
Set comments.
|
||||||
|
|
||||||
Parameters
|
Parameters
|
||||||
----------
|
----------
|
||||||
comments : (sequence of) str
|
comments : sequence of str
|
||||||
Comments.
|
Comments.
|
||||||
|
|
||||||
"""
|
"""
|
||||||
s = vtk.vtkStringArray()
|
s = vtk.vtkStringArray()
|
||||||
s.SetName('comments')
|
s.SetName('comments')
|
||||||
comments_ = util.tail_repack(comments,self.comments) if comments[:len(self.comments)] == self.comments else \
|
for c in comments:
|
||||||
[comments] if isinstance(comments,str) else \
|
|
||||||
comments
|
|
||||||
for c in comments_:
|
|
||||||
s.InsertNextValue(c)
|
s.InsertNextValue(c)
|
||||||
self.vtk_data.GetFieldData().AddArray(s)
|
self.vtk_data.GetFieldData().AddArray(s)
|
||||||
|
|
||||||
|
@ -478,13 +475,13 @@ class VTK:
|
||||||
_add_array(dup.vtk_data,
|
_add_array(dup.vtk_data,
|
||||||
label,
|
label,
|
||||||
np.where(data.mask,data.fill_value,data) if isinstance(data,np.ma.MaskedArray) else data)
|
np.where(data.mask,data.fill_value,data) if isinstance(data,np.ma.MaskedArray) else data)
|
||||||
if info is not None: dup.comments += f'{label}: {info}'
|
if info is not None: dup.comments += [f'{label}: {info}']
|
||||||
else:
|
else:
|
||||||
raise ValueError('no label defined for data')
|
raise ValueError('no label defined for data')
|
||||||
elif isinstance(table,Table):
|
elif isinstance(table,Table):
|
||||||
for l in table.labels:
|
for l in table.labels:
|
||||||
_add_array(dup.vtk_data,l,table.get(l))
|
_add_array(dup.vtk_data,l,table.get(l))
|
||||||
if info is not None: dup.comments += f'{l}: {info}'
|
if info is not None: dup.comments += [f'{l}: {info}']
|
||||||
else:
|
else:
|
||||||
raise TypeError
|
raise TypeError
|
||||||
|
|
||||||
|
|
|
@ -6,7 +6,7 @@ from scipy import spatial as _spatial
|
||||||
import numpy as _np
|
import numpy as _np
|
||||||
|
|
||||||
from ._typehints import FloatSequence as _FloatSequence, IntSequence as _IntSequence, \
|
from ._typehints import FloatSequence as _FloatSequence, IntSequence as _IntSequence, \
|
||||||
NumpyRngSeed as _NumpyRngSeed, IntCollection as _IntCollection
|
NumpyRngSeed as _NumpyRngSeed
|
||||||
from . import util as _util
|
from . import util as _util
|
||||||
from . import grid_filters as _grid_filters
|
from . import grid_filters as _grid_filters
|
||||||
|
|
||||||
|
@ -106,7 +106,7 @@ def from_Poisson_disc(size: _FloatSequence,
|
||||||
|
|
||||||
|
|
||||||
def from_grid(grid,
|
def from_grid(grid,
|
||||||
selection: _Optional[_IntCollection] = None,
|
selection: _Optional[_IntSequence] = None,
|
||||||
invert_selection: bool = False,
|
invert_selection: bool = False,
|
||||||
average: bool = False,
|
average: bool = False,
|
||||||
periodic: bool = True) -> _Tuple[_np.ndarray, _np.ndarray]:
|
periodic: bool = True) -> _Tuple[_np.ndarray, _np.ndarray]:
|
||||||
|
@ -117,7 +117,7 @@ def from_grid(grid,
|
||||||
----------
|
----------
|
||||||
grid : damask.Grid
|
grid : damask.Grid
|
||||||
Grid from which the material IDs are used as seeds.
|
Grid from which the material IDs are used as seeds.
|
||||||
selection : (collection of) int, optional
|
selection : (sequence of) int, optional
|
||||||
Material IDs to consider.
|
Material IDs to consider.
|
||||||
invert_selection : bool, optional
|
invert_selection : bool, optional
|
||||||
Consider all material IDs except those in selection. Defaults to False.
|
Consider all material IDs except those in selection. Defaults to False.
|
||||||
|
@ -134,7 +134,7 @@ def from_grid(grid,
|
||||||
"""
|
"""
|
||||||
material = grid.material.reshape((-1,1),order='F')
|
material = grid.material.reshape((-1,1),order='F')
|
||||||
mask = _np.full(grid.cells.prod(),True,dtype=bool) if selection is None else \
|
mask = _np.full(grid.cells.prod(),True,dtype=bool) if selection is None else \
|
||||||
_np.isin(material,_util.aslist(selection),invert=invert_selection).flatten()
|
_np.isin(material,selection,invert=invert_selection).flatten()
|
||||||
coords = _grid_filters.coordinates0_point(grid.cells,grid.size).reshape(-1,3,order='F')
|
coords = _grid_filters.coordinates0_point(grid.cells,grid.size).reshape(-1,3,order='F')
|
||||||
|
|
||||||
if not average:
|
if not average:
|
||||||
|
|
|
@ -11,16 +11,15 @@ import fractions as _fractions
|
||||||
from collections import abc as _abc
|
from collections import abc as _abc
|
||||||
from functools import reduce as _reduce, partial as _partial
|
from functools import reduce as _reduce, partial as _partial
|
||||||
from typing import Optional as _Optional, Callable as _Callable, Union as _Union, Iterable as _Iterable, \
|
from typing import Optional as _Optional, Callable as _Callable, Union as _Union, Iterable as _Iterable, \
|
||||||
Sequence as _Sequence, Dict as _Dict, List as _List, Tuple as _Tuple, Literal as _Literal, \
|
Dict as _Dict, List as _List, Tuple as _Tuple, Literal as _Literal, \
|
||||||
Any as _Any, Collection as _Collection, TextIO as _TextIO
|
Any as _Any, TextIO as _TextIO
|
||||||
from pathlib import Path as _Path
|
from pathlib import Path as _Path
|
||||||
|
|
||||||
import numpy as _np
|
import numpy as _np
|
||||||
import h5py as _h5py
|
import h5py as _h5py
|
||||||
|
|
||||||
from . import version as _version
|
from . import version as _version
|
||||||
from ._typehints import FloatSequence as _FloatSequence, NumpyRngSeed as _NumpyRngSeed, IntCollection as _IntCollection, \
|
from ._typehints import FloatSequence as _FloatSequence, NumpyRngSeed as _NumpyRngSeed, FileHandle as _FileHandle
|
||||||
FileHandle as _FileHandle
|
|
||||||
|
|
||||||
# https://svn.blender.org/svnroot/bf-blender/trunk/blender/build_files/scons/tools/bcolors.py
|
# https://svn.blender.org/svnroot/bf-blender/trunk/blender/build_files/scons/tools/bcolors.py
|
||||||
# https://stackoverflow.com/questions/287871
|
# https://stackoverflow.com/questions/287871
|
||||||
|
@ -786,54 +785,6 @@ def dict_flatten(d: _Dict) -> _Dict:
|
||||||
return new
|
return new
|
||||||
|
|
||||||
|
|
||||||
def tail_repack(extended: _Union[str, _Sequence[str]],
|
|
||||||
existing: _List[str] = []) -> _List[str]:
|
|
||||||
"""
|
|
||||||
Repack tailing characters into single string if all are new.
|
|
||||||
|
|
||||||
Parameters
|
|
||||||
----------
|
|
||||||
extended : (sequence of) str
|
|
||||||
Extended string list with potentially autosplitted tailing string relative to `existing`.
|
|
||||||
existing : list of str
|
|
||||||
Base string list.
|
|
||||||
|
|
||||||
Returns
|
|
||||||
-------
|
|
||||||
repacked : list of str
|
|
||||||
Repacked version of `extended`.
|
|
||||||
|
|
||||||
Examples
|
|
||||||
--------
|
|
||||||
>>> tail_repack(['a','new','e','n','t','r','y'],['a','new'])
|
|
||||||
['a','new','entry']
|
|
||||||
>>> tail_repack(['a','new','shiny','e','n','t','r','y'],['a','new'])
|
|
||||||
['a','new','shiny','e','n','t','r','y']
|
|
||||||
|
|
||||||
"""
|
|
||||||
new = extended[len(existing):]
|
|
||||||
return [extended] if isinstance(extended,str) else \
|
|
||||||
existing + list([''.join(new)] if _np.prod([len(i) for i in new]) == 1 else new)
|
|
||||||
|
|
||||||
|
|
||||||
def aslist(arg: _Union[_IntCollection, int, None]) -> _List:
|
|
||||||
"""
|
|
||||||
Transform argument to list.
|
|
||||||
|
|
||||||
Parameters
|
|
||||||
----------
|
|
||||||
arg : (collection of) int or None
|
|
||||||
Entity to transform into list.
|
|
||||||
|
|
||||||
Returns
|
|
||||||
-------
|
|
||||||
transformed : list
|
|
||||||
Entity transformed into list.
|
|
||||||
|
|
||||||
"""
|
|
||||||
return [] if arg is None else list(arg) if isinstance(arg,(_np.ndarray,_Collection)) else [arg]
|
|
||||||
|
|
||||||
|
|
||||||
####################################################################################################
|
####################################################################################################
|
||||||
# Classes
|
# Classes
|
||||||
####################################################################################################
|
####################################################################################################
|
||||||
|
|
|
@ -17,11 +17,13 @@ from damask import grid_filters
|
||||||
@pytest.fixture
|
@pytest.fixture
|
||||||
def default():
|
def default():
|
||||||
"""Simple geometry."""
|
"""Simple geometry."""
|
||||||
x=np.concatenate((np.ones(40,dtype=int),
|
g = np.array([8,5,4])
|
||||||
np.arange(2,42),
|
l = np.prod(g[:2])
|
||||||
np.ones(40,dtype=int)*2,
|
return Grid(np.concatenate((np.ones (l,dtype=int),
|
||||||
np.arange(1,41))).reshape(8,5,4,order='F')
|
np.arange(l,dtype=int)+2,
|
||||||
return Grid(x,[8e-6,5e-6,4e-6])
|
np.ones (l,dtype=int)*2,
|
||||||
|
np.arange(l,dtype=int)+1)).reshape(g,order='F'),
|
||||||
|
g*1e-6)
|
||||||
|
|
||||||
@pytest.fixture
|
@pytest.fixture
|
||||||
def random():
|
def random():
|
||||||
|
@ -166,15 +168,15 @@ class TestGrid:
|
||||||
@pytest.mark.parametrize('periodic',[True,False])
|
@pytest.mark.parametrize('periodic',[True,False])
|
||||||
def test_clean_reference(self,default,update,ref_path,distance,selection,periodic):
|
def test_clean_reference(self,default,update,ref_path,distance,selection,periodic):
|
||||||
current = default.clean(distance,selection,periodic=periodic,rng_seed=0)
|
current = default.clean(distance,selection,periodic=periodic,rng_seed=0)
|
||||||
reference = ref_path/f'clean_{distance}_{"+".join(map(str,util.aslist(selection)))}_{periodic}.vti'
|
reference = ref_path/f'clean_{distance}_{util.srepr(selection,"+")}_{periodic}.vti'
|
||||||
if update:
|
if update:
|
||||||
current.save(reference)
|
current.save(reference)
|
||||||
assert Grid.load(reference) == current
|
assert Grid.load(reference) == current
|
||||||
|
|
||||||
@pytest.mark.parametrize('selection',[list(np.random.randint(1,20,6)),set(np.random.randint(1,20,6)),np.random.randint(1,20,6)])
|
@pytest.mark.parametrize('selection',[list(np.random.randint(1,20,6)),np.random.randint(1,20,6)])
|
||||||
@pytest.mark.parametrize('invert',[True,False])
|
@pytest.mark.parametrize('invert',[True,False])
|
||||||
def test_clean_invert(self,default,selection,invert):
|
def test_clean_invert(self,default,selection,invert):
|
||||||
selection_inverse = set(default.material.flatten()) - set(selection)
|
selection_inverse = np.setdiff1d(default.material,selection)
|
||||||
assert default.clean(selection=selection,invert_selection=invert,rng_seed=0) == \
|
assert default.clean(selection=selection,invert_selection=invert,rng_seed=0) == \
|
||||||
default.clean(selection=selection_inverse,invert_selection=not invert,rng_seed=0)
|
default.clean(selection=selection_inverse,invert_selection=not invert,rng_seed=0)
|
||||||
|
|
||||||
|
@ -351,10 +353,10 @@ class TestGrid:
|
||||||
|
|
||||||
assert np.all(m2==grid.material)
|
assert np.all(m2==grid.material)
|
||||||
|
|
||||||
@pytest.mark.parametrize('selection',[list(np.random.randint(1,20,6)),set(np.random.randint(1,20,6)),np.random.randint(1,20,6)])
|
@pytest.mark.parametrize('selection',[list(np.random.randint(1,20,6)),np.random.randint(1,20,6)])
|
||||||
@pytest.mark.parametrize('invert',[True,False])
|
@pytest.mark.parametrize('invert',[True,False])
|
||||||
def test_vicinity_offset_invert(self,random,selection,invert):
|
def test_vicinity_offset_invert(self,random,selection,invert):
|
||||||
selection_inverse = set(random.material.flatten()) - set(selection)
|
selection_inverse = np.setdiff1d(random.material,selection)
|
||||||
assert random.vicinity_offset(selection=selection ,invert_selection=not invert) == \
|
assert random.vicinity_offset(selection=selection ,invert_selection=not invert) == \
|
||||||
random.vicinity_offset(selection=selection_inverse,invert_selection= invert)
|
random.vicinity_offset(selection=selection_inverse,invert_selection= invert)
|
||||||
|
|
||||||
|
|
|
@ -393,7 +393,7 @@ class TestResult:
|
||||||
result.export_VTK(output,target_dir=tmp_path,parallel=False)
|
result.export_VTK(output,target_dir=tmp_path,parallel=False)
|
||||||
fname = fname.split('.')[0]+f'_inc{(inc if type(inc) == int else inc[0]):0>2}.vti'
|
fname = fname.split('.')[0]+f'_inc{(inc if type(inc) == int else inc[0]):0>2}.vti'
|
||||||
v = VTK.load(tmp_path/fname)
|
v = VTK.load(tmp_path/fname)
|
||||||
v.comments = 'n/a'
|
v.comments = ['n/a']
|
||||||
v.save(tmp_path/fname,parallel=False)
|
v.save(tmp_path/fname,parallel=False)
|
||||||
with open(tmp_path/fname) as f:
|
with open(tmp_path/fname) as f:
|
||||||
cur = hashlib.md5(f.read().encode()).hexdigest()
|
cur = hashlib.md5(f.read().encode()).hexdigest()
|
||||||
|
|
|
@ -6,7 +6,6 @@ from damask import Rotation
|
||||||
from damask import Table
|
from damask import Table
|
||||||
from damask import _rotation
|
from damask import _rotation
|
||||||
from damask import grid_filters
|
from damask import grid_filters
|
||||||
from damask import util
|
|
||||||
from damask import tensor
|
from damask import tensor
|
||||||
|
|
||||||
n = 1000
|
n = 1000
|
||||||
|
@ -1151,19 +1150,22 @@ class TestRotation:
|
||||||
@pytest.mark.parametrize('sigma',[5,10,15,20])
|
@pytest.mark.parametrize('sigma',[5,10,15,20])
|
||||||
@pytest.mark.parametrize('shape',[1000,10000,100000,(10,100)])
|
@pytest.mark.parametrize('shape',[1000,10000,100000,(10,100)])
|
||||||
def test_from_fiber_component(self,sigma,shape):
|
def test_from_fiber_component(self,sigma,shape):
|
||||||
|
|
||||||
|
def astuple(a):
|
||||||
|
return tuple(a) if hasattr(a,'__len__') else (a,)
|
||||||
|
|
||||||
p = []
|
p = []
|
||||||
for run in range(5):
|
for run in range(9):
|
||||||
alpha = np.arccos(np.random.random()),np.random.random()*2*np.pi
|
alpha = np.arccos(np.random.random()),np.random.random()*2*np.pi
|
||||||
beta = np.arccos(np.random.random()),np.random.random()*2*np.pi
|
beta = np.arccos(np.random.random()),np.random.random()*2*np.pi
|
||||||
|
|
||||||
f_in_C = np.array([np.sin(alpha[0])*np.cos(alpha[1]), np.sin(alpha[0])*np.sin(alpha[1]), np.cos(alpha[0])])
|
f_in_C = np.array([np.sin(alpha[0])*np.cos(alpha[1]), np.sin(alpha[0])*np.sin(alpha[1]), np.cos(alpha[0])])
|
||||||
f_in_S = np.array([np.sin( beta[0])*np.cos( beta[1]), np.sin( beta[0])*np.sin( beta[1]), np.cos( beta[0])])
|
f_in_S = np.array([np.sin( beta[0])*np.cos( beta[1]), np.sin( beta[0])*np.sin( beta[1]), np.cos( beta[0])])
|
||||||
ax = np.append(np.cross(f_in_C,f_in_S), - np.arccos(np.dot(f_in_C,f_in_S)))
|
ax = np.append(np.cross(f_in_C,f_in_S), - np.arccos(np.dot(f_in_C,f_in_S)))
|
||||||
n = Rotation.from_axis_angle(ax if ax[3] > 0.0 else ax*-1.0 ,normalize=True) # rotation to align fiber axis in crystal and sample system
|
n = Rotation.from_axis_angle(ax if ax[3] > 0.0 else -ax,normalize=True) # rotation to align fiber axis in crystal and sample system
|
||||||
|
|
||||||
o = Rotation.from_fiber_component(alpha,beta,np.radians(sigma),shape,False)
|
o = Rotation.from_fiber_component(alpha,beta,np.radians(sigma),shape,False)
|
||||||
angles = np.arccos(np.clip(np.dot(o@np.broadcast_to(f_in_S,tuple(util.aslist(shape))+(3,)),n@f_in_S),-1,1))
|
angles = np.arccos(np.clip(np.dot(o@np.broadcast_to(f_in_S,astuple(shape)+(3,)),n@f_in_S),-1,1))
|
||||||
dist = np.array(angles) * (np.random.randint(0,2,util.aslist(shape))*2-1)
|
dist = np.array(angles) * (np.random.randint(0,2,shape)*2-1)
|
||||||
|
|
||||||
p.append(stats.normaltest(dist)[1])
|
p.append(stats.normaltest(dist)[1])
|
||||||
|
|
||||||
|
|
|
@ -217,7 +217,7 @@ class TestVTK:
|
||||||
|
|
||||||
|
|
||||||
def test_comments(self,tmp_path,default):
|
def test_comments(self,tmp_path,default):
|
||||||
default.comments += 'this is a comment'
|
default.comments += ['this is a comment']
|
||||||
default.save(tmp_path/'with_comments',parallel=False)
|
default.save(tmp_path/'with_comments',parallel=False)
|
||||||
new = VTK.load(tmp_path/'with_comments.vti')
|
new = VTK.load(tmp_path/'with_comments.vti')
|
||||||
assert new.comments == ['this is a comment']
|
assert new.comments == ['this is a comment']
|
||||||
|
|
|
@ -25,6 +25,17 @@ class TestUtil:
|
||||||
with pytest.raises(RuntimeError):
|
with pytest.raises(RuntimeError):
|
||||||
util.run('false')
|
util.run('false')
|
||||||
|
|
||||||
|
@pytest.mark.parametrize('input,glue,quote,output',
|
||||||
|
[
|
||||||
|
(None,'',False,'None'),
|
||||||
|
([None,None],'\n',False,'None\nNone'),
|
||||||
|
([-0.5,0.5],'=',False,'-0.5=0.5'),
|
||||||
|
([1,2,3],'_',False,'1_2_3'),
|
||||||
|
([1,2,3],'/',True,'"1"/"2"/"3"'),
|
||||||
|
])
|
||||||
|
def test_srepr(self,input,glue,quote,output):
|
||||||
|
assert output == util.srepr(input,glue,quote)
|
||||||
|
|
||||||
@pytest.mark.parametrize('input,output',
|
@pytest.mark.parametrize('input,output',
|
||||||
[
|
[
|
||||||
([0,-2],[0,-1]),
|
([0,-2],[0,-1]),
|
||||||
|
@ -32,7 +43,6 @@ class TestUtil:
|
||||||
([1./2.,1./3.],[3,2]),
|
([1./2.,1./3.],[3,2]),
|
||||||
([2./3.,1./2.,1./3.],[4,3,2]),
|
([2./3.,1./2.,1./3.],[4,3,2]),
|
||||||
])
|
])
|
||||||
|
|
||||||
def test_scale2coprime(self,input,output):
|
def test_scale2coprime(self,input,output):
|
||||||
assert np.allclose(util.scale_to_coprime(np.array(input)),
|
assert np.allclose(util.scale_to_coprime(np.array(input)),
|
||||||
np.array(output).astype(int))
|
np.array(output).astype(int))
|
||||||
|
@ -134,10 +144,6 @@ class TestUtil:
|
||||||
def test_decorate(self,style):
|
def test_decorate(self,style):
|
||||||
assert 'DAMASK' in style('DAMASK')
|
assert 'DAMASK' in style('DAMASK')
|
||||||
|
|
||||||
@pytest.mark.parametrize('lst',[1,[1,2],set([1,2,3]),np.arange(4)])
|
|
||||||
def test_aslist(self,lst):
|
|
||||||
assert len(util.aslist(lst)) > 0
|
|
||||||
|
|
||||||
@pytest.mark.parametrize('complete',[True,False])
|
@pytest.mark.parametrize('complete',[True,False])
|
||||||
def test_D3D_base_group(self,tmp_path,complete):
|
def test_D3D_base_group(self,tmp_path,complete):
|
||||||
base_group = ''.join(random.choices('DAMASK', k=10))
|
base_group = ''.join(random.choices('DAMASK', k=10))
|
||||||
|
|
Loading…
Reference in New Issue