Merge branch '151_typehints-readability' into 'development'

better readabiliy for python

See merge request damask/DAMASK!511
This commit is contained in:
Sharan Roongta 2022-01-31 17:03:24 +00:00
commit 225a5d9086
11 changed files with 318 additions and 192 deletions

View File

@ -47,27 +47,32 @@ class Colormap(mpl.colors.ListedColormap):
"""
def __eq__(self, other: object) -> bool:
def __eq__(self,
other: object) -> bool:
"""Test equality of colormaps."""
if not isinstance(other, Colormap):
return NotImplemented
return len(self.colors) == len(other.colors) \
and bool(np.all(self.colors == other.colors))
def __add__(self, other: 'Colormap') -> 'Colormap':
def __add__(self,
other: 'Colormap') -> 'Colormap':
"""Concatenate."""
return Colormap(np.vstack((self.colors,other.colors)),
f'{self.name}+{other.name}')
def __iadd__(self, other: 'Colormap') -> 'Colormap':
def __iadd__(self,
other: 'Colormap') -> 'Colormap':
"""Concatenate (in-place)."""
return self.__add__(other)
def __mul__(self, factor: int) -> 'Colormap':
def __mul__(self,
factor: int) -> 'Colormap':
"""Repeat."""
return Colormap(np.vstack([self.colors]*factor),f'{self.name}*{factor}')
def __imul__(self, factor: int) -> 'Colormap':
def __imul__(self,
factor: int) -> 'Colormap':
"""Repeat (in-place)."""
return self.__mul__(factor)
@ -161,7 +166,8 @@ class Colormap(mpl.colors.ListedColormap):
@staticmethod
def from_predefined(name: str, N: int = 256) -> 'Colormap':
def from_predefined(name: str,
N: int = 256) -> 'Colormap':
"""
Select from a set of predefined colormaps.
@ -260,10 +266,8 @@ class Colormap(mpl.colors.ListedColormap):
l,r = (field[mask].min(),field[mask].max()) if bounds is None else \
(bounds[0],bounds[1])
delta,avg = r-l,0.5*abs(r+l)
if abs(delta) * 1e8 <= avg: # delta is similar to numerical noise
l,r = l-0.5*avg*np.sign(delta),r+0.5*avg*np.sign(delta), # extend range to have actual data centered within
if abs(delta := r-l) * 1e8 <= (avg := 0.5*abs(r+l)): # delta is similar to numerical noise
l,r = (l-0.5*avg*np.sign(delta),r+0.5*avg*np.sign(delta)) # extend range to have actual data centered within
return Image.fromarray(
(np.dstack((
@ -275,7 +279,8 @@ class Colormap(mpl.colors.ListedColormap):
mode='RGBA')
def reversed(self, name: str = None) -> 'Colormap':
def reversed(self,
name: str = None) -> 'Colormap':
"""
Reverse.
@ -296,7 +301,7 @@ class Colormap(mpl.colors.ListedColormap):
>>> damask.Colormap.from_predefined('stress').reversed()
"""
rev = super(Colormap,self).reversed(name)
rev = super().reversed(name)
return Colormap(np.array(rev.colors),rev.name[:-4] if rev.name.endswith('_r_r') else rev.name)
@ -328,7 +333,8 @@ class Colormap(mpl.colors.ListedColormap):
return fname
def save_paraview(self, fname: FileHandle = None):
def save_paraview(self,
fname: FileHandle = None):
"""
Save as JSON file for use in Paraview.
@ -355,7 +361,8 @@ class Colormap(mpl.colors.ListedColormap):
fhandle.write('\n')
def save_ASCII(self, fname: FileHandle = None):
def save_ASCII(self,
fname: FileHandle = None):
"""
Save as ASCII file.
@ -390,7 +397,8 @@ class Colormap(mpl.colors.ListedColormap):
self._get_file_handle(fname,'.legend').write(GOM_str)
def save_gmsh(self, fname: FileHandle = None):
def save_gmsh(self,
fname: FileHandle = None):
"""
Save as ASCII file for use in gmsh.
@ -428,11 +436,11 @@ class Colormap(mpl.colors.ListedColormap):
def adjust_hue(msh_sat, msh_unsat):
"""If saturation of one of the two colors is much less than the other, hue of the less."""
if msh_sat[0] >= msh_unsat[0]:
return msh_sat[2]
else:
hSpin = msh_sat[1]/np.sin(msh_sat[1])*np.sqrt(msh_unsat[0]**2.0-msh_sat[0]**2)/msh_sat[0]
if msh_sat[2] < - np.pi/3.0: hSpin *= -1.0
return msh_sat[2] + hSpin
return msh_sat[2]
hSpin = msh_sat[1]/np.sin(msh_sat[1])*np.sqrt(msh_unsat[0]**2.0-msh_sat[0]**2)/msh_sat[0]
if msh_sat[2] < - np.pi/3.0: hSpin *= -1.0
return msh_sat[2] + hSpin
lo = np.array(low)
hi = np.array(high)
@ -445,9 +453,9 @@ class Colormap(mpl.colors.ListedColormap):
else:
lo = np.array([M_mid,0.0,0.0])
frac = 2.0*frac - 1.0
if lo[1] < 0.05 and hi[1] > 0.05:
if lo[1] < 0.05 < hi[1]:
lo[2] = adjust_hue(hi,lo)
elif lo[1] > 0.05 and hi[1] < 0.05:
elif hi[1] < 0.05 < lo[1]:
hi[2] = adjust_hue(lo,hi)
return (1.0 - frac) * lo + frac * hi
@ -476,7 +484,7 @@ class Colormap(mpl.colors.ListedColormap):
'gnuplot', 'gnuplot2', 'CMRmap', 'cubehelix', 'brg',
'gist_rainbow', 'rainbow', 'jet', 'nipy_spectral', 'gist_ncar']}
_predefined_DAMASK = {'orientation': {'low': [0.933334,0.878432,0.878431],
_predefined_DAMASK = {'orientation': {'low': [0.933334,0.878432,0.878431], # noqa
'high': [0.250980,0.007843,0.000000]},
'strain': {'low': [0.941177,0.941177,0.870588],
'high': [0.266667,0.266667,0.000000]},
@ -621,7 +629,8 @@ class Colormap(mpl.colors.ListedColormap):
@staticmethod
def _lab2xyz(lab: np.ndarray, ref_white: np.ndarray = _REF_WHITE) -> np.ndarray:
def _lab2xyz(lab: np.ndarray,
ref_white: np.ndarray = _REF_WHITE) -> np.ndarray:
"""
CIE Lab to CIE Xyz.
@ -652,7 +661,8 @@ class Colormap(mpl.colors.ListedColormap):
])*ref_white
@staticmethod
def _xyz2lab(xyz: np.ndarray, ref_white: np.ndarray = _REF_WHITE) -> np.ndarray:
def _xyz2lab(xyz: np.ndarray,
ref_white: np.ndarray = _REF_WHITE) -> np.ndarray:
"""
CIE Xyz to CIE Lab.

View File

@ -33,9 +33,9 @@ class Crystal():
def __init__(self,*,
family = None,
lattice = None,
a = None,b = None,c = None,
alpha = None,beta = None,gamma = None,
degrees = False):
a: float = None, b: float = None, c: float = None,
alpha: float = None, beta: float = None, gamma: float = None,
degrees: bool = False):
"""
Representation of crystal in terms of crystal family or Bravais lattice.
@ -62,7 +62,7 @@ class Crystal():
Angles are given in degrees. Defaults to False.
"""
if family not in [None] + list(lattice_symmetries.values()):
if family is not None and family not in list(lattice_symmetries.values()):
raise KeyError(f'invalid crystal family "{family}"')
if lattice is not None and family is not None and family != lattice_symmetries[lattice]:
raise KeyError(f'incompatible family "{family}" for lattice "{lattice}"')
@ -107,9 +107,6 @@ class Crystal():
if np.any([np.roll([self.alpha,self.beta,self.gamma],r)[0]
>= np.sum(np.roll([self.alpha,self.beta,self.gamma],r)[1:]) for r in range(3)]):
raise ValueError ('each lattice angle must be less than sum of others')
else:
self.a = self.b = self.c = None
self.alpha = self.beta = self.gamma = None
def __repr__(self):
@ -122,7 +119,8 @@ class Crystal():
'α={:.5g}°, β={:.5g}°, γ={:.5g}°'.format(*np.degrees(self.parameters[3:]))])
def __eq__(self,other):
def __eq__(self,
other: object) -> bool:
"""
Equal to other.
@ -132,6 +130,8 @@ class Crystal():
Crystal to check for equality.
"""
if not isinstance(other, Crystal):
return NotImplemented
return self.lattice == other.lattice and \
self.parameters == other.parameters and \
self.family == other.family
@ -139,8 +139,7 @@ class Crystal():
@property
def parameters(self):
"""Return lattice parameters a, b, c, alpha, beta, gamma."""
return (self.a,self.b,self.c,self.alpha,self.beta,self.gamma)
if hasattr(self,'a'): return (self.a,self.b,self.c,self.alpha,self.beta,self.gamma)
@property
def immutable(self):
@ -269,7 +268,7 @@ class Crystal():
https://doi.org/10.1063/1.1661333
"""
if None in self.parameters:
if self.parameters is None:
raise KeyError('missing crystal lattice parameters')
return np.array([
[1,0,0],
@ -315,7 +314,9 @@ class Crystal():
+ _lattice_points.get(self.lattice if self.lattice == 'hP' else \
self.lattice[-1],None),dtype=float)
def to_lattice(self, *, direction: np.ndarray = None, plane: np.ndarray = None) -> np.ndarray:
def to_lattice(self, *,
direction: np.ndarray = None,
plane: np.ndarray = None) -> np.ndarray:
"""
Calculate lattice vector corresponding to crystal frame direction or plane normal.
@ -339,7 +340,9 @@ class Crystal():
return np.einsum('il,...l',basis,axis)
def to_frame(self, *, uvw: np.ndarray = None, hkl: np.ndarray = None) -> np.ndarray:
def to_frame(self, *,
uvw: np.ndarray = None,
hkl: np.ndarray = None) -> np.ndarray:
"""
Calculate crystal frame vector along lattice direction [uvw] or plane normal (hkl).
@ -362,7 +365,8 @@ class Crystal():
return np.einsum('il,...l',basis,axis)
def kinematics(self, mode: str) -> Dict[str, List[np.ndarray]]:
def kinematics(self,
mode: str) -> Dict[str, List[np.ndarray]]:
"""
Return crystal kinematics systems.
@ -621,7 +625,8 @@ class Crystal():
'plane': [m[:,3:6] for m in master]}
def relation_operations(self, model: str) -> Tuple[str, Rotation]:
def relation_operations(self,
model: str) -> Tuple[str, Rotation]:
"""
Crystallographic orientation relationships for phase transformations.

View File

@ -4,7 +4,7 @@ import warnings
import multiprocessing as mp
from functools import partial
import typing
from typing import Union, Optional, TextIO, List, Sequence, Literal
from typing import Union, Optional, TextIO, List, Sequence
from pathlib import Path
import numpy as np
@ -33,7 +33,7 @@ class Grid:
material: np.ndarray,
size: FloatSequence,
origin: FloatSequence = np.zeros(3),
comments: Union[str, Sequence[str]] = []):
comments: Union[str, Sequence[str]] = None):
"""
New geometry definition for grid solvers.
@ -53,7 +53,7 @@ class Grid:
self.material = material
self.size = size # type: ignore
self.origin = origin # type: ignore
self.comments = comments # type: ignore
self.comments = [] if comments is None else comments # type: ignore
def __repr__(self) -> str:
@ -77,7 +77,8 @@ class Grid:
copy = __copy__
def __eq__(self, other: object) -> bool:
def __eq__(self,
other: object) -> bool:
"""
Test equality of other.
@ -101,17 +102,18 @@ class Grid:
return self._material
@material.setter
def material(self, material: np.ndarray):
def material(self,
material: np.ndarray):
if len(material.shape) != 3:
raise ValueError(f'invalid material shape {material.shape}')
elif material.dtype not in np.sctypes['float'] and material.dtype not in np.sctypes['int']:
if material.dtype not in np.sctypes['float'] and material.dtype not in np.sctypes['int']:
raise TypeError(f'invalid material data type {material.dtype}')
else:
self._material = np.copy(material)
if self.material.dtype in np.sctypes['float'] and \
np.all(self.material == self.material.astype(int).astype(float)):
self._material = self.material.astype(int)
self._material = np.copy(material)
if self.material.dtype in np.sctypes['float'] and \
np.all(self.material == self.material.astype(int).astype(float)):
self._material = self.material.astype(int)
@property
@ -120,11 +122,12 @@ class Grid:
return self._size
@size.setter
def size(self, size: FloatSequence):
def size(self,
size: FloatSequence):
if len(size) != 3 or any(np.array(size) < 0):
raise ValueError(f'invalid size {size}')
else:
self._size = np.array(size)
self._size = np.array(size)
@property
def origin(self) -> np.ndarray:
@ -132,11 +135,12 @@ class Grid:
return self._origin
@origin.setter
def origin(self, origin: FloatSequence):
def origin(self,
origin: FloatSequence):
if len(origin) != 3:
raise ValueError(f'invalid origin {origin}')
else:
self._origin = np.array(origin)
self._origin = np.array(origin)
@property
def comments(self) -> List[str]:
@ -144,7 +148,8 @@ class Grid:
return self._comments
@comments.setter
def comments(self, comments: Union[str, Sequence[str]]):
def comments(self,
comments: Union[str, Sequence[str]]):
self._comments = [str(c) for c in comments] if isinstance(comments,list) else [str(comments)]
@ -229,8 +234,7 @@ class Grid:
content = f.readlines()
for i,line in enumerate(content[:header_length]):
items = line.split('#')[0].lower().strip().split()
key = items[0] if items else ''
if key == 'grid':
if (key := items[0] if items else '') == 'grid':
cells = np.array([ int(dict(zip(items[1::2],items[2::2]))[i]) for i in ['a','b','c']])
elif key == 'size':
size = np.array([float(dict(zip(items[1::2],items[2::2]))[i]) for i in ['x','y','z']])
@ -242,8 +246,7 @@ class Grid:
material = np.empty(int(cells.prod())) # initialize as flat array
i = 0
for line in content[header_length:]:
items = line.split('#')[0].split()
if len(items) == 3:
if len(items := line.split('#')[0].split()) == 3:
if items[1].lower() == 'of':
material_entry = np.ones(int(items[0]))*float(items[2])
elif items[1].lower() == 'to':
@ -387,7 +390,9 @@ class Grid:
@staticmethod
def _find_closest_seed(seeds: np.ndarray, weights: np.ndarray, point: np.ndarray) -> np.integer:
def _find_closest_seed(seeds: np.ndarray,
weights: np.ndarray,
point: np.ndarray) -> np.integer:
return np.argmin(np.sum((np.broadcast_to(point,(len(seeds),3))-seeds)**2,axis=1) - weights)
@staticmethod
@ -624,7 +629,9 @@ class Grid:
)
def save(self, fname: Union[str, Path], compress: bool = True):
def save(self,
fname: Union[str, Path],
compress: bool = True):
"""
Save as VTK image data file.
@ -643,7 +650,8 @@ class Grid:
v.save(fname,parallel=False,compress=compress)
def save_ASCII(self, fname: Union[str, TextIO]):
def save_ASCII(self,
fname: Union[str, TextIO]):
"""
Save as geom file.
@ -770,15 +778,16 @@ class Grid:
)
def mirror(self, directions: Sequence[str], reflect: bool = False) -> 'Grid':
def mirror(self,
directions: Sequence[str],
reflect: bool = False) -> 'Grid':
"""
Mirror grid along given directions.
Parameters
----------
directions : (sequence of) str
directions : (sequence of) {'x', 'y', 'z'}
Direction(s) along which the grid is mirrored.
Valid entries are 'x', 'y', 'z'.
reflect : bool, optional
Reflect (include) outermost layers. Defaults to False.
@ -801,8 +810,7 @@ class Grid:
# materials: 1
"""
valid = ['x','y','z']
if not set(directions).issubset(valid):
if not set(directions).issubset(valid := ['x', 'y', 'z']):
raise ValueError(f'invalid direction {set(directions).difference(valid)} specified')
limits: Sequence[Optional[int]] = [None,None] if reflect else [-2,0]
@ -822,15 +830,15 @@ class Grid:
)
def flip(self, directions: Union[Literal['x', 'y', 'z'], Sequence[Literal['x', 'y', 'z']]]) -> 'Grid':
def flip(self,
directions: Sequence[str]) -> 'Grid':
"""
Flip grid along given directions.
Parameters
----------
directions : (sequence of) str
directions : (sequence of) {'x', 'y', 'z'}
Direction(s) along which the grid is flipped.
Valid entries are 'x', 'y', 'z'.
Returns
-------
@ -838,8 +846,7 @@ class Grid:
Updated grid-based geometry.
"""
valid = ['x','y','z']
if not set(directions).issubset(valid):
if not set(directions).issubset(valid := ['x', 'y', 'z']):
raise ValueError(f'invalid direction {set(directions).difference(valid)} specified')
@ -852,7 +859,9 @@ class Grid:
)
def scale(self, cells: IntSequence, periodic: bool = True) -> 'Grid':
def scale(self,
cells: IntSequence,
periodic: bool = True) -> 'Grid':
"""
Scale grid to new cells.
@ -958,7 +967,9 @@ class Grid:
)
def rotate(self, R: Rotation, fill: int = None) -> 'Grid':
def rotate(self,
R: Rotation,
fill: int = None) -> 'Grid':
"""
Rotate grid (pad if required).
@ -1049,7 +1060,9 @@ class Grid:
)
def substitute(self, from_material: IntSequence, to_material: IntSequence) -> 'Grid':
def substitute(self,
from_material: IntSequence,
to_material: IntSequence) -> 'Grid':
"""
Substitute material indices.
@ -1150,7 +1163,9 @@ class Grid:
)
def get_grain_boundaries(self, periodic: bool = True, directions: Sequence[str] = 'xyz'):
def get_grain_boundaries(self,
periodic: bool = True,
directions: Sequence[str] = 'xyz') -> VTK:
"""
Create VTK unstructured grid containing grain boundaries.
@ -1158,9 +1173,9 @@ class Grid:
----------
periodic : bool, optional
Assume grid to be periodic. Defaults to True.
directions : (sequence of) string, optional
directions : (sequence of) {'x', 'y', 'z'}, optional
Direction(s) along which the boundaries are determined.
Valid entries are 'x', 'y', 'z'. Defaults to 'xyz'.
Defaults to 'xyz'.
Returns
-------
@ -1168,8 +1183,7 @@ class Grid:
VTK-based geometry of grain boundary network.
"""
valid = ['x','y','z']
if not set(directions).issubset(valid):
if not set(directions).issubset(valid := ['x', 'y', 'z']):
raise ValueError(f'invalid direction {set(directions).difference(valid)} specified')
o = [[0, self.cells[0]+1, np.prod(self.cells[:2]+1)+self.cells[0]+1, np.prod(self.cells[:2]+1)],

View File

@ -1,7 +1,7 @@
import re
import copy
from pathlib import Path
from typing import Union, Optional, Tuple, List
from typing import Union, Tuple, List
import pandas as pd
import numpy as np
@ -12,7 +12,10 @@ from . import util
class Table:
"""Manipulate multi-dimensional spreadsheet-like data."""
def __init__(self, data: np.ndarray, shapes: dict, comments: Optional[Union[str, list]] = None):
def __init__(self,
data: np.ndarray,
shapes: dict,
comments: Union[str, list] = None):
"""
New spreadsheet.
@ -41,7 +44,8 @@ class Table:
return '\n'.join(['# '+c for c in self.comments])+'\n'+data_repr
def __getitem__(self, item: Union[slice, Tuple[slice, ...]]) -> 'Table':
def __getitem__(self,
item: Union[slice, Tuple[slice, ...]]) -> 'Table':
"""
Slice the Table according to item.
@ -100,7 +104,9 @@ class Table:
copy = __copy__
def _label(self, what: Union[str, List[str]], how: str) -> List[str]:
def _label(self,
what: Union[str, List[str]],
how: str) -> List[str]:
"""
Expand labels according to data shape.
@ -131,7 +137,8 @@ class Table:
return labels
def _relabel(self, how: str):
def _relabel(self,
how: str):
"""
Modify labeling of data in-place.
@ -147,7 +154,10 @@ class Table:
self.data.columns = self._label(self.shapes,how) #type: ignore
def _add_comment(self, label: str, shape: Tuple[int, ...], info: Optional[str]):
def _add_comment(self,
label: str,
shape: Tuple[int, ...],
info: str = None):
if info is not None:
specific = f'{label}{" "+str(shape) if np.prod(shape,dtype=int) > 1 else ""}: {info}'
general = util.execution_stamp('Table')
@ -309,8 +319,7 @@ class Table:
data = np.loadtxt(content)
shapes = {'eu':3, 'pos':2, 'IQ':1, 'CI':1, 'ID':1, 'intensity':1, 'fit':1}
remainder = data.shape[1]-sum(shapes.values())
if remainder > 0: # 3.8 can do: if (remainder := data.shape[1]-sum(shapes.values())) > 0
if (remainder := data.shape[1]-sum(shapes.values())) > 0:
shapes['unknown'] = remainder
return Table(data,shapes,comments)
@ -321,7 +330,8 @@ class Table:
return list(self.shapes)
def get(self, label: str) -> np.ndarray:
def get(self,
label: str) -> np.ndarray:
"""
Get column data.
@ -341,7 +351,10 @@ class Table:
return data.astype(type(data.flatten()[0]))
def set(self, label: str, data: np.ndarray, info: str = None) -> 'Table':
def set(self,
label: str,
data: np.ndarray,
info: str = None) -> 'Table':
"""
Set column data.
@ -362,8 +375,7 @@ class Table:
"""
dup = self.copy()
dup._add_comment(label, data.shape[1:], info)
m = re.match(r'(.*)\[((\d+,)*(\d+))\]',label)
if m:
if m := re.match(r'(.*)\[((\d+,)*(\d+))\]',label):
key = m.group(1)
idx = np.ravel_multi_index(tuple(map(int,m.group(2).split(","))),
self.shapes[key])
@ -374,7 +386,10 @@ class Table:
return dup
def add(self, label: str, data: np.ndarray, info: str = None) -> 'Table':
def add(self,
label: str,
data: np.ndarray,
info: str = None) -> 'Table':
"""
Add column data.
@ -406,7 +421,8 @@ class Table:
return dup
def delete(self, label: str) -> 'Table':
def delete(self,
label: str) -> 'Table':
"""
Delete column data.
@ -427,7 +443,10 @@ class Table:
return dup
def rename(self, old: Union[str, List[str]], new: Union[str, List[str]], info: str = None) -> 'Table':
def rename(self,
old: Union[str, List[str]],
new: Union[str, List[str]],
info: str = None) -> 'Table':
"""
Rename column data.
@ -453,7 +472,9 @@ class Table:
return dup
def sort_by(self, labels: Union[str, List[str]], ascending: Union[bool, List[bool]] = True) -> 'Table':
def sort_by(self,
labels: Union[str, List[str]],
ascending: Union[bool, List[bool]] = True) -> 'Table':
"""
Sort table by values of given labels.
@ -472,8 +493,7 @@ class Table:
"""
labels_ = [labels] if isinstance(labels,str) else labels.copy()
for i,l in enumerate(labels_):
m = re.match(r'(.*)\[((\d+,)*(\d+))\]',l)
if m:
if m := re.match(r'(.*)\[((\d+,)*(\d+))\]',l):
idx = np.ravel_multi_index(tuple(map(int,m.group(2).split(','))),
self.shapes[m.group(1)])
labels_[i] = f'{1+idx}_{m.group(1)}'
@ -486,7 +506,8 @@ class Table:
return dup
def append(self, other: 'Table') -> 'Table':
def append(self,
other: 'Table') -> 'Table':
"""
Append other table vertically (similar to numpy.vstack).
@ -505,13 +526,14 @@ class Table:
"""
if self.shapes != other.shapes or not self.data.columns.equals(other.data.columns):
raise KeyError('Labels or shapes or order do not match')
else:
dup = self.copy()
dup.data = dup.data.append(other.data,ignore_index=True)
return dup
dup = self.copy()
dup.data = dup.data.append(other.data,ignore_index=True)
return dup
def join(self, other: 'Table') -> 'Table':
def join(self,
other: 'Table') -> 'Table':
"""
Append other table horizontally (similar to numpy.hstack).
@ -530,15 +552,16 @@ class Table:
"""
if set(self.shapes) & set(other.shapes) or self.data.shape[0] != other.data.shape[0]:
raise KeyError('Duplicated keys or row count mismatch')
else:
dup = self.copy()
dup.data = dup.data.join(other.data)
for key in other.shapes:
dup.shapes[key] = other.shapes[key]
return dup
dup = self.copy()
dup.data = dup.data.join(other.data)
for key in other.shapes:
dup.shapes[key] = other.shapes[key]
return dup
def save(self, fname: FileHandle):
def save(self,
fname: FileHandle):
"""
Save as plain text file.

View File

@ -22,7 +22,8 @@ class VTK:
High-level interface to VTK.
"""
def __init__(self, vtk_data: vtk.vtkDataSet):
def __init__(self,
vtk_data: vtk.vtkDataSet):
"""
New spatial visualization.
@ -38,7 +39,9 @@ class VTK:
@staticmethod
def from_image_data(cells: IntSequence, size: FloatSequence, origin: FloatSequence = np.zeros(3)) -> 'VTK':
def from_image_data(cells: IntSequence,
size: FloatSequence,
origin: FloatSequence = np.zeros(3)) -> 'VTK':
"""
Create VTK of type vtk.vtkImageData.
@ -68,7 +71,9 @@ class VTK:
@staticmethod
def from_rectilinear_grid(grid: np.ndarray, size: FloatSequence, origin: FloatSequence = np.zeros(3)) -> 'VTK':
def from_rectilinear_grid(grid: np.ndarray,
size: FloatSequence,
origin: FloatSequence = np.zeros(3)) -> 'VTK':
"""
Create VTK of type vtk.vtkRectilinearGrid.
@ -100,7 +105,9 @@ class VTK:
@staticmethod
def from_unstructured_grid(nodes: np.ndarray, connectivity: np.ndarray, cell_type: str) -> 'VTK':
def from_unstructured_grid(nodes: np.ndarray,
connectivity: np.ndarray,
cell_type: str) -> 'VTK':
"""
Create VTK of type vtk.vtkUnstructuredGrid.
@ -195,8 +202,7 @@ class VTK:
"""
if not os.path.isfile(fname): # vtk has a strange error handling
raise FileNotFoundError(f'No such file: {fname}')
ext = Path(fname).suffix
if ext == '.vtk' or dataset_type is not None:
if (ext := Path(fname).suffix) == '.vtk' or dataset_type is not None:
reader = vtk.vtkGenericDataObjectReader()
reader.SetFileName(str(fname))
if dataset_type is None:
@ -238,7 +244,11 @@ class VTK:
def _write(writer):
"""Wrapper for parallel writing."""
writer.Write()
def save(self, fname: Union[str, Path], parallel: bool = True, compress: bool = True):
def save(self,
fname: Union[str, Path],
parallel: bool = True,
compress: bool = True):
"""
Save as VTK file.
@ -284,7 +294,9 @@ class VTK:
# Check https://blog.kitware.com/ghost-and-blanking-visibility-changes/ for missing data
# Needs support for damask.Table
def add(self, data: Union[np.ndarray, np.ma.MaskedArray], label: str = None):
def add(self,
data: Union[np.ndarray, np.ma.MaskedArray],
label: str = None):
"""
Add data to either cells or points.
@ -331,7 +343,8 @@ class VTK:
raise TypeError
def get(self, label: str) -> np.ndarray:
def get(self,
label: str) -> np.ndarray:
"""
Get either cell or point data.
@ -383,7 +396,8 @@ class VTK:
return []
def set_comments(self, comments: Union[str, List[str]]):
def set_comments(self,
comments: Union[str, List[str]]):
"""
Set comments.
@ -400,7 +414,8 @@ class VTK:
self.vtk_data.GetFieldData().AddArray(s)
def add_comments(self, comments: Union[str, List[str]]):
def add_comments(self,
comments: Union[str, List[str]]):
"""
Add comments.
@ -440,7 +455,7 @@ class VTK:
width = tk.winfo_screenwidth()
height = tk.winfo_screenheight()
tk.destroy()
except Exception as e:
except Exception:
width = 1024
height = 768

View File

@ -20,7 +20,9 @@ import numpy as _np
from ._typehints import FloatSequence as _FloatSequence, IntSequence as _IntSequence
def _ks(size: _FloatSequence, cells: _IntSequence, first_order: bool = False) -> _np.ndarray:
def _ks(size: _FloatSequence,
cells: _IntSequence,
first_order: bool = False) -> _np.ndarray:
"""
Get wave numbers operator.
@ -47,7 +49,8 @@ def _ks(size: _FloatSequence, cells: _IntSequence, first_order: bool = False) ->
return _np.stack(_np.meshgrid(k_sk,k_sj,k_si,indexing = 'ij'), axis=-1)
def curl(size: _FloatSequence, f: _np.ndarray) -> _np.ndarray:
def curl(size: _FloatSequence,
f: _np.ndarray) -> _np.ndarray:
u"""
Calculate curl of a vector or tensor field in Fourier space.
@ -78,7 +81,8 @@ def curl(size: _FloatSequence, f: _np.ndarray) -> _np.ndarray:
return _np.fft.irfftn(curl_,axes=(0,1,2),s=f.shape[:3])
def divergence(size: _FloatSequence, f: _np.ndarray) -> _np.ndarray:
def divergence(size: _FloatSequence,
f: _np.ndarray) -> _np.ndarray:
u"""
Calculate divergence of a vector or tensor field in Fourier space.
@ -105,7 +109,8 @@ def divergence(size: _FloatSequence, f: _np.ndarray) -> _np.ndarray:
return _np.fft.irfftn(div_,axes=(0,1,2),s=f.shape[:3])
def gradient(size: _FloatSequence, f: _np.ndarray) -> _np.ndarray:
def gradient(size: _FloatSequence,
f: _np.ndarray) -> _np.ndarray:
u"""
Calculate gradient of a scalar or vector field in Fourier space.
@ -163,7 +168,8 @@ def coordinates0_point(cells: _IntSequence,
axis = -1)
def displacement_fluct_point(size: _FloatSequence, F: _np.ndarray) -> _np.ndarray:
def displacement_fluct_point(size: _FloatSequence,
F: _np.ndarray) -> _np.ndarray:
"""
Cell center displacement field from fluctuation part of the deformation gradient field.
@ -195,7 +201,8 @@ def displacement_fluct_point(size: _FloatSequence, F: _np.ndarray) -> _np.ndarra
return _np.fft.irfftn(displacement,axes=(0,1,2),s=F.shape[:3])
def displacement_avg_point(size: _FloatSequence, F: _np.ndarray) -> _np.ndarray:
def displacement_avg_point(size: _FloatSequence,
F: _np.ndarray) -> _np.ndarray:
"""
Cell center displacement field from average part of the deformation gradient field.
@ -216,7 +223,8 @@ def displacement_avg_point(size: _FloatSequence, F: _np.ndarray) -> _np.ndarray:
return _np.einsum('ml,ijkl->ijkm',F_avg - _np.eye(3),coordinates0_point(F.shape[:3],size))
def displacement_point(size: _FloatSequence, F: _np.ndarray) -> _np.ndarray:
def displacement_point(size: _FloatSequence,
F: _np.ndarray) -> _np.ndarray:
"""
Cell center displacement field from deformation gradient field.
@ -236,7 +244,9 @@ def displacement_point(size: _FloatSequence, F: _np.ndarray) -> _np.ndarray:
return displacement_avg_point(size,F) + displacement_fluct_point(size,F)
def coordinates_point(size: _FloatSequence, F: _np.ndarray, origin: _FloatSequence = _np.zeros(3)) -> _np.ndarray:
def coordinates_point(size: _FloatSequence,
F: _np.ndarray,
origin: _FloatSequence = _np.zeros(3)) -> _np.ndarray:
"""
Cell center positions.
@ -335,7 +345,8 @@ def coordinates0_node(cells: _IntSequence,
axis = -1)
def displacement_fluct_node(size: _FloatSequence, F: _np.ndarray) -> _np.ndarray:
def displacement_fluct_node(size: _FloatSequence,
F: _np.ndarray) -> _np.ndarray:
"""
Nodal displacement field from fluctuation part of the deformation gradient field.
@ -355,7 +366,8 @@ def displacement_fluct_node(size: _FloatSequence, F: _np.ndarray) -> _np.ndarray
return point_to_node(displacement_fluct_point(size,F))
def displacement_avg_node(size: _FloatSequence, F: _np.ndarray) -> _np.ndarray:
def displacement_avg_node(size: _FloatSequence,
F: _np.ndarray) -> _np.ndarray:
"""
Nodal displacement field from average part of the deformation gradient field.
@ -376,7 +388,8 @@ def displacement_avg_node(size: _FloatSequence, F: _np.ndarray) -> _np.ndarray:
return _np.einsum('ml,ijkl->ijkm',F_avg - _np.eye(3),coordinates0_node(F.shape[:3],size))
def displacement_node(size: _FloatSequence, F: _np.ndarray) -> _np.ndarray:
def displacement_node(size: _FloatSequence,
F: _np.ndarray) -> _np.ndarray:
"""
Nodal displacement field from deformation gradient field.
@ -396,7 +409,9 @@ def displacement_node(size: _FloatSequence, F: _np.ndarray) -> _np.ndarray:
return displacement_avg_node(size,F) + displacement_fluct_node(size,F)
def coordinates_node(size: _FloatSequence, F: _np.ndarray, origin: _FloatSequence = _np.zeros(3)) -> _np.ndarray:
def coordinates_node(size: _FloatSequence,
F: _np.ndarray,
origin: _FloatSequence = _np.zeros(3)) -> _np.ndarray:
"""
Nodal positions.
@ -526,7 +541,9 @@ def coordinates0_valid(coordinates0: _np.ndarray) -> bool:
return False
def regrid(size: _FloatSequence, F: _np.ndarray, cells: _IntSequence) -> _np.ndarray:
def regrid(size: _FloatSequence,
F: _np.ndarray,
cells: _IntSequence) -> _np.ndarray:
"""
Return mapping from coordinates in deformed configuration to a regular grid.

View File

@ -122,7 +122,9 @@ def rotation(T: _np.ndarray) -> _rotation.Rotation:
return _rotation.Rotation.from_matrix(_polar_decomposition(T,'R')[0])
def strain(F: _np.ndarray, t: str, m: float) -> _np.ndarray:
def strain(F: _np.ndarray,
t: str,
m: float) -> _np.ndarray:
"""
Calculate strain tensor (SethHill family).
@ -162,7 +164,8 @@ def strain(F: _np.ndarray, t: str, m: float) -> _np.ndarray:
return eps
def stress_Cauchy(P: _np.ndarray, F: _np.ndarray) -> _np.ndarray:
def stress_Cauchy(P: _np.ndarray,
F: _np.ndarray) -> _np.ndarray:
"""
Calculate the Cauchy stress (true stress).
@ -184,7 +187,8 @@ def stress_Cauchy(P: _np.ndarray, F: _np.ndarray) -> _np.ndarray:
return _tensor.symmetric(_np.einsum('...,...ij,...kj',1.0/_np.linalg.det(F),P,F))
def stress_second_Piola_Kirchhoff(P: _np.ndarray, F: _np.ndarray) -> _np.ndarray:
def stress_second_Piola_Kirchhoff(P: _np.ndarray,
F: _np.ndarray) -> _np.ndarray:
"""
Calculate the second Piola-Kirchhoff stress.
@ -243,7 +247,8 @@ def stretch_right(T: _np.ndarray) -> _np.ndarray:
return _polar_decomposition(T,'U')[0]
def _polar_decomposition(T: _np.ndarray, requested: _Sequence[str]) -> tuple:
def _polar_decomposition(T: _np.ndarray,
requested: _Sequence[str]) -> tuple:
"""
Perform singular value decomposition.
@ -251,7 +256,7 @@ def _polar_decomposition(T: _np.ndarray, requested: _Sequence[str]) -> tuple:
----------
T : numpy.ndarray, shape (...,3,3)
Tensor of which the singular values are computed.
requested : iterable of str
requested : sequence of {'R', 'U', 'V'}
Requested outputs: R for the rotation tensor,
V for left stretch tensor and U for right stretch tensor.
@ -273,7 +278,8 @@ def _polar_decomposition(T: _np.ndarray, requested: _Sequence[str]) -> tuple:
return tuple(output)
def _equivalent_Mises(T_sym: _np.ndarray, s: float) -> _np.ndarray:
def _equivalent_Mises(T_sym: _np.ndarray,
s: float) -> _np.ndarray:
"""
Base equation for Mises equivalent of a stress or strain tensor.

View File

@ -10,7 +10,9 @@ from . import util as _util
from . import grid_filters as _grid_filters
def from_random(size: _FloatSequence, N_seeds: int, cells: _IntSequence = None,
def from_random(size: _FloatSequence,
N_seeds: int,
cells: _IntSequence = None,
rng_seed=None) -> _np.ndarray:
"""
Place seeds randomly in space.
@ -46,8 +48,12 @@ def from_random(size: _FloatSequence, N_seeds: int, cells: _IntSequence = None,
return coords
def from_Poisson_disc(size: _FloatSequence, N_seeds: int, N_candidates: int, distance: float,
periodic: bool = True, rng_seed=None) -> _np.ndarray:
def from_Poisson_disc(size: _FloatSequence,
N_seeds: int,
N_candidates: int,
distance: float,
periodic: bool = True,
rng_seed=None) -> _np.ndarray:
"""
Place seeds according to a Poisson disc distribution.
@ -86,10 +92,9 @@ def from_Poisson_disc(size: _FloatSequence, N_seeds: int, N_candidates: int, dis
tree = _spatial.cKDTree(coords[:s],boxsize=size) if periodic else \
_spatial.cKDTree(coords[:s])
distances = tree.query(candidates)[0]
best = distances.argmax()
if distances[best] > distance: # require minimum separation
if distances.max() > distance: # require minimum separation
i = 0
coords[s] = candidates[best] # maximum separation to existing point cloud
coords[s] = candidates[distances.argmax()] # maximum separation to existing point cloud
s += 1
progress.update(s)
@ -99,8 +104,11 @@ def from_Poisson_disc(size: _FloatSequence, N_seeds: int, N_candidates: int, dis
return coords
def from_grid(grid, selection: _IntSequence = None, invert_selection: bool = False,
average: bool = False, periodic: bool = True) -> _Tuple[_np.ndarray, _np.ndarray]:
def from_grid(grid,
selection: _IntSequence = None,
invert_selection: bool = False,
average: bool = False,
periodic: bool = True) -> _Tuple[_np.ndarray, _np.ndarray]:
"""
Create seeds from grid description.

View File

@ -45,7 +45,8 @@ def eigenvalues(T_sym: _np.ndarray) -> _np.ndarray:
return _np.linalg.eigvalsh(symmetric(T_sym))
def eigenvectors(T_sym: _np.ndarray, RHS: bool = False) -> _np.ndarray:
def eigenvectors(T_sym: _np.ndarray,
RHS: bool = False) -> _np.ndarray:
"""
Eigenvectors of a symmetric tensor.
@ -63,14 +64,14 @@ def eigenvectors(T_sym: _np.ndarray, RHS: bool = False) -> _np.ndarray:
associated eigenvalues.
"""
(u,v) = _np.linalg.eigh(symmetric(T_sym))
_,v = _np.linalg.eigh(symmetric(T_sym))
if RHS:
v[_np.linalg.det(v) < 0.0,:,2] *= -1.0
if RHS: v[_np.linalg.det(v) < 0.0,:,2] *= -1.0
return v
def spherical(T: _np.ndarray, tensor: bool = True) -> _np.ndarray:
def spherical(T: _np.ndarray,
tensor: bool = True) -> _np.ndarray:
"""
Calculate spherical part of a tensor.

View File

@ -7,7 +7,7 @@ import subprocess
import shlex
import re
import fractions
import collections.abc as abc
from collections import abc
from functools import reduce
from typing import Union, Tuple, Iterable, Callable, Dict, List, Any, Literal, Optional
from pathlib import Path
@ -16,7 +16,7 @@ import numpy as np
import h5py
from . import version
from ._typehints import FloatSequence
from ._typehints import IntSequence, FloatSequence
# limit visibility
__all__=[
@ -54,7 +54,8 @@ _colors = {
####################################################################################################
# Functions
####################################################################################################
def srepr(msg, glue: str = '\n') -> str:
def srepr(msg,
glue: str = '\n') -> str:
r"""
Join items with glue string.
@ -76,7 +77,7 @@ def srepr(msg, glue: str = '\n') -> str:
hasattr(msg, '__iter__'))):
return glue.join(str(x) for x in msg)
else:
return msg if isinstance(msg,str) else repr(msg)
return msg if isinstance(msg,str) else repr(msg)
def emph(msg) -> str:
@ -148,7 +149,10 @@ def strikeout(msg) -> str:
return _colors['crossout']+srepr(msg)+_colors['end_color']
def run(cmd: str, wd: str = './', env: Dict[str, str] = None, timeout: int = None) -> Tuple[str, str]:
def run(cmd: str,
wd: str = './',
env: Dict[str, str] = None,
timeout: int = None) -> Tuple[str, str]:
"""
Run a command.
@ -226,15 +230,15 @@ def show_progress(iterable: Iterable,
"""
if isinstance(iterable,abc.Sequence):
if N_iter is None:
N = len(iterable)
else:
raise ValueError('N_iter given for sequence')
if N_iter is None:
N = len(iterable)
else:
raise ValueError('N_iter given for sequence')
else:
if N_iter is None:
raise ValueError('N_iter not given')
else:
N = N_iter
if N_iter is None:
raise ValueError('N_iter not given')
N = N_iter
if N <= 1:
for item in iterable:
@ -301,16 +305,20 @@ def project_equal_angle(vector: np.ndarray,
normalize : bool
Ensure unit length of input vector. Defaults to True.
keepdims : bool
Maintain three-dimensional output coordinates. Defaults to False.
Two-dimensional output uses right-handed frame spanned by
the next and next-next axis relative to the projection direction,
e.g. x-y when projecting along z and z-x when projecting along y.
Maintain three-dimensional output coordinates.
Defaults to False.
Returns
-------
coordinates : numpy.ndarray, shape (...,2 | 3)
Projected coordinates.
Notes
-----
Two-dimensional output uses right-handed frame spanned by
the next and next-next axis relative to the projection direction,
e.g. x-y when projecting along z and z-x when projecting along y.
Examples
--------
>>> import damask
@ -345,16 +353,21 @@ def project_equal_area(vector: np.ndarray,
normalize : bool
Ensure unit length of input vector. Defaults to True.
keepdims : bool
Maintain three-dimensional output coordinates. Defaults to False.
Two-dimensional output uses right-handed frame spanned by
the next and next-next axis relative to the projection direction,
e.g. x-y when projecting along z and z-x when projecting along y.
Maintain three-dimensional output coordinates.
Defaults to False.
Returns
-------
coordinates : numpy.ndarray, shape (...,2 | 3)
Projected coordinates.
Notes
-----
Two-dimensional output uses right-handed frame spanned by
the next and next-next axis relative to the projection direction,
e.g. x-y when projecting along z and z-x when projecting along y.
Examples
--------
>>> import damask
@ -373,14 +386,17 @@ def project_equal_area(vector: np.ndarray,
return np.roll(np.block([v[...,:2]/np.sqrt(1.0+np.abs(v[...,2:3])),np.zeros_like(v[...,2:3])]),
-shift if keepdims else 0,axis=-1)[...,:3 if keepdims else 2]
def execution_stamp(class_name: str, function_name: str = None) -> str:
def execution_stamp(class_name: str,
function_name: str = None) -> str:
"""Timestamp the execution of a (function within a) class."""
now = datetime.datetime.now().astimezone().strftime('%Y-%m-%d %H:%M:%S%z')
_function_name = '' if function_name is None else f'.{function_name}'
return f'damask.{class_name}{_function_name} v{version} ({now})'
def hybrid_IA(dist: np.ndarray, N: int, rng_seed = None) -> np.ndarray:
def hybrid_IA(dist: np.ndarray,
N: int,
rng_seed: Union[int, IntSequence] = None) -> np.ndarray:
"""
Hybrid integer approximation.
@ -447,7 +463,7 @@ def shapeshifter(fro: Tuple[int, ...],
"""
if not len(fro) and not len(to): return ()
if len(fro) == 0 and len(to) == 0: return ()
beg = dict(left ='(^.*\\b)',
right='(^.*?\\b)')
@ -455,8 +471,8 @@ def shapeshifter(fro: Tuple[int, ...],
right='(.*?\\b)')
end = dict(left ='(.*?$)',
right='(.*$)')
fro = (1,) if not len(fro) else fro
to = (1,) if not len(to) else to
fro = (1,) if len(fro) == 0 else fro
to = (1,) if len(to) == 0 else to
try:
match = re.match(beg[mode]
+f',{sep[mode]}'.join(map(lambda x: f'{x}'
@ -473,7 +489,8 @@ def shapeshifter(fro: Tuple[int, ...],
return fill[:-1]
def shapeblender(a: Tuple[int, ...], b: Tuple[int, ...]) -> Tuple[int, ...]:
def shapeblender(a: Tuple[int, ...],
b: Tuple[int, ...]) -> Tuple[int, ...]:
"""
Return a shape that overlaps the rightmost entries of 'a' with the leftmost of 'b'.
@ -517,7 +534,8 @@ def extend_docstring(extra_docstring: str) -> Callable:
return _decorator
def extended_docstring(f: Callable, extra_docstring: str) -> Callable:
def extended_docstring(f: Callable,
extra_docstring: str) -> Callable:
"""
Decorator: Combine another function's docstring with a given docstring.
@ -593,7 +611,9 @@ def DREAM3D_cell_data_group(fname: Union[str, Path]) -> str:
return cell_data_group
def Bravais_to_Miller(*, uvtw: np.ndarray = None, hkil: np.ndarray = None) -> np.ndarray:
def Bravais_to_Miller(*,
uvtw: np.ndarray = None,
hkil: np.ndarray = None) -> np.ndarray:
"""
Transform 4 MillerBravais indices to 3 Miller indices of crystal direction [uvw] or plane normal (hkl).
@ -620,7 +640,9 @@ def Bravais_to_Miller(*, uvtw: np.ndarray = None, hkil: np.ndarray = None) -> np
return np.einsum('il,...l',basis,axis)
def Miller_to_Bravais(*, uvw: np.ndarray = None, hkl: np.ndarray = None) -> np.ndarray:
def Miller_to_Bravais(*,
uvw: np.ndarray = None,
hkl: np.ndarray = None) -> np.ndarray:
"""
Transform 3 Miller indices to 4 MillerBravais indices of crystal direction [uvtw] or plane normal (hkil).
@ -710,7 +732,10 @@ class ProgressBar:
Works for 0-based loops, ETA is estimated by linear extrapolation.
"""
def __init__(self, total: int, prefix: str, bar_length: int):
def __init__(self,
total: int,
prefix: str,
bar_length: int):
"""
Set current time as basis for ETA estimation.
@ -733,12 +758,12 @@ class ProgressBar:
sys.stderr.write(f"{self.prefix} {''*self.bar_length} 0% ETA n/a")
sys.stderr.flush()
def update(self, iteration: int) -> None:
def update(self,
iteration: int) -> None:
fraction = (iteration+1) / self.total
filled_length = int(self.bar_length * fraction)
if filled_length > int(self.bar_length * self.fraction_last) or \
if filled_length := int(self.bar_length * fraction) > int(self.bar_length * self.fraction_last) or \
datetime.datetime.now() - self.time_last_update > datetime.timedelta(seconds=10):
self.time_last_update = datetime.datetime.now()
bar = '' * filled_length + '' * (self.bar_length - filled_length)

View File

@ -1,3 +1,5 @@
[mypy]
warn_redundant_casts = True
[mypy-scipy.*]
ignore_missing_imports = True
[mypy-h5py.*]