Merge branch 'development' of git.damask.mpie.de:damask/DAMASK into typehints_orientation_rotation

This commit is contained in:
Daniel Otto de Mentock 2022-02-01 11:56:01 +01:00
commit ef051ceef9
22 changed files with 608 additions and 292 deletions

View File

@ -1 +1 @@
v3.0.0-alpha5-518-g4fa97b9a3
v3.0.0-alpha5-545-gad74f5dbe

View File

@ -47,27 +47,32 @@ class Colormap(mpl.colors.ListedColormap):
"""
def __eq__(self, other: object) -> bool:
def __eq__(self,
other: object) -> bool:
"""Test equality of colormaps."""
if not isinstance(other, Colormap):
return NotImplemented
return len(self.colors) == len(other.colors) \
and bool(np.all(self.colors == other.colors))
def __add__(self, other: 'Colormap') -> 'Colormap':
def __add__(self,
other: 'Colormap') -> 'Colormap':
"""Concatenate."""
return Colormap(np.vstack((self.colors,other.colors)),
f'{self.name}+{other.name}')
def __iadd__(self, other: 'Colormap') -> 'Colormap':
def __iadd__(self,
other: 'Colormap') -> 'Colormap':
"""Concatenate (in-place)."""
return self.__add__(other)
def __mul__(self, factor: int) -> 'Colormap':
def __mul__(self,
factor: int) -> 'Colormap':
"""Repeat."""
return Colormap(np.vstack([self.colors]*factor),f'{self.name}*{factor}')
def __imul__(self, factor: int) -> 'Colormap':
def __imul__(self,
factor: int) -> 'Colormap':
"""Repeat (in-place)."""
return self.__mul__(factor)
@ -161,7 +166,8 @@ class Colormap(mpl.colors.ListedColormap):
@staticmethod
def from_predefined(name: str, N: int = 256) -> 'Colormap':
def from_predefined(name: str,
N: int = 256) -> 'Colormap':
"""
Select from a set of predefined colormaps.
@ -260,10 +266,8 @@ class Colormap(mpl.colors.ListedColormap):
l,r = (field[mask].min(),field[mask].max()) if bounds is None else \
(bounds[0],bounds[1])
delta,avg = r-l,0.5*abs(r+l)
if abs(delta) * 1e8 <= avg: # delta is similar to numerical noise
l,r = l-0.5*avg*np.sign(delta),r+0.5*avg*np.sign(delta), # extend range to have actual data centered within
if abs(delta := r-l) * 1e8 <= (avg := 0.5*abs(r+l)): # delta is similar to numerical noise
l,r = (l-0.5*avg*np.sign(delta),r+0.5*avg*np.sign(delta)) # extend range to have actual data centered within
return Image.fromarray(
(np.dstack((
@ -275,7 +279,8 @@ class Colormap(mpl.colors.ListedColormap):
mode='RGBA')
def reversed(self, name: str = None) -> 'Colormap':
def reversed(self,
name: str = None) -> 'Colormap':
"""
Reverse.
@ -296,7 +301,7 @@ class Colormap(mpl.colors.ListedColormap):
>>> damask.Colormap.from_predefined('stress').reversed()
"""
rev = super(Colormap,self).reversed(name)
rev = super().reversed(name)
return Colormap(np.array(rev.colors),rev.name[:-4] if rev.name.endswith('_r_r') else rev.name)
@ -328,7 +333,8 @@ class Colormap(mpl.colors.ListedColormap):
return fname
def save_paraview(self, fname: FileHandle = None):
def save_paraview(self,
fname: FileHandle = None):
"""
Save as JSON file for use in Paraview.
@ -355,7 +361,8 @@ class Colormap(mpl.colors.ListedColormap):
fhandle.write('\n')
def save_ASCII(self, fname: FileHandle = None):
def save_ASCII(self,
fname: FileHandle = None):
"""
Save as ASCII file.
@ -390,7 +397,8 @@ class Colormap(mpl.colors.ListedColormap):
self._get_file_handle(fname,'.legend').write(GOM_str)
def save_gmsh(self, fname: FileHandle = None):
def save_gmsh(self,
fname: FileHandle = None):
"""
Save as ASCII file for use in gmsh.
@ -428,11 +436,11 @@ class Colormap(mpl.colors.ListedColormap):
def adjust_hue(msh_sat, msh_unsat):
"""If saturation of one of the two colors is much less than the other, hue of the less."""
if msh_sat[0] >= msh_unsat[0]:
return msh_sat[2]
else:
hSpin = msh_sat[1]/np.sin(msh_sat[1])*np.sqrt(msh_unsat[0]**2.0-msh_sat[0]**2)/msh_sat[0]
if msh_sat[2] < - np.pi/3.0: hSpin *= -1.0
return msh_sat[2] + hSpin
return msh_sat[2]
hSpin = msh_sat[1]/np.sin(msh_sat[1])*np.sqrt(msh_unsat[0]**2.0-msh_sat[0]**2)/msh_sat[0]
if msh_sat[2] < - np.pi/3.0: hSpin *= -1.0
return msh_sat[2] + hSpin
lo = np.array(low)
hi = np.array(high)
@ -445,9 +453,9 @@ class Colormap(mpl.colors.ListedColormap):
else:
lo = np.array([M_mid,0.0,0.0])
frac = 2.0*frac - 1.0
if lo[1] < 0.05 and hi[1] > 0.05:
if lo[1] < 0.05 < hi[1]:
lo[2] = adjust_hue(hi,lo)
elif lo[1] > 0.05 and hi[1] < 0.05:
elif hi[1] < 0.05 < lo[1]:
hi[2] = adjust_hue(lo,hi)
return (1.0 - frac) * lo + frac * hi
@ -476,7 +484,7 @@ class Colormap(mpl.colors.ListedColormap):
'gnuplot', 'gnuplot2', 'CMRmap', 'cubehelix', 'brg',
'gist_rainbow', 'rainbow', 'jet', 'nipy_spectral', 'gist_ncar']}
_predefined_DAMASK = {'orientation': {'low': [0.933334,0.878432,0.878431],
_predefined_DAMASK = {'orientation': {'low': [0.933334,0.878432,0.878431], # noqa
'high': [0.250980,0.007843,0.000000]},
'strain': {'low': [0.941177,0.941177,0.870588],
'high': [0.266667,0.266667,0.000000]},
@ -621,7 +629,8 @@ class Colormap(mpl.colors.ListedColormap):
@staticmethod
def _lab2xyz(lab: np.ndarray, ref_white: np.ndarray = _REF_WHITE) -> np.ndarray:
def _lab2xyz(lab: np.ndarray,
ref_white: np.ndarray = _REF_WHITE) -> np.ndarray:
"""
CIE Lab to CIE Xyz.
@ -652,7 +661,8 @@ class Colormap(mpl.colors.ListedColormap):
])*ref_white
@staticmethod
def _xyz2lab(xyz: np.ndarray, ref_white: np.ndarray = _REF_WHITE) -> np.ndarray:
def _xyz2lab(xyz: np.ndarray,
ref_white: np.ndarray = _REF_WHITE) -> np.ndarray:
"""
CIE Xyz to CIE Lab.

View File

@ -2,25 +2,34 @@ import copy
from io import StringIO
from collections.abc import Iterable
import abc
from pathlib import Path
from typing import Union, Dict, Any, Type, TypeVar
import numpy as np
import yaml
from ._typehints import FileHandle
from . import Rotation
MyType = TypeVar('MyType', bound='Config')
class NiceDumper(yaml.SafeDumper):
"""Make YAML readable for humans."""
def write_line_break(self, data=None):
def write_line_break(self,
data: str = None):
super().write_line_break(data)
if len(self.indents) == 1:
super().write_line_break()
def increase_indent(self, flow=False, indentless=False):
def increase_indent(self,
flow: bool = False,
indentless: bool = False):
return super().increase_indent(flow, False)
def represent_data(self, data):
def represent_data(self,
data: Any):
"""Cast Config objects and its subclasses to dict."""
if isinstance(data, dict) and type(data) != dict:
return self.represent_data(dict(data))
@ -31,14 +40,17 @@ class NiceDumper(yaml.SafeDumper):
else:
return super().represent_data(data)
def ignore_aliases(self, data):
def ignore_aliases(self,
data: Any) -> bool:
"""Do not use references to existing objects."""
return True
class Config(dict):
"""YAML-based configuration."""
def __init__(self,yml=None,**kwargs):
def __init__(self,
yml: Union[str, Dict[str, Any]] = None,
**kwargs):
"""Initialize from YAML, dict, or key=value pairs."""
if isinstance(yml,str):
kwargs.update(yaml.safe_load(yml))
@ -47,7 +59,7 @@ class Config(dict):
super().__init__(**kwargs)
def __repr__(self):
def __repr__(self) -> str:
"""Show as in file."""
output = StringIO()
self.save(output)
@ -55,14 +67,15 @@ class Config(dict):
return ''.join(output.readlines())
def __copy__(self):
def __copy__(self: MyType) -> MyType:
"""Create deep copy."""
return copy.deepcopy(self)
copy = __copy__
def __or__(self,other):
def __or__(self: MyType,
other) -> MyType:
"""
Update configuration with contents of other.
@ -76,18 +89,24 @@ class Config(dict):
updated : damask.Config
Updated configuration.
Note
----
This functionality is a backport for Python 3.8
"""
duplicate = self.copy()
duplicate.update(other)
return duplicate
def __ior__(self,other):
def __ior__(self: MyType,
other) -> MyType:
"""Update configuration with contents of other."""
return self.__or__(other)
def delete(self,keys):
def delete(self: MyType,
keys: Union[Iterable, str]) -> MyType:
"""
Remove configuration keys.
@ -109,7 +128,8 @@ class Config(dict):
@classmethod
def load(cls,fname):
def load(cls: Type[MyType],
fname: FileHandle) -> MyType:
"""
Load from yaml file.
@ -124,14 +144,15 @@ class Config(dict):
Configuration from file.
"""
try:
if isinstance(fname, (str, Path)):
fhandle = open(fname)
except TypeError:
else:
fhandle = fname
return cls(yaml.safe_load(fhandle))
def save(self,fname,**kwargs):
def save(self,
fname: FileHandle,
**kwargs):
"""
Save to yaml file.
@ -143,9 +164,9 @@ class Config(dict):
Keyword arguments parsed to yaml.dump.
"""
try:
if isinstance(fname, (str, Path)):
fhandle = open(fname,'w',newline='\n')
except TypeError:
else:
fhandle = fname
if 'width' not in kwargs:

View File

@ -1,10 +1,14 @@
import numpy as np
import h5py
from typing import Sequence, Dict, Any, Collection
from ._typehints import FileHandle
from . import Config
from . import Rotation
from . import Orientation
from . import util
from . import Table
class ConfigMaterial(Config):
"""
@ -17,7 +21,9 @@ class ConfigMaterial(Config):
"""
def __init__(self,d=None,**kwargs):
def __init__(self,
d: Dict[str, Any] = None,
**kwargs):
"""
New material configuration.
@ -30,14 +36,17 @@ class ConfigMaterial(Config):
Initial content specified as pairs of key=value.
"""
default: Collection
if d is None:
for section,default in {'material':[],'homogenization':{},'phase':{}}.items():
for section, default in {'material':[],'homogenization':{},'phase':{}}.items():
if section not in kwargs: kwargs.update({section:default})
super().__init__(d,**kwargs)
def save(self,fname='material.yaml',**kwargs):
def save(self,
fname: FileHandle = 'material.yaml',
**kwargs):
"""
Save to yaml file.
@ -53,7 +62,8 @@ class ConfigMaterial(Config):
@classmethod
def load(cls,fname='material.yaml'):
def load(cls,
fname: FileHandle = 'material.yaml') -> 'ConfigMaterial':
"""
Load from yaml file.
@ -72,10 +82,14 @@ class ConfigMaterial(Config):
@staticmethod
def load_DREAM3D(fname,
grain_data=None,cell_data=None,cell_ensemble_data='CellEnsembleData',
phases='Phases',Euler_angles='EulerAngles',phase_names='PhaseName',
base_group=None):
def load_DREAM3D(fname: str,
grain_data: str = None,
cell_data: str = None,
cell_ensemble_data: str = 'CellEnsembleData',
phases: str = 'Phases',
Euler_angles: str = 'EulerAngles',
phase_names: str = 'PhaseName',
base_group: str = None) -> 'ConfigMaterial':
"""
Load DREAM.3D (HDF5) file.
@ -154,7 +168,8 @@ class ConfigMaterial(Config):
@staticmethod
def from_table(table,**kwargs):
def from_table(table: Table,
**kwargs) -> 'ConfigMaterial':
"""
Generate from an ASCII table.
@ -207,7 +222,7 @@ class ConfigMaterial(Config):
@property
def is_complete(self):
def is_complete(self) -> bool:
"""
Check for completeness.
@ -267,12 +282,11 @@ class ConfigMaterial(Config):
if homogenization - set(self['homogenization']):
print(f'Homogenization(s) {homogenization-set(self["homogenization"])} missing')
ok = False
return ok
@property
def is_valid(self):
def is_valid(self) -> bool:
"""
Check for valid content.
@ -316,7 +330,10 @@ class ConfigMaterial(Config):
return ok
def material_rename_phase(self,mapping,ID=None,constituent=None):
def material_rename_phase(self,
mapping: Dict[str, str],
ID: Sequence[int] = None,
constituent: Sequence[int] = None) -> 'ConfigMaterial':
"""
Change phase name in material.
@ -347,7 +364,9 @@ class ConfigMaterial(Config):
return dup
def material_rename_homogenization(self,mapping,ID=None):
def material_rename_homogenization(self,
mapping: Dict[str, str],
ID: Sequence[int] = None) -> 'ConfigMaterial':
"""
Change homogenization name in material.
@ -374,7 +393,8 @@ class ConfigMaterial(Config):
return dup
def material_add(self,**kwargs):
def material_add(self,
**kwargs: Any) -> 'ConfigMaterial':
"""
Add material entries.
@ -453,7 +473,7 @@ class ConfigMaterial(Config):
N = max(N,s[0]) if len(s)>0 else N
n = max(n,s[1]) if len(s)>1 else n
mat = [{'constituents':[{} for _ in range(n)]} for _ in range(N)]
mat: Sequence[dict] = [{'constituents':[{} for _ in range(n)]} for _ in range(N)]
if 'v' not in kwargs:
shaped['v'] = np.broadcast_to(1/n,(N,n))
@ -461,7 +481,7 @@ class ConfigMaterial(Config):
map_shape = {'O':(N,n,4),'F_i':(N,n,3,3)}
for k,v in shaped.items():
target = map_shape.get(k,(N,n))
obj = np.broadcast_to(v.reshape(util.shapeshifter(v.shape,target,mode='right')),target)
obj = np.broadcast_to(v.reshape(util.shapeshifter(v.shape, target, mode = 'right')), target)
for i in range(N):
if k in ['phase','O','v','F_i']:
for j in range(n):

View File

@ -33,9 +33,9 @@ class Crystal():
def __init__(self,*,
family = None,
lattice = None,
a = None,b = None,c = None,
alpha = None,beta = None,gamma = None,
degrees = False):
a: float = None, b: float = None, c: float = None,
alpha: float = None, beta: float = None, gamma: float = None,
degrees: bool = False):
"""
Representation of crystal in terms of crystal family or Bravais lattice.
@ -62,7 +62,7 @@ class Crystal():
Angles are given in degrees. Defaults to False.
"""
if family not in [None] + list(lattice_symmetries.values()):
if family is not None and family not in list(lattice_symmetries.values()):
raise KeyError(f'invalid crystal family "{family}"')
if lattice is not None and family is not None and family != lattice_symmetries[lattice]:
raise KeyError(f'incompatible family "{family}" for lattice "{lattice}"')
@ -107,9 +107,6 @@ class Crystal():
if np.any([np.roll([self.alpha,self.beta,self.gamma],r)[0]
>= np.sum(np.roll([self.alpha,self.beta,self.gamma],r)[1:]) for r in range(3)]):
raise ValueError ('each lattice angle must be less than sum of others')
else:
self.a = self.b = self.c = None
self.alpha = self.beta = self.gamma = None
def __repr__(self):
@ -122,7 +119,8 @@ class Crystal():
'α={:.5g}°, β={:.5g}°, γ={:.5g}°'.format(*np.degrees(self.parameters[3:]))])
def __eq__(self,other):
def __eq__(self,
other: object) -> bool:
"""
Equal to other.
@ -132,6 +130,8 @@ class Crystal():
Crystal to check for equality.
"""
if not isinstance(other, Crystal):
return NotImplemented
return self.lattice == other.lattice and \
self.parameters == other.parameters and \
self.family == other.family
@ -139,8 +139,7 @@ class Crystal():
@property
def parameters(self):
"""Return lattice parameters a, b, c, alpha, beta, gamma."""
return (self.a,self.b,self.c,self.alpha,self.beta,self.gamma)
if hasattr(self,'a'): return (self.a,self.b,self.c,self.alpha,self.beta,self.gamma)
@property
def immutable(self):
@ -269,7 +268,7 @@ class Crystal():
https://doi.org/10.1063/1.1661333
"""
if None in self.parameters:
if self.parameters is None:
raise KeyError('missing crystal lattice parameters')
return np.array([
[1,0,0],
@ -315,7 +314,9 @@ class Crystal():
+ _lattice_points.get(self.lattice if self.lattice == 'hP' else \
self.lattice[-1],None),dtype=float)
def to_lattice(self, *, direction: np.ndarray = None, plane: np.ndarray = None) -> np.ndarray:
def to_lattice(self, *,
direction: np.ndarray = None,
plane: np.ndarray = None) -> np.ndarray:
"""
Calculate lattice vector corresponding to crystal frame direction or plane normal.
@ -339,7 +340,9 @@ class Crystal():
return np.einsum('il,...l',basis,axis)
def to_frame(self, *, uvw: np.ndarray = None, hkl: np.ndarray = None) -> np.ndarray:
def to_frame(self, *,
uvw: np.ndarray = None,
hkl: np.ndarray = None) -> np.ndarray:
"""
Calculate crystal frame vector along lattice direction [uvw] or plane normal (hkl).
@ -362,7 +365,8 @@ class Crystal():
return np.einsum('il,...l',basis,axis)
def kinematics(self, mode: str) -> Dict[str, List[np.ndarray]]:
def kinematics(self,
mode: str) -> Dict[str, List[np.ndarray]]:
"""
Return crystal kinematics systems.
@ -621,7 +625,8 @@ class Crystal():
'plane': [m[:,3:6] for m in master]}
def relation_operations(self, model: str) -> Tuple[str, Rotation]:
def relation_operations(self,
model: str) -> Tuple[str, Rotation]:
"""
Crystallographic orientation relationships for phase transformations.

View File

@ -4,7 +4,7 @@ import warnings
import multiprocessing as mp
from functools import partial
import typing
from typing import Union, Optional, TextIO, List, Sequence, Literal
from typing import Union, Optional, TextIO, List, Sequence
from pathlib import Path
import numpy as np
@ -33,7 +33,7 @@ class Grid:
material: np.ndarray,
size: FloatSequence,
origin: FloatSequence = np.zeros(3),
comments: Union[str, Sequence[str]] = []):
comments: Union[str, Sequence[str]] = None):
"""
New geometry definition for grid solvers.
@ -53,7 +53,7 @@ class Grid:
self.material = material
self.size = size # type: ignore
self.origin = origin # type: ignore
self.comments = comments # type: ignore
self.comments = [] if comments is None else comments # type: ignore
def __repr__(self) -> str:
@ -77,7 +77,8 @@ class Grid:
copy = __copy__
def __eq__(self, other: object) -> bool:
def __eq__(self,
other: object) -> bool:
"""
Test equality of other.
@ -101,17 +102,18 @@ class Grid:
return self._material
@material.setter
def material(self, material: np.ndarray):
def material(self,
material: np.ndarray):
if len(material.shape) != 3:
raise ValueError(f'invalid material shape {material.shape}')
elif material.dtype not in np.sctypes['float'] and material.dtype not in np.sctypes['int']:
if material.dtype not in np.sctypes['float'] and material.dtype not in np.sctypes['int']:
raise TypeError(f'invalid material data type {material.dtype}')
else:
self._material = np.copy(material)
if self.material.dtype in np.sctypes['float'] and \
np.all(self.material == self.material.astype(int).astype(float)):
self._material = self.material.astype(int)
self._material = np.copy(material)
if self.material.dtype in np.sctypes['float'] and \
np.all(self.material == self.material.astype(int).astype(float)):
self._material = self.material.astype(int)
@property
@ -120,11 +122,12 @@ class Grid:
return self._size
@size.setter
def size(self, size: FloatSequence):
def size(self,
size: FloatSequence):
if len(size) != 3 or any(np.array(size) < 0):
raise ValueError(f'invalid size {size}')
else:
self._size = np.array(size)
self._size = np.array(size)
@property
def origin(self) -> np.ndarray:
@ -132,11 +135,12 @@ class Grid:
return self._origin
@origin.setter
def origin(self, origin: FloatSequence):
def origin(self,
origin: FloatSequence):
if len(origin) != 3:
raise ValueError(f'invalid origin {origin}')
else:
self._origin = np.array(origin)
self._origin = np.array(origin)
@property
def comments(self) -> List[str]:
@ -144,7 +148,8 @@ class Grid:
return self._comments
@comments.setter
def comments(self, comments: Union[str, Sequence[str]]):
def comments(self,
comments: Union[str, Sequence[str]]):
self._comments = [str(c) for c in comments] if isinstance(comments,list) else [str(comments)]
@ -229,8 +234,7 @@ class Grid:
content = f.readlines()
for i,line in enumerate(content[:header_length]):
items = line.split('#')[0].lower().strip().split()
key = items[0] if items else ''
if key == 'grid':
if (key := items[0] if items else '') == 'grid':
cells = np.array([ int(dict(zip(items[1::2],items[2::2]))[i]) for i in ['a','b','c']])
elif key == 'size':
size = np.array([float(dict(zip(items[1::2],items[2::2]))[i]) for i in ['x','y','z']])
@ -242,8 +246,7 @@ class Grid:
material = np.empty(int(cells.prod())) # initialize as flat array
i = 0
for line in content[header_length:]:
items = line.split('#')[0].split()
if len(items) == 3:
if len(items := line.split('#')[0].split()) == 3:
if items[1].lower() == 'of':
material_entry = np.ones(int(items[0]))*float(items[2])
elif items[1].lower() == 'to':
@ -387,7 +390,9 @@ class Grid:
@staticmethod
def _find_closest_seed(seeds: np.ndarray, weights: np.ndarray, point: np.ndarray) -> np.integer:
def _find_closest_seed(seeds: np.ndarray,
weights: np.ndarray,
point: np.ndarray) -> np.integer:
return np.argmin(np.sum((np.broadcast_to(point,(len(seeds),3))-seeds)**2,axis=1) - weights)
@staticmethod
@ -624,7 +629,9 @@ class Grid:
)
def save(self, fname: Union[str, Path], compress: bool = True):
def save(self,
fname: Union[str, Path],
compress: bool = True):
"""
Save as VTK image data file.
@ -643,7 +650,8 @@ class Grid:
v.save(fname,parallel=False,compress=compress)
def save_ASCII(self, fname: Union[str, TextIO]):
def save_ASCII(self,
fname: Union[str, TextIO]):
"""
Save as geom file.
@ -770,15 +778,16 @@ class Grid:
)
def mirror(self, directions: Sequence[str], reflect: bool = False) -> 'Grid':
def mirror(self,
directions: Sequence[str],
reflect: bool = False) -> 'Grid':
"""
Mirror grid along given directions.
Parameters
----------
directions : (sequence of) str
directions : (sequence of) {'x', 'y', 'z'}
Direction(s) along which the grid is mirrored.
Valid entries are 'x', 'y', 'z'.
reflect : bool, optional
Reflect (include) outermost layers. Defaults to False.
@ -801,8 +810,7 @@ class Grid:
# materials: 1
"""
valid = ['x','y','z']
if not set(directions).issubset(valid):
if not set(directions).issubset(valid := ['x', 'y', 'z']):
raise ValueError(f'invalid direction {set(directions).difference(valid)} specified')
limits: Sequence[Optional[int]] = [None,None] if reflect else [-2,0]
@ -822,15 +830,15 @@ class Grid:
)
def flip(self, directions: Union[Literal['x', 'y', 'z'], Sequence[Literal['x', 'y', 'z']]]) -> 'Grid':
def flip(self,
directions: Sequence[str]) -> 'Grid':
"""
Flip grid along given directions.
Parameters
----------
directions : (sequence of) str
directions : (sequence of) {'x', 'y', 'z'}
Direction(s) along which the grid is flipped.
Valid entries are 'x', 'y', 'z'.
Returns
-------
@ -838,8 +846,7 @@ class Grid:
Updated grid-based geometry.
"""
valid = ['x','y','z']
if not set(directions).issubset(valid):
if not set(directions).issubset(valid := ['x', 'y', 'z']):
raise ValueError(f'invalid direction {set(directions).difference(valid)} specified')
@ -852,7 +859,9 @@ class Grid:
)
def scale(self, cells: IntSequence, periodic: bool = True) -> 'Grid':
def scale(self,
cells: IntSequence,
periodic: bool = True) -> 'Grid':
"""
Scale grid to new cells.
@ -958,7 +967,9 @@ class Grid:
)
def rotate(self, R: Rotation, fill: int = None) -> 'Grid':
def rotate(self,
R: Rotation,
fill: int = None) -> 'Grid':
"""
Rotate grid (pad if required).
@ -1049,7 +1060,9 @@ class Grid:
)
def substitute(self, from_material: IntSequence, to_material: IntSequence) -> 'Grid':
def substitute(self,
from_material: IntSequence,
to_material: IntSequence) -> 'Grid':
"""
Substitute material indices.
@ -1150,7 +1163,9 @@ class Grid:
)
def get_grain_boundaries(self, periodic: bool = True, directions: Sequence[str] = 'xyz'):
def get_grain_boundaries(self,
periodic: bool = True,
directions: Sequence[str] = 'xyz') -> VTK:
"""
Create VTK unstructured grid containing grain boundaries.
@ -1158,9 +1173,9 @@ class Grid:
----------
periodic : bool, optional
Assume grid to be periodic. Defaults to True.
directions : (sequence of) string, optional
directions : (sequence of) {'x', 'y', 'z'}, optional
Direction(s) along which the boundaries are determined.
Valid entries are 'x', 'y', 'z'. Defaults to 'xyz'.
Defaults to 'xyz'.
Returns
-------
@ -1168,8 +1183,7 @@ class Grid:
VTK-based geometry of grain boundary network.
"""
valid = ['x','y','z']
if not set(directions).issubset(valid):
if not set(directions).issubset(valid := ['x', 'y', 'z']):
raise ValueError(f'invalid direction {set(directions).difference(valid)} specified')
o = [[0, self.cells[0]+1, np.prod(self.cells[:2]+1)+self.cells[0]+1, np.prod(self.cells[:2]+1)],

View File

@ -1,7 +1,7 @@
import re
import copy
from pathlib import Path
from typing import Union, Optional, Tuple, List
from typing import Union, Tuple, List
import pandas as pd
import numpy as np
@ -12,7 +12,10 @@ from . import util
class Table:
"""Manipulate multi-dimensional spreadsheet-like data."""
def __init__(self, data: np.ndarray, shapes: dict, comments: Optional[Union[str, list]] = None):
def __init__(self,
data: np.ndarray,
shapes: dict,
comments: Union[str, list] = None):
"""
New spreadsheet.
@ -41,7 +44,8 @@ class Table:
return '\n'.join(['# '+c for c in self.comments])+'\n'+data_repr
def __getitem__(self, item: Union[slice, Tuple[slice, ...]]) -> 'Table':
def __getitem__(self,
item: Union[slice, Tuple[slice, ...]]) -> 'Table':
"""
Slice the Table according to item.
@ -100,7 +104,9 @@ class Table:
copy = __copy__
def _label(self, what: Union[str, List[str]], how: str) -> List[str]:
def _label(self,
what: Union[str, List[str]],
how: str) -> List[str]:
"""
Expand labels according to data shape.
@ -131,7 +137,8 @@ class Table:
return labels
def _relabel(self, how: str):
def _relabel(self,
how: str):
"""
Modify labeling of data in-place.
@ -147,7 +154,10 @@ class Table:
self.data.columns = self._label(self.shapes,how) #type: ignore
def _add_comment(self, label: str, shape: Tuple[int, ...], info: Optional[str]):
def _add_comment(self,
label: str,
shape: Tuple[int, ...],
info: str = None):
if info is not None:
specific = f'{label}{" "+str(shape) if np.prod(shape,dtype=int) > 1 else ""}: {info}'
general = util.execution_stamp('Table')
@ -309,8 +319,7 @@ class Table:
data = np.loadtxt(content)
shapes = {'eu':3, 'pos':2, 'IQ':1, 'CI':1, 'ID':1, 'intensity':1, 'fit':1}
remainder = data.shape[1]-sum(shapes.values())
if remainder > 0: # 3.8 can do: if (remainder := data.shape[1]-sum(shapes.values())) > 0
if (remainder := data.shape[1]-sum(shapes.values())) > 0:
shapes['unknown'] = remainder
return Table(data,shapes,comments)
@ -321,7 +330,8 @@ class Table:
return list(self.shapes)
def get(self, label: str) -> np.ndarray:
def get(self,
label: str) -> np.ndarray:
"""
Get column data.
@ -341,7 +351,10 @@ class Table:
return data.astype(type(data.flatten()[0]))
def set(self, label: str, data: np.ndarray, info: str = None) -> 'Table':
def set(self,
label: str,
data: np.ndarray,
info: str = None) -> 'Table':
"""
Set column data.
@ -362,8 +375,7 @@ class Table:
"""
dup = self.copy()
dup._add_comment(label, data.shape[1:], info)
m = re.match(r'(.*)\[((\d+,)*(\d+))\]',label)
if m:
if m := re.match(r'(.*)\[((\d+,)*(\d+))\]',label):
key = m.group(1)
idx = np.ravel_multi_index(tuple(map(int,m.group(2).split(","))),
self.shapes[key])
@ -374,7 +386,10 @@ class Table:
return dup
def add(self, label: str, data: np.ndarray, info: str = None) -> 'Table':
def add(self,
label: str,
data: np.ndarray,
info: str = None) -> 'Table':
"""
Add column data.
@ -406,7 +421,8 @@ class Table:
return dup
def delete(self, label: str) -> 'Table':
def delete(self,
label: str) -> 'Table':
"""
Delete column data.
@ -427,7 +443,10 @@ class Table:
return dup
def rename(self, old: Union[str, List[str]], new: Union[str, List[str]], info: str = None) -> 'Table':
def rename(self,
old: Union[str, List[str]],
new: Union[str, List[str]],
info: str = None) -> 'Table':
"""
Rename column data.
@ -453,7 +472,9 @@ class Table:
return dup
def sort_by(self, labels: Union[str, List[str]], ascending: Union[bool, List[bool]] = True) -> 'Table':
def sort_by(self,
labels: Union[str, List[str]],
ascending: Union[bool, List[bool]] = True) -> 'Table':
"""
Sort table by values of given labels.
@ -472,8 +493,7 @@ class Table:
"""
labels_ = [labels] if isinstance(labels,str) else labels.copy()
for i,l in enumerate(labels_):
m = re.match(r'(.*)\[((\d+,)*(\d+))\]',l)
if m:
if m := re.match(r'(.*)\[((\d+,)*(\d+))\]',l):
idx = np.ravel_multi_index(tuple(map(int,m.group(2).split(','))),
self.shapes[m.group(1)])
labels_[i] = f'{1+idx}_{m.group(1)}'
@ -486,7 +506,8 @@ class Table:
return dup
def append(self, other: 'Table') -> 'Table':
def append(self,
other: 'Table') -> 'Table':
"""
Append other table vertically (similar to numpy.vstack).
@ -505,13 +526,14 @@ class Table:
"""
if self.shapes != other.shapes or not self.data.columns.equals(other.data.columns):
raise KeyError('Labels or shapes or order do not match')
else:
dup = self.copy()
dup.data = dup.data.append(other.data,ignore_index=True)
return dup
dup = self.copy()
dup.data = dup.data.append(other.data,ignore_index=True)
return dup
def join(self, other: 'Table') -> 'Table':
def join(self,
other: 'Table') -> 'Table':
"""
Append other table horizontally (similar to numpy.hstack).
@ -530,15 +552,16 @@ class Table:
"""
if set(self.shapes) & set(other.shapes) or self.data.shape[0] != other.data.shape[0]:
raise KeyError('Duplicated keys or row count mismatch')
else:
dup = self.copy()
dup.data = dup.data.join(other.data)
for key in other.shapes:
dup.shapes[key] = other.shapes[key]
return dup
dup = self.copy()
dup.data = dup.data.join(other.data)
for key in other.shapes:
dup.shapes[key] = other.shapes[key]
return dup
def save(self, fname: FileHandle):
def save(self,
fname: FileHandle):
"""
Save as plain text file.

View File

@ -22,7 +22,8 @@ class VTK:
High-level interface to VTK.
"""
def __init__(self, vtk_data: vtk.vtkDataSet):
def __init__(self,
vtk_data: vtk.vtkDataSet):
"""
New spatial visualization.
@ -38,7 +39,9 @@ class VTK:
@staticmethod
def from_image_data(cells: IntSequence, size: FloatSequence, origin: FloatSequence = np.zeros(3)) -> 'VTK':
def from_image_data(cells: IntSequence,
size: FloatSequence,
origin: FloatSequence = np.zeros(3)) -> 'VTK':
"""
Create VTK of type vtk.vtkImageData.
@ -68,7 +71,9 @@ class VTK:
@staticmethod
def from_rectilinear_grid(grid: np.ndarray, size: FloatSequence, origin: FloatSequence = np.zeros(3)) -> 'VTK':
def from_rectilinear_grid(grid: np.ndarray,
size: FloatSequence,
origin: FloatSequence = np.zeros(3)) -> 'VTK':
"""
Create VTK of type vtk.vtkRectilinearGrid.
@ -100,7 +105,9 @@ class VTK:
@staticmethod
def from_unstructured_grid(nodes: np.ndarray, connectivity: np.ndarray, cell_type: str) -> 'VTK':
def from_unstructured_grid(nodes: np.ndarray,
connectivity: np.ndarray,
cell_type: str) -> 'VTK':
"""
Create VTK of type vtk.vtkUnstructuredGrid.
@ -195,8 +202,7 @@ class VTK:
"""
if not os.path.isfile(fname): # vtk has a strange error handling
raise FileNotFoundError(f'No such file: {fname}')
ext = Path(fname).suffix
if ext == '.vtk' or dataset_type is not None:
if (ext := Path(fname).suffix) == '.vtk' or dataset_type is not None:
reader = vtk.vtkGenericDataObjectReader()
reader.SetFileName(str(fname))
if dataset_type is None:
@ -238,7 +244,11 @@ class VTK:
def _write(writer):
"""Wrapper for parallel writing."""
writer.Write()
def save(self, fname: Union[str, Path], parallel: bool = True, compress: bool = True):
def save(self,
fname: Union[str, Path],
parallel: bool = True,
compress: bool = True):
"""
Save as VTK file.
@ -284,7 +294,9 @@ class VTK:
# Check https://blog.kitware.com/ghost-and-blanking-visibility-changes/ for missing data
# Needs support for damask.Table
def add(self, data: Union[np.ndarray, np.ma.MaskedArray], label: str = None):
def add(self,
data: Union[np.ndarray, np.ma.MaskedArray],
label: str = None):
"""
Add data to either cells or points.
@ -331,7 +343,8 @@ class VTK:
raise TypeError
def get(self, label: str) -> np.ndarray:
def get(self,
label: str) -> np.ndarray:
"""
Get either cell or point data.
@ -383,7 +396,8 @@ class VTK:
return []
def set_comments(self, comments: Union[str, List[str]]):
def set_comments(self,
comments: Union[str, List[str]]):
"""
Set comments.
@ -400,7 +414,8 @@ class VTK:
self.vtk_data.GetFieldData().AddArray(s)
def add_comments(self, comments: Union[str, List[str]]):
def add_comments(self,
comments: Union[str, List[str]]):
"""
Add comments.
@ -440,7 +455,7 @@ class VTK:
width = tk.winfo_screenwidth()
height = tk.winfo_screenheight()
tk.destroy()
except Exception as e:
except Exception:
width = 1024
height = 768

View File

@ -20,7 +20,9 @@ import numpy as _np
from ._typehints import FloatSequence as _FloatSequence, IntSequence as _IntSequence
def _ks(size: _FloatSequence, cells: _IntSequence, first_order: bool = False) -> _np.ndarray:
def _ks(size: _FloatSequence,
cells: _IntSequence,
first_order: bool = False) -> _np.ndarray:
"""
Get wave numbers operator.
@ -47,7 +49,8 @@ def _ks(size: _FloatSequence, cells: _IntSequence, first_order: bool = False) ->
return _np.stack(_np.meshgrid(k_sk,k_sj,k_si,indexing = 'ij'), axis=-1)
def curl(size: _FloatSequence, f: _np.ndarray) -> _np.ndarray:
def curl(size: _FloatSequence,
f: _np.ndarray) -> _np.ndarray:
u"""
Calculate curl of a vector or tensor field in Fourier space.
@ -78,7 +81,8 @@ def curl(size: _FloatSequence, f: _np.ndarray) -> _np.ndarray:
return _np.fft.irfftn(curl_,axes=(0,1,2),s=f.shape[:3])
def divergence(size: _FloatSequence, f: _np.ndarray) -> _np.ndarray:
def divergence(size: _FloatSequence,
f: _np.ndarray) -> _np.ndarray:
u"""
Calculate divergence of a vector or tensor field in Fourier space.
@ -105,7 +109,8 @@ def divergence(size: _FloatSequence, f: _np.ndarray) -> _np.ndarray:
return _np.fft.irfftn(div_,axes=(0,1,2),s=f.shape[:3])
def gradient(size: _FloatSequence, f: _np.ndarray) -> _np.ndarray:
def gradient(size: _FloatSequence,
f: _np.ndarray) -> _np.ndarray:
u"""
Calculate gradient of a scalar or vector field in Fourier space.
@ -163,7 +168,8 @@ def coordinates0_point(cells: _IntSequence,
axis = -1)
def displacement_fluct_point(size: _FloatSequence, F: _np.ndarray) -> _np.ndarray:
def displacement_fluct_point(size: _FloatSequence,
F: _np.ndarray) -> _np.ndarray:
"""
Cell center displacement field from fluctuation part of the deformation gradient field.
@ -195,7 +201,8 @@ def displacement_fluct_point(size: _FloatSequence, F: _np.ndarray) -> _np.ndarra
return _np.fft.irfftn(displacement,axes=(0,1,2),s=F.shape[:3])
def displacement_avg_point(size: _FloatSequence, F: _np.ndarray) -> _np.ndarray:
def displacement_avg_point(size: _FloatSequence,
F: _np.ndarray) -> _np.ndarray:
"""
Cell center displacement field from average part of the deformation gradient field.
@ -216,7 +223,8 @@ def displacement_avg_point(size: _FloatSequence, F: _np.ndarray) -> _np.ndarray:
return _np.einsum('ml,ijkl->ijkm',F_avg - _np.eye(3),coordinates0_point(F.shape[:3],size))
def displacement_point(size: _FloatSequence, F: _np.ndarray) -> _np.ndarray:
def displacement_point(size: _FloatSequence,
F: _np.ndarray) -> _np.ndarray:
"""
Cell center displacement field from deformation gradient field.
@ -236,7 +244,9 @@ def displacement_point(size: _FloatSequence, F: _np.ndarray) -> _np.ndarray:
return displacement_avg_point(size,F) + displacement_fluct_point(size,F)
def coordinates_point(size: _FloatSequence, F: _np.ndarray, origin: _FloatSequence = _np.zeros(3)) -> _np.ndarray:
def coordinates_point(size: _FloatSequence,
F: _np.ndarray,
origin: _FloatSequence = _np.zeros(3)) -> _np.ndarray:
"""
Cell center positions.
@ -335,7 +345,8 @@ def coordinates0_node(cells: _IntSequence,
axis = -1)
def displacement_fluct_node(size: _FloatSequence, F: _np.ndarray) -> _np.ndarray:
def displacement_fluct_node(size: _FloatSequence,
F: _np.ndarray) -> _np.ndarray:
"""
Nodal displacement field from fluctuation part of the deformation gradient field.
@ -355,7 +366,8 @@ def displacement_fluct_node(size: _FloatSequence, F: _np.ndarray) -> _np.ndarray
return point_to_node(displacement_fluct_point(size,F))
def displacement_avg_node(size: _FloatSequence, F: _np.ndarray) -> _np.ndarray:
def displacement_avg_node(size: _FloatSequence,
F: _np.ndarray) -> _np.ndarray:
"""
Nodal displacement field from average part of the deformation gradient field.
@ -376,7 +388,8 @@ def displacement_avg_node(size: _FloatSequence, F: _np.ndarray) -> _np.ndarray:
return _np.einsum('ml,ijkl->ijkm',F_avg - _np.eye(3),coordinates0_node(F.shape[:3],size))
def displacement_node(size: _FloatSequence, F: _np.ndarray) -> _np.ndarray:
def displacement_node(size: _FloatSequence,
F: _np.ndarray) -> _np.ndarray:
"""
Nodal displacement field from deformation gradient field.
@ -396,7 +409,9 @@ def displacement_node(size: _FloatSequence, F: _np.ndarray) -> _np.ndarray:
return displacement_avg_node(size,F) + displacement_fluct_node(size,F)
def coordinates_node(size: _FloatSequence, F: _np.ndarray, origin: _FloatSequence = _np.zeros(3)) -> _np.ndarray:
def coordinates_node(size: _FloatSequence,
F: _np.ndarray,
origin: _FloatSequence = _np.zeros(3)) -> _np.ndarray:
"""
Nodal positions.
@ -526,7 +541,9 @@ def coordinates0_valid(coordinates0: _np.ndarray) -> bool:
return False
def regrid(size: _FloatSequence, F: _np.ndarray, cells: _IntSequence) -> _np.ndarray:
def regrid(size: _FloatSequence,
F: _np.ndarray,
cells: _IntSequence) -> _np.ndarray:
"""
Return mapping from coordinates in deformed configuration to a regular grid.

View File

@ -122,7 +122,9 @@ def rotation(T: _np.ndarray) -> _rotation.Rotation:
return _rotation.Rotation.from_matrix(_polar_decomposition(T,'R')[0])
def strain(F: _np.ndarray, t: str, m: float) -> _np.ndarray:
def strain(F: _np.ndarray,
t: str,
m: float) -> _np.ndarray:
"""
Calculate strain tensor (SethHill family).
@ -162,7 +164,8 @@ def strain(F: _np.ndarray, t: str, m: float) -> _np.ndarray:
return eps
def stress_Cauchy(P: _np.ndarray, F: _np.ndarray) -> _np.ndarray:
def stress_Cauchy(P: _np.ndarray,
F: _np.ndarray) -> _np.ndarray:
"""
Calculate the Cauchy stress (true stress).
@ -184,7 +187,8 @@ def stress_Cauchy(P: _np.ndarray, F: _np.ndarray) -> _np.ndarray:
return _tensor.symmetric(_np.einsum('...,...ij,...kj',1.0/_np.linalg.det(F),P,F))
def stress_second_Piola_Kirchhoff(P: _np.ndarray, F: _np.ndarray) -> _np.ndarray:
def stress_second_Piola_Kirchhoff(P: _np.ndarray,
F: _np.ndarray) -> _np.ndarray:
"""
Calculate the second Piola-Kirchhoff stress.
@ -243,7 +247,8 @@ def stretch_right(T: _np.ndarray) -> _np.ndarray:
return _polar_decomposition(T,'U')[0]
def _polar_decomposition(T: _np.ndarray, requested: _Sequence[str]) -> tuple:
def _polar_decomposition(T: _np.ndarray,
requested: _Sequence[str]) -> tuple:
"""
Perform singular value decomposition.
@ -251,7 +256,7 @@ def _polar_decomposition(T: _np.ndarray, requested: _Sequence[str]) -> tuple:
----------
T : numpy.ndarray, shape (...,3,3)
Tensor of which the singular values are computed.
requested : iterable of str
requested : sequence of {'R', 'U', 'V'}
Requested outputs: R for the rotation tensor,
V for left stretch tensor and U for right stretch tensor.
@ -273,7 +278,8 @@ def _polar_decomposition(T: _np.ndarray, requested: _Sequence[str]) -> tuple:
return tuple(output)
def _equivalent_Mises(T_sym: _np.ndarray, s: float) -> _np.ndarray:
def _equivalent_Mises(T_sym: _np.ndarray,
s: float) -> _np.ndarray:
"""
Base equation for Mises equivalent of a stress or strain tensor.

View File

@ -10,7 +10,9 @@ from . import util as _util
from . import grid_filters as _grid_filters
def from_random(size: _FloatSequence, N_seeds: int, cells: _IntSequence = None,
def from_random(size: _FloatSequence,
N_seeds: int,
cells: _IntSequence = None,
rng_seed=None) -> _np.ndarray:
"""
Place seeds randomly in space.
@ -46,8 +48,12 @@ def from_random(size: _FloatSequence, N_seeds: int, cells: _IntSequence = None,
return coords
def from_Poisson_disc(size: _FloatSequence, N_seeds: int, N_candidates: int, distance: float,
periodic: bool = True, rng_seed=None) -> _np.ndarray:
def from_Poisson_disc(size: _FloatSequence,
N_seeds: int,
N_candidates: int,
distance: float,
periodic: bool = True,
rng_seed=None) -> _np.ndarray:
"""
Place seeds according to a Poisson disc distribution.
@ -86,10 +92,9 @@ def from_Poisson_disc(size: _FloatSequence, N_seeds: int, N_candidates: int, dis
tree = _spatial.cKDTree(coords[:s],boxsize=size) if periodic else \
_spatial.cKDTree(coords[:s])
distances = tree.query(candidates)[0]
best = distances.argmax()
if distances[best] > distance: # require minimum separation
if distances.max() > distance: # require minimum separation
i = 0
coords[s] = candidates[best] # maximum separation to existing point cloud
coords[s] = candidates[distances.argmax()] # maximum separation to existing point cloud
s += 1
progress.update(s)
@ -99,8 +104,11 @@ def from_Poisson_disc(size: _FloatSequence, N_seeds: int, N_candidates: int, dis
return coords
def from_grid(grid, selection: _IntSequence = None, invert_selection: bool = False,
average: bool = False, periodic: bool = True) -> _Tuple[_np.ndarray, _np.ndarray]:
def from_grid(grid,
selection: _IntSequence = None,
invert_selection: bool = False,
average: bool = False,
periodic: bool = True) -> _Tuple[_np.ndarray, _np.ndarray]:
"""
Create seeds from grid description.

View File

@ -45,7 +45,8 @@ def eigenvalues(T_sym: _np.ndarray) -> _np.ndarray:
return _np.linalg.eigvalsh(symmetric(T_sym))
def eigenvectors(T_sym: _np.ndarray, RHS: bool = False) -> _np.ndarray:
def eigenvectors(T_sym: _np.ndarray,
RHS: bool = False) -> _np.ndarray:
"""
Eigenvectors of a symmetric tensor.
@ -63,14 +64,14 @@ def eigenvectors(T_sym: _np.ndarray, RHS: bool = False) -> _np.ndarray:
associated eigenvalues.
"""
(u,v) = _np.linalg.eigh(symmetric(T_sym))
_,v = _np.linalg.eigh(symmetric(T_sym))
if RHS:
v[_np.linalg.det(v) < 0.0,:,2] *= -1.0
if RHS: v[_np.linalg.det(v) < 0.0,:,2] *= -1.0
return v
def spherical(T: _np.ndarray, tensor: bool = True) -> _np.ndarray:
def spherical(T: _np.ndarray,
tensor: bool = True) -> _np.ndarray:
"""
Calculate spherical part of a tensor.

View File

@ -7,16 +7,16 @@ import subprocess
import shlex
import re
import fractions
import collections.abc as abc
from collections import abc
from functools import reduce
from typing import Union, Tuple, Iterable, Callable, Dict, List, Any, Literal, Optional
from typing import Union, Tuple, Iterable, Callable, Dict, List, Any, Literal, SupportsIndex, Sequence
from pathlib import Path
import numpy as np
import h5py
from . import version
from ._typehints import FloatSequence
from ._typehints import IntSequence, FloatSequence
# limit visibility
__all__=[
@ -54,7 +54,8 @@ _colors = {
####################################################################################################
# Functions
####################################################################################################
def srepr(msg, glue: str = '\n') -> str:
def srepr(msg,
glue: str = '\n') -> str:
r"""
Join items with glue string.
@ -76,7 +77,7 @@ def srepr(msg, glue: str = '\n') -> str:
hasattr(msg, '__iter__'))):
return glue.join(str(x) for x in msg)
else:
return msg if isinstance(msg,str) else repr(msg)
return msg if isinstance(msg,str) else repr(msg)
def emph(msg) -> str:
@ -148,7 +149,10 @@ def strikeout(msg) -> str:
return _colors['crossout']+srepr(msg)+_colors['end_color']
def run(cmd: str, wd: str = './', env: Dict[str, str] = None, timeout: int = None) -> Tuple[str, str]:
def run(cmd: str,
wd: str = './',
env: Dict[str, str] = None,
timeout: int = None) -> Tuple[str, str]:
"""
Run a command.
@ -226,15 +230,15 @@ def show_progress(iterable: Iterable,
"""
if isinstance(iterable,abc.Sequence):
if N_iter is None:
N = len(iterable)
else:
raise ValueError('N_iter given for sequence')
if N_iter is None:
N = len(iterable)
else:
raise ValueError('N_iter given for sequence')
else:
if N_iter is None:
raise ValueError('N_iter not given')
else:
N = N_iter
if N_iter is None:
raise ValueError('N_iter not given')
N = N_iter
if N <= 1:
for item in iterable:
@ -301,16 +305,20 @@ def project_equal_angle(vector: np.ndarray,
normalize : bool
Ensure unit length of input vector. Defaults to True.
keepdims : bool
Maintain three-dimensional output coordinates. Defaults to False.
Two-dimensional output uses right-handed frame spanned by
the next and next-next axis relative to the projection direction,
e.g. x-y when projecting along z and z-x when projecting along y.
Maintain three-dimensional output coordinates.
Defaults to False.
Returns
-------
coordinates : numpy.ndarray, shape (...,2 | 3)
Projected coordinates.
Notes
-----
Two-dimensional output uses right-handed frame spanned by
the next and next-next axis relative to the projection direction,
e.g. x-y when projecting along z and z-x when projecting along y.
Examples
--------
>>> import damask
@ -345,16 +353,21 @@ def project_equal_area(vector: np.ndarray,
normalize : bool
Ensure unit length of input vector. Defaults to True.
keepdims : bool
Maintain three-dimensional output coordinates. Defaults to False.
Two-dimensional output uses right-handed frame spanned by
the next and next-next axis relative to the projection direction,
e.g. x-y when projecting along z and z-x when projecting along y.
Maintain three-dimensional output coordinates.
Defaults to False.
Returns
-------
coordinates : numpy.ndarray, shape (...,2 | 3)
Projected coordinates.
Notes
-----
Two-dimensional output uses right-handed frame spanned by
the next and next-next axis relative to the projection direction,
e.g. x-y when projecting along z and z-x when projecting along y.
Examples
--------
>>> import damask
@ -373,14 +386,17 @@ def project_equal_area(vector: np.ndarray,
return np.roll(np.block([v[...,:2]/np.sqrt(1.0+np.abs(v[...,2:3])),np.zeros_like(v[...,2:3])]),
-shift if keepdims else 0,axis=-1)[...,:3 if keepdims else 2]
def execution_stamp(class_name: str, function_name: str = None) -> str:
def execution_stamp(class_name: str,
function_name: str = None) -> str:
"""Timestamp the execution of a (function within a) class."""
now = datetime.datetime.now().astimezone().strftime('%Y-%m-%d %H:%M:%S%z')
_function_name = '' if function_name is None else f'.{function_name}'
return f'damask.{class_name}{_function_name} v{version} ({now})'
def hybrid_IA(dist: np.ndarray, N: int, rng_seed = None) -> np.ndarray:
def hybrid_IA(dist: np.ndarray,
N: int,
rng_seed: Union[int, IntSequence] = None) -> np.ndarray:
"""
Hybrid integer approximation.
@ -411,7 +427,7 @@ def hybrid_IA(dist: np.ndarray, N: int, rng_seed = None) -> np.ndarray:
def shapeshifter(fro: Tuple[int, ...],
to: Tuple[int, ...],
mode: Literal['left','right'] = 'left',
keep_ones: bool = False) -> Tuple[Optional[int], ...]:
keep_ones: bool = False) -> Sequence[SupportsIndex]:
"""
Return dimensions that reshape 'fro' to become broadcastable to 'to'.
@ -447,7 +463,7 @@ def shapeshifter(fro: Tuple[int, ...],
"""
if not len(fro) and not len(to): return ()
if len(fro) == 0 and len(to) == 0: return ()
beg = dict(left ='(^.*\\b)',
right='(^.*?\\b)')
@ -455,8 +471,8 @@ def shapeshifter(fro: Tuple[int, ...],
right='(.*?\\b)')
end = dict(left ='(.*?$)',
right='(.*$)')
fro = (1,) if not len(fro) else fro
to = (1,) if not len(to) else to
fro = (1,) if len(fro) == 0 else fro
to = (1,) if len(to) == 0 else to
try:
match = re.match(beg[mode]
+f',{sep[mode]}'.join(map(lambda x: f'{x}'
@ -467,13 +483,14 @@ def shapeshifter(fro: Tuple[int, ...],
grp = match.groups()
except AssertionError:
raise ValueError(f'Shapes can not be shifted {fro} --> {to}')
fill: Tuple[Optional[int], ...] = ()
fill: Any = ()
for g,d in zip(grp,fro+(None,)):
fill += (1,)*g.count(',')+(d,)
return fill[:-1]
def shapeblender(a: Tuple[int, ...], b: Tuple[int, ...]) -> Tuple[int, ...]:
def shapeblender(a: Tuple[int, ...],
b: Tuple[int, ...]) -> Sequence[SupportsIndex]:
"""
Return a shape that overlaps the rightmost entries of 'a' with the leftmost of 'b'.
@ -517,7 +534,8 @@ def extend_docstring(extra_docstring: str) -> Callable:
return _decorator
def extended_docstring(f: Callable, extra_docstring: str) -> Callable:
def extended_docstring(f: Callable,
extra_docstring: str) -> Callable:
"""
Decorator: Combine another function's docstring with a given docstring.
@ -593,7 +611,9 @@ def DREAM3D_cell_data_group(fname: Union[str, Path]) -> str:
return cell_data_group
def Bravais_to_Miller(*, uvtw: np.ndarray = None, hkil: np.ndarray = None) -> np.ndarray:
def Bravais_to_Miller(*,
uvtw: np.ndarray = None,
hkil: np.ndarray = None) -> np.ndarray:
"""
Transform 4 MillerBravais indices to 3 Miller indices of crystal direction [uvw] or plane normal (hkl).
@ -620,7 +640,9 @@ def Bravais_to_Miller(*, uvtw: np.ndarray = None, hkil: np.ndarray = None) -> np
return np.einsum('il,...l',basis,axis)
def Miller_to_Bravais(*, uvw: np.ndarray = None, hkl: np.ndarray = None) -> np.ndarray:
def Miller_to_Bravais(*,
uvw: np.ndarray = None,
hkl: np.ndarray = None) -> np.ndarray:
"""
Transform 3 Miller indices to 4 MillerBravais indices of crystal direction [uvtw] or plane normal (hkil).
@ -710,7 +732,10 @@ class ProgressBar:
Works for 0-based loops, ETA is estimated by linear extrapolation.
"""
def __init__(self, total: int, prefix: str, bar_length: int):
def __init__(self,
total: int,
prefix: str,
bar_length: int):
"""
Set current time as basis for ETA estimation.
@ -733,12 +758,12 @@ class ProgressBar:
sys.stderr.write(f"{self.prefix} {''*self.bar_length} 0% ETA n/a")
sys.stderr.flush()
def update(self, iteration: int) -> None:
def update(self,
iteration: int) -> None:
fraction = (iteration+1) / self.total
filled_length = int(self.bar_length * fraction)
if filled_length > int(self.bar_length * self.fraction_last) or \
if filled_length := int(self.bar_length * fraction) > int(self.bar_length * self.fraction_last) or \
datetime.datetime.now() - self.time_last_update > datetime.timedelta(seconds=10):
self.time_last_update = datetime.datetime.now()
bar = '' * filled_length + '' * (self.bar_length - filled_length)

View File

@ -1,3 +1,5 @@
[mypy]
warn_redundant_casts = True
[mypy-scipy.*]
ignore_missing_imports = True
[mypy-h5py.*]

View File

@ -14,6 +14,7 @@ module CPFEM
use config
use math
use rotations
use polynomials
use lattice
use material
use phase
@ -83,6 +84,7 @@ subroutine CPFEM_initAll
call config_init
call math_init
call rotations_init
call polynomials_init
call lattice_init
call discretization_marc_init
call material_init(.false.)

View File

@ -16,6 +16,7 @@ module CPFEM2
use config
use math
use rotations
use polynomials
use lattice
use material
use phase
@ -57,6 +58,7 @@ subroutine CPFEM_initAll
call config_init
call math_init
call rotations_init
call polynomials_init
call lattice_init
#if defined(MESH)
call discretization_mesh_init(restart=interface_restartInc>0)

View File

@ -191,8 +191,10 @@ logical function isScalar(line)
character(len=*), intent(in) :: line
isScalar = (.not.isKeyValue(line) .and. .not.isKey(line) .and. .not.isListItem(line) &
.and. .not.isFlow(line))
isScalar = (.not. isKeyValue(line) .and. &
.not. isKey(line) .and. &
.not. isListItem(line) .and. &
.not. isFlow(line))
end function isScalar

View File

@ -14,6 +14,7 @@
#include "LAPACK_interface.f90"
#include "math.f90"
#include "rotations.f90"
#include "polynomials.f90"
#include "lattice.f90"
#include "element.f90"
#include "geometry_plastic_nonlocal.f90"

View File

@ -66,7 +66,7 @@ subroutine material_init(restart)
print'(/,1x,a)', '<<<+- material init -+>>>'; flush(IO_STDOUT)
call parse
call parse()
print'(/,1x,a)', 'parsed material.yaml'

View File

@ -8,6 +8,7 @@ module phase
use constants
use math
use rotations
use polynomials
use IO
use config
use material

View File

@ -1,15 +1,13 @@
submodule(phase:mechanical) elastic
type :: tParameters
real(pReal),dimension(3) :: &
C_11 = 0.0_pReal, &
C_12 = 0.0_pReal, &
C_13 = 0.0_pReal, &
C_33 = 0.0_pReal, &
C_44 = 0.0_pReal, &
C_66 = 0.0_pReal
real(pReal) :: &
T_ref
type(tPolynomial) :: &
C_11, &
C_12, &
C_13, &
C_33, &
C_44, &
C_66
end type tParameters
type(tParameters), allocatable, dimension(:) :: param
@ -47,35 +45,17 @@ module subroutine elastic_init(phases)
associate(prm => param(ph))
prm%T_ref = elastic%get_asFloat('T_ref', defaultVal=T_ROOM)
prm%C_11(1) = elastic%get_asFloat('C_11')
prm%C_11(2) = elastic%get_asFloat('C_11,T', defaultVal=0.0_pReal)
prm%C_11(3) = elastic%get_asFloat('C_11,T^2',defaultVal=0.0_pReal)
prm%C_12(1) = elastic%get_asFloat('C_12')
prm%C_12(2) = elastic%get_asFloat('C_12,T', defaultVal=0.0_pReal)
prm%C_12(3) = elastic%get_asFloat('C_12,T^2',defaultVal=0.0_pReal)
prm%C_44(1) = elastic%get_asFloat('C_44')
prm%C_44(2) = elastic%get_asFloat('C_44,T', defaultVal=0.0_pReal)
prm%C_44(3) = elastic%get_asFloat('C_44,T^2',defaultVal=0.0_pReal)
prm%C_11 = polynomial(elastic%asDict(),'C_11','T')
prm%C_12 = polynomial(elastic%asDict(),'C_12','T')
prm%C_44 = polynomial(elastic%asDict(),'C_44','T')
if (any(phase_lattice(ph) == ['hP','tI'])) then
prm%C_13(1) = elastic%get_asFloat('C_13')
prm%C_13(2) = elastic%get_asFloat('C_13,T', defaultVal=0.0_pReal)
prm%C_13(3) = elastic%get_asFloat('C_13,T^2',defaultVal=0.0_pReal)
prm%C_33(1) = elastic%get_asFloat('C_33')
prm%C_33(2) = elastic%get_asFloat('C_33,T', defaultVal=0.0_pReal)
prm%C_33(3) = elastic%get_asFloat('C_33,T^2',defaultVal=0.0_pReal)
prm%C_13 = polynomial(elastic%asDict(),'C_13','T')
prm%C_33 = polynomial(elastic%asDict(),'C_33','T')
end if
if (phase_lattice(ph) == 'tI') then
prm%C_66(1) = elastic%get_asFloat('C_66')
prm%C_66(2) = elastic%get_asFloat('C_66,T', defaultVal=0.0_pReal)
prm%C_66(3) = elastic%get_asFloat('C_66,T^2',defaultVal=0.0_pReal)
end if
if (phase_lattice(ph) == 'tI') &
prm%C_66 = polynomial(elastic%asDict(),'C_66','T')
end associate
end do
@ -97,38 +77,20 @@ pure module function elastic_C66(ph,en) result(C66)
associate(prm => param(ph))
C66 = 0.0_pReal
T = thermal_T(ph,en)
C66(1,1) = prm%C_11(1) &
+ prm%C_11(2)*(T - prm%T_ref) &
+ prm%C_11(3)*(T - prm%T_ref)**2
C66(1,2) = prm%C_12(1) &
+ prm%C_12(2)*(T - prm%T_ref) &
+ prm%C_12(3)*(T - prm%T_ref)**2
C66(4,4) = prm%C_44(1) &
+ prm%C_44(2)*(T - prm%T_ref) &
+ prm%C_44(3)*(T - prm%T_ref)**2
C66(1,1) = prm%C_11%at(T)
C66(1,2) = prm%C_12%at(T)
C66(4,4) = prm%C_44%at(T)
if (any(phase_lattice(ph) == ['hP','tI'])) then
C66(1,3) = prm%C_13(1) &
+ prm%C_13(2)*(T - prm%T_ref) &
+ prm%C_13(3)*(T - prm%T_ref)**2
C66(3,3) = prm%C_33(1) &
+ prm%C_33(2)*(T - prm%T_ref) &
+ prm%C_33(3)*(T - prm%T_ref)**2
C66(1,3) = prm%C_13%at(T)
C66(3,3) = prm%C_33%at(T)
end if
if (phase_lattice(ph) == 'tI') then
C66(6,6) = prm%C_66(1) &
+ prm%C_66(2)*(T - prm%T_ref) &
+ prm%C_66(3)*(T - prm%T_ref)**2
end if
if (phase_lattice(ph) == 'tI') C66(6,6) = prm%C_66%at(T)
C66 = lattice_symmetrize_C66(C66,phase_lattice(ph))

179
src/polynomials.f90 Normal file
View File

@ -0,0 +1,179 @@
!--------------------------------------------------------------------------------------------------
!> @author Martin Diehl, KU Leuven
!> @brief Polynomial representation for variable data
!--------------------------------------------------------------------------------------------------
module polynomials
use prec
use IO
use YAML_parse
use YAML_types
implicit none
private
type, public :: tPolynomial
real(pReal), dimension(:), allocatable :: coef
real(pReal) :: x_ref
contains
procedure, public :: at => eval
procedure, public :: der1_at => eval_der1
end type tPolynomial
interface polynomial
module procedure polynomial_from_dict
module procedure polynomial_from_coef
end interface polynomial
public :: &
polynomial, &
polynomials_init
contains
!--------------------------------------------------------------------------------------------------
!> @brief Run self-test.
!--------------------------------------------------------------------------------------------------
subroutine polynomials_init()
print'(/,1x,a)', '<<<+- polynomials init -+>>>'; flush(IO_STDOUT)
call selfTest()
end subroutine polynomials_init
!--------------------------------------------------------------------------------------------------
!> @brief Initialize a Polynomial from Coefficients.
!--------------------------------------------------------------------------------------------------
function polynomial_from_coef(coef,x_ref) result(p)
real(pReal), dimension(:), intent(in) :: coef
real(pReal), intent(in) :: x_ref
type(tPolynomial) :: p
allocate(p%coef(0:size(coef)-1),source=coef) ! should be zero based
p%x_ref = x_ref
end function polynomial_from_coef
!--------------------------------------------------------------------------------------------------
!> @brief Initialize a Polynomial from a Dictionary with Coefficients.
!--------------------------------------------------------------------------------------------------
function polynomial_from_dict(dict,y,x) result(p)
type(tDict), intent(in) :: dict
character(len=*), intent(in) :: y, x
type(tPolynomial) :: p
real(pReal), dimension(:), allocatable :: coef
real(pReal) :: x_ref
allocate(coef(1),source=dict%get_asFloat(y))
if (dict%contains(y//','//x)) then
x_ref = dict%get_asFloat(x//'_ref')
coef = [coef,dict%get_asFloat(y//','//x)]
if (dict%contains(y//','//x//'^2')) then
coef = [coef,dict%get_asFloat(y//','//x//'^2')]
end if
else
x_ref = huge(0.0_pReal) ! Simplify debugging
end if
p = Polynomial(coef,x_ref)
end function polynomial_from_dict
!--------------------------------------------------------------------------------------------------
!> @brief Evaluate a Polynomial.
!--------------------------------------------------------------------------------------------------
pure function eval(self,x) result(y)
class(tPolynomial), intent(in) :: self
real(pReal), intent(in) :: x
real(pReal) :: y
integer :: i
y = self%coef(0)
do i = 1, ubound(self%coef,1)
y = y + self%coef(i) * (x-self%x_ref)**i
enddo
end function eval
!--------------------------------------------------------------------------------------------------
!> @brief Evaluate a first derivative of Polynomial.
!--------------------------------------------------------------------------------------------------
pure function eval_der1(self,x) result(y)
class(tPolynomial), intent(in) :: self
real(pReal), intent(in) :: x
real(pReal) :: y
integer :: i
y = 0.0_pReal
do i = 1, ubound(self%coef,1)
y = y + real(i,pReal)*self%coef(i) * (x-self%x_ref)**(i-1)
enddo
end function eval_der1
!--------------------------------------------------------------------------------------------------
!> @brief Check correctness of polynomical functionality.
!--------------------------------------------------------------------------------------------------
subroutine selfTest
type(tPolynomial) :: p1, p2
real(pReal), dimension(3) :: coef
real(pReal) :: x_ref, x
class(tNode), pointer :: dict
character(len=pStringLen), dimension(3) :: coef_s
character(len=pStringLen) :: x_ref_s, x_s, YAML_s
call random_number(coef)
call random_number(x_ref)
call random_number(x)
coef = coef*10_pReal -0.5_pReal
x_ref = x_ref*10_pReal -0.5_pReal
x = x*10_pReal -0.5_pReal
p1 = polynomial(coef,x_ref)
if (dNeq(p1%at(x_ref),coef(1))) error stop 'polynomial: @ref'
write(coef_s(1),*) coef(1)
write(coef_s(2),*) coef(2)
write(coef_s(3),*) coef(3)
write(x_ref_s,*) x_ref
write(x_s,*) x
YAML_s = 'C: '//trim(adjustl(coef_s(1)))//IO_EOL//&
'C,T: '//trim(adjustl(coef_s(2)))//IO_EOL//&
'C,T^2: '//trim(adjustl(coef_s(3)))//IO_EOL//&
'T_ref: '//trim(adjustl(x_ref_s))//IO_EOL
Dict => YAML_parse_str(trim(YAML_s))
p2 = polynomial(dict%asDict(),'C','T')
if (dNeq(p1%at(x),p2%at(x),1.0e-12_pReal)) error stop 'polynomials: init'
p1 = polynomial(coef*[0.0_pReal,1.0_pReal,0.0_pReal],x_ref)
if (dNeq(p1%at(x_ref+x),-p1%at(x_ref-x),1.0e-10_pReal)) error stop 'polynomials: eval(odd)'
if (dNeq(p1%der1_at(x),p1%der1_at(5.0_pReal*x),1.0e-10_pReal)) error stop 'polynomials: eval_der(odd)'
p1 = polynomial(coef*[0.0_pReal,0.0_pReal,1.0_pReal],x_ref)
if (dNeq(p1%at(x_ref+x),p1%at(x_ref-x),1e-10_pReal)) error stop 'polynomials: eval(even)'
if (dNeq(p1%der1_at(x_ref+x),-p1%der1_at(x_ref-x),1e-10_pReal)) error stop 'polynomials: eval_der(even)'
end subroutine selfTest
end module polynomials