Merge branch 'development' of git.damask.mpie.de:damask/DAMASK into typehints_orientation_rotation
This commit is contained in:
commit
ef051ceef9
|
@ -1 +1 @@
|
||||||
v3.0.0-alpha5-518-g4fa97b9a3
|
v3.0.0-alpha5-545-gad74f5dbe
|
||||||
|
|
|
@ -47,27 +47,32 @@ class Colormap(mpl.colors.ListedColormap):
|
||||||
|
|
||||||
"""
|
"""
|
||||||
|
|
||||||
def __eq__(self, other: object) -> bool:
|
def __eq__(self,
|
||||||
|
other: object) -> bool:
|
||||||
"""Test equality of colormaps."""
|
"""Test equality of colormaps."""
|
||||||
if not isinstance(other, Colormap):
|
if not isinstance(other, Colormap):
|
||||||
return NotImplemented
|
return NotImplemented
|
||||||
return len(self.colors) == len(other.colors) \
|
return len(self.colors) == len(other.colors) \
|
||||||
and bool(np.all(self.colors == other.colors))
|
and bool(np.all(self.colors == other.colors))
|
||||||
|
|
||||||
def __add__(self, other: 'Colormap') -> 'Colormap':
|
def __add__(self,
|
||||||
|
other: 'Colormap') -> 'Colormap':
|
||||||
"""Concatenate."""
|
"""Concatenate."""
|
||||||
return Colormap(np.vstack((self.colors,other.colors)),
|
return Colormap(np.vstack((self.colors,other.colors)),
|
||||||
f'{self.name}+{other.name}')
|
f'{self.name}+{other.name}')
|
||||||
|
|
||||||
def __iadd__(self, other: 'Colormap') -> 'Colormap':
|
def __iadd__(self,
|
||||||
|
other: 'Colormap') -> 'Colormap':
|
||||||
"""Concatenate (in-place)."""
|
"""Concatenate (in-place)."""
|
||||||
return self.__add__(other)
|
return self.__add__(other)
|
||||||
|
|
||||||
def __mul__(self, factor: int) -> 'Colormap':
|
def __mul__(self,
|
||||||
|
factor: int) -> 'Colormap':
|
||||||
"""Repeat."""
|
"""Repeat."""
|
||||||
return Colormap(np.vstack([self.colors]*factor),f'{self.name}*{factor}')
|
return Colormap(np.vstack([self.colors]*factor),f'{self.name}*{factor}')
|
||||||
|
|
||||||
def __imul__(self, factor: int) -> 'Colormap':
|
def __imul__(self,
|
||||||
|
factor: int) -> 'Colormap':
|
||||||
"""Repeat (in-place)."""
|
"""Repeat (in-place)."""
|
||||||
return self.__mul__(factor)
|
return self.__mul__(factor)
|
||||||
|
|
||||||
|
@ -161,7 +166,8 @@ class Colormap(mpl.colors.ListedColormap):
|
||||||
|
|
||||||
|
|
||||||
@staticmethod
|
@staticmethod
|
||||||
def from_predefined(name: str, N: int = 256) -> 'Colormap':
|
def from_predefined(name: str,
|
||||||
|
N: int = 256) -> 'Colormap':
|
||||||
"""
|
"""
|
||||||
Select from a set of predefined colormaps.
|
Select from a set of predefined colormaps.
|
||||||
|
|
||||||
|
@ -260,10 +266,8 @@ class Colormap(mpl.colors.ListedColormap):
|
||||||
l,r = (field[mask].min(),field[mask].max()) if bounds is None else \
|
l,r = (field[mask].min(),field[mask].max()) if bounds is None else \
|
||||||
(bounds[0],bounds[1])
|
(bounds[0],bounds[1])
|
||||||
|
|
||||||
delta,avg = r-l,0.5*abs(r+l)
|
if abs(delta := r-l) * 1e8 <= (avg := 0.5*abs(r+l)): # delta is similar to numerical noise
|
||||||
|
l,r = (l-0.5*avg*np.sign(delta),r+0.5*avg*np.sign(delta)) # extend range to have actual data centered within
|
||||||
if abs(delta) * 1e8 <= avg: # delta is similar to numerical noise
|
|
||||||
l,r = l-0.5*avg*np.sign(delta),r+0.5*avg*np.sign(delta), # extend range to have actual data centered within
|
|
||||||
|
|
||||||
return Image.fromarray(
|
return Image.fromarray(
|
||||||
(np.dstack((
|
(np.dstack((
|
||||||
|
@ -275,7 +279,8 @@ class Colormap(mpl.colors.ListedColormap):
|
||||||
mode='RGBA')
|
mode='RGBA')
|
||||||
|
|
||||||
|
|
||||||
def reversed(self, name: str = None) -> 'Colormap':
|
def reversed(self,
|
||||||
|
name: str = None) -> 'Colormap':
|
||||||
"""
|
"""
|
||||||
Reverse.
|
Reverse.
|
||||||
|
|
||||||
|
@ -296,7 +301,7 @@ class Colormap(mpl.colors.ListedColormap):
|
||||||
>>> damask.Colormap.from_predefined('stress').reversed()
|
>>> damask.Colormap.from_predefined('stress').reversed()
|
||||||
|
|
||||||
"""
|
"""
|
||||||
rev = super(Colormap,self).reversed(name)
|
rev = super().reversed(name)
|
||||||
return Colormap(np.array(rev.colors),rev.name[:-4] if rev.name.endswith('_r_r') else rev.name)
|
return Colormap(np.array(rev.colors),rev.name[:-4] if rev.name.endswith('_r_r') else rev.name)
|
||||||
|
|
||||||
|
|
||||||
|
@ -328,7 +333,8 @@ class Colormap(mpl.colors.ListedColormap):
|
||||||
return fname
|
return fname
|
||||||
|
|
||||||
|
|
||||||
def save_paraview(self, fname: FileHandle = None):
|
def save_paraview(self,
|
||||||
|
fname: FileHandle = None):
|
||||||
"""
|
"""
|
||||||
Save as JSON file for use in Paraview.
|
Save as JSON file for use in Paraview.
|
||||||
|
|
||||||
|
@ -355,7 +361,8 @@ class Colormap(mpl.colors.ListedColormap):
|
||||||
fhandle.write('\n')
|
fhandle.write('\n')
|
||||||
|
|
||||||
|
|
||||||
def save_ASCII(self, fname: FileHandle = None):
|
def save_ASCII(self,
|
||||||
|
fname: FileHandle = None):
|
||||||
"""
|
"""
|
||||||
Save as ASCII file.
|
Save as ASCII file.
|
||||||
|
|
||||||
|
@ -390,7 +397,8 @@ class Colormap(mpl.colors.ListedColormap):
|
||||||
self._get_file_handle(fname,'.legend').write(GOM_str)
|
self._get_file_handle(fname,'.legend').write(GOM_str)
|
||||||
|
|
||||||
|
|
||||||
def save_gmsh(self, fname: FileHandle = None):
|
def save_gmsh(self,
|
||||||
|
fname: FileHandle = None):
|
||||||
"""
|
"""
|
||||||
Save as ASCII file for use in gmsh.
|
Save as ASCII file for use in gmsh.
|
||||||
|
|
||||||
|
@ -429,7 +437,7 @@ class Colormap(mpl.colors.ListedColormap):
|
||||||
"""If saturation of one of the two colors is much less than the other, hue of the less."""
|
"""If saturation of one of the two colors is much less than the other, hue of the less."""
|
||||||
if msh_sat[0] >= msh_unsat[0]:
|
if msh_sat[0] >= msh_unsat[0]:
|
||||||
return msh_sat[2]
|
return msh_sat[2]
|
||||||
else:
|
|
||||||
hSpin = msh_sat[1]/np.sin(msh_sat[1])*np.sqrt(msh_unsat[0]**2.0-msh_sat[0]**2)/msh_sat[0]
|
hSpin = msh_sat[1]/np.sin(msh_sat[1])*np.sqrt(msh_unsat[0]**2.0-msh_sat[0]**2)/msh_sat[0]
|
||||||
if msh_sat[2] < - np.pi/3.0: hSpin *= -1.0
|
if msh_sat[2] < - np.pi/3.0: hSpin *= -1.0
|
||||||
return msh_sat[2] + hSpin
|
return msh_sat[2] + hSpin
|
||||||
|
@ -445,9 +453,9 @@ class Colormap(mpl.colors.ListedColormap):
|
||||||
else:
|
else:
|
||||||
lo = np.array([M_mid,0.0,0.0])
|
lo = np.array([M_mid,0.0,0.0])
|
||||||
frac = 2.0*frac - 1.0
|
frac = 2.0*frac - 1.0
|
||||||
if lo[1] < 0.05 and hi[1] > 0.05:
|
if lo[1] < 0.05 < hi[1]:
|
||||||
lo[2] = adjust_hue(hi,lo)
|
lo[2] = adjust_hue(hi,lo)
|
||||||
elif lo[1] > 0.05 and hi[1] < 0.05:
|
elif hi[1] < 0.05 < lo[1]:
|
||||||
hi[2] = adjust_hue(lo,hi)
|
hi[2] = adjust_hue(lo,hi)
|
||||||
|
|
||||||
return (1.0 - frac) * lo + frac * hi
|
return (1.0 - frac) * lo + frac * hi
|
||||||
|
@ -476,7 +484,7 @@ class Colormap(mpl.colors.ListedColormap):
|
||||||
'gnuplot', 'gnuplot2', 'CMRmap', 'cubehelix', 'brg',
|
'gnuplot', 'gnuplot2', 'CMRmap', 'cubehelix', 'brg',
|
||||||
'gist_rainbow', 'rainbow', 'jet', 'nipy_spectral', 'gist_ncar']}
|
'gist_rainbow', 'rainbow', 'jet', 'nipy_spectral', 'gist_ncar']}
|
||||||
|
|
||||||
_predefined_DAMASK = {'orientation': {'low': [0.933334,0.878432,0.878431],
|
_predefined_DAMASK = {'orientation': {'low': [0.933334,0.878432,0.878431], # noqa
|
||||||
'high': [0.250980,0.007843,0.000000]},
|
'high': [0.250980,0.007843,0.000000]},
|
||||||
'strain': {'low': [0.941177,0.941177,0.870588],
|
'strain': {'low': [0.941177,0.941177,0.870588],
|
||||||
'high': [0.266667,0.266667,0.000000]},
|
'high': [0.266667,0.266667,0.000000]},
|
||||||
|
@ -621,7 +629,8 @@ class Colormap(mpl.colors.ListedColormap):
|
||||||
|
|
||||||
|
|
||||||
@staticmethod
|
@staticmethod
|
||||||
def _lab2xyz(lab: np.ndarray, ref_white: np.ndarray = _REF_WHITE) -> np.ndarray:
|
def _lab2xyz(lab: np.ndarray,
|
||||||
|
ref_white: np.ndarray = _REF_WHITE) -> np.ndarray:
|
||||||
"""
|
"""
|
||||||
CIE Lab to CIE Xyz.
|
CIE Lab to CIE Xyz.
|
||||||
|
|
||||||
|
@ -652,7 +661,8 @@ class Colormap(mpl.colors.ListedColormap):
|
||||||
])*ref_white
|
])*ref_white
|
||||||
|
|
||||||
@staticmethod
|
@staticmethod
|
||||||
def _xyz2lab(xyz: np.ndarray, ref_white: np.ndarray = _REF_WHITE) -> np.ndarray:
|
def _xyz2lab(xyz: np.ndarray,
|
||||||
|
ref_white: np.ndarray = _REF_WHITE) -> np.ndarray:
|
||||||
"""
|
"""
|
||||||
CIE Xyz to CIE Lab.
|
CIE Xyz to CIE Lab.
|
||||||
|
|
||||||
|
|
|
@ -2,25 +2,34 @@ import copy
|
||||||
from io import StringIO
|
from io import StringIO
|
||||||
from collections.abc import Iterable
|
from collections.abc import Iterable
|
||||||
import abc
|
import abc
|
||||||
|
from pathlib import Path
|
||||||
|
from typing import Union, Dict, Any, Type, TypeVar
|
||||||
|
|
||||||
import numpy as np
|
import numpy as np
|
||||||
import yaml
|
import yaml
|
||||||
|
|
||||||
|
from ._typehints import FileHandle
|
||||||
from . import Rotation
|
from . import Rotation
|
||||||
|
|
||||||
|
MyType = TypeVar('MyType', bound='Config')
|
||||||
|
|
||||||
class NiceDumper(yaml.SafeDumper):
|
class NiceDumper(yaml.SafeDumper):
|
||||||
"""Make YAML readable for humans."""
|
"""Make YAML readable for humans."""
|
||||||
|
|
||||||
def write_line_break(self, data=None):
|
def write_line_break(self,
|
||||||
|
data: str = None):
|
||||||
super().write_line_break(data)
|
super().write_line_break(data)
|
||||||
|
|
||||||
if len(self.indents) == 1:
|
if len(self.indents) == 1:
|
||||||
super().write_line_break()
|
super().write_line_break()
|
||||||
|
|
||||||
def increase_indent(self, flow=False, indentless=False):
|
def increase_indent(self,
|
||||||
|
flow: bool = False,
|
||||||
|
indentless: bool = False):
|
||||||
return super().increase_indent(flow, False)
|
return super().increase_indent(flow, False)
|
||||||
|
|
||||||
def represent_data(self, data):
|
def represent_data(self,
|
||||||
|
data: Any):
|
||||||
"""Cast Config objects and its subclasses to dict."""
|
"""Cast Config objects and its subclasses to dict."""
|
||||||
if isinstance(data, dict) and type(data) != dict:
|
if isinstance(data, dict) and type(data) != dict:
|
||||||
return self.represent_data(dict(data))
|
return self.represent_data(dict(data))
|
||||||
|
@ -31,14 +40,17 @@ class NiceDumper(yaml.SafeDumper):
|
||||||
else:
|
else:
|
||||||
return super().represent_data(data)
|
return super().represent_data(data)
|
||||||
|
|
||||||
def ignore_aliases(self, data):
|
def ignore_aliases(self,
|
||||||
|
data: Any) -> bool:
|
||||||
"""Do not use references to existing objects."""
|
"""Do not use references to existing objects."""
|
||||||
return True
|
return True
|
||||||
|
|
||||||
class Config(dict):
|
class Config(dict):
|
||||||
"""YAML-based configuration."""
|
"""YAML-based configuration."""
|
||||||
|
|
||||||
def __init__(self,yml=None,**kwargs):
|
def __init__(self,
|
||||||
|
yml: Union[str, Dict[str, Any]] = None,
|
||||||
|
**kwargs):
|
||||||
"""Initialize from YAML, dict, or key=value pairs."""
|
"""Initialize from YAML, dict, or key=value pairs."""
|
||||||
if isinstance(yml,str):
|
if isinstance(yml,str):
|
||||||
kwargs.update(yaml.safe_load(yml))
|
kwargs.update(yaml.safe_load(yml))
|
||||||
|
@ -47,7 +59,7 @@ class Config(dict):
|
||||||
|
|
||||||
super().__init__(**kwargs)
|
super().__init__(**kwargs)
|
||||||
|
|
||||||
def __repr__(self):
|
def __repr__(self) -> str:
|
||||||
"""Show as in file."""
|
"""Show as in file."""
|
||||||
output = StringIO()
|
output = StringIO()
|
||||||
self.save(output)
|
self.save(output)
|
||||||
|
@ -55,14 +67,15 @@ class Config(dict):
|
||||||
return ''.join(output.readlines())
|
return ''.join(output.readlines())
|
||||||
|
|
||||||
|
|
||||||
def __copy__(self):
|
def __copy__(self: MyType) -> MyType:
|
||||||
"""Create deep copy."""
|
"""Create deep copy."""
|
||||||
return copy.deepcopy(self)
|
return copy.deepcopy(self)
|
||||||
|
|
||||||
copy = __copy__
|
copy = __copy__
|
||||||
|
|
||||||
|
|
||||||
def __or__(self,other):
|
def __or__(self: MyType,
|
||||||
|
other) -> MyType:
|
||||||
"""
|
"""
|
||||||
Update configuration with contents of other.
|
Update configuration with contents of other.
|
||||||
|
|
||||||
|
@ -76,18 +89,24 @@ class Config(dict):
|
||||||
updated : damask.Config
|
updated : damask.Config
|
||||||
Updated configuration.
|
Updated configuration.
|
||||||
|
|
||||||
|
Note
|
||||||
|
----
|
||||||
|
This functionality is a backport for Python 3.8
|
||||||
|
|
||||||
"""
|
"""
|
||||||
duplicate = self.copy()
|
duplicate = self.copy()
|
||||||
duplicate.update(other)
|
duplicate.update(other)
|
||||||
return duplicate
|
return duplicate
|
||||||
|
|
||||||
|
|
||||||
def __ior__(self,other):
|
def __ior__(self: MyType,
|
||||||
|
other) -> MyType:
|
||||||
"""Update configuration with contents of other."""
|
"""Update configuration with contents of other."""
|
||||||
return self.__or__(other)
|
return self.__or__(other)
|
||||||
|
|
||||||
|
|
||||||
def delete(self,keys):
|
def delete(self: MyType,
|
||||||
|
keys: Union[Iterable, str]) -> MyType:
|
||||||
"""
|
"""
|
||||||
Remove configuration keys.
|
Remove configuration keys.
|
||||||
|
|
||||||
|
@ -109,7 +128,8 @@ class Config(dict):
|
||||||
|
|
||||||
|
|
||||||
@classmethod
|
@classmethod
|
||||||
def load(cls,fname):
|
def load(cls: Type[MyType],
|
||||||
|
fname: FileHandle) -> MyType:
|
||||||
"""
|
"""
|
||||||
Load from yaml file.
|
Load from yaml file.
|
||||||
|
|
||||||
|
@ -124,14 +144,15 @@ class Config(dict):
|
||||||
Configuration from file.
|
Configuration from file.
|
||||||
|
|
||||||
"""
|
"""
|
||||||
try:
|
if isinstance(fname, (str, Path)):
|
||||||
fhandle = open(fname)
|
fhandle = open(fname)
|
||||||
except TypeError:
|
else:
|
||||||
fhandle = fname
|
fhandle = fname
|
||||||
return cls(yaml.safe_load(fhandle))
|
return cls(yaml.safe_load(fhandle))
|
||||||
|
|
||||||
|
def save(self,
|
||||||
def save(self,fname,**kwargs):
|
fname: FileHandle,
|
||||||
|
**kwargs):
|
||||||
"""
|
"""
|
||||||
Save to yaml file.
|
Save to yaml file.
|
||||||
|
|
||||||
|
@ -143,9 +164,9 @@ class Config(dict):
|
||||||
Keyword arguments parsed to yaml.dump.
|
Keyword arguments parsed to yaml.dump.
|
||||||
|
|
||||||
"""
|
"""
|
||||||
try:
|
if isinstance(fname, (str, Path)):
|
||||||
fhandle = open(fname,'w',newline='\n')
|
fhandle = open(fname,'w',newline='\n')
|
||||||
except TypeError:
|
else:
|
||||||
fhandle = fname
|
fhandle = fname
|
||||||
|
|
||||||
if 'width' not in kwargs:
|
if 'width' not in kwargs:
|
||||||
|
|
|
@ -1,10 +1,14 @@
|
||||||
import numpy as np
|
import numpy as np
|
||||||
import h5py
|
import h5py
|
||||||
|
from typing import Sequence, Dict, Any, Collection
|
||||||
|
|
||||||
|
from ._typehints import FileHandle
|
||||||
from . import Config
|
from . import Config
|
||||||
from . import Rotation
|
from . import Rotation
|
||||||
from . import Orientation
|
from . import Orientation
|
||||||
from . import util
|
from . import util
|
||||||
|
from . import Table
|
||||||
|
|
||||||
|
|
||||||
class ConfigMaterial(Config):
|
class ConfigMaterial(Config):
|
||||||
"""
|
"""
|
||||||
|
@ -17,7 +21,9 @@ class ConfigMaterial(Config):
|
||||||
|
|
||||||
"""
|
"""
|
||||||
|
|
||||||
def __init__(self,d=None,**kwargs):
|
def __init__(self,
|
||||||
|
d: Dict[str, Any] = None,
|
||||||
|
**kwargs):
|
||||||
"""
|
"""
|
||||||
New material configuration.
|
New material configuration.
|
||||||
|
|
||||||
|
@ -30,14 +36,17 @@ class ConfigMaterial(Config):
|
||||||
Initial content specified as pairs of key=value.
|
Initial content specified as pairs of key=value.
|
||||||
|
|
||||||
"""
|
"""
|
||||||
|
default: Collection
|
||||||
if d is None:
|
if d is None:
|
||||||
for section,default in {'material':[],'homogenization':{},'phase':{}}.items():
|
for section, default in {'material':[],'homogenization':{},'phase':{}}.items():
|
||||||
if section not in kwargs: kwargs.update({section:default})
|
if section not in kwargs: kwargs.update({section:default})
|
||||||
|
|
||||||
super().__init__(d,**kwargs)
|
super().__init__(d,**kwargs)
|
||||||
|
|
||||||
|
|
||||||
def save(self,fname='material.yaml',**kwargs):
|
def save(self,
|
||||||
|
fname: FileHandle = 'material.yaml',
|
||||||
|
**kwargs):
|
||||||
"""
|
"""
|
||||||
Save to yaml file.
|
Save to yaml file.
|
||||||
|
|
||||||
|
@ -53,7 +62,8 @@ class ConfigMaterial(Config):
|
||||||
|
|
||||||
|
|
||||||
@classmethod
|
@classmethod
|
||||||
def load(cls,fname='material.yaml'):
|
def load(cls,
|
||||||
|
fname: FileHandle = 'material.yaml') -> 'ConfigMaterial':
|
||||||
"""
|
"""
|
||||||
Load from yaml file.
|
Load from yaml file.
|
||||||
|
|
||||||
|
@ -72,10 +82,14 @@ class ConfigMaterial(Config):
|
||||||
|
|
||||||
|
|
||||||
@staticmethod
|
@staticmethod
|
||||||
def load_DREAM3D(fname,
|
def load_DREAM3D(fname: str,
|
||||||
grain_data=None,cell_data=None,cell_ensemble_data='CellEnsembleData',
|
grain_data: str = None,
|
||||||
phases='Phases',Euler_angles='EulerAngles',phase_names='PhaseName',
|
cell_data: str = None,
|
||||||
base_group=None):
|
cell_ensemble_data: str = 'CellEnsembleData',
|
||||||
|
phases: str = 'Phases',
|
||||||
|
Euler_angles: str = 'EulerAngles',
|
||||||
|
phase_names: str = 'PhaseName',
|
||||||
|
base_group: str = None) -> 'ConfigMaterial':
|
||||||
"""
|
"""
|
||||||
Load DREAM.3D (HDF5) file.
|
Load DREAM.3D (HDF5) file.
|
||||||
|
|
||||||
|
@ -154,7 +168,8 @@ class ConfigMaterial(Config):
|
||||||
|
|
||||||
|
|
||||||
@staticmethod
|
@staticmethod
|
||||||
def from_table(table,**kwargs):
|
def from_table(table: Table,
|
||||||
|
**kwargs) -> 'ConfigMaterial':
|
||||||
"""
|
"""
|
||||||
Generate from an ASCII table.
|
Generate from an ASCII table.
|
||||||
|
|
||||||
|
@ -207,7 +222,7 @@ class ConfigMaterial(Config):
|
||||||
|
|
||||||
|
|
||||||
@property
|
@property
|
||||||
def is_complete(self):
|
def is_complete(self) -> bool:
|
||||||
"""
|
"""
|
||||||
Check for completeness.
|
Check for completeness.
|
||||||
|
|
||||||
|
@ -267,12 +282,11 @@ class ConfigMaterial(Config):
|
||||||
if homogenization - set(self['homogenization']):
|
if homogenization - set(self['homogenization']):
|
||||||
print(f'Homogenization(s) {homogenization-set(self["homogenization"])} missing')
|
print(f'Homogenization(s) {homogenization-set(self["homogenization"])} missing')
|
||||||
ok = False
|
ok = False
|
||||||
|
|
||||||
return ok
|
return ok
|
||||||
|
|
||||||
|
|
||||||
@property
|
@property
|
||||||
def is_valid(self):
|
def is_valid(self) -> bool:
|
||||||
"""
|
"""
|
||||||
Check for valid content.
|
Check for valid content.
|
||||||
|
|
||||||
|
@ -316,7 +330,10 @@ class ConfigMaterial(Config):
|
||||||
return ok
|
return ok
|
||||||
|
|
||||||
|
|
||||||
def material_rename_phase(self,mapping,ID=None,constituent=None):
|
def material_rename_phase(self,
|
||||||
|
mapping: Dict[str, str],
|
||||||
|
ID: Sequence[int] = None,
|
||||||
|
constituent: Sequence[int] = None) -> 'ConfigMaterial':
|
||||||
"""
|
"""
|
||||||
Change phase name in material.
|
Change phase name in material.
|
||||||
|
|
||||||
|
@ -347,7 +364,9 @@ class ConfigMaterial(Config):
|
||||||
return dup
|
return dup
|
||||||
|
|
||||||
|
|
||||||
def material_rename_homogenization(self,mapping,ID=None):
|
def material_rename_homogenization(self,
|
||||||
|
mapping: Dict[str, str],
|
||||||
|
ID: Sequence[int] = None) -> 'ConfigMaterial':
|
||||||
"""
|
"""
|
||||||
Change homogenization name in material.
|
Change homogenization name in material.
|
||||||
|
|
||||||
|
@ -374,7 +393,8 @@ class ConfigMaterial(Config):
|
||||||
return dup
|
return dup
|
||||||
|
|
||||||
|
|
||||||
def material_add(self,**kwargs):
|
def material_add(self,
|
||||||
|
**kwargs: Any) -> 'ConfigMaterial':
|
||||||
"""
|
"""
|
||||||
Add material entries.
|
Add material entries.
|
||||||
|
|
||||||
|
@ -453,7 +473,7 @@ class ConfigMaterial(Config):
|
||||||
N = max(N,s[0]) if len(s)>0 else N
|
N = max(N,s[0]) if len(s)>0 else N
|
||||||
n = max(n,s[1]) if len(s)>1 else n
|
n = max(n,s[1]) if len(s)>1 else n
|
||||||
|
|
||||||
mat = [{'constituents':[{} for _ in range(n)]} for _ in range(N)]
|
mat: Sequence[dict] = [{'constituents':[{} for _ in range(n)]} for _ in range(N)]
|
||||||
|
|
||||||
if 'v' not in kwargs:
|
if 'v' not in kwargs:
|
||||||
shaped['v'] = np.broadcast_to(1/n,(N,n))
|
shaped['v'] = np.broadcast_to(1/n,(N,n))
|
||||||
|
@ -461,7 +481,7 @@ class ConfigMaterial(Config):
|
||||||
map_shape = {'O':(N,n,4),'F_i':(N,n,3,3)}
|
map_shape = {'O':(N,n,4),'F_i':(N,n,3,3)}
|
||||||
for k,v in shaped.items():
|
for k,v in shaped.items():
|
||||||
target = map_shape.get(k,(N,n))
|
target = map_shape.get(k,(N,n))
|
||||||
obj = np.broadcast_to(v.reshape(util.shapeshifter(v.shape,target,mode='right')),target)
|
obj = np.broadcast_to(v.reshape(util.shapeshifter(v.shape, target, mode = 'right')), target)
|
||||||
for i in range(N):
|
for i in range(N):
|
||||||
if k in ['phase','O','v','F_i']:
|
if k in ['phase','O','v','F_i']:
|
||||||
for j in range(n):
|
for j in range(n):
|
||||||
|
|
|
@ -33,9 +33,9 @@ class Crystal():
|
||||||
def __init__(self,*,
|
def __init__(self,*,
|
||||||
family = None,
|
family = None,
|
||||||
lattice = None,
|
lattice = None,
|
||||||
a = None,b = None,c = None,
|
a: float = None, b: float = None, c: float = None,
|
||||||
alpha = None,beta = None,gamma = None,
|
alpha: float = None, beta: float = None, gamma: float = None,
|
||||||
degrees = False):
|
degrees: bool = False):
|
||||||
"""
|
"""
|
||||||
Representation of crystal in terms of crystal family or Bravais lattice.
|
Representation of crystal in terms of crystal family or Bravais lattice.
|
||||||
|
|
||||||
|
@ -62,7 +62,7 @@ class Crystal():
|
||||||
Angles are given in degrees. Defaults to False.
|
Angles are given in degrees. Defaults to False.
|
||||||
|
|
||||||
"""
|
"""
|
||||||
if family not in [None] + list(lattice_symmetries.values()):
|
if family is not None and family not in list(lattice_symmetries.values()):
|
||||||
raise KeyError(f'invalid crystal family "{family}"')
|
raise KeyError(f'invalid crystal family "{family}"')
|
||||||
if lattice is not None and family is not None and family != lattice_symmetries[lattice]:
|
if lattice is not None and family is not None and family != lattice_symmetries[lattice]:
|
||||||
raise KeyError(f'incompatible family "{family}" for lattice "{lattice}"')
|
raise KeyError(f'incompatible family "{family}" for lattice "{lattice}"')
|
||||||
|
@ -107,9 +107,6 @@ class Crystal():
|
||||||
if np.any([np.roll([self.alpha,self.beta,self.gamma],r)[0]
|
if np.any([np.roll([self.alpha,self.beta,self.gamma],r)[0]
|
||||||
>= np.sum(np.roll([self.alpha,self.beta,self.gamma],r)[1:]) for r in range(3)]):
|
>= np.sum(np.roll([self.alpha,self.beta,self.gamma],r)[1:]) for r in range(3)]):
|
||||||
raise ValueError ('each lattice angle must be less than sum of others')
|
raise ValueError ('each lattice angle must be less than sum of others')
|
||||||
else:
|
|
||||||
self.a = self.b = self.c = None
|
|
||||||
self.alpha = self.beta = self.gamma = None
|
|
||||||
|
|
||||||
|
|
||||||
def __repr__(self):
|
def __repr__(self):
|
||||||
|
@ -122,7 +119,8 @@ class Crystal():
|
||||||
'α={:.5g}°, β={:.5g}°, γ={:.5g}°'.format(*np.degrees(self.parameters[3:]))])
|
'α={:.5g}°, β={:.5g}°, γ={:.5g}°'.format(*np.degrees(self.parameters[3:]))])
|
||||||
|
|
||||||
|
|
||||||
def __eq__(self,other):
|
def __eq__(self,
|
||||||
|
other: object) -> bool:
|
||||||
"""
|
"""
|
||||||
Equal to other.
|
Equal to other.
|
||||||
|
|
||||||
|
@ -132,6 +130,8 @@ class Crystal():
|
||||||
Crystal to check for equality.
|
Crystal to check for equality.
|
||||||
|
|
||||||
"""
|
"""
|
||||||
|
if not isinstance(other, Crystal):
|
||||||
|
return NotImplemented
|
||||||
return self.lattice == other.lattice and \
|
return self.lattice == other.lattice and \
|
||||||
self.parameters == other.parameters and \
|
self.parameters == other.parameters and \
|
||||||
self.family == other.family
|
self.family == other.family
|
||||||
|
@ -139,8 +139,7 @@ class Crystal():
|
||||||
@property
|
@property
|
||||||
def parameters(self):
|
def parameters(self):
|
||||||
"""Return lattice parameters a, b, c, alpha, beta, gamma."""
|
"""Return lattice parameters a, b, c, alpha, beta, gamma."""
|
||||||
return (self.a,self.b,self.c,self.alpha,self.beta,self.gamma)
|
if hasattr(self,'a'): return (self.a,self.b,self.c,self.alpha,self.beta,self.gamma)
|
||||||
|
|
||||||
|
|
||||||
@property
|
@property
|
||||||
def immutable(self):
|
def immutable(self):
|
||||||
|
@ -269,7 +268,7 @@ class Crystal():
|
||||||
https://doi.org/10.1063/1.1661333
|
https://doi.org/10.1063/1.1661333
|
||||||
|
|
||||||
"""
|
"""
|
||||||
if None in self.parameters:
|
if self.parameters is None:
|
||||||
raise KeyError('missing crystal lattice parameters')
|
raise KeyError('missing crystal lattice parameters')
|
||||||
return np.array([
|
return np.array([
|
||||||
[1,0,0],
|
[1,0,0],
|
||||||
|
@ -315,7 +314,9 @@ class Crystal():
|
||||||
+ _lattice_points.get(self.lattice if self.lattice == 'hP' else \
|
+ _lattice_points.get(self.lattice if self.lattice == 'hP' else \
|
||||||
self.lattice[-1],None),dtype=float)
|
self.lattice[-1],None),dtype=float)
|
||||||
|
|
||||||
def to_lattice(self, *, direction: np.ndarray = None, plane: np.ndarray = None) -> np.ndarray:
|
def to_lattice(self, *,
|
||||||
|
direction: np.ndarray = None,
|
||||||
|
plane: np.ndarray = None) -> np.ndarray:
|
||||||
"""
|
"""
|
||||||
Calculate lattice vector corresponding to crystal frame direction or plane normal.
|
Calculate lattice vector corresponding to crystal frame direction or plane normal.
|
||||||
|
|
||||||
|
@ -339,7 +340,9 @@ class Crystal():
|
||||||
return np.einsum('il,...l',basis,axis)
|
return np.einsum('il,...l',basis,axis)
|
||||||
|
|
||||||
|
|
||||||
def to_frame(self, *, uvw: np.ndarray = None, hkl: np.ndarray = None) -> np.ndarray:
|
def to_frame(self, *,
|
||||||
|
uvw: np.ndarray = None,
|
||||||
|
hkl: np.ndarray = None) -> np.ndarray:
|
||||||
"""
|
"""
|
||||||
Calculate crystal frame vector along lattice direction [uvw] or plane normal (hkl).
|
Calculate crystal frame vector along lattice direction [uvw] or plane normal (hkl).
|
||||||
|
|
||||||
|
@ -362,7 +365,8 @@ class Crystal():
|
||||||
return np.einsum('il,...l',basis,axis)
|
return np.einsum('il,...l',basis,axis)
|
||||||
|
|
||||||
|
|
||||||
def kinematics(self, mode: str) -> Dict[str, List[np.ndarray]]:
|
def kinematics(self,
|
||||||
|
mode: str) -> Dict[str, List[np.ndarray]]:
|
||||||
"""
|
"""
|
||||||
Return crystal kinematics systems.
|
Return crystal kinematics systems.
|
||||||
|
|
||||||
|
@ -621,7 +625,8 @@ class Crystal():
|
||||||
'plane': [m[:,3:6] for m in master]}
|
'plane': [m[:,3:6] for m in master]}
|
||||||
|
|
||||||
|
|
||||||
def relation_operations(self, model: str) -> Tuple[str, Rotation]:
|
def relation_operations(self,
|
||||||
|
model: str) -> Tuple[str, Rotation]:
|
||||||
"""
|
"""
|
||||||
Crystallographic orientation relationships for phase transformations.
|
Crystallographic orientation relationships for phase transformations.
|
||||||
|
|
||||||
|
|
|
@ -4,7 +4,7 @@ import warnings
|
||||||
import multiprocessing as mp
|
import multiprocessing as mp
|
||||||
from functools import partial
|
from functools import partial
|
||||||
import typing
|
import typing
|
||||||
from typing import Union, Optional, TextIO, List, Sequence, Literal
|
from typing import Union, Optional, TextIO, List, Sequence
|
||||||
from pathlib import Path
|
from pathlib import Path
|
||||||
|
|
||||||
import numpy as np
|
import numpy as np
|
||||||
|
@ -33,7 +33,7 @@ class Grid:
|
||||||
material: np.ndarray,
|
material: np.ndarray,
|
||||||
size: FloatSequence,
|
size: FloatSequence,
|
||||||
origin: FloatSequence = np.zeros(3),
|
origin: FloatSequence = np.zeros(3),
|
||||||
comments: Union[str, Sequence[str]] = []):
|
comments: Union[str, Sequence[str]] = None):
|
||||||
"""
|
"""
|
||||||
New geometry definition for grid solvers.
|
New geometry definition for grid solvers.
|
||||||
|
|
||||||
|
@ -53,7 +53,7 @@ class Grid:
|
||||||
self.material = material
|
self.material = material
|
||||||
self.size = size # type: ignore
|
self.size = size # type: ignore
|
||||||
self.origin = origin # type: ignore
|
self.origin = origin # type: ignore
|
||||||
self.comments = comments # type: ignore
|
self.comments = [] if comments is None else comments # type: ignore
|
||||||
|
|
||||||
|
|
||||||
def __repr__(self) -> str:
|
def __repr__(self) -> str:
|
||||||
|
@ -77,7 +77,8 @@ class Grid:
|
||||||
copy = __copy__
|
copy = __copy__
|
||||||
|
|
||||||
|
|
||||||
def __eq__(self, other: object) -> bool:
|
def __eq__(self,
|
||||||
|
other: object) -> bool:
|
||||||
"""
|
"""
|
||||||
Test equality of other.
|
Test equality of other.
|
||||||
|
|
||||||
|
@ -101,12 +102,13 @@ class Grid:
|
||||||
return self._material
|
return self._material
|
||||||
|
|
||||||
@material.setter
|
@material.setter
|
||||||
def material(self, material: np.ndarray):
|
def material(self,
|
||||||
|
material: np.ndarray):
|
||||||
if len(material.shape) != 3:
|
if len(material.shape) != 3:
|
||||||
raise ValueError(f'invalid material shape {material.shape}')
|
raise ValueError(f'invalid material shape {material.shape}')
|
||||||
elif material.dtype not in np.sctypes['float'] and material.dtype not in np.sctypes['int']:
|
if material.dtype not in np.sctypes['float'] and material.dtype not in np.sctypes['int']:
|
||||||
raise TypeError(f'invalid material data type {material.dtype}')
|
raise TypeError(f'invalid material data type {material.dtype}')
|
||||||
else:
|
|
||||||
self._material = np.copy(material)
|
self._material = np.copy(material)
|
||||||
|
|
||||||
if self.material.dtype in np.sctypes['float'] and \
|
if self.material.dtype in np.sctypes['float'] and \
|
||||||
|
@ -120,10 +122,11 @@ class Grid:
|
||||||
return self._size
|
return self._size
|
||||||
|
|
||||||
@size.setter
|
@size.setter
|
||||||
def size(self, size: FloatSequence):
|
def size(self,
|
||||||
|
size: FloatSequence):
|
||||||
if len(size) != 3 or any(np.array(size) < 0):
|
if len(size) != 3 or any(np.array(size) < 0):
|
||||||
raise ValueError(f'invalid size {size}')
|
raise ValueError(f'invalid size {size}')
|
||||||
else:
|
|
||||||
self._size = np.array(size)
|
self._size = np.array(size)
|
||||||
|
|
||||||
@property
|
@property
|
||||||
|
@ -132,10 +135,11 @@ class Grid:
|
||||||
return self._origin
|
return self._origin
|
||||||
|
|
||||||
@origin.setter
|
@origin.setter
|
||||||
def origin(self, origin: FloatSequence):
|
def origin(self,
|
||||||
|
origin: FloatSequence):
|
||||||
if len(origin) != 3:
|
if len(origin) != 3:
|
||||||
raise ValueError(f'invalid origin {origin}')
|
raise ValueError(f'invalid origin {origin}')
|
||||||
else:
|
|
||||||
self._origin = np.array(origin)
|
self._origin = np.array(origin)
|
||||||
|
|
||||||
@property
|
@property
|
||||||
|
@ -144,7 +148,8 @@ class Grid:
|
||||||
return self._comments
|
return self._comments
|
||||||
|
|
||||||
@comments.setter
|
@comments.setter
|
||||||
def comments(self, comments: Union[str, Sequence[str]]):
|
def comments(self,
|
||||||
|
comments: Union[str, Sequence[str]]):
|
||||||
self._comments = [str(c) for c in comments] if isinstance(comments,list) else [str(comments)]
|
self._comments = [str(c) for c in comments] if isinstance(comments,list) else [str(comments)]
|
||||||
|
|
||||||
|
|
||||||
|
@ -229,8 +234,7 @@ class Grid:
|
||||||
content = f.readlines()
|
content = f.readlines()
|
||||||
for i,line in enumerate(content[:header_length]):
|
for i,line in enumerate(content[:header_length]):
|
||||||
items = line.split('#')[0].lower().strip().split()
|
items = line.split('#')[0].lower().strip().split()
|
||||||
key = items[0] if items else ''
|
if (key := items[0] if items else '') == 'grid':
|
||||||
if key == 'grid':
|
|
||||||
cells = np.array([ int(dict(zip(items[1::2],items[2::2]))[i]) for i in ['a','b','c']])
|
cells = np.array([ int(dict(zip(items[1::2],items[2::2]))[i]) for i in ['a','b','c']])
|
||||||
elif key == 'size':
|
elif key == 'size':
|
||||||
size = np.array([float(dict(zip(items[1::2],items[2::2]))[i]) for i in ['x','y','z']])
|
size = np.array([float(dict(zip(items[1::2],items[2::2]))[i]) for i in ['x','y','z']])
|
||||||
|
@ -242,8 +246,7 @@ class Grid:
|
||||||
material = np.empty(int(cells.prod())) # initialize as flat array
|
material = np.empty(int(cells.prod())) # initialize as flat array
|
||||||
i = 0
|
i = 0
|
||||||
for line in content[header_length:]:
|
for line in content[header_length:]:
|
||||||
items = line.split('#')[0].split()
|
if len(items := line.split('#')[0].split()) == 3:
|
||||||
if len(items) == 3:
|
|
||||||
if items[1].lower() == 'of':
|
if items[1].lower() == 'of':
|
||||||
material_entry = np.ones(int(items[0]))*float(items[2])
|
material_entry = np.ones(int(items[0]))*float(items[2])
|
||||||
elif items[1].lower() == 'to':
|
elif items[1].lower() == 'to':
|
||||||
|
@ -387,7 +390,9 @@ class Grid:
|
||||||
|
|
||||||
|
|
||||||
@staticmethod
|
@staticmethod
|
||||||
def _find_closest_seed(seeds: np.ndarray, weights: np.ndarray, point: np.ndarray) -> np.integer:
|
def _find_closest_seed(seeds: np.ndarray,
|
||||||
|
weights: np.ndarray,
|
||||||
|
point: np.ndarray) -> np.integer:
|
||||||
return np.argmin(np.sum((np.broadcast_to(point,(len(seeds),3))-seeds)**2,axis=1) - weights)
|
return np.argmin(np.sum((np.broadcast_to(point,(len(seeds),3))-seeds)**2,axis=1) - weights)
|
||||||
|
|
||||||
@staticmethod
|
@staticmethod
|
||||||
|
@ -624,7 +629,9 @@ class Grid:
|
||||||
)
|
)
|
||||||
|
|
||||||
|
|
||||||
def save(self, fname: Union[str, Path], compress: bool = True):
|
def save(self,
|
||||||
|
fname: Union[str, Path],
|
||||||
|
compress: bool = True):
|
||||||
"""
|
"""
|
||||||
Save as VTK image data file.
|
Save as VTK image data file.
|
||||||
|
|
||||||
|
@ -643,7 +650,8 @@ class Grid:
|
||||||
v.save(fname,parallel=False,compress=compress)
|
v.save(fname,parallel=False,compress=compress)
|
||||||
|
|
||||||
|
|
||||||
def save_ASCII(self, fname: Union[str, TextIO]):
|
def save_ASCII(self,
|
||||||
|
fname: Union[str, TextIO]):
|
||||||
"""
|
"""
|
||||||
Save as geom file.
|
Save as geom file.
|
||||||
|
|
||||||
|
@ -770,15 +778,16 @@ class Grid:
|
||||||
)
|
)
|
||||||
|
|
||||||
|
|
||||||
def mirror(self, directions: Sequence[str], reflect: bool = False) -> 'Grid':
|
def mirror(self,
|
||||||
|
directions: Sequence[str],
|
||||||
|
reflect: bool = False) -> 'Grid':
|
||||||
"""
|
"""
|
||||||
Mirror grid along given directions.
|
Mirror grid along given directions.
|
||||||
|
|
||||||
Parameters
|
Parameters
|
||||||
----------
|
----------
|
||||||
directions : (sequence of) str
|
directions : (sequence of) {'x', 'y', 'z'}
|
||||||
Direction(s) along which the grid is mirrored.
|
Direction(s) along which the grid is mirrored.
|
||||||
Valid entries are 'x', 'y', 'z'.
|
|
||||||
reflect : bool, optional
|
reflect : bool, optional
|
||||||
Reflect (include) outermost layers. Defaults to False.
|
Reflect (include) outermost layers. Defaults to False.
|
||||||
|
|
||||||
|
@ -801,8 +810,7 @@ class Grid:
|
||||||
# materials: 1
|
# materials: 1
|
||||||
|
|
||||||
"""
|
"""
|
||||||
valid = ['x','y','z']
|
if not set(directions).issubset(valid := ['x', 'y', 'z']):
|
||||||
if not set(directions).issubset(valid):
|
|
||||||
raise ValueError(f'invalid direction {set(directions).difference(valid)} specified')
|
raise ValueError(f'invalid direction {set(directions).difference(valid)} specified')
|
||||||
|
|
||||||
limits: Sequence[Optional[int]] = [None,None] if reflect else [-2,0]
|
limits: Sequence[Optional[int]] = [None,None] if reflect else [-2,0]
|
||||||
|
@ -822,15 +830,15 @@ class Grid:
|
||||||
)
|
)
|
||||||
|
|
||||||
|
|
||||||
def flip(self, directions: Union[Literal['x', 'y', 'z'], Sequence[Literal['x', 'y', 'z']]]) -> 'Grid':
|
def flip(self,
|
||||||
|
directions: Sequence[str]) -> 'Grid':
|
||||||
"""
|
"""
|
||||||
Flip grid along given directions.
|
Flip grid along given directions.
|
||||||
|
|
||||||
Parameters
|
Parameters
|
||||||
----------
|
----------
|
||||||
directions : (sequence of) str
|
directions : (sequence of) {'x', 'y', 'z'}
|
||||||
Direction(s) along which the grid is flipped.
|
Direction(s) along which the grid is flipped.
|
||||||
Valid entries are 'x', 'y', 'z'.
|
|
||||||
|
|
||||||
Returns
|
Returns
|
||||||
-------
|
-------
|
||||||
|
@ -838,8 +846,7 @@ class Grid:
|
||||||
Updated grid-based geometry.
|
Updated grid-based geometry.
|
||||||
|
|
||||||
"""
|
"""
|
||||||
valid = ['x','y','z']
|
if not set(directions).issubset(valid := ['x', 'y', 'z']):
|
||||||
if not set(directions).issubset(valid):
|
|
||||||
raise ValueError(f'invalid direction {set(directions).difference(valid)} specified')
|
raise ValueError(f'invalid direction {set(directions).difference(valid)} specified')
|
||||||
|
|
||||||
|
|
||||||
|
@ -852,7 +859,9 @@ class Grid:
|
||||||
)
|
)
|
||||||
|
|
||||||
|
|
||||||
def scale(self, cells: IntSequence, periodic: bool = True) -> 'Grid':
|
def scale(self,
|
||||||
|
cells: IntSequence,
|
||||||
|
periodic: bool = True) -> 'Grid':
|
||||||
"""
|
"""
|
||||||
Scale grid to new cells.
|
Scale grid to new cells.
|
||||||
|
|
||||||
|
@ -958,7 +967,9 @@ class Grid:
|
||||||
)
|
)
|
||||||
|
|
||||||
|
|
||||||
def rotate(self, R: Rotation, fill: int = None) -> 'Grid':
|
def rotate(self,
|
||||||
|
R: Rotation,
|
||||||
|
fill: int = None) -> 'Grid':
|
||||||
"""
|
"""
|
||||||
Rotate grid (pad if required).
|
Rotate grid (pad if required).
|
||||||
|
|
||||||
|
@ -1049,7 +1060,9 @@ class Grid:
|
||||||
)
|
)
|
||||||
|
|
||||||
|
|
||||||
def substitute(self, from_material: IntSequence, to_material: IntSequence) -> 'Grid':
|
def substitute(self,
|
||||||
|
from_material: IntSequence,
|
||||||
|
to_material: IntSequence) -> 'Grid':
|
||||||
"""
|
"""
|
||||||
Substitute material indices.
|
Substitute material indices.
|
||||||
|
|
||||||
|
@ -1150,7 +1163,9 @@ class Grid:
|
||||||
)
|
)
|
||||||
|
|
||||||
|
|
||||||
def get_grain_boundaries(self, periodic: bool = True, directions: Sequence[str] = 'xyz'):
|
def get_grain_boundaries(self,
|
||||||
|
periodic: bool = True,
|
||||||
|
directions: Sequence[str] = 'xyz') -> VTK:
|
||||||
"""
|
"""
|
||||||
Create VTK unstructured grid containing grain boundaries.
|
Create VTK unstructured grid containing grain boundaries.
|
||||||
|
|
||||||
|
@ -1158,9 +1173,9 @@ class Grid:
|
||||||
----------
|
----------
|
||||||
periodic : bool, optional
|
periodic : bool, optional
|
||||||
Assume grid to be periodic. Defaults to True.
|
Assume grid to be periodic. Defaults to True.
|
||||||
directions : (sequence of) string, optional
|
directions : (sequence of) {'x', 'y', 'z'}, optional
|
||||||
Direction(s) along which the boundaries are determined.
|
Direction(s) along which the boundaries are determined.
|
||||||
Valid entries are 'x', 'y', 'z'. Defaults to 'xyz'.
|
Defaults to 'xyz'.
|
||||||
|
|
||||||
Returns
|
Returns
|
||||||
-------
|
-------
|
||||||
|
@ -1168,8 +1183,7 @@ class Grid:
|
||||||
VTK-based geometry of grain boundary network.
|
VTK-based geometry of grain boundary network.
|
||||||
|
|
||||||
"""
|
"""
|
||||||
valid = ['x','y','z']
|
if not set(directions).issubset(valid := ['x', 'y', 'z']):
|
||||||
if not set(directions).issubset(valid):
|
|
||||||
raise ValueError(f'invalid direction {set(directions).difference(valid)} specified')
|
raise ValueError(f'invalid direction {set(directions).difference(valid)} specified')
|
||||||
|
|
||||||
o = [[0, self.cells[0]+1, np.prod(self.cells[:2]+1)+self.cells[0]+1, np.prod(self.cells[:2]+1)],
|
o = [[0, self.cells[0]+1, np.prod(self.cells[:2]+1)+self.cells[0]+1, np.prod(self.cells[:2]+1)],
|
||||||
|
|
|
@ -1,7 +1,7 @@
|
||||||
import re
|
import re
|
||||||
import copy
|
import copy
|
||||||
from pathlib import Path
|
from pathlib import Path
|
||||||
from typing import Union, Optional, Tuple, List
|
from typing import Union, Tuple, List
|
||||||
|
|
||||||
import pandas as pd
|
import pandas as pd
|
||||||
import numpy as np
|
import numpy as np
|
||||||
|
@ -12,7 +12,10 @@ from . import util
|
||||||
class Table:
|
class Table:
|
||||||
"""Manipulate multi-dimensional spreadsheet-like data."""
|
"""Manipulate multi-dimensional spreadsheet-like data."""
|
||||||
|
|
||||||
def __init__(self, data: np.ndarray, shapes: dict, comments: Optional[Union[str, list]] = None):
|
def __init__(self,
|
||||||
|
data: np.ndarray,
|
||||||
|
shapes: dict,
|
||||||
|
comments: Union[str, list] = None):
|
||||||
"""
|
"""
|
||||||
New spreadsheet.
|
New spreadsheet.
|
||||||
|
|
||||||
|
@ -41,7 +44,8 @@ class Table:
|
||||||
return '\n'.join(['# '+c for c in self.comments])+'\n'+data_repr
|
return '\n'.join(['# '+c for c in self.comments])+'\n'+data_repr
|
||||||
|
|
||||||
|
|
||||||
def __getitem__(self, item: Union[slice, Tuple[slice, ...]]) -> 'Table':
|
def __getitem__(self,
|
||||||
|
item: Union[slice, Tuple[slice, ...]]) -> 'Table':
|
||||||
"""
|
"""
|
||||||
Slice the Table according to item.
|
Slice the Table according to item.
|
||||||
|
|
||||||
|
@ -100,7 +104,9 @@ class Table:
|
||||||
copy = __copy__
|
copy = __copy__
|
||||||
|
|
||||||
|
|
||||||
def _label(self, what: Union[str, List[str]], how: str) -> List[str]:
|
def _label(self,
|
||||||
|
what: Union[str, List[str]],
|
||||||
|
how: str) -> List[str]:
|
||||||
"""
|
"""
|
||||||
Expand labels according to data shape.
|
Expand labels according to data shape.
|
||||||
|
|
||||||
|
@ -131,7 +137,8 @@ class Table:
|
||||||
return labels
|
return labels
|
||||||
|
|
||||||
|
|
||||||
def _relabel(self, how: str):
|
def _relabel(self,
|
||||||
|
how: str):
|
||||||
"""
|
"""
|
||||||
Modify labeling of data in-place.
|
Modify labeling of data in-place.
|
||||||
|
|
||||||
|
@ -147,7 +154,10 @@ class Table:
|
||||||
self.data.columns = self._label(self.shapes,how) #type: ignore
|
self.data.columns = self._label(self.shapes,how) #type: ignore
|
||||||
|
|
||||||
|
|
||||||
def _add_comment(self, label: str, shape: Tuple[int, ...], info: Optional[str]):
|
def _add_comment(self,
|
||||||
|
label: str,
|
||||||
|
shape: Tuple[int, ...],
|
||||||
|
info: str = None):
|
||||||
if info is not None:
|
if info is not None:
|
||||||
specific = f'{label}{" "+str(shape) if np.prod(shape,dtype=int) > 1 else ""}: {info}'
|
specific = f'{label}{" "+str(shape) if np.prod(shape,dtype=int) > 1 else ""}: {info}'
|
||||||
general = util.execution_stamp('Table')
|
general = util.execution_stamp('Table')
|
||||||
|
@ -309,8 +319,7 @@ class Table:
|
||||||
data = np.loadtxt(content)
|
data = np.loadtxt(content)
|
||||||
|
|
||||||
shapes = {'eu':3, 'pos':2, 'IQ':1, 'CI':1, 'ID':1, 'intensity':1, 'fit':1}
|
shapes = {'eu':3, 'pos':2, 'IQ':1, 'CI':1, 'ID':1, 'intensity':1, 'fit':1}
|
||||||
remainder = data.shape[1]-sum(shapes.values())
|
if (remainder := data.shape[1]-sum(shapes.values())) > 0:
|
||||||
if remainder > 0: # 3.8 can do: if (remainder := data.shape[1]-sum(shapes.values())) > 0
|
|
||||||
shapes['unknown'] = remainder
|
shapes['unknown'] = remainder
|
||||||
|
|
||||||
return Table(data,shapes,comments)
|
return Table(data,shapes,comments)
|
||||||
|
@ -321,7 +330,8 @@ class Table:
|
||||||
return list(self.shapes)
|
return list(self.shapes)
|
||||||
|
|
||||||
|
|
||||||
def get(self, label: str) -> np.ndarray:
|
def get(self,
|
||||||
|
label: str) -> np.ndarray:
|
||||||
"""
|
"""
|
||||||
Get column data.
|
Get column data.
|
||||||
|
|
||||||
|
@ -341,7 +351,10 @@ class Table:
|
||||||
return data.astype(type(data.flatten()[0]))
|
return data.astype(type(data.flatten()[0]))
|
||||||
|
|
||||||
|
|
||||||
def set(self, label: str, data: np.ndarray, info: str = None) -> 'Table':
|
def set(self,
|
||||||
|
label: str,
|
||||||
|
data: np.ndarray,
|
||||||
|
info: str = None) -> 'Table':
|
||||||
"""
|
"""
|
||||||
Set column data.
|
Set column data.
|
||||||
|
|
||||||
|
@ -362,8 +375,7 @@ class Table:
|
||||||
"""
|
"""
|
||||||
dup = self.copy()
|
dup = self.copy()
|
||||||
dup._add_comment(label, data.shape[1:], info)
|
dup._add_comment(label, data.shape[1:], info)
|
||||||
m = re.match(r'(.*)\[((\d+,)*(\d+))\]',label)
|
if m := re.match(r'(.*)\[((\d+,)*(\d+))\]',label):
|
||||||
if m:
|
|
||||||
key = m.group(1)
|
key = m.group(1)
|
||||||
idx = np.ravel_multi_index(tuple(map(int,m.group(2).split(","))),
|
idx = np.ravel_multi_index(tuple(map(int,m.group(2).split(","))),
|
||||||
self.shapes[key])
|
self.shapes[key])
|
||||||
|
@ -374,7 +386,10 @@ class Table:
|
||||||
return dup
|
return dup
|
||||||
|
|
||||||
|
|
||||||
def add(self, label: str, data: np.ndarray, info: str = None) -> 'Table':
|
def add(self,
|
||||||
|
label: str,
|
||||||
|
data: np.ndarray,
|
||||||
|
info: str = None) -> 'Table':
|
||||||
"""
|
"""
|
||||||
Add column data.
|
Add column data.
|
||||||
|
|
||||||
|
@ -406,7 +421,8 @@ class Table:
|
||||||
return dup
|
return dup
|
||||||
|
|
||||||
|
|
||||||
def delete(self, label: str) -> 'Table':
|
def delete(self,
|
||||||
|
label: str) -> 'Table':
|
||||||
"""
|
"""
|
||||||
Delete column data.
|
Delete column data.
|
||||||
|
|
||||||
|
@ -427,7 +443,10 @@ class Table:
|
||||||
return dup
|
return dup
|
||||||
|
|
||||||
|
|
||||||
def rename(self, old: Union[str, List[str]], new: Union[str, List[str]], info: str = None) -> 'Table':
|
def rename(self,
|
||||||
|
old: Union[str, List[str]],
|
||||||
|
new: Union[str, List[str]],
|
||||||
|
info: str = None) -> 'Table':
|
||||||
"""
|
"""
|
||||||
Rename column data.
|
Rename column data.
|
||||||
|
|
||||||
|
@ -453,7 +472,9 @@ class Table:
|
||||||
return dup
|
return dup
|
||||||
|
|
||||||
|
|
||||||
def sort_by(self, labels: Union[str, List[str]], ascending: Union[bool, List[bool]] = True) -> 'Table':
|
def sort_by(self,
|
||||||
|
labels: Union[str, List[str]],
|
||||||
|
ascending: Union[bool, List[bool]] = True) -> 'Table':
|
||||||
"""
|
"""
|
||||||
Sort table by values of given labels.
|
Sort table by values of given labels.
|
||||||
|
|
||||||
|
@ -472,8 +493,7 @@ class Table:
|
||||||
"""
|
"""
|
||||||
labels_ = [labels] if isinstance(labels,str) else labels.copy()
|
labels_ = [labels] if isinstance(labels,str) else labels.copy()
|
||||||
for i,l in enumerate(labels_):
|
for i,l in enumerate(labels_):
|
||||||
m = re.match(r'(.*)\[((\d+,)*(\d+))\]',l)
|
if m := re.match(r'(.*)\[((\d+,)*(\d+))\]',l):
|
||||||
if m:
|
|
||||||
idx = np.ravel_multi_index(tuple(map(int,m.group(2).split(','))),
|
idx = np.ravel_multi_index(tuple(map(int,m.group(2).split(','))),
|
||||||
self.shapes[m.group(1)])
|
self.shapes[m.group(1)])
|
||||||
labels_[i] = f'{1+idx}_{m.group(1)}'
|
labels_[i] = f'{1+idx}_{m.group(1)}'
|
||||||
|
@ -486,7 +506,8 @@ class Table:
|
||||||
return dup
|
return dup
|
||||||
|
|
||||||
|
|
||||||
def append(self, other: 'Table') -> 'Table':
|
def append(self,
|
||||||
|
other: 'Table') -> 'Table':
|
||||||
"""
|
"""
|
||||||
Append other table vertically (similar to numpy.vstack).
|
Append other table vertically (similar to numpy.vstack).
|
||||||
|
|
||||||
|
@ -505,13 +526,14 @@ class Table:
|
||||||
"""
|
"""
|
||||||
if self.shapes != other.shapes or not self.data.columns.equals(other.data.columns):
|
if self.shapes != other.shapes or not self.data.columns.equals(other.data.columns):
|
||||||
raise KeyError('Labels or shapes or order do not match')
|
raise KeyError('Labels or shapes or order do not match')
|
||||||
else:
|
|
||||||
dup = self.copy()
|
dup = self.copy()
|
||||||
dup.data = dup.data.append(other.data,ignore_index=True)
|
dup.data = dup.data.append(other.data,ignore_index=True)
|
||||||
return dup
|
return dup
|
||||||
|
|
||||||
|
|
||||||
def join(self, other: 'Table') -> 'Table':
|
def join(self,
|
||||||
|
other: 'Table') -> 'Table':
|
||||||
"""
|
"""
|
||||||
Append other table horizontally (similar to numpy.hstack).
|
Append other table horizontally (similar to numpy.hstack).
|
||||||
|
|
||||||
|
@ -530,7 +552,7 @@ class Table:
|
||||||
"""
|
"""
|
||||||
if set(self.shapes) & set(other.shapes) or self.data.shape[0] != other.data.shape[0]:
|
if set(self.shapes) & set(other.shapes) or self.data.shape[0] != other.data.shape[0]:
|
||||||
raise KeyError('Duplicated keys or row count mismatch')
|
raise KeyError('Duplicated keys or row count mismatch')
|
||||||
else:
|
|
||||||
dup = self.copy()
|
dup = self.copy()
|
||||||
dup.data = dup.data.join(other.data)
|
dup.data = dup.data.join(other.data)
|
||||||
for key in other.shapes:
|
for key in other.shapes:
|
||||||
|
@ -538,7 +560,8 @@ class Table:
|
||||||
return dup
|
return dup
|
||||||
|
|
||||||
|
|
||||||
def save(self, fname: FileHandle):
|
def save(self,
|
||||||
|
fname: FileHandle):
|
||||||
"""
|
"""
|
||||||
Save as plain text file.
|
Save as plain text file.
|
||||||
|
|
||||||
|
|
|
@ -22,7 +22,8 @@ class VTK:
|
||||||
High-level interface to VTK.
|
High-level interface to VTK.
|
||||||
"""
|
"""
|
||||||
|
|
||||||
def __init__(self, vtk_data: vtk.vtkDataSet):
|
def __init__(self,
|
||||||
|
vtk_data: vtk.vtkDataSet):
|
||||||
"""
|
"""
|
||||||
New spatial visualization.
|
New spatial visualization.
|
||||||
|
|
||||||
|
@ -38,7 +39,9 @@ class VTK:
|
||||||
|
|
||||||
|
|
||||||
@staticmethod
|
@staticmethod
|
||||||
def from_image_data(cells: IntSequence, size: FloatSequence, origin: FloatSequence = np.zeros(3)) -> 'VTK':
|
def from_image_data(cells: IntSequence,
|
||||||
|
size: FloatSequence,
|
||||||
|
origin: FloatSequence = np.zeros(3)) -> 'VTK':
|
||||||
"""
|
"""
|
||||||
Create VTK of type vtk.vtkImageData.
|
Create VTK of type vtk.vtkImageData.
|
||||||
|
|
||||||
|
@ -68,7 +71,9 @@ class VTK:
|
||||||
|
|
||||||
|
|
||||||
@staticmethod
|
@staticmethod
|
||||||
def from_rectilinear_grid(grid: np.ndarray, size: FloatSequence, origin: FloatSequence = np.zeros(3)) -> 'VTK':
|
def from_rectilinear_grid(grid: np.ndarray,
|
||||||
|
size: FloatSequence,
|
||||||
|
origin: FloatSequence = np.zeros(3)) -> 'VTK':
|
||||||
"""
|
"""
|
||||||
Create VTK of type vtk.vtkRectilinearGrid.
|
Create VTK of type vtk.vtkRectilinearGrid.
|
||||||
|
|
||||||
|
@ -100,7 +105,9 @@ class VTK:
|
||||||
|
|
||||||
|
|
||||||
@staticmethod
|
@staticmethod
|
||||||
def from_unstructured_grid(nodes: np.ndarray, connectivity: np.ndarray, cell_type: str) -> 'VTK':
|
def from_unstructured_grid(nodes: np.ndarray,
|
||||||
|
connectivity: np.ndarray,
|
||||||
|
cell_type: str) -> 'VTK':
|
||||||
"""
|
"""
|
||||||
Create VTK of type vtk.vtkUnstructuredGrid.
|
Create VTK of type vtk.vtkUnstructuredGrid.
|
||||||
|
|
||||||
|
@ -195,8 +202,7 @@ class VTK:
|
||||||
"""
|
"""
|
||||||
if not os.path.isfile(fname): # vtk has a strange error handling
|
if not os.path.isfile(fname): # vtk has a strange error handling
|
||||||
raise FileNotFoundError(f'No such file: {fname}')
|
raise FileNotFoundError(f'No such file: {fname}')
|
||||||
ext = Path(fname).suffix
|
if (ext := Path(fname).suffix) == '.vtk' or dataset_type is not None:
|
||||||
if ext == '.vtk' or dataset_type is not None:
|
|
||||||
reader = vtk.vtkGenericDataObjectReader()
|
reader = vtk.vtkGenericDataObjectReader()
|
||||||
reader.SetFileName(str(fname))
|
reader.SetFileName(str(fname))
|
||||||
if dataset_type is None:
|
if dataset_type is None:
|
||||||
|
@ -238,7 +244,11 @@ class VTK:
|
||||||
def _write(writer):
|
def _write(writer):
|
||||||
"""Wrapper for parallel writing."""
|
"""Wrapper for parallel writing."""
|
||||||
writer.Write()
|
writer.Write()
|
||||||
def save(self, fname: Union[str, Path], parallel: bool = True, compress: bool = True):
|
|
||||||
|
def save(self,
|
||||||
|
fname: Union[str, Path],
|
||||||
|
parallel: bool = True,
|
||||||
|
compress: bool = True):
|
||||||
"""
|
"""
|
||||||
Save as VTK file.
|
Save as VTK file.
|
||||||
|
|
||||||
|
@ -284,7 +294,9 @@ class VTK:
|
||||||
|
|
||||||
# Check https://blog.kitware.com/ghost-and-blanking-visibility-changes/ for missing data
|
# Check https://blog.kitware.com/ghost-and-blanking-visibility-changes/ for missing data
|
||||||
# Needs support for damask.Table
|
# Needs support for damask.Table
|
||||||
def add(self, data: Union[np.ndarray, np.ma.MaskedArray], label: str = None):
|
def add(self,
|
||||||
|
data: Union[np.ndarray, np.ma.MaskedArray],
|
||||||
|
label: str = None):
|
||||||
"""
|
"""
|
||||||
Add data to either cells or points.
|
Add data to either cells or points.
|
||||||
|
|
||||||
|
@ -331,7 +343,8 @@ class VTK:
|
||||||
raise TypeError
|
raise TypeError
|
||||||
|
|
||||||
|
|
||||||
def get(self, label: str) -> np.ndarray:
|
def get(self,
|
||||||
|
label: str) -> np.ndarray:
|
||||||
"""
|
"""
|
||||||
Get either cell or point data.
|
Get either cell or point data.
|
||||||
|
|
||||||
|
@ -383,7 +396,8 @@ class VTK:
|
||||||
return []
|
return []
|
||||||
|
|
||||||
|
|
||||||
def set_comments(self, comments: Union[str, List[str]]):
|
def set_comments(self,
|
||||||
|
comments: Union[str, List[str]]):
|
||||||
"""
|
"""
|
||||||
Set comments.
|
Set comments.
|
||||||
|
|
||||||
|
@ -400,7 +414,8 @@ class VTK:
|
||||||
self.vtk_data.GetFieldData().AddArray(s)
|
self.vtk_data.GetFieldData().AddArray(s)
|
||||||
|
|
||||||
|
|
||||||
def add_comments(self, comments: Union[str, List[str]]):
|
def add_comments(self,
|
||||||
|
comments: Union[str, List[str]]):
|
||||||
"""
|
"""
|
||||||
Add comments.
|
Add comments.
|
||||||
|
|
||||||
|
@ -440,7 +455,7 @@ class VTK:
|
||||||
width = tk.winfo_screenwidth()
|
width = tk.winfo_screenwidth()
|
||||||
height = tk.winfo_screenheight()
|
height = tk.winfo_screenheight()
|
||||||
tk.destroy()
|
tk.destroy()
|
||||||
except Exception as e:
|
except Exception:
|
||||||
width = 1024
|
width = 1024
|
||||||
height = 768
|
height = 768
|
||||||
|
|
||||||
|
|
|
@ -20,7 +20,9 @@ import numpy as _np
|
||||||
from ._typehints import FloatSequence as _FloatSequence, IntSequence as _IntSequence
|
from ._typehints import FloatSequence as _FloatSequence, IntSequence as _IntSequence
|
||||||
|
|
||||||
|
|
||||||
def _ks(size: _FloatSequence, cells: _IntSequence, first_order: bool = False) -> _np.ndarray:
|
def _ks(size: _FloatSequence,
|
||||||
|
cells: _IntSequence,
|
||||||
|
first_order: bool = False) -> _np.ndarray:
|
||||||
"""
|
"""
|
||||||
Get wave numbers operator.
|
Get wave numbers operator.
|
||||||
|
|
||||||
|
@ -47,7 +49,8 @@ def _ks(size: _FloatSequence, cells: _IntSequence, first_order: bool = False) ->
|
||||||
return _np.stack(_np.meshgrid(k_sk,k_sj,k_si,indexing = 'ij'), axis=-1)
|
return _np.stack(_np.meshgrid(k_sk,k_sj,k_si,indexing = 'ij'), axis=-1)
|
||||||
|
|
||||||
|
|
||||||
def curl(size: _FloatSequence, f: _np.ndarray) -> _np.ndarray:
|
def curl(size: _FloatSequence,
|
||||||
|
f: _np.ndarray) -> _np.ndarray:
|
||||||
u"""
|
u"""
|
||||||
Calculate curl of a vector or tensor field in Fourier space.
|
Calculate curl of a vector or tensor field in Fourier space.
|
||||||
|
|
||||||
|
@ -78,7 +81,8 @@ def curl(size: _FloatSequence, f: _np.ndarray) -> _np.ndarray:
|
||||||
return _np.fft.irfftn(curl_,axes=(0,1,2),s=f.shape[:3])
|
return _np.fft.irfftn(curl_,axes=(0,1,2),s=f.shape[:3])
|
||||||
|
|
||||||
|
|
||||||
def divergence(size: _FloatSequence, f: _np.ndarray) -> _np.ndarray:
|
def divergence(size: _FloatSequence,
|
||||||
|
f: _np.ndarray) -> _np.ndarray:
|
||||||
u"""
|
u"""
|
||||||
Calculate divergence of a vector or tensor field in Fourier space.
|
Calculate divergence of a vector or tensor field in Fourier space.
|
||||||
|
|
||||||
|
@ -105,7 +109,8 @@ def divergence(size: _FloatSequence, f: _np.ndarray) -> _np.ndarray:
|
||||||
return _np.fft.irfftn(div_,axes=(0,1,2),s=f.shape[:3])
|
return _np.fft.irfftn(div_,axes=(0,1,2),s=f.shape[:3])
|
||||||
|
|
||||||
|
|
||||||
def gradient(size: _FloatSequence, f: _np.ndarray) -> _np.ndarray:
|
def gradient(size: _FloatSequence,
|
||||||
|
f: _np.ndarray) -> _np.ndarray:
|
||||||
u"""
|
u"""
|
||||||
Calculate gradient of a scalar or vector field in Fourier space.
|
Calculate gradient of a scalar or vector field in Fourier space.
|
||||||
|
|
||||||
|
@ -163,7 +168,8 @@ def coordinates0_point(cells: _IntSequence,
|
||||||
axis = -1)
|
axis = -1)
|
||||||
|
|
||||||
|
|
||||||
def displacement_fluct_point(size: _FloatSequence, F: _np.ndarray) -> _np.ndarray:
|
def displacement_fluct_point(size: _FloatSequence,
|
||||||
|
F: _np.ndarray) -> _np.ndarray:
|
||||||
"""
|
"""
|
||||||
Cell center displacement field from fluctuation part of the deformation gradient field.
|
Cell center displacement field from fluctuation part of the deformation gradient field.
|
||||||
|
|
||||||
|
@ -195,7 +201,8 @@ def displacement_fluct_point(size: _FloatSequence, F: _np.ndarray) -> _np.ndarra
|
||||||
return _np.fft.irfftn(displacement,axes=(0,1,2),s=F.shape[:3])
|
return _np.fft.irfftn(displacement,axes=(0,1,2),s=F.shape[:3])
|
||||||
|
|
||||||
|
|
||||||
def displacement_avg_point(size: _FloatSequence, F: _np.ndarray) -> _np.ndarray:
|
def displacement_avg_point(size: _FloatSequence,
|
||||||
|
F: _np.ndarray) -> _np.ndarray:
|
||||||
"""
|
"""
|
||||||
Cell center displacement field from average part of the deformation gradient field.
|
Cell center displacement field from average part of the deformation gradient field.
|
||||||
|
|
||||||
|
@ -216,7 +223,8 @@ def displacement_avg_point(size: _FloatSequence, F: _np.ndarray) -> _np.ndarray:
|
||||||
return _np.einsum('ml,ijkl->ijkm',F_avg - _np.eye(3),coordinates0_point(F.shape[:3],size))
|
return _np.einsum('ml,ijkl->ijkm',F_avg - _np.eye(3),coordinates0_point(F.shape[:3],size))
|
||||||
|
|
||||||
|
|
||||||
def displacement_point(size: _FloatSequence, F: _np.ndarray) -> _np.ndarray:
|
def displacement_point(size: _FloatSequence,
|
||||||
|
F: _np.ndarray) -> _np.ndarray:
|
||||||
"""
|
"""
|
||||||
Cell center displacement field from deformation gradient field.
|
Cell center displacement field from deformation gradient field.
|
||||||
|
|
||||||
|
@ -236,7 +244,9 @@ def displacement_point(size: _FloatSequence, F: _np.ndarray) -> _np.ndarray:
|
||||||
return displacement_avg_point(size,F) + displacement_fluct_point(size,F)
|
return displacement_avg_point(size,F) + displacement_fluct_point(size,F)
|
||||||
|
|
||||||
|
|
||||||
def coordinates_point(size: _FloatSequence, F: _np.ndarray, origin: _FloatSequence = _np.zeros(3)) -> _np.ndarray:
|
def coordinates_point(size: _FloatSequence,
|
||||||
|
F: _np.ndarray,
|
||||||
|
origin: _FloatSequence = _np.zeros(3)) -> _np.ndarray:
|
||||||
"""
|
"""
|
||||||
Cell center positions.
|
Cell center positions.
|
||||||
|
|
||||||
|
@ -335,7 +345,8 @@ def coordinates0_node(cells: _IntSequence,
|
||||||
axis = -1)
|
axis = -1)
|
||||||
|
|
||||||
|
|
||||||
def displacement_fluct_node(size: _FloatSequence, F: _np.ndarray) -> _np.ndarray:
|
def displacement_fluct_node(size: _FloatSequence,
|
||||||
|
F: _np.ndarray) -> _np.ndarray:
|
||||||
"""
|
"""
|
||||||
Nodal displacement field from fluctuation part of the deformation gradient field.
|
Nodal displacement field from fluctuation part of the deformation gradient field.
|
||||||
|
|
||||||
|
@ -355,7 +366,8 @@ def displacement_fluct_node(size: _FloatSequence, F: _np.ndarray) -> _np.ndarray
|
||||||
return point_to_node(displacement_fluct_point(size,F))
|
return point_to_node(displacement_fluct_point(size,F))
|
||||||
|
|
||||||
|
|
||||||
def displacement_avg_node(size: _FloatSequence, F: _np.ndarray) -> _np.ndarray:
|
def displacement_avg_node(size: _FloatSequence,
|
||||||
|
F: _np.ndarray) -> _np.ndarray:
|
||||||
"""
|
"""
|
||||||
Nodal displacement field from average part of the deformation gradient field.
|
Nodal displacement field from average part of the deformation gradient field.
|
||||||
|
|
||||||
|
@ -376,7 +388,8 @@ def displacement_avg_node(size: _FloatSequence, F: _np.ndarray) -> _np.ndarray:
|
||||||
return _np.einsum('ml,ijkl->ijkm',F_avg - _np.eye(3),coordinates0_node(F.shape[:3],size))
|
return _np.einsum('ml,ijkl->ijkm',F_avg - _np.eye(3),coordinates0_node(F.shape[:3],size))
|
||||||
|
|
||||||
|
|
||||||
def displacement_node(size: _FloatSequence, F: _np.ndarray) -> _np.ndarray:
|
def displacement_node(size: _FloatSequence,
|
||||||
|
F: _np.ndarray) -> _np.ndarray:
|
||||||
"""
|
"""
|
||||||
Nodal displacement field from deformation gradient field.
|
Nodal displacement field from deformation gradient field.
|
||||||
|
|
||||||
|
@ -396,7 +409,9 @@ def displacement_node(size: _FloatSequence, F: _np.ndarray) -> _np.ndarray:
|
||||||
return displacement_avg_node(size,F) + displacement_fluct_node(size,F)
|
return displacement_avg_node(size,F) + displacement_fluct_node(size,F)
|
||||||
|
|
||||||
|
|
||||||
def coordinates_node(size: _FloatSequence, F: _np.ndarray, origin: _FloatSequence = _np.zeros(3)) -> _np.ndarray:
|
def coordinates_node(size: _FloatSequence,
|
||||||
|
F: _np.ndarray,
|
||||||
|
origin: _FloatSequence = _np.zeros(3)) -> _np.ndarray:
|
||||||
"""
|
"""
|
||||||
Nodal positions.
|
Nodal positions.
|
||||||
|
|
||||||
|
@ -526,7 +541,9 @@ def coordinates0_valid(coordinates0: _np.ndarray) -> bool:
|
||||||
return False
|
return False
|
||||||
|
|
||||||
|
|
||||||
def regrid(size: _FloatSequence, F: _np.ndarray, cells: _IntSequence) -> _np.ndarray:
|
def regrid(size: _FloatSequence,
|
||||||
|
F: _np.ndarray,
|
||||||
|
cells: _IntSequence) -> _np.ndarray:
|
||||||
"""
|
"""
|
||||||
Return mapping from coordinates in deformed configuration to a regular grid.
|
Return mapping from coordinates in deformed configuration to a regular grid.
|
||||||
|
|
||||||
|
|
|
@ -122,7 +122,9 @@ def rotation(T: _np.ndarray) -> _rotation.Rotation:
|
||||||
return _rotation.Rotation.from_matrix(_polar_decomposition(T,'R')[0])
|
return _rotation.Rotation.from_matrix(_polar_decomposition(T,'R')[0])
|
||||||
|
|
||||||
|
|
||||||
def strain(F: _np.ndarray, t: str, m: float) -> _np.ndarray:
|
def strain(F: _np.ndarray,
|
||||||
|
t: str,
|
||||||
|
m: float) -> _np.ndarray:
|
||||||
"""
|
"""
|
||||||
Calculate strain tensor (Seth–Hill family).
|
Calculate strain tensor (Seth–Hill family).
|
||||||
|
|
||||||
|
@ -162,7 +164,8 @@ def strain(F: _np.ndarray, t: str, m: float) -> _np.ndarray:
|
||||||
return eps
|
return eps
|
||||||
|
|
||||||
|
|
||||||
def stress_Cauchy(P: _np.ndarray, F: _np.ndarray) -> _np.ndarray:
|
def stress_Cauchy(P: _np.ndarray,
|
||||||
|
F: _np.ndarray) -> _np.ndarray:
|
||||||
"""
|
"""
|
||||||
Calculate the Cauchy stress (true stress).
|
Calculate the Cauchy stress (true stress).
|
||||||
|
|
||||||
|
@ -184,7 +187,8 @@ def stress_Cauchy(P: _np.ndarray, F: _np.ndarray) -> _np.ndarray:
|
||||||
return _tensor.symmetric(_np.einsum('...,...ij,...kj',1.0/_np.linalg.det(F),P,F))
|
return _tensor.symmetric(_np.einsum('...,...ij,...kj',1.0/_np.linalg.det(F),P,F))
|
||||||
|
|
||||||
|
|
||||||
def stress_second_Piola_Kirchhoff(P: _np.ndarray, F: _np.ndarray) -> _np.ndarray:
|
def stress_second_Piola_Kirchhoff(P: _np.ndarray,
|
||||||
|
F: _np.ndarray) -> _np.ndarray:
|
||||||
"""
|
"""
|
||||||
Calculate the second Piola-Kirchhoff stress.
|
Calculate the second Piola-Kirchhoff stress.
|
||||||
|
|
||||||
|
@ -243,7 +247,8 @@ def stretch_right(T: _np.ndarray) -> _np.ndarray:
|
||||||
return _polar_decomposition(T,'U')[0]
|
return _polar_decomposition(T,'U')[0]
|
||||||
|
|
||||||
|
|
||||||
def _polar_decomposition(T: _np.ndarray, requested: _Sequence[str]) -> tuple:
|
def _polar_decomposition(T: _np.ndarray,
|
||||||
|
requested: _Sequence[str]) -> tuple:
|
||||||
"""
|
"""
|
||||||
Perform singular value decomposition.
|
Perform singular value decomposition.
|
||||||
|
|
||||||
|
@ -251,7 +256,7 @@ def _polar_decomposition(T: _np.ndarray, requested: _Sequence[str]) -> tuple:
|
||||||
----------
|
----------
|
||||||
T : numpy.ndarray, shape (...,3,3)
|
T : numpy.ndarray, shape (...,3,3)
|
||||||
Tensor of which the singular values are computed.
|
Tensor of which the singular values are computed.
|
||||||
requested : iterable of str
|
requested : sequence of {'R', 'U', 'V'}
|
||||||
Requested outputs: ‘R’ for the rotation tensor,
|
Requested outputs: ‘R’ for the rotation tensor,
|
||||||
‘V’ for left stretch tensor and ‘U’ for right stretch tensor.
|
‘V’ for left stretch tensor and ‘U’ for right stretch tensor.
|
||||||
|
|
||||||
|
@ -273,7 +278,8 @@ def _polar_decomposition(T: _np.ndarray, requested: _Sequence[str]) -> tuple:
|
||||||
return tuple(output)
|
return tuple(output)
|
||||||
|
|
||||||
|
|
||||||
def _equivalent_Mises(T_sym: _np.ndarray, s: float) -> _np.ndarray:
|
def _equivalent_Mises(T_sym: _np.ndarray,
|
||||||
|
s: float) -> _np.ndarray:
|
||||||
"""
|
"""
|
||||||
Base equation for Mises equivalent of a stress or strain tensor.
|
Base equation for Mises equivalent of a stress or strain tensor.
|
||||||
|
|
||||||
|
|
|
@ -10,7 +10,9 @@ from . import util as _util
|
||||||
from . import grid_filters as _grid_filters
|
from . import grid_filters as _grid_filters
|
||||||
|
|
||||||
|
|
||||||
def from_random(size: _FloatSequence, N_seeds: int, cells: _IntSequence = None,
|
def from_random(size: _FloatSequence,
|
||||||
|
N_seeds: int,
|
||||||
|
cells: _IntSequence = None,
|
||||||
rng_seed=None) -> _np.ndarray:
|
rng_seed=None) -> _np.ndarray:
|
||||||
"""
|
"""
|
||||||
Place seeds randomly in space.
|
Place seeds randomly in space.
|
||||||
|
@ -46,8 +48,12 @@ def from_random(size: _FloatSequence, N_seeds: int, cells: _IntSequence = None,
|
||||||
return coords
|
return coords
|
||||||
|
|
||||||
|
|
||||||
def from_Poisson_disc(size: _FloatSequence, N_seeds: int, N_candidates: int, distance: float,
|
def from_Poisson_disc(size: _FloatSequence,
|
||||||
periodic: bool = True, rng_seed=None) -> _np.ndarray:
|
N_seeds: int,
|
||||||
|
N_candidates: int,
|
||||||
|
distance: float,
|
||||||
|
periodic: bool = True,
|
||||||
|
rng_seed=None) -> _np.ndarray:
|
||||||
"""
|
"""
|
||||||
Place seeds according to a Poisson disc distribution.
|
Place seeds according to a Poisson disc distribution.
|
||||||
|
|
||||||
|
@ -86,10 +92,9 @@ def from_Poisson_disc(size: _FloatSequence, N_seeds: int, N_candidates: int, dis
|
||||||
tree = _spatial.cKDTree(coords[:s],boxsize=size) if periodic else \
|
tree = _spatial.cKDTree(coords[:s],boxsize=size) if periodic else \
|
||||||
_spatial.cKDTree(coords[:s])
|
_spatial.cKDTree(coords[:s])
|
||||||
distances = tree.query(candidates)[0]
|
distances = tree.query(candidates)[0]
|
||||||
best = distances.argmax()
|
if distances.max() > distance: # require minimum separation
|
||||||
if distances[best] > distance: # require minimum separation
|
|
||||||
i = 0
|
i = 0
|
||||||
coords[s] = candidates[best] # maximum separation to existing point cloud
|
coords[s] = candidates[distances.argmax()] # maximum separation to existing point cloud
|
||||||
s += 1
|
s += 1
|
||||||
progress.update(s)
|
progress.update(s)
|
||||||
|
|
||||||
|
@ -99,8 +104,11 @@ def from_Poisson_disc(size: _FloatSequence, N_seeds: int, N_candidates: int, dis
|
||||||
return coords
|
return coords
|
||||||
|
|
||||||
|
|
||||||
def from_grid(grid, selection: _IntSequence = None, invert_selection: bool = False,
|
def from_grid(grid,
|
||||||
average: bool = False, periodic: bool = True) -> _Tuple[_np.ndarray, _np.ndarray]:
|
selection: _IntSequence = None,
|
||||||
|
invert_selection: bool = False,
|
||||||
|
average: bool = False,
|
||||||
|
periodic: bool = True) -> _Tuple[_np.ndarray, _np.ndarray]:
|
||||||
"""
|
"""
|
||||||
Create seeds from grid description.
|
Create seeds from grid description.
|
||||||
|
|
||||||
|
|
|
@ -45,7 +45,8 @@ def eigenvalues(T_sym: _np.ndarray) -> _np.ndarray:
|
||||||
return _np.linalg.eigvalsh(symmetric(T_sym))
|
return _np.linalg.eigvalsh(symmetric(T_sym))
|
||||||
|
|
||||||
|
|
||||||
def eigenvectors(T_sym: _np.ndarray, RHS: bool = False) -> _np.ndarray:
|
def eigenvectors(T_sym: _np.ndarray,
|
||||||
|
RHS: bool = False) -> _np.ndarray:
|
||||||
"""
|
"""
|
||||||
Eigenvectors of a symmetric tensor.
|
Eigenvectors of a symmetric tensor.
|
||||||
|
|
||||||
|
@ -63,14 +64,14 @@ def eigenvectors(T_sym: _np.ndarray, RHS: bool = False) -> _np.ndarray:
|
||||||
associated eigenvalues.
|
associated eigenvalues.
|
||||||
|
|
||||||
"""
|
"""
|
||||||
(u,v) = _np.linalg.eigh(symmetric(T_sym))
|
_,v = _np.linalg.eigh(symmetric(T_sym))
|
||||||
|
|
||||||
if RHS:
|
if RHS: v[_np.linalg.det(v) < 0.0,:,2] *= -1.0
|
||||||
v[_np.linalg.det(v) < 0.0,:,2] *= -1.0
|
|
||||||
return v
|
return v
|
||||||
|
|
||||||
|
|
||||||
def spherical(T: _np.ndarray, tensor: bool = True) -> _np.ndarray:
|
def spherical(T: _np.ndarray,
|
||||||
|
tensor: bool = True) -> _np.ndarray:
|
||||||
"""
|
"""
|
||||||
Calculate spherical part of a tensor.
|
Calculate spherical part of a tensor.
|
||||||
|
|
||||||
|
|
|
@ -7,16 +7,16 @@ import subprocess
|
||||||
import shlex
|
import shlex
|
||||||
import re
|
import re
|
||||||
import fractions
|
import fractions
|
||||||
import collections.abc as abc
|
from collections import abc
|
||||||
from functools import reduce
|
from functools import reduce
|
||||||
from typing import Union, Tuple, Iterable, Callable, Dict, List, Any, Literal, Optional
|
from typing import Union, Tuple, Iterable, Callable, Dict, List, Any, Literal, SupportsIndex, Sequence
|
||||||
from pathlib import Path
|
from pathlib import Path
|
||||||
|
|
||||||
import numpy as np
|
import numpy as np
|
||||||
import h5py
|
import h5py
|
||||||
|
|
||||||
from . import version
|
from . import version
|
||||||
from ._typehints import FloatSequence
|
from ._typehints import IntSequence, FloatSequence
|
||||||
|
|
||||||
# limit visibility
|
# limit visibility
|
||||||
__all__=[
|
__all__=[
|
||||||
|
@ -54,7 +54,8 @@ _colors = {
|
||||||
####################################################################################################
|
####################################################################################################
|
||||||
# Functions
|
# Functions
|
||||||
####################################################################################################
|
####################################################################################################
|
||||||
def srepr(msg, glue: str = '\n') -> str:
|
def srepr(msg,
|
||||||
|
glue: str = '\n') -> str:
|
||||||
r"""
|
r"""
|
||||||
Join items with glue string.
|
Join items with glue string.
|
||||||
|
|
||||||
|
@ -148,7 +149,10 @@ def strikeout(msg) -> str:
|
||||||
return _colors['crossout']+srepr(msg)+_colors['end_color']
|
return _colors['crossout']+srepr(msg)+_colors['end_color']
|
||||||
|
|
||||||
|
|
||||||
def run(cmd: str, wd: str = './', env: Dict[str, str] = None, timeout: int = None) -> Tuple[str, str]:
|
def run(cmd: str,
|
||||||
|
wd: str = './',
|
||||||
|
env: Dict[str, str] = None,
|
||||||
|
timeout: int = None) -> Tuple[str, str]:
|
||||||
"""
|
"""
|
||||||
Run a command.
|
Run a command.
|
||||||
|
|
||||||
|
@ -233,7 +237,7 @@ def show_progress(iterable: Iterable,
|
||||||
else:
|
else:
|
||||||
if N_iter is None:
|
if N_iter is None:
|
||||||
raise ValueError('N_iter not given')
|
raise ValueError('N_iter not given')
|
||||||
else:
|
|
||||||
N = N_iter
|
N = N_iter
|
||||||
|
|
||||||
if N <= 1:
|
if N <= 1:
|
||||||
|
@ -301,16 +305,20 @@ def project_equal_angle(vector: np.ndarray,
|
||||||
normalize : bool
|
normalize : bool
|
||||||
Ensure unit length of input vector. Defaults to True.
|
Ensure unit length of input vector. Defaults to True.
|
||||||
keepdims : bool
|
keepdims : bool
|
||||||
Maintain three-dimensional output coordinates. Defaults to False.
|
Maintain three-dimensional output coordinates.
|
||||||
Two-dimensional output uses right-handed frame spanned by
|
Defaults to False.
|
||||||
the next and next-next axis relative to the projection direction,
|
|
||||||
e.g. x-y when projecting along z and z-x when projecting along y.
|
|
||||||
|
|
||||||
Returns
|
Returns
|
||||||
-------
|
-------
|
||||||
coordinates : numpy.ndarray, shape (...,2 | 3)
|
coordinates : numpy.ndarray, shape (...,2 | 3)
|
||||||
Projected coordinates.
|
Projected coordinates.
|
||||||
|
|
||||||
|
Notes
|
||||||
|
-----
|
||||||
|
Two-dimensional output uses right-handed frame spanned by
|
||||||
|
the next and next-next axis relative to the projection direction,
|
||||||
|
e.g. x-y when projecting along z and z-x when projecting along y.
|
||||||
|
|
||||||
Examples
|
Examples
|
||||||
--------
|
--------
|
||||||
>>> import damask
|
>>> import damask
|
||||||
|
@ -345,16 +353,21 @@ def project_equal_area(vector: np.ndarray,
|
||||||
normalize : bool
|
normalize : bool
|
||||||
Ensure unit length of input vector. Defaults to True.
|
Ensure unit length of input vector. Defaults to True.
|
||||||
keepdims : bool
|
keepdims : bool
|
||||||
Maintain three-dimensional output coordinates. Defaults to False.
|
Maintain three-dimensional output coordinates.
|
||||||
Two-dimensional output uses right-handed frame spanned by
|
Defaults to False.
|
||||||
the next and next-next axis relative to the projection direction,
|
|
||||||
e.g. x-y when projecting along z and z-x when projecting along y.
|
|
||||||
|
|
||||||
Returns
|
Returns
|
||||||
-------
|
-------
|
||||||
coordinates : numpy.ndarray, shape (...,2 | 3)
|
coordinates : numpy.ndarray, shape (...,2 | 3)
|
||||||
Projected coordinates.
|
Projected coordinates.
|
||||||
|
|
||||||
|
Notes
|
||||||
|
-----
|
||||||
|
Two-dimensional output uses right-handed frame spanned by
|
||||||
|
the next and next-next axis relative to the projection direction,
|
||||||
|
e.g. x-y when projecting along z and z-x when projecting along y.
|
||||||
|
|
||||||
|
|
||||||
Examples
|
Examples
|
||||||
--------
|
--------
|
||||||
>>> import damask
|
>>> import damask
|
||||||
|
@ -373,14 +386,17 @@ def project_equal_area(vector: np.ndarray,
|
||||||
return np.roll(np.block([v[...,:2]/np.sqrt(1.0+np.abs(v[...,2:3])),np.zeros_like(v[...,2:3])]),
|
return np.roll(np.block([v[...,:2]/np.sqrt(1.0+np.abs(v[...,2:3])),np.zeros_like(v[...,2:3])]),
|
||||||
-shift if keepdims else 0,axis=-1)[...,:3 if keepdims else 2]
|
-shift if keepdims else 0,axis=-1)[...,:3 if keepdims else 2]
|
||||||
|
|
||||||
def execution_stamp(class_name: str, function_name: str = None) -> str:
|
def execution_stamp(class_name: str,
|
||||||
|
function_name: str = None) -> str:
|
||||||
"""Timestamp the execution of a (function within a) class."""
|
"""Timestamp the execution of a (function within a) class."""
|
||||||
now = datetime.datetime.now().astimezone().strftime('%Y-%m-%d %H:%M:%S%z')
|
now = datetime.datetime.now().astimezone().strftime('%Y-%m-%d %H:%M:%S%z')
|
||||||
_function_name = '' if function_name is None else f'.{function_name}'
|
_function_name = '' if function_name is None else f'.{function_name}'
|
||||||
return f'damask.{class_name}{_function_name} v{version} ({now})'
|
return f'damask.{class_name}{_function_name} v{version} ({now})'
|
||||||
|
|
||||||
|
|
||||||
def hybrid_IA(dist: np.ndarray, N: int, rng_seed = None) -> np.ndarray:
|
def hybrid_IA(dist: np.ndarray,
|
||||||
|
N: int,
|
||||||
|
rng_seed: Union[int, IntSequence] = None) -> np.ndarray:
|
||||||
"""
|
"""
|
||||||
Hybrid integer approximation.
|
Hybrid integer approximation.
|
||||||
|
|
||||||
|
@ -411,7 +427,7 @@ def hybrid_IA(dist: np.ndarray, N: int, rng_seed = None) -> np.ndarray:
|
||||||
def shapeshifter(fro: Tuple[int, ...],
|
def shapeshifter(fro: Tuple[int, ...],
|
||||||
to: Tuple[int, ...],
|
to: Tuple[int, ...],
|
||||||
mode: Literal['left','right'] = 'left',
|
mode: Literal['left','right'] = 'left',
|
||||||
keep_ones: bool = False) -> Tuple[Optional[int], ...]:
|
keep_ones: bool = False) -> Sequence[SupportsIndex]:
|
||||||
"""
|
"""
|
||||||
Return dimensions that reshape 'fro' to become broadcastable to 'to'.
|
Return dimensions that reshape 'fro' to become broadcastable to 'to'.
|
||||||
|
|
||||||
|
@ -447,7 +463,7 @@ def shapeshifter(fro: Tuple[int, ...],
|
||||||
|
|
||||||
|
|
||||||
"""
|
"""
|
||||||
if not len(fro) and not len(to): return ()
|
if len(fro) == 0 and len(to) == 0: return ()
|
||||||
|
|
||||||
beg = dict(left ='(^.*\\b)',
|
beg = dict(left ='(^.*\\b)',
|
||||||
right='(^.*?\\b)')
|
right='(^.*?\\b)')
|
||||||
|
@ -455,8 +471,8 @@ def shapeshifter(fro: Tuple[int, ...],
|
||||||
right='(.*?\\b)')
|
right='(.*?\\b)')
|
||||||
end = dict(left ='(.*?$)',
|
end = dict(left ='(.*?$)',
|
||||||
right='(.*$)')
|
right='(.*$)')
|
||||||
fro = (1,) if not len(fro) else fro
|
fro = (1,) if len(fro) == 0 else fro
|
||||||
to = (1,) if not len(to) else to
|
to = (1,) if len(to) == 0 else to
|
||||||
try:
|
try:
|
||||||
match = re.match(beg[mode]
|
match = re.match(beg[mode]
|
||||||
+f',{sep[mode]}'.join(map(lambda x: f'{x}'
|
+f',{sep[mode]}'.join(map(lambda x: f'{x}'
|
||||||
|
@ -467,13 +483,14 @@ def shapeshifter(fro: Tuple[int, ...],
|
||||||
grp = match.groups()
|
grp = match.groups()
|
||||||
except AssertionError:
|
except AssertionError:
|
||||||
raise ValueError(f'Shapes can not be shifted {fro} --> {to}')
|
raise ValueError(f'Shapes can not be shifted {fro} --> {to}')
|
||||||
fill: Tuple[Optional[int], ...] = ()
|
fill: Any = ()
|
||||||
for g,d in zip(grp,fro+(None,)):
|
for g,d in zip(grp,fro+(None,)):
|
||||||
fill += (1,)*g.count(',')+(d,)
|
fill += (1,)*g.count(',')+(d,)
|
||||||
return fill[:-1]
|
return fill[:-1]
|
||||||
|
|
||||||
|
|
||||||
def shapeblender(a: Tuple[int, ...], b: Tuple[int, ...]) -> Tuple[int, ...]:
|
def shapeblender(a: Tuple[int, ...],
|
||||||
|
b: Tuple[int, ...]) -> Sequence[SupportsIndex]:
|
||||||
"""
|
"""
|
||||||
Return a shape that overlaps the rightmost entries of 'a' with the leftmost of 'b'.
|
Return a shape that overlaps the rightmost entries of 'a' with the leftmost of 'b'.
|
||||||
|
|
||||||
|
@ -517,7 +534,8 @@ def extend_docstring(extra_docstring: str) -> Callable:
|
||||||
return _decorator
|
return _decorator
|
||||||
|
|
||||||
|
|
||||||
def extended_docstring(f: Callable, extra_docstring: str) -> Callable:
|
def extended_docstring(f: Callable,
|
||||||
|
extra_docstring: str) -> Callable:
|
||||||
"""
|
"""
|
||||||
Decorator: Combine another function's docstring with a given docstring.
|
Decorator: Combine another function's docstring with a given docstring.
|
||||||
|
|
||||||
|
@ -593,7 +611,9 @@ def DREAM3D_cell_data_group(fname: Union[str, Path]) -> str:
|
||||||
return cell_data_group
|
return cell_data_group
|
||||||
|
|
||||||
|
|
||||||
def Bravais_to_Miller(*, uvtw: np.ndarray = None, hkil: np.ndarray = None) -> np.ndarray:
|
def Bravais_to_Miller(*,
|
||||||
|
uvtw: np.ndarray = None,
|
||||||
|
hkil: np.ndarray = None) -> np.ndarray:
|
||||||
"""
|
"""
|
||||||
Transform 4 Miller–Bravais indices to 3 Miller indices of crystal direction [uvw] or plane normal (hkl).
|
Transform 4 Miller–Bravais indices to 3 Miller indices of crystal direction [uvw] or plane normal (hkl).
|
||||||
|
|
||||||
|
@ -620,7 +640,9 @@ def Bravais_to_Miller(*, uvtw: np.ndarray = None, hkil: np.ndarray = None) -> np
|
||||||
return np.einsum('il,...l',basis,axis)
|
return np.einsum('il,...l',basis,axis)
|
||||||
|
|
||||||
|
|
||||||
def Miller_to_Bravais(*, uvw: np.ndarray = None, hkl: np.ndarray = None) -> np.ndarray:
|
def Miller_to_Bravais(*,
|
||||||
|
uvw: np.ndarray = None,
|
||||||
|
hkl: np.ndarray = None) -> np.ndarray:
|
||||||
"""
|
"""
|
||||||
Transform 3 Miller indices to 4 Miller–Bravais indices of crystal direction [uvtw] or plane normal (hkil).
|
Transform 3 Miller indices to 4 Miller–Bravais indices of crystal direction [uvtw] or plane normal (hkil).
|
||||||
|
|
||||||
|
@ -710,7 +732,10 @@ class ProgressBar:
|
||||||
Works for 0-based loops, ETA is estimated by linear extrapolation.
|
Works for 0-based loops, ETA is estimated by linear extrapolation.
|
||||||
"""
|
"""
|
||||||
|
|
||||||
def __init__(self, total: int, prefix: str, bar_length: int):
|
def __init__(self,
|
||||||
|
total: int,
|
||||||
|
prefix: str,
|
||||||
|
bar_length: int):
|
||||||
"""
|
"""
|
||||||
Set current time as basis for ETA estimation.
|
Set current time as basis for ETA estimation.
|
||||||
|
|
||||||
|
@ -733,12 +758,12 @@ class ProgressBar:
|
||||||
sys.stderr.write(f"{self.prefix} {'░'*self.bar_length} 0% ETA n/a")
|
sys.stderr.write(f"{self.prefix} {'░'*self.bar_length} 0% ETA n/a")
|
||||||
sys.stderr.flush()
|
sys.stderr.flush()
|
||||||
|
|
||||||
def update(self, iteration: int) -> None:
|
def update(self,
|
||||||
|
iteration: int) -> None:
|
||||||
|
|
||||||
fraction = (iteration+1) / self.total
|
fraction = (iteration+1) / self.total
|
||||||
filled_length = int(self.bar_length * fraction)
|
|
||||||
|
|
||||||
if filled_length > int(self.bar_length * self.fraction_last) or \
|
if filled_length := int(self.bar_length * fraction) > int(self.bar_length * self.fraction_last) or \
|
||||||
datetime.datetime.now() - self.time_last_update > datetime.timedelta(seconds=10):
|
datetime.datetime.now() - self.time_last_update > datetime.timedelta(seconds=10):
|
||||||
self.time_last_update = datetime.datetime.now()
|
self.time_last_update = datetime.datetime.now()
|
||||||
bar = '█' * filled_length + '░' * (self.bar_length - filled_length)
|
bar = '█' * filled_length + '░' * (self.bar_length - filled_length)
|
||||||
|
|
|
@ -1,3 +1,5 @@
|
||||||
|
[mypy]
|
||||||
|
warn_redundant_casts = True
|
||||||
[mypy-scipy.*]
|
[mypy-scipy.*]
|
||||||
ignore_missing_imports = True
|
ignore_missing_imports = True
|
||||||
[mypy-h5py.*]
|
[mypy-h5py.*]
|
||||||
|
|
|
@ -14,6 +14,7 @@ module CPFEM
|
||||||
use config
|
use config
|
||||||
use math
|
use math
|
||||||
use rotations
|
use rotations
|
||||||
|
use polynomials
|
||||||
use lattice
|
use lattice
|
||||||
use material
|
use material
|
||||||
use phase
|
use phase
|
||||||
|
@ -83,6 +84,7 @@ subroutine CPFEM_initAll
|
||||||
call config_init
|
call config_init
|
||||||
call math_init
|
call math_init
|
||||||
call rotations_init
|
call rotations_init
|
||||||
|
call polynomials_init
|
||||||
call lattice_init
|
call lattice_init
|
||||||
call discretization_marc_init
|
call discretization_marc_init
|
||||||
call material_init(.false.)
|
call material_init(.false.)
|
||||||
|
|
|
@ -16,6 +16,7 @@ module CPFEM2
|
||||||
use config
|
use config
|
||||||
use math
|
use math
|
||||||
use rotations
|
use rotations
|
||||||
|
use polynomials
|
||||||
use lattice
|
use lattice
|
||||||
use material
|
use material
|
||||||
use phase
|
use phase
|
||||||
|
@ -57,6 +58,7 @@ subroutine CPFEM_initAll
|
||||||
call config_init
|
call config_init
|
||||||
call math_init
|
call math_init
|
||||||
call rotations_init
|
call rotations_init
|
||||||
|
call polynomials_init
|
||||||
call lattice_init
|
call lattice_init
|
||||||
#if defined(MESH)
|
#if defined(MESH)
|
||||||
call discretization_mesh_init(restart=interface_restartInc>0)
|
call discretization_mesh_init(restart=interface_restartInc>0)
|
||||||
|
|
|
@ -191,8 +191,10 @@ logical function isScalar(line)
|
||||||
|
|
||||||
character(len=*), intent(in) :: line
|
character(len=*), intent(in) :: line
|
||||||
|
|
||||||
isScalar = (.not.isKeyValue(line) .and. .not.isKey(line) .and. .not.isListItem(line) &
|
isScalar = (.not. isKeyValue(line) .and. &
|
||||||
.and. .not.isFlow(line))
|
.not. isKey(line) .and. &
|
||||||
|
.not. isListItem(line) .and. &
|
||||||
|
.not. isFlow(line))
|
||||||
|
|
||||||
end function isScalar
|
end function isScalar
|
||||||
|
|
||||||
|
|
|
@ -14,6 +14,7 @@
|
||||||
#include "LAPACK_interface.f90"
|
#include "LAPACK_interface.f90"
|
||||||
#include "math.f90"
|
#include "math.f90"
|
||||||
#include "rotations.f90"
|
#include "rotations.f90"
|
||||||
|
#include "polynomials.f90"
|
||||||
#include "lattice.f90"
|
#include "lattice.f90"
|
||||||
#include "element.f90"
|
#include "element.f90"
|
||||||
#include "geometry_plastic_nonlocal.f90"
|
#include "geometry_plastic_nonlocal.f90"
|
||||||
|
|
|
@ -66,7 +66,7 @@ subroutine material_init(restart)
|
||||||
print'(/,1x,a)', '<<<+- material init -+>>>'; flush(IO_STDOUT)
|
print'(/,1x,a)', '<<<+- material init -+>>>'; flush(IO_STDOUT)
|
||||||
|
|
||||||
|
|
||||||
call parse
|
call parse()
|
||||||
print'(/,1x,a)', 'parsed material.yaml'
|
print'(/,1x,a)', 'parsed material.yaml'
|
||||||
|
|
||||||
|
|
||||||
|
|
|
@ -8,6 +8,7 @@ module phase
|
||||||
use constants
|
use constants
|
||||||
use math
|
use math
|
||||||
use rotations
|
use rotations
|
||||||
|
use polynomials
|
||||||
use IO
|
use IO
|
||||||
use config
|
use config
|
||||||
use material
|
use material
|
||||||
|
|
|
@ -1,15 +1,13 @@
|
||||||
submodule(phase:mechanical) elastic
|
submodule(phase:mechanical) elastic
|
||||||
|
|
||||||
type :: tParameters
|
type :: tParameters
|
||||||
real(pReal),dimension(3) :: &
|
type(tPolynomial) :: &
|
||||||
C_11 = 0.0_pReal, &
|
C_11, &
|
||||||
C_12 = 0.0_pReal, &
|
C_12, &
|
||||||
C_13 = 0.0_pReal, &
|
C_13, &
|
||||||
C_33 = 0.0_pReal, &
|
C_33, &
|
||||||
C_44 = 0.0_pReal, &
|
C_44, &
|
||||||
C_66 = 0.0_pReal
|
C_66
|
||||||
real(pReal) :: &
|
|
||||||
T_ref
|
|
||||||
end type tParameters
|
end type tParameters
|
||||||
|
|
||||||
type(tParameters), allocatable, dimension(:) :: param
|
type(tParameters), allocatable, dimension(:) :: param
|
||||||
|
@ -47,35 +45,17 @@ module subroutine elastic_init(phases)
|
||||||
|
|
||||||
associate(prm => param(ph))
|
associate(prm => param(ph))
|
||||||
|
|
||||||
prm%T_ref = elastic%get_asFloat('T_ref', defaultVal=T_ROOM)
|
prm%C_11 = polynomial(elastic%asDict(),'C_11','T')
|
||||||
|
prm%C_12 = polynomial(elastic%asDict(),'C_12','T')
|
||||||
prm%C_11(1) = elastic%get_asFloat('C_11')
|
prm%C_44 = polynomial(elastic%asDict(),'C_44','T')
|
||||||
prm%C_11(2) = elastic%get_asFloat('C_11,T', defaultVal=0.0_pReal)
|
|
||||||
prm%C_11(3) = elastic%get_asFloat('C_11,T^2',defaultVal=0.0_pReal)
|
|
||||||
|
|
||||||
prm%C_12(1) = elastic%get_asFloat('C_12')
|
|
||||||
prm%C_12(2) = elastic%get_asFloat('C_12,T', defaultVal=0.0_pReal)
|
|
||||||
prm%C_12(3) = elastic%get_asFloat('C_12,T^2',defaultVal=0.0_pReal)
|
|
||||||
|
|
||||||
prm%C_44(1) = elastic%get_asFloat('C_44')
|
|
||||||
prm%C_44(2) = elastic%get_asFloat('C_44,T', defaultVal=0.0_pReal)
|
|
||||||
prm%C_44(3) = elastic%get_asFloat('C_44,T^2',defaultVal=0.0_pReal)
|
|
||||||
|
|
||||||
if (any(phase_lattice(ph) == ['hP','tI'])) then
|
if (any(phase_lattice(ph) == ['hP','tI'])) then
|
||||||
prm%C_13(1) = elastic%get_asFloat('C_13')
|
prm%C_13 = polynomial(elastic%asDict(),'C_13','T')
|
||||||
prm%C_13(2) = elastic%get_asFloat('C_13,T', defaultVal=0.0_pReal)
|
prm%C_33 = polynomial(elastic%asDict(),'C_33','T')
|
||||||
prm%C_13(3) = elastic%get_asFloat('C_13,T^2',defaultVal=0.0_pReal)
|
|
||||||
|
|
||||||
prm%C_33(1) = elastic%get_asFloat('C_33')
|
|
||||||
prm%C_33(2) = elastic%get_asFloat('C_33,T', defaultVal=0.0_pReal)
|
|
||||||
prm%C_33(3) = elastic%get_asFloat('C_33,T^2',defaultVal=0.0_pReal)
|
|
||||||
end if
|
end if
|
||||||
|
|
||||||
if (phase_lattice(ph) == 'tI') then
|
if (phase_lattice(ph) == 'tI') &
|
||||||
prm%C_66(1) = elastic%get_asFloat('C_66')
|
prm%C_66 = polynomial(elastic%asDict(),'C_66','T')
|
||||||
prm%C_66(2) = elastic%get_asFloat('C_66,T', defaultVal=0.0_pReal)
|
|
||||||
prm%C_66(3) = elastic%get_asFloat('C_66,T^2',defaultVal=0.0_pReal)
|
|
||||||
end if
|
|
||||||
|
|
||||||
end associate
|
end associate
|
||||||
end do
|
end do
|
||||||
|
@ -97,38 +77,20 @@ pure module function elastic_C66(ph,en) result(C66)
|
||||||
|
|
||||||
|
|
||||||
associate(prm => param(ph))
|
associate(prm => param(ph))
|
||||||
|
|
||||||
C66 = 0.0_pReal
|
C66 = 0.0_pReal
|
||||||
T = thermal_T(ph,en)
|
T = thermal_T(ph,en)
|
||||||
|
|
||||||
C66(1,1) = prm%C_11(1) &
|
C66(1,1) = prm%C_11%at(T)
|
||||||
+ prm%C_11(2)*(T - prm%T_ref) &
|
C66(1,2) = prm%C_12%at(T)
|
||||||
+ prm%C_11(3)*(T - prm%T_ref)**2
|
C66(4,4) = prm%C_44%at(T)
|
||||||
|
|
||||||
C66(1,2) = prm%C_12(1) &
|
|
||||||
+ prm%C_12(2)*(T - prm%T_ref) &
|
|
||||||
+ prm%C_12(3)*(T - prm%T_ref)**2
|
|
||||||
|
|
||||||
C66(4,4) = prm%C_44(1) &
|
|
||||||
+ prm%C_44(2)*(T - prm%T_ref) &
|
|
||||||
+ prm%C_44(3)*(T - prm%T_ref)**2
|
|
||||||
|
|
||||||
|
|
||||||
if (any(phase_lattice(ph) == ['hP','tI'])) then
|
if (any(phase_lattice(ph) == ['hP','tI'])) then
|
||||||
C66(1,3) = prm%C_13(1) &
|
C66(1,3) = prm%C_13%at(T)
|
||||||
+ prm%C_13(2)*(T - prm%T_ref) &
|
C66(3,3) = prm%C_33%at(T)
|
||||||
+ prm%C_13(3)*(T - prm%T_ref)**2
|
|
||||||
|
|
||||||
C66(3,3) = prm%C_33(1) &
|
|
||||||
+ prm%C_33(2)*(T - prm%T_ref) &
|
|
||||||
+ prm%C_33(3)*(T - prm%T_ref)**2
|
|
||||||
|
|
||||||
end if
|
end if
|
||||||
|
|
||||||
if (phase_lattice(ph) == 'tI') then
|
if (phase_lattice(ph) == 'tI') C66(6,6) = prm%C_66%at(T)
|
||||||
C66(6,6) = prm%C_66(1) &
|
|
||||||
+ prm%C_66(2)*(T - prm%T_ref) &
|
|
||||||
+ prm%C_66(3)*(T - prm%T_ref)**2
|
|
||||||
end if
|
|
||||||
|
|
||||||
C66 = lattice_symmetrize_C66(C66,phase_lattice(ph))
|
C66 = lattice_symmetrize_C66(C66,phase_lattice(ph))
|
||||||
|
|
||||||
|
|
|
@ -0,0 +1,179 @@
|
||||||
|
!--------------------------------------------------------------------------------------------------
|
||||||
|
!> @author Martin Diehl, KU Leuven
|
||||||
|
!> @brief Polynomial representation for variable data
|
||||||
|
!--------------------------------------------------------------------------------------------------
|
||||||
|
module polynomials
|
||||||
|
use prec
|
||||||
|
use IO
|
||||||
|
use YAML_parse
|
||||||
|
use YAML_types
|
||||||
|
|
||||||
|
implicit none
|
||||||
|
private
|
||||||
|
|
||||||
|
type, public :: tPolynomial
|
||||||
|
real(pReal), dimension(:), allocatable :: coef
|
||||||
|
real(pReal) :: x_ref
|
||||||
|
contains
|
||||||
|
procedure, public :: at => eval
|
||||||
|
procedure, public :: der1_at => eval_der1
|
||||||
|
end type tPolynomial
|
||||||
|
|
||||||
|
interface polynomial
|
||||||
|
module procedure polynomial_from_dict
|
||||||
|
module procedure polynomial_from_coef
|
||||||
|
end interface polynomial
|
||||||
|
|
||||||
|
public :: &
|
||||||
|
polynomial, &
|
||||||
|
polynomials_init
|
||||||
|
|
||||||
|
contains
|
||||||
|
|
||||||
|
|
||||||
|
!--------------------------------------------------------------------------------------------------
|
||||||
|
!> @brief Run self-test.
|
||||||
|
!--------------------------------------------------------------------------------------------------
|
||||||
|
subroutine polynomials_init()
|
||||||
|
|
||||||
|
print'(/,1x,a)', '<<<+- polynomials init -+>>>'; flush(IO_STDOUT)
|
||||||
|
|
||||||
|
call selfTest()
|
||||||
|
|
||||||
|
end subroutine polynomials_init
|
||||||
|
|
||||||
|
|
||||||
|
!--------------------------------------------------------------------------------------------------
|
||||||
|
!> @brief Initialize a Polynomial from Coefficients.
|
||||||
|
!--------------------------------------------------------------------------------------------------
|
||||||
|
function polynomial_from_coef(coef,x_ref) result(p)
|
||||||
|
|
||||||
|
real(pReal), dimension(:), intent(in) :: coef
|
||||||
|
real(pReal), intent(in) :: x_ref
|
||||||
|
type(tPolynomial) :: p
|
||||||
|
|
||||||
|
|
||||||
|
allocate(p%coef(0:size(coef)-1),source=coef) ! should be zero based
|
||||||
|
p%x_ref = x_ref
|
||||||
|
|
||||||
|
end function polynomial_from_coef
|
||||||
|
|
||||||
|
|
||||||
|
!--------------------------------------------------------------------------------------------------
|
||||||
|
!> @brief Initialize a Polynomial from a Dictionary with Coefficients.
|
||||||
|
!--------------------------------------------------------------------------------------------------
|
||||||
|
function polynomial_from_dict(dict,y,x) result(p)
|
||||||
|
|
||||||
|
type(tDict), intent(in) :: dict
|
||||||
|
character(len=*), intent(in) :: y, x
|
||||||
|
type(tPolynomial) :: p
|
||||||
|
|
||||||
|
real(pReal), dimension(:), allocatable :: coef
|
||||||
|
real(pReal) :: x_ref
|
||||||
|
|
||||||
|
|
||||||
|
allocate(coef(1),source=dict%get_asFloat(y))
|
||||||
|
|
||||||
|
if (dict%contains(y//','//x)) then
|
||||||
|
x_ref = dict%get_asFloat(x//'_ref')
|
||||||
|
coef = [coef,dict%get_asFloat(y//','//x)]
|
||||||
|
if (dict%contains(y//','//x//'^2')) then
|
||||||
|
coef = [coef,dict%get_asFloat(y//','//x//'^2')]
|
||||||
|
end if
|
||||||
|
else
|
||||||
|
x_ref = huge(0.0_pReal) ! Simplify debugging
|
||||||
|
end if
|
||||||
|
|
||||||
|
p = Polynomial(coef,x_ref)
|
||||||
|
|
||||||
|
end function polynomial_from_dict
|
||||||
|
|
||||||
|
|
||||||
|
!--------------------------------------------------------------------------------------------------
|
||||||
|
!> @brief Evaluate a Polynomial.
|
||||||
|
!--------------------------------------------------------------------------------------------------
|
||||||
|
pure function eval(self,x) result(y)
|
||||||
|
|
||||||
|
class(tPolynomial), intent(in) :: self
|
||||||
|
real(pReal), intent(in) :: x
|
||||||
|
real(pReal) :: y
|
||||||
|
|
||||||
|
integer :: i
|
||||||
|
|
||||||
|
|
||||||
|
y = self%coef(0)
|
||||||
|
do i = 1, ubound(self%coef,1)
|
||||||
|
y = y + self%coef(i) * (x-self%x_ref)**i
|
||||||
|
enddo
|
||||||
|
|
||||||
|
end function eval
|
||||||
|
|
||||||
|
|
||||||
|
!--------------------------------------------------------------------------------------------------
|
||||||
|
!> @brief Evaluate a first derivative of Polynomial.
|
||||||
|
!--------------------------------------------------------------------------------------------------
|
||||||
|
pure function eval_der1(self,x) result(y)
|
||||||
|
|
||||||
|
class(tPolynomial), intent(in) :: self
|
||||||
|
real(pReal), intent(in) :: x
|
||||||
|
real(pReal) :: y
|
||||||
|
|
||||||
|
integer :: i
|
||||||
|
|
||||||
|
|
||||||
|
y = 0.0_pReal
|
||||||
|
do i = 1, ubound(self%coef,1)
|
||||||
|
y = y + real(i,pReal)*self%coef(i) * (x-self%x_ref)**(i-1)
|
||||||
|
enddo
|
||||||
|
|
||||||
|
end function eval_der1
|
||||||
|
|
||||||
|
|
||||||
|
!--------------------------------------------------------------------------------------------------
|
||||||
|
!> @brief Check correctness of polynomical functionality.
|
||||||
|
!--------------------------------------------------------------------------------------------------
|
||||||
|
subroutine selfTest
|
||||||
|
|
||||||
|
type(tPolynomial) :: p1, p2
|
||||||
|
real(pReal), dimension(3) :: coef
|
||||||
|
real(pReal) :: x_ref, x
|
||||||
|
class(tNode), pointer :: dict
|
||||||
|
character(len=pStringLen), dimension(3) :: coef_s
|
||||||
|
character(len=pStringLen) :: x_ref_s, x_s, YAML_s
|
||||||
|
|
||||||
|
call random_number(coef)
|
||||||
|
call random_number(x_ref)
|
||||||
|
call random_number(x)
|
||||||
|
|
||||||
|
coef = coef*10_pReal -0.5_pReal
|
||||||
|
x_ref = x_ref*10_pReal -0.5_pReal
|
||||||
|
x = x*10_pReal -0.5_pReal
|
||||||
|
|
||||||
|
p1 = polynomial(coef,x_ref)
|
||||||
|
if (dNeq(p1%at(x_ref),coef(1))) error stop 'polynomial: @ref'
|
||||||
|
|
||||||
|
write(coef_s(1),*) coef(1)
|
||||||
|
write(coef_s(2),*) coef(2)
|
||||||
|
write(coef_s(3),*) coef(3)
|
||||||
|
write(x_ref_s,*) x_ref
|
||||||
|
write(x_s,*) x
|
||||||
|
YAML_s = 'C: '//trim(adjustl(coef_s(1)))//IO_EOL//&
|
||||||
|
'C,T: '//trim(adjustl(coef_s(2)))//IO_EOL//&
|
||||||
|
'C,T^2: '//trim(adjustl(coef_s(3)))//IO_EOL//&
|
||||||
|
'T_ref: '//trim(adjustl(x_ref_s))//IO_EOL
|
||||||
|
Dict => YAML_parse_str(trim(YAML_s))
|
||||||
|
p2 = polynomial(dict%asDict(),'C','T')
|
||||||
|
if (dNeq(p1%at(x),p2%at(x),1.0e-12_pReal)) error stop 'polynomials: init'
|
||||||
|
|
||||||
|
p1 = polynomial(coef*[0.0_pReal,1.0_pReal,0.0_pReal],x_ref)
|
||||||
|
if (dNeq(p1%at(x_ref+x),-p1%at(x_ref-x),1.0e-10_pReal)) error stop 'polynomials: eval(odd)'
|
||||||
|
if (dNeq(p1%der1_at(x),p1%der1_at(5.0_pReal*x),1.0e-10_pReal)) error stop 'polynomials: eval_der(odd)'
|
||||||
|
|
||||||
|
p1 = polynomial(coef*[0.0_pReal,0.0_pReal,1.0_pReal],x_ref)
|
||||||
|
if (dNeq(p1%at(x_ref+x),p1%at(x_ref-x),1e-10_pReal)) error stop 'polynomials: eval(even)'
|
||||||
|
if (dNeq(p1%der1_at(x_ref+x),-p1%der1_at(x_ref-x),1e-10_pReal)) error stop 'polynomials: eval_der(even)'
|
||||||
|
|
||||||
|
|
||||||
|
end subroutine selfTest
|
||||||
|
|
||||||
|
end module polynomials
|
Loading…
Reference in New Issue