Merge remote-tracking branch 'origin/development' into Fortran-polishing
This commit is contained in:
commit
a243e10641
2
PRIVATE
2
PRIVATE
|
@ -1 +1 @@
|
||||||
Subproject commit 5598ec4892b0921fccf63e8570f9fcd8e0365716
|
Subproject commit ebb7f0ce78d11275020af0ba60f929f95b446932
|
|
@ -1,178 +0,0 @@
|
||||||
#!/usr/bin/env python3
|
|
||||||
|
|
||||||
import os
|
|
||||||
import sys
|
|
||||||
from io import StringIO
|
|
||||||
from optparse import OptionParser
|
|
||||||
|
|
||||||
import numpy as np
|
|
||||||
from scipy import ndimage
|
|
||||||
|
|
||||||
import damask
|
|
||||||
|
|
||||||
|
|
||||||
scriptName = os.path.splitext(os.path.basename(__file__))[0]
|
|
||||||
scriptID = ' '.join([scriptName,damask.version])
|
|
||||||
|
|
||||||
|
|
||||||
getInterfaceEnergy = lambda A,B: np.float32((A != B)*1.0) # 1.0 if A & B are distinct, 0.0 otherwise
|
|
||||||
struc = ndimage.generate_binary_structure(3,1) # 3D von Neumann neighborhood
|
|
||||||
|
|
||||||
|
|
||||||
#--------------------------------------------------------------------------------------------------
|
|
||||||
# MAIN
|
|
||||||
#--------------------------------------------------------------------------------------------------
|
|
||||||
|
|
||||||
parser = OptionParser(option_class=damask.extendableOption, usage='%prog option(s) [geomfile(s)]', description = """
|
|
||||||
Smoothen interface roughness by simulated curvature flow.
|
|
||||||
This is achieved by the diffusion of each initially sharply bounded grain volume within the periodic domain
|
|
||||||
up to a given distance 'd' voxels.
|
|
||||||
The final geometry is assembled by selecting at each voxel that grain index for which the concentration remains largest.
|
|
||||||
References 10.1073/pnas.1111557108 (10.1006/jcph.1994.1105)
|
|
||||||
|
|
||||||
""", version = scriptID)
|
|
||||||
|
|
||||||
parser.add_option('-d', '--distance',
|
|
||||||
dest = 'd',
|
|
||||||
type = 'float', metavar = 'float',
|
|
||||||
help = 'diffusion distance in voxels [%default]')
|
|
||||||
parser.add_option('-N', '--iterations',
|
|
||||||
dest = 'N',
|
|
||||||
type = 'int', metavar = 'int',
|
|
||||||
help = 'curvature flow iterations [%default]')
|
|
||||||
parser.add_option('-i', '--immutable',
|
|
||||||
action = 'extend', dest = 'immutable', metavar = '<int LIST>',
|
|
||||||
help = 'list of immutable material indices')
|
|
||||||
parser.add_option('--ndimage',
|
|
||||||
dest = 'ndimage', action='store_true',
|
|
||||||
help = 'use ndimage.gaussian_filter in lieu of explicit FFT')
|
|
||||||
|
|
||||||
parser.set_defaults(d = 1,
|
|
||||||
N = 1,
|
|
||||||
immutable = [],
|
|
||||||
ndimage = False,
|
|
||||||
)
|
|
||||||
|
|
||||||
(options, filenames) = parser.parse_args()
|
|
||||||
|
|
||||||
options.immutable = list(map(int,options.immutable))
|
|
||||||
|
|
||||||
|
|
||||||
if filenames == []: filenames = [None]
|
|
||||||
|
|
||||||
for name in filenames:
|
|
||||||
damask.util.report(scriptName,name)
|
|
||||||
|
|
||||||
geom = damask.Grid.load(StringIO(''.join(sys.stdin.read())) if name is None else name)
|
|
||||||
|
|
||||||
grid_original = geom.cells
|
|
||||||
damask.util.croak(geom)
|
|
||||||
material = np.tile(geom.material,np.where(grid_original == 1, 2,1)) # make one copy along dimensions with grid == 1
|
|
||||||
grid = np.array(material.shape)
|
|
||||||
|
|
||||||
# --- initialize support data ---------------------------------------------------------------------
|
|
||||||
|
|
||||||
# store a copy of the initial material indices to find locations of immutable indices
|
|
||||||
material_original = np.copy(material)
|
|
||||||
|
|
||||||
if not options.ndimage:
|
|
||||||
X,Y,Z = np.mgrid[0:grid[0],0:grid[1],0:grid[2]]
|
|
||||||
|
|
||||||
# Calculates gaussian weights for simulating 3d diffusion
|
|
||||||
gauss = np.exp(-(X*X + Y*Y + Z*Z)/(2.0*options.d*options.d),dtype=np.float32) \
|
|
||||||
/np.power(2.0*np.pi*options.d*options.d,(3.0 - np.count_nonzero(grid_original == 1))/2.,dtype=np.float32)
|
|
||||||
|
|
||||||
gauss[:,:,:grid[2]//2:-1] = gauss[:,:,1:(grid[2]+1)//2] # trying to cope with uneven (odd) grid size
|
|
||||||
gauss[:,:grid[1]//2:-1,:] = gauss[:,1:(grid[1]+1)//2,:]
|
|
||||||
gauss[:grid[0]//2:-1,:,:] = gauss[1:(grid[0]+1)//2,:,:]
|
|
||||||
gauss = np.fft.rfftn(gauss).astype(np.complex64)
|
|
||||||
|
|
||||||
for smoothIter in range(options.N):
|
|
||||||
|
|
||||||
interfaceEnergy = np.zeros(material.shape,dtype=np.float32)
|
|
||||||
for i in (-1,0,1):
|
|
||||||
for j in (-1,0,1):
|
|
||||||
for k in (-1,0,1):
|
|
||||||
# assign interfacial energy to all voxels that have a differing neighbor (in Moore neighborhood)
|
|
||||||
interfaceEnergy = np.maximum(interfaceEnergy,
|
|
||||||
getInterfaceEnergy(material,np.roll(np.roll(np.roll(
|
|
||||||
material,i,axis=0), j,axis=1), k,axis=2)))
|
|
||||||
|
|
||||||
# periodically extend interfacial energy array by half a grid size in positive and negative directions
|
|
||||||
periodic_interfaceEnergy = np.tile(interfaceEnergy,(3,3,3))[grid[0]//2:-grid[0]//2,
|
|
||||||
grid[1]//2:-grid[1]//2,
|
|
||||||
grid[2]//2:-grid[2]//2]
|
|
||||||
|
|
||||||
# transform bulk volume (i.e. where interfacial energy remained zero), store index of closest boundary voxel
|
|
||||||
index = ndimage.morphology.distance_transform_edt(periodic_interfaceEnergy == 0.,
|
|
||||||
return_distances = False,
|
|
||||||
return_indices = True)
|
|
||||||
|
|
||||||
# want array index of nearest voxel on periodically extended boundary
|
|
||||||
periodic_bulkEnergy = periodic_interfaceEnergy[index[0],
|
|
||||||
index[1],
|
|
||||||
index[2]].reshape(2*grid) # fill bulk with energy of nearest interface
|
|
||||||
|
|
||||||
if options.ndimage:
|
|
||||||
periodic_diffusedEnergy = ndimage.gaussian_filter(
|
|
||||||
np.where(ndimage.morphology.binary_dilation(periodic_interfaceEnergy > 0.,
|
|
||||||
structure = struc,
|
|
||||||
iterations = int(round(options.d*2.))-1, # fat boundary
|
|
||||||
),
|
|
||||||
periodic_bulkEnergy, # ...and zero everywhere else
|
|
||||||
0.),
|
|
||||||
sigma = options.d)
|
|
||||||
else:
|
|
||||||
diffusedEnergy = np.fft.irfftn(np.fft.rfftn(
|
|
||||||
np.where(
|
|
||||||
ndimage.morphology.binary_dilation(interfaceEnergy > 0.,
|
|
||||||
structure = struc,
|
|
||||||
iterations = int(round(options.d*2.))-1),# fat boundary
|
|
||||||
periodic_bulkEnergy[grid[0]//2:-grid[0]//2, # retain filled energy on fat boundary...
|
|
||||||
grid[1]//2:-grid[1]//2,
|
|
||||||
grid[2]//2:-grid[2]//2], # ...and zero everywhere else
|
|
||||||
0.)).astype(np.complex64) *
|
|
||||||
gauss).astype(np.float32)
|
|
||||||
|
|
||||||
periodic_diffusedEnergy = np.tile(diffusedEnergy,(3,3,3))[grid[0]//2:-grid[0]//2,
|
|
||||||
grid[1]//2:-grid[1]//2,
|
|
||||||
grid[2]//2:-grid[2]//2] # periodically extend the smoothed bulk energy
|
|
||||||
|
|
||||||
|
|
||||||
# transform voxels close to interface region
|
|
||||||
index = ndimage.morphology.distance_transform_edt(periodic_diffusedEnergy >= 0.95*np.amax(periodic_diffusedEnergy),
|
|
||||||
return_distances = False,
|
|
||||||
return_indices = True) # want index of closest bulk grain
|
|
||||||
|
|
||||||
periodic_material = np.tile(material,(3,3,3))[grid[0]//2:-grid[0]//2,
|
|
||||||
grid[1]//2:-grid[1]//2,
|
|
||||||
grid[2]//2:-grid[2]//2] # periodically extend the geometry
|
|
||||||
|
|
||||||
material = periodic_material[index[0],
|
|
||||||
index[1],
|
|
||||||
index[2]].reshape(2*grid)[grid[0]//2:-grid[0]//2,
|
|
||||||
grid[1]//2:-grid[1]//2,
|
|
||||||
grid[2]//2:-grid[2]//2] # extent grains into interface region
|
|
||||||
|
|
||||||
# replace immutable materials with closest mutable ones
|
|
||||||
index = ndimage.morphology.distance_transform_edt(np.in1d(material,options.immutable).reshape(grid),
|
|
||||||
return_distances = False,
|
|
||||||
return_indices = True)
|
|
||||||
material = material[index[0],
|
|
||||||
index[1],
|
|
||||||
index[2]]
|
|
||||||
|
|
||||||
immutable = np.zeros(material.shape, dtype=np.bool)
|
|
||||||
# find locations where immutable materials have been in original structure
|
|
||||||
for micro in options.immutable:
|
|
||||||
immutable += material_original == micro
|
|
||||||
|
|
||||||
# undo any changes involving immutable materials
|
|
||||||
material = np.where(immutable, material_original,material)
|
|
||||||
|
|
||||||
damask.Grid(material = material[0:grid_original[0],0:grid_original[1],0:grid_original[2]],
|
|
||||||
size = geom.size,
|
|
||||||
origin = geom.origin,
|
|
||||||
comments = geom.comments + [scriptID + ' ' + ' '.join(sys.argv[1:])],
|
|
||||||
)\
|
|
||||||
.save(sys.stdout if name is None else name)
|
|
|
@ -1 +1 @@
|
||||||
v3.0.0-alpha5-518-g4fa97b9a3
|
v3.0.0-alpha5-556-g97f849c09
|
||||||
|
|
|
@ -47,27 +47,32 @@ class Colormap(mpl.colors.ListedColormap):
|
||||||
|
|
||||||
"""
|
"""
|
||||||
|
|
||||||
def __eq__(self, other: object) -> bool:
|
def __eq__(self,
|
||||||
|
other: object) -> bool:
|
||||||
"""Test equality of colormaps."""
|
"""Test equality of colormaps."""
|
||||||
if not isinstance(other, Colormap):
|
if not isinstance(other, Colormap):
|
||||||
return NotImplemented
|
return NotImplemented
|
||||||
return len(self.colors) == len(other.colors) \
|
return len(self.colors) == len(other.colors) \
|
||||||
and bool(np.all(self.colors == other.colors))
|
and bool(np.all(self.colors == other.colors))
|
||||||
|
|
||||||
def __add__(self, other: 'Colormap') -> 'Colormap':
|
def __add__(self,
|
||||||
|
other: 'Colormap') -> 'Colormap':
|
||||||
"""Concatenate."""
|
"""Concatenate."""
|
||||||
return Colormap(np.vstack((self.colors,other.colors)),
|
return Colormap(np.vstack((self.colors,other.colors)),
|
||||||
f'{self.name}+{other.name}')
|
f'{self.name}+{other.name}')
|
||||||
|
|
||||||
def __iadd__(self, other: 'Colormap') -> 'Colormap':
|
def __iadd__(self,
|
||||||
|
other: 'Colormap') -> 'Colormap':
|
||||||
"""Concatenate (in-place)."""
|
"""Concatenate (in-place)."""
|
||||||
return self.__add__(other)
|
return self.__add__(other)
|
||||||
|
|
||||||
def __mul__(self, factor: int) -> 'Colormap':
|
def __mul__(self,
|
||||||
|
factor: int) -> 'Colormap':
|
||||||
"""Repeat."""
|
"""Repeat."""
|
||||||
return Colormap(np.vstack([self.colors]*factor),f'{self.name}*{factor}')
|
return Colormap(np.vstack([self.colors]*factor),f'{self.name}*{factor}')
|
||||||
|
|
||||||
def __imul__(self, factor: int) -> 'Colormap':
|
def __imul__(self,
|
||||||
|
factor: int) -> 'Colormap':
|
||||||
"""Repeat (in-place)."""
|
"""Repeat (in-place)."""
|
||||||
return self.__mul__(factor)
|
return self.__mul__(factor)
|
||||||
|
|
||||||
|
@ -161,7 +166,8 @@ class Colormap(mpl.colors.ListedColormap):
|
||||||
|
|
||||||
|
|
||||||
@staticmethod
|
@staticmethod
|
||||||
def from_predefined(name: str, N: int = 256) -> 'Colormap':
|
def from_predefined(name: str,
|
||||||
|
N: int = 256) -> 'Colormap':
|
||||||
"""
|
"""
|
||||||
Select from a set of predefined colormaps.
|
Select from a set of predefined colormaps.
|
||||||
|
|
||||||
|
@ -260,10 +266,8 @@ class Colormap(mpl.colors.ListedColormap):
|
||||||
l,r = (field[mask].min(),field[mask].max()) if bounds is None else \
|
l,r = (field[mask].min(),field[mask].max()) if bounds is None else \
|
||||||
(bounds[0],bounds[1])
|
(bounds[0],bounds[1])
|
||||||
|
|
||||||
delta,avg = r-l,0.5*abs(r+l)
|
if abs(delta := r-l) * 1e8 <= (avg := 0.5*abs(r+l)): # delta is similar to numerical noise
|
||||||
|
l,r = (l-0.5*avg*np.sign(delta),r+0.5*avg*np.sign(delta)) # extend range to have actual data centered within
|
||||||
if abs(delta) * 1e8 <= avg: # delta is similar to numerical noise
|
|
||||||
l,r = l-0.5*avg*np.sign(delta),r+0.5*avg*np.sign(delta), # extend range to have actual data centered within
|
|
||||||
|
|
||||||
return Image.fromarray(
|
return Image.fromarray(
|
||||||
(np.dstack((
|
(np.dstack((
|
||||||
|
@ -275,7 +279,8 @@ class Colormap(mpl.colors.ListedColormap):
|
||||||
mode='RGBA')
|
mode='RGBA')
|
||||||
|
|
||||||
|
|
||||||
def reversed(self, name: str = None) -> 'Colormap':
|
def reversed(self,
|
||||||
|
name: str = None) -> 'Colormap':
|
||||||
"""
|
"""
|
||||||
Reverse.
|
Reverse.
|
||||||
|
|
||||||
|
@ -296,7 +301,7 @@ class Colormap(mpl.colors.ListedColormap):
|
||||||
>>> damask.Colormap.from_predefined('stress').reversed()
|
>>> damask.Colormap.from_predefined('stress').reversed()
|
||||||
|
|
||||||
"""
|
"""
|
||||||
rev = super(Colormap,self).reversed(name)
|
rev = super().reversed(name)
|
||||||
return Colormap(np.array(rev.colors),rev.name[:-4] if rev.name.endswith('_r_r') else rev.name)
|
return Colormap(np.array(rev.colors),rev.name[:-4] if rev.name.endswith('_r_r') else rev.name)
|
||||||
|
|
||||||
|
|
||||||
|
@ -328,7 +333,8 @@ class Colormap(mpl.colors.ListedColormap):
|
||||||
return fname
|
return fname
|
||||||
|
|
||||||
|
|
||||||
def save_paraview(self, fname: FileHandle = None):
|
def save_paraview(self,
|
||||||
|
fname: FileHandle = None):
|
||||||
"""
|
"""
|
||||||
Save as JSON file for use in Paraview.
|
Save as JSON file for use in Paraview.
|
||||||
|
|
||||||
|
@ -355,7 +361,8 @@ class Colormap(mpl.colors.ListedColormap):
|
||||||
fhandle.write('\n')
|
fhandle.write('\n')
|
||||||
|
|
||||||
|
|
||||||
def save_ASCII(self, fname: FileHandle = None):
|
def save_ASCII(self,
|
||||||
|
fname: FileHandle = None):
|
||||||
"""
|
"""
|
||||||
Save as ASCII file.
|
Save as ASCII file.
|
||||||
|
|
||||||
|
@ -390,7 +397,8 @@ class Colormap(mpl.colors.ListedColormap):
|
||||||
self._get_file_handle(fname,'.legend').write(GOM_str)
|
self._get_file_handle(fname,'.legend').write(GOM_str)
|
||||||
|
|
||||||
|
|
||||||
def save_gmsh(self, fname: FileHandle = None):
|
def save_gmsh(self,
|
||||||
|
fname: FileHandle = None):
|
||||||
"""
|
"""
|
||||||
Save as ASCII file for use in gmsh.
|
Save as ASCII file for use in gmsh.
|
||||||
|
|
||||||
|
@ -429,7 +437,7 @@ class Colormap(mpl.colors.ListedColormap):
|
||||||
"""If saturation of one of the two colors is much less than the other, hue of the less."""
|
"""If saturation of one of the two colors is much less than the other, hue of the less."""
|
||||||
if msh_sat[0] >= msh_unsat[0]:
|
if msh_sat[0] >= msh_unsat[0]:
|
||||||
return msh_sat[2]
|
return msh_sat[2]
|
||||||
else:
|
|
||||||
hSpin = msh_sat[1]/np.sin(msh_sat[1])*np.sqrt(msh_unsat[0]**2.0-msh_sat[0]**2)/msh_sat[0]
|
hSpin = msh_sat[1]/np.sin(msh_sat[1])*np.sqrt(msh_unsat[0]**2.0-msh_sat[0]**2)/msh_sat[0]
|
||||||
if msh_sat[2] < - np.pi/3.0: hSpin *= -1.0
|
if msh_sat[2] < - np.pi/3.0: hSpin *= -1.0
|
||||||
return msh_sat[2] + hSpin
|
return msh_sat[2] + hSpin
|
||||||
|
@ -445,9 +453,9 @@ class Colormap(mpl.colors.ListedColormap):
|
||||||
else:
|
else:
|
||||||
lo = np.array([M_mid,0.0,0.0])
|
lo = np.array([M_mid,0.0,0.0])
|
||||||
frac = 2.0*frac - 1.0
|
frac = 2.0*frac - 1.0
|
||||||
if lo[1] < 0.05 and hi[1] > 0.05:
|
if lo[1] < 0.05 < hi[1]:
|
||||||
lo[2] = adjust_hue(hi,lo)
|
lo[2] = adjust_hue(hi,lo)
|
||||||
elif lo[1] > 0.05 and hi[1] < 0.05:
|
elif hi[1] < 0.05 < lo[1]:
|
||||||
hi[2] = adjust_hue(lo,hi)
|
hi[2] = adjust_hue(lo,hi)
|
||||||
|
|
||||||
return (1.0 - frac) * lo + frac * hi
|
return (1.0 - frac) * lo + frac * hi
|
||||||
|
@ -476,7 +484,7 @@ class Colormap(mpl.colors.ListedColormap):
|
||||||
'gnuplot', 'gnuplot2', 'CMRmap', 'cubehelix', 'brg',
|
'gnuplot', 'gnuplot2', 'CMRmap', 'cubehelix', 'brg',
|
||||||
'gist_rainbow', 'rainbow', 'jet', 'nipy_spectral', 'gist_ncar']}
|
'gist_rainbow', 'rainbow', 'jet', 'nipy_spectral', 'gist_ncar']}
|
||||||
|
|
||||||
_predefined_DAMASK = {'orientation': {'low': [0.933334,0.878432,0.878431],
|
_predefined_DAMASK = {'orientation': {'low': [0.933334,0.878432,0.878431], # noqa
|
||||||
'high': [0.250980,0.007843,0.000000]},
|
'high': [0.250980,0.007843,0.000000]},
|
||||||
'strain': {'low': [0.941177,0.941177,0.870588],
|
'strain': {'low': [0.941177,0.941177,0.870588],
|
||||||
'high': [0.266667,0.266667,0.000000]},
|
'high': [0.266667,0.266667,0.000000]},
|
||||||
|
@ -621,7 +629,8 @@ class Colormap(mpl.colors.ListedColormap):
|
||||||
|
|
||||||
|
|
||||||
@staticmethod
|
@staticmethod
|
||||||
def _lab2xyz(lab: np.ndarray, ref_white: np.ndarray = _REF_WHITE) -> np.ndarray:
|
def _lab2xyz(lab: np.ndarray,
|
||||||
|
ref_white: np.ndarray = _REF_WHITE) -> np.ndarray:
|
||||||
"""
|
"""
|
||||||
CIE Lab to CIE Xyz.
|
CIE Lab to CIE Xyz.
|
||||||
|
|
||||||
|
@ -652,7 +661,8 @@ class Colormap(mpl.colors.ListedColormap):
|
||||||
])*ref_white
|
])*ref_white
|
||||||
|
|
||||||
@staticmethod
|
@staticmethod
|
||||||
def _xyz2lab(xyz: np.ndarray, ref_white: np.ndarray = _REF_WHITE) -> np.ndarray:
|
def _xyz2lab(xyz: np.ndarray,
|
||||||
|
ref_white: np.ndarray = _REF_WHITE) -> np.ndarray:
|
||||||
"""
|
"""
|
||||||
CIE Xyz to CIE Lab.
|
CIE Xyz to CIE Lab.
|
||||||
|
|
||||||
|
|
|
@ -2,25 +2,34 @@ import copy
|
||||||
from io import StringIO
|
from io import StringIO
|
||||||
from collections.abc import Iterable
|
from collections.abc import Iterable
|
||||||
import abc
|
import abc
|
||||||
|
from pathlib import Path
|
||||||
|
from typing import Union, Dict, Any, Type, TypeVar
|
||||||
|
|
||||||
import numpy as np
|
import numpy as np
|
||||||
import yaml
|
import yaml
|
||||||
|
|
||||||
|
from ._typehints import FileHandle
|
||||||
from . import Rotation
|
from . import Rotation
|
||||||
|
|
||||||
|
MyType = TypeVar('MyType', bound='Config')
|
||||||
|
|
||||||
class NiceDumper(yaml.SafeDumper):
|
class NiceDumper(yaml.SafeDumper):
|
||||||
"""Make YAML readable for humans."""
|
"""Make YAML readable for humans."""
|
||||||
|
|
||||||
def write_line_break(self, data=None):
|
def write_line_break(self,
|
||||||
|
data: str = None):
|
||||||
super().write_line_break(data)
|
super().write_line_break(data)
|
||||||
|
|
||||||
if len(self.indents) == 1:
|
if len(self.indents) == 1:
|
||||||
super().write_line_break()
|
super().write_line_break()
|
||||||
|
|
||||||
def increase_indent(self, flow=False, indentless=False):
|
def increase_indent(self,
|
||||||
|
flow: bool = False,
|
||||||
|
indentless: bool = False):
|
||||||
return super().increase_indent(flow, False)
|
return super().increase_indent(flow, False)
|
||||||
|
|
||||||
def represent_data(self, data):
|
def represent_data(self,
|
||||||
|
data: Any):
|
||||||
"""Cast Config objects and its subclasses to dict."""
|
"""Cast Config objects and its subclasses to dict."""
|
||||||
if isinstance(data, dict) and type(data) != dict:
|
if isinstance(data, dict) and type(data) != dict:
|
||||||
return self.represent_data(dict(data))
|
return self.represent_data(dict(data))
|
||||||
|
@ -31,14 +40,17 @@ class NiceDumper(yaml.SafeDumper):
|
||||||
else:
|
else:
|
||||||
return super().represent_data(data)
|
return super().represent_data(data)
|
||||||
|
|
||||||
def ignore_aliases(self, data):
|
def ignore_aliases(self,
|
||||||
|
data: Any) -> bool:
|
||||||
"""Do not use references to existing objects."""
|
"""Do not use references to existing objects."""
|
||||||
return True
|
return True
|
||||||
|
|
||||||
class Config(dict):
|
class Config(dict):
|
||||||
"""YAML-based configuration."""
|
"""YAML-based configuration."""
|
||||||
|
|
||||||
def __init__(self,yml=None,**kwargs):
|
def __init__(self,
|
||||||
|
yml: Union[str, Dict[str, Any]] = None,
|
||||||
|
**kwargs):
|
||||||
"""Initialize from YAML, dict, or key=value pairs."""
|
"""Initialize from YAML, dict, or key=value pairs."""
|
||||||
if isinstance(yml,str):
|
if isinstance(yml,str):
|
||||||
kwargs.update(yaml.safe_load(yml))
|
kwargs.update(yaml.safe_load(yml))
|
||||||
|
@ -47,7 +59,7 @@ class Config(dict):
|
||||||
|
|
||||||
super().__init__(**kwargs)
|
super().__init__(**kwargs)
|
||||||
|
|
||||||
def __repr__(self):
|
def __repr__(self) -> str:
|
||||||
"""Show as in file."""
|
"""Show as in file."""
|
||||||
output = StringIO()
|
output = StringIO()
|
||||||
self.save(output)
|
self.save(output)
|
||||||
|
@ -55,14 +67,15 @@ class Config(dict):
|
||||||
return ''.join(output.readlines())
|
return ''.join(output.readlines())
|
||||||
|
|
||||||
|
|
||||||
def __copy__(self):
|
def __copy__(self: MyType) -> MyType:
|
||||||
"""Create deep copy."""
|
"""Create deep copy."""
|
||||||
return copy.deepcopy(self)
|
return copy.deepcopy(self)
|
||||||
|
|
||||||
copy = __copy__
|
copy = __copy__
|
||||||
|
|
||||||
|
|
||||||
def __or__(self,other):
|
def __or__(self: MyType,
|
||||||
|
other) -> MyType:
|
||||||
"""
|
"""
|
||||||
Update configuration with contents of other.
|
Update configuration with contents of other.
|
||||||
|
|
||||||
|
@ -76,18 +89,24 @@ class Config(dict):
|
||||||
updated : damask.Config
|
updated : damask.Config
|
||||||
Updated configuration.
|
Updated configuration.
|
||||||
|
|
||||||
|
Note
|
||||||
|
----
|
||||||
|
This functionality is a backport for Python 3.8
|
||||||
|
|
||||||
"""
|
"""
|
||||||
duplicate = self.copy()
|
duplicate = self.copy()
|
||||||
duplicate.update(other)
|
duplicate.update(other)
|
||||||
return duplicate
|
return duplicate
|
||||||
|
|
||||||
|
|
||||||
def __ior__(self,other):
|
def __ior__(self: MyType,
|
||||||
|
other) -> MyType:
|
||||||
"""Update configuration with contents of other."""
|
"""Update configuration with contents of other."""
|
||||||
return self.__or__(other)
|
return self.__or__(other)
|
||||||
|
|
||||||
|
|
||||||
def delete(self,keys):
|
def delete(self: MyType,
|
||||||
|
keys: Union[Iterable, str]) -> MyType:
|
||||||
"""
|
"""
|
||||||
Remove configuration keys.
|
Remove configuration keys.
|
||||||
|
|
||||||
|
@ -109,7 +128,8 @@ class Config(dict):
|
||||||
|
|
||||||
|
|
||||||
@classmethod
|
@classmethod
|
||||||
def load(cls,fname):
|
def load(cls: Type[MyType],
|
||||||
|
fname: FileHandle) -> MyType:
|
||||||
"""
|
"""
|
||||||
Load from yaml file.
|
Load from yaml file.
|
||||||
|
|
||||||
|
@ -124,14 +144,15 @@ class Config(dict):
|
||||||
Configuration from file.
|
Configuration from file.
|
||||||
|
|
||||||
"""
|
"""
|
||||||
try:
|
if isinstance(fname, (str, Path)):
|
||||||
fhandle = open(fname)
|
fhandle = open(fname)
|
||||||
except TypeError:
|
else:
|
||||||
fhandle = fname
|
fhandle = fname
|
||||||
return cls(yaml.safe_load(fhandle))
|
return cls(yaml.safe_load(fhandle))
|
||||||
|
|
||||||
|
def save(self,
|
||||||
def save(self,fname,**kwargs):
|
fname: FileHandle,
|
||||||
|
**kwargs):
|
||||||
"""
|
"""
|
||||||
Save to yaml file.
|
Save to yaml file.
|
||||||
|
|
||||||
|
@ -143,9 +164,9 @@ class Config(dict):
|
||||||
Keyword arguments parsed to yaml.dump.
|
Keyword arguments parsed to yaml.dump.
|
||||||
|
|
||||||
"""
|
"""
|
||||||
try:
|
if isinstance(fname, (str, Path)):
|
||||||
fhandle = open(fname,'w',newline='\n')
|
fhandle = open(fname,'w',newline='\n')
|
||||||
except TypeError:
|
else:
|
||||||
fhandle = fname
|
fhandle = fname
|
||||||
|
|
||||||
if 'width' not in kwargs:
|
if 'width' not in kwargs:
|
||||||
|
|
|
@ -1,10 +1,14 @@
|
||||||
import numpy as np
|
import numpy as np
|
||||||
import h5py
|
import h5py
|
||||||
|
from typing import Sequence, Dict, Any, Collection
|
||||||
|
|
||||||
|
from ._typehints import FileHandle
|
||||||
from . import Config
|
from . import Config
|
||||||
from . import Rotation
|
from . import Rotation
|
||||||
from . import Orientation
|
from . import Orientation
|
||||||
from . import util
|
from . import util
|
||||||
|
from . import Table
|
||||||
|
|
||||||
|
|
||||||
class ConfigMaterial(Config):
|
class ConfigMaterial(Config):
|
||||||
"""
|
"""
|
||||||
|
@ -17,7 +21,9 @@ class ConfigMaterial(Config):
|
||||||
|
|
||||||
"""
|
"""
|
||||||
|
|
||||||
def __init__(self,d=None,**kwargs):
|
def __init__(self,
|
||||||
|
d: Dict[str, Any] = None,
|
||||||
|
**kwargs):
|
||||||
"""
|
"""
|
||||||
New material configuration.
|
New material configuration.
|
||||||
|
|
||||||
|
@ -30,6 +36,7 @@ class ConfigMaterial(Config):
|
||||||
Initial content specified as pairs of key=value.
|
Initial content specified as pairs of key=value.
|
||||||
|
|
||||||
"""
|
"""
|
||||||
|
default: Collection
|
||||||
if d is None:
|
if d is None:
|
||||||
for section, default in {'material':[],'homogenization':{},'phase':{}}.items():
|
for section, default in {'material':[],'homogenization':{},'phase':{}}.items():
|
||||||
if section not in kwargs: kwargs.update({section:default})
|
if section not in kwargs: kwargs.update({section:default})
|
||||||
|
@ -37,7 +44,9 @@ class ConfigMaterial(Config):
|
||||||
super().__init__(d,**kwargs)
|
super().__init__(d,**kwargs)
|
||||||
|
|
||||||
|
|
||||||
def save(self,fname='material.yaml',**kwargs):
|
def save(self,
|
||||||
|
fname: FileHandle = 'material.yaml',
|
||||||
|
**kwargs):
|
||||||
"""
|
"""
|
||||||
Save to yaml file.
|
Save to yaml file.
|
||||||
|
|
||||||
|
@ -53,7 +62,8 @@ class ConfigMaterial(Config):
|
||||||
|
|
||||||
|
|
||||||
@classmethod
|
@classmethod
|
||||||
def load(cls,fname='material.yaml'):
|
def load(cls,
|
||||||
|
fname: FileHandle = 'material.yaml') -> 'ConfigMaterial':
|
||||||
"""
|
"""
|
||||||
Load from yaml file.
|
Load from yaml file.
|
||||||
|
|
||||||
|
@ -72,10 +82,14 @@ class ConfigMaterial(Config):
|
||||||
|
|
||||||
|
|
||||||
@staticmethod
|
@staticmethod
|
||||||
def load_DREAM3D(fname,
|
def load_DREAM3D(fname: str,
|
||||||
grain_data=None,cell_data=None,cell_ensemble_data='CellEnsembleData',
|
grain_data: str = None,
|
||||||
phases='Phases',Euler_angles='EulerAngles',phase_names='PhaseName',
|
cell_data: str = None,
|
||||||
base_group=None):
|
cell_ensemble_data: str = 'CellEnsembleData',
|
||||||
|
phases: str = 'Phases',
|
||||||
|
Euler_angles: str = 'EulerAngles',
|
||||||
|
phase_names: str = 'PhaseName',
|
||||||
|
base_group: str = None) -> 'ConfigMaterial':
|
||||||
"""
|
"""
|
||||||
Load DREAM.3D (HDF5) file.
|
Load DREAM.3D (HDF5) file.
|
||||||
|
|
||||||
|
@ -154,7 +168,8 @@ class ConfigMaterial(Config):
|
||||||
|
|
||||||
|
|
||||||
@staticmethod
|
@staticmethod
|
||||||
def from_table(table,**kwargs):
|
def from_table(table: Table,
|
||||||
|
**kwargs) -> 'ConfigMaterial':
|
||||||
"""
|
"""
|
||||||
Generate from an ASCII table.
|
Generate from an ASCII table.
|
||||||
|
|
||||||
|
@ -207,7 +222,7 @@ class ConfigMaterial(Config):
|
||||||
|
|
||||||
|
|
||||||
@property
|
@property
|
||||||
def is_complete(self):
|
def is_complete(self) -> bool:
|
||||||
"""
|
"""
|
||||||
Check for completeness.
|
Check for completeness.
|
||||||
|
|
||||||
|
@ -267,12 +282,11 @@ class ConfigMaterial(Config):
|
||||||
if homogenization - set(self['homogenization']):
|
if homogenization - set(self['homogenization']):
|
||||||
print(f'Homogenization(s) {homogenization-set(self["homogenization"])} missing')
|
print(f'Homogenization(s) {homogenization-set(self["homogenization"])} missing')
|
||||||
ok = False
|
ok = False
|
||||||
|
|
||||||
return ok
|
return ok
|
||||||
|
|
||||||
|
|
||||||
@property
|
@property
|
||||||
def is_valid(self):
|
def is_valid(self) -> bool:
|
||||||
"""
|
"""
|
||||||
Check for valid content.
|
Check for valid content.
|
||||||
|
|
||||||
|
@ -316,7 +330,10 @@ class ConfigMaterial(Config):
|
||||||
return ok
|
return ok
|
||||||
|
|
||||||
|
|
||||||
def material_rename_phase(self,mapping,ID=None,constituent=None):
|
def material_rename_phase(self,
|
||||||
|
mapping: Dict[str, str],
|
||||||
|
ID: Sequence[int] = None,
|
||||||
|
constituent: Sequence[int] = None) -> 'ConfigMaterial':
|
||||||
"""
|
"""
|
||||||
Change phase name in material.
|
Change phase name in material.
|
||||||
|
|
||||||
|
@ -347,7 +364,9 @@ class ConfigMaterial(Config):
|
||||||
return dup
|
return dup
|
||||||
|
|
||||||
|
|
||||||
def material_rename_homogenization(self,mapping,ID=None):
|
def material_rename_homogenization(self,
|
||||||
|
mapping: Dict[str, str],
|
||||||
|
ID: Sequence[int] = None) -> 'ConfigMaterial':
|
||||||
"""
|
"""
|
||||||
Change homogenization name in material.
|
Change homogenization name in material.
|
||||||
|
|
||||||
|
@ -374,7 +393,8 @@ class ConfigMaterial(Config):
|
||||||
return dup
|
return dup
|
||||||
|
|
||||||
|
|
||||||
def material_add(self,**kwargs):
|
def material_add(self,
|
||||||
|
**kwargs: Any) -> 'ConfigMaterial':
|
||||||
"""
|
"""
|
||||||
Add material entries.
|
Add material entries.
|
||||||
|
|
||||||
|
@ -453,7 +473,7 @@ class ConfigMaterial(Config):
|
||||||
N = max(N,s[0]) if len(s)>0 else N
|
N = max(N,s[0]) if len(s)>0 else N
|
||||||
n = max(n,s[1]) if len(s)>1 else n
|
n = max(n,s[1]) if len(s)>1 else n
|
||||||
|
|
||||||
mat = [{'constituents':[{} for _ in range(n)]} for _ in range(N)]
|
mat: Sequence[dict] = [{'constituents':[{} for _ in range(n)]} for _ in range(N)]
|
||||||
|
|
||||||
if 'v' not in kwargs:
|
if 'v' not in kwargs:
|
||||||
shaped['v'] = np.broadcast_to(1/n,(N,n))
|
shaped['v'] = np.broadcast_to(1/n,(N,n))
|
||||||
|
|
|
@ -33,9 +33,9 @@ class Crystal():
|
||||||
def __init__(self,*,
|
def __init__(self,*,
|
||||||
family = None,
|
family = None,
|
||||||
lattice = None,
|
lattice = None,
|
||||||
a = None,b = None,c = None,
|
a: float = None, b: float = None, c: float = None,
|
||||||
alpha = None,beta = None,gamma = None,
|
alpha: float = None, beta: float = None, gamma: float = None,
|
||||||
degrees = False):
|
degrees: bool = False):
|
||||||
"""
|
"""
|
||||||
Representation of crystal in terms of crystal family or Bravais lattice.
|
Representation of crystal in terms of crystal family or Bravais lattice.
|
||||||
|
|
||||||
|
@ -62,7 +62,7 @@ class Crystal():
|
||||||
Angles are given in degrees. Defaults to False.
|
Angles are given in degrees. Defaults to False.
|
||||||
|
|
||||||
"""
|
"""
|
||||||
if family not in [None] + list(lattice_symmetries.values()):
|
if family is not None and family not in list(lattice_symmetries.values()):
|
||||||
raise KeyError(f'invalid crystal family "{family}"')
|
raise KeyError(f'invalid crystal family "{family}"')
|
||||||
if lattice is not None and family is not None and family != lattice_symmetries[lattice]:
|
if lattice is not None and family is not None and family != lattice_symmetries[lattice]:
|
||||||
raise KeyError(f'incompatible family "{family}" for lattice "{lattice}"')
|
raise KeyError(f'incompatible family "{family}" for lattice "{lattice}"')
|
||||||
|
@ -107,9 +107,6 @@ class Crystal():
|
||||||
if np.any([np.roll([self.alpha,self.beta,self.gamma],r)[0]
|
if np.any([np.roll([self.alpha,self.beta,self.gamma],r)[0]
|
||||||
>= np.sum(np.roll([self.alpha,self.beta,self.gamma],r)[1:]) for r in range(3)]):
|
>= np.sum(np.roll([self.alpha,self.beta,self.gamma],r)[1:]) for r in range(3)]):
|
||||||
raise ValueError ('each lattice angle must be less than sum of others')
|
raise ValueError ('each lattice angle must be less than sum of others')
|
||||||
else:
|
|
||||||
self.a = self.b = self.c = None
|
|
||||||
self.alpha = self.beta = self.gamma = None
|
|
||||||
|
|
||||||
|
|
||||||
def __repr__(self):
|
def __repr__(self):
|
||||||
|
@ -122,7 +119,8 @@ class Crystal():
|
||||||
'α={:.5g}°, β={:.5g}°, γ={:.5g}°'.format(*np.degrees(self.parameters[3:]))])
|
'α={:.5g}°, β={:.5g}°, γ={:.5g}°'.format(*np.degrees(self.parameters[3:]))])
|
||||||
|
|
||||||
|
|
||||||
def __eq__(self,other):
|
def __eq__(self,
|
||||||
|
other: object) -> bool:
|
||||||
"""
|
"""
|
||||||
Equal to other.
|
Equal to other.
|
||||||
|
|
||||||
|
@ -132,6 +130,8 @@ class Crystal():
|
||||||
Crystal to check for equality.
|
Crystal to check for equality.
|
||||||
|
|
||||||
"""
|
"""
|
||||||
|
if not isinstance(other, Crystal):
|
||||||
|
return NotImplemented
|
||||||
return self.lattice == other.lattice and \
|
return self.lattice == other.lattice and \
|
||||||
self.parameters == other.parameters and \
|
self.parameters == other.parameters and \
|
||||||
self.family == other.family
|
self.family == other.family
|
||||||
|
@ -139,8 +139,7 @@ class Crystal():
|
||||||
@property
|
@property
|
||||||
def parameters(self):
|
def parameters(self):
|
||||||
"""Return lattice parameters a, b, c, alpha, beta, gamma."""
|
"""Return lattice parameters a, b, c, alpha, beta, gamma."""
|
||||||
return (self.a,self.b,self.c,self.alpha,self.beta,self.gamma)
|
if hasattr(self,'a'): return (self.a,self.b,self.c,self.alpha,self.beta,self.gamma)
|
||||||
|
|
||||||
|
|
||||||
@property
|
@property
|
||||||
def immutable(self):
|
def immutable(self):
|
||||||
|
@ -269,7 +268,7 @@ class Crystal():
|
||||||
https://doi.org/10.1063/1.1661333
|
https://doi.org/10.1063/1.1661333
|
||||||
|
|
||||||
"""
|
"""
|
||||||
if None in self.parameters:
|
if self.parameters is None:
|
||||||
raise KeyError('missing crystal lattice parameters')
|
raise KeyError('missing crystal lattice parameters')
|
||||||
return np.array([
|
return np.array([
|
||||||
[1,0,0],
|
[1,0,0],
|
||||||
|
@ -315,7 +314,9 @@ class Crystal():
|
||||||
+ _lattice_points.get(self.lattice if self.lattice == 'hP' else \
|
+ _lattice_points.get(self.lattice if self.lattice == 'hP' else \
|
||||||
self.lattice[-1],None),dtype=float)
|
self.lattice[-1],None),dtype=float)
|
||||||
|
|
||||||
def to_lattice(self, *, direction: np.ndarray = None, plane: np.ndarray = None) -> np.ndarray:
|
def to_lattice(self, *,
|
||||||
|
direction: np.ndarray = None,
|
||||||
|
plane: np.ndarray = None) -> np.ndarray:
|
||||||
"""
|
"""
|
||||||
Calculate lattice vector corresponding to crystal frame direction or plane normal.
|
Calculate lattice vector corresponding to crystal frame direction or plane normal.
|
||||||
|
|
||||||
|
@ -339,7 +340,9 @@ class Crystal():
|
||||||
return np.einsum('il,...l',basis,axis)
|
return np.einsum('il,...l',basis,axis)
|
||||||
|
|
||||||
|
|
||||||
def to_frame(self, *, uvw: np.ndarray = None, hkl: np.ndarray = None) -> np.ndarray:
|
def to_frame(self, *,
|
||||||
|
uvw: np.ndarray = None,
|
||||||
|
hkl: np.ndarray = None) -> np.ndarray:
|
||||||
"""
|
"""
|
||||||
Calculate crystal frame vector along lattice direction [uvw] or plane normal (hkl).
|
Calculate crystal frame vector along lattice direction [uvw] or plane normal (hkl).
|
||||||
|
|
||||||
|
@ -362,7 +365,8 @@ class Crystal():
|
||||||
return np.einsum('il,...l',basis,axis)
|
return np.einsum('il,...l',basis,axis)
|
||||||
|
|
||||||
|
|
||||||
def kinematics(self, mode: str) -> Dict[str, List[np.ndarray]]:
|
def kinematics(self,
|
||||||
|
mode: str) -> Dict[str, List[np.ndarray]]:
|
||||||
"""
|
"""
|
||||||
Return crystal kinematics systems.
|
Return crystal kinematics systems.
|
||||||
|
|
||||||
|
@ -621,7 +625,8 @@ class Crystal():
|
||||||
'plane': [m[:,3:6] for m in master]}
|
'plane': [m[:,3:6] for m in master]}
|
||||||
|
|
||||||
|
|
||||||
def relation_operations(self, model: str) -> Tuple[str, Rotation]:
|
def relation_operations(self,
|
||||||
|
model: str) -> Tuple[str, Rotation]:
|
||||||
"""
|
"""
|
||||||
Crystallographic orientation relationships for phase transformations.
|
Crystallographic orientation relationships for phase transformations.
|
||||||
|
|
||||||
|
|
|
@ -4,7 +4,7 @@ import warnings
|
||||||
import multiprocessing as mp
|
import multiprocessing as mp
|
||||||
from functools import partial
|
from functools import partial
|
||||||
import typing
|
import typing
|
||||||
from typing import Union, Optional, TextIO, List, Sequence, Literal
|
from typing import Union, Optional, TextIO, List, Sequence
|
||||||
from pathlib import Path
|
from pathlib import Path
|
||||||
|
|
||||||
import numpy as np
|
import numpy as np
|
||||||
|
@ -33,7 +33,7 @@ class Grid:
|
||||||
material: np.ndarray,
|
material: np.ndarray,
|
||||||
size: FloatSequence,
|
size: FloatSequence,
|
||||||
origin: FloatSequence = np.zeros(3),
|
origin: FloatSequence = np.zeros(3),
|
||||||
comments: Union[str, Sequence[str]] = []):
|
comments: Union[str, Sequence[str]] = None):
|
||||||
"""
|
"""
|
||||||
New geometry definition for grid solvers.
|
New geometry definition for grid solvers.
|
||||||
|
|
||||||
|
@ -53,7 +53,7 @@ class Grid:
|
||||||
self.material = material
|
self.material = material
|
||||||
self.size = size # type: ignore
|
self.size = size # type: ignore
|
||||||
self.origin = origin # type: ignore
|
self.origin = origin # type: ignore
|
||||||
self.comments = comments # type: ignore
|
self.comments = [] if comments is None else comments # type: ignore
|
||||||
|
|
||||||
|
|
||||||
def __repr__(self) -> str:
|
def __repr__(self) -> str:
|
||||||
|
@ -77,7 +77,8 @@ class Grid:
|
||||||
copy = __copy__
|
copy = __copy__
|
||||||
|
|
||||||
|
|
||||||
def __eq__(self, other: object) -> bool:
|
def __eq__(self,
|
||||||
|
other: object) -> bool:
|
||||||
"""
|
"""
|
||||||
Test equality of other.
|
Test equality of other.
|
||||||
|
|
||||||
|
@ -101,12 +102,13 @@ class Grid:
|
||||||
return self._material
|
return self._material
|
||||||
|
|
||||||
@material.setter
|
@material.setter
|
||||||
def material(self, material: np.ndarray):
|
def material(self,
|
||||||
|
material: np.ndarray):
|
||||||
if len(material.shape) != 3:
|
if len(material.shape) != 3:
|
||||||
raise ValueError(f'invalid material shape {material.shape}')
|
raise ValueError(f'invalid material shape {material.shape}')
|
||||||
elif material.dtype not in np.sctypes['float'] and material.dtype not in np.sctypes['int']:
|
if material.dtype not in np.sctypes['float'] and material.dtype not in np.sctypes['int']:
|
||||||
raise TypeError(f'invalid material data type {material.dtype}')
|
raise TypeError(f'invalid material data type {material.dtype}')
|
||||||
else:
|
|
||||||
self._material = np.copy(material)
|
self._material = np.copy(material)
|
||||||
|
|
||||||
if self.material.dtype in np.sctypes['float'] and \
|
if self.material.dtype in np.sctypes['float'] and \
|
||||||
|
@ -120,10 +122,11 @@ class Grid:
|
||||||
return self._size
|
return self._size
|
||||||
|
|
||||||
@size.setter
|
@size.setter
|
||||||
def size(self, size: FloatSequence):
|
def size(self,
|
||||||
|
size: FloatSequence):
|
||||||
if len(size) != 3 or any(np.array(size) < 0):
|
if len(size) != 3 or any(np.array(size) < 0):
|
||||||
raise ValueError(f'invalid size {size}')
|
raise ValueError(f'invalid size {size}')
|
||||||
else:
|
|
||||||
self._size = np.array(size)
|
self._size = np.array(size)
|
||||||
|
|
||||||
@property
|
@property
|
||||||
|
@ -132,10 +135,11 @@ class Grid:
|
||||||
return self._origin
|
return self._origin
|
||||||
|
|
||||||
@origin.setter
|
@origin.setter
|
||||||
def origin(self, origin: FloatSequence):
|
def origin(self,
|
||||||
|
origin: FloatSequence):
|
||||||
if len(origin) != 3:
|
if len(origin) != 3:
|
||||||
raise ValueError(f'invalid origin {origin}')
|
raise ValueError(f'invalid origin {origin}')
|
||||||
else:
|
|
||||||
self._origin = np.array(origin)
|
self._origin = np.array(origin)
|
||||||
|
|
||||||
@property
|
@property
|
||||||
|
@ -144,7 +148,8 @@ class Grid:
|
||||||
return self._comments
|
return self._comments
|
||||||
|
|
||||||
@comments.setter
|
@comments.setter
|
||||||
def comments(self, comments: Union[str, Sequence[str]]):
|
def comments(self,
|
||||||
|
comments: Union[str, Sequence[str]]):
|
||||||
self._comments = [str(c) for c in comments] if isinstance(comments,list) else [str(comments)]
|
self._comments = [str(c) for c in comments] if isinstance(comments,list) else [str(comments)]
|
||||||
|
|
||||||
|
|
||||||
|
@ -229,8 +234,7 @@ class Grid:
|
||||||
content = f.readlines()
|
content = f.readlines()
|
||||||
for i,line in enumerate(content[:header_length]):
|
for i,line in enumerate(content[:header_length]):
|
||||||
items = line.split('#')[0].lower().strip().split()
|
items = line.split('#')[0].lower().strip().split()
|
||||||
key = items[0] if items else ''
|
if (key := items[0] if items else '') == 'grid':
|
||||||
if key == 'grid':
|
|
||||||
cells = np.array([ int(dict(zip(items[1::2],items[2::2]))[i]) for i in ['a','b','c']])
|
cells = np.array([ int(dict(zip(items[1::2],items[2::2]))[i]) for i in ['a','b','c']])
|
||||||
elif key == 'size':
|
elif key == 'size':
|
||||||
size = np.array([float(dict(zip(items[1::2],items[2::2]))[i]) for i in ['x','y','z']])
|
size = np.array([float(dict(zip(items[1::2],items[2::2]))[i]) for i in ['x','y','z']])
|
||||||
|
@ -242,8 +246,7 @@ class Grid:
|
||||||
material = np.empty(int(cells.prod())) # initialize as flat array
|
material = np.empty(int(cells.prod())) # initialize as flat array
|
||||||
i = 0
|
i = 0
|
||||||
for line in content[header_length:]:
|
for line in content[header_length:]:
|
||||||
items = line.split('#')[0].split()
|
if len(items := line.split('#')[0].split()) == 3:
|
||||||
if len(items) == 3:
|
|
||||||
if items[1].lower() == 'of':
|
if items[1].lower() == 'of':
|
||||||
material_entry = np.ones(int(items[0]))*float(items[2])
|
material_entry = np.ones(int(items[0]))*float(items[2])
|
||||||
elif items[1].lower() == 'to':
|
elif items[1].lower() == 'to':
|
||||||
|
@ -387,7 +390,9 @@ class Grid:
|
||||||
|
|
||||||
|
|
||||||
@staticmethod
|
@staticmethod
|
||||||
def _find_closest_seed(seeds: np.ndarray, weights: np.ndarray, point: np.ndarray) -> np.integer:
|
def _find_closest_seed(seeds: np.ndarray,
|
||||||
|
weights: np.ndarray,
|
||||||
|
point: np.ndarray) -> np.integer:
|
||||||
return np.argmin(np.sum((np.broadcast_to(point,(len(seeds),3))-seeds)**2,axis=1) - weights)
|
return np.argmin(np.sum((np.broadcast_to(point,(len(seeds),3))-seeds)**2,axis=1) - weights)
|
||||||
|
|
||||||
@staticmethod
|
@staticmethod
|
||||||
|
@ -624,7 +629,9 @@ class Grid:
|
||||||
)
|
)
|
||||||
|
|
||||||
|
|
||||||
def save(self, fname: Union[str, Path], compress: bool = True):
|
def save(self,
|
||||||
|
fname: Union[str, Path],
|
||||||
|
compress: bool = True):
|
||||||
"""
|
"""
|
||||||
Save as VTK image data file.
|
Save as VTK image data file.
|
||||||
|
|
||||||
|
@ -643,7 +650,8 @@ class Grid:
|
||||||
v.save(fname,parallel=False,compress=compress)
|
v.save(fname,parallel=False,compress=compress)
|
||||||
|
|
||||||
|
|
||||||
def save_ASCII(self, fname: Union[str, TextIO]):
|
def save_ASCII(self,
|
||||||
|
fname: Union[str, TextIO]):
|
||||||
"""
|
"""
|
||||||
Save as geom file.
|
Save as geom file.
|
||||||
|
|
||||||
|
@ -770,15 +778,16 @@ class Grid:
|
||||||
)
|
)
|
||||||
|
|
||||||
|
|
||||||
def mirror(self, directions: Sequence[str], reflect: bool = False) -> 'Grid':
|
def mirror(self,
|
||||||
|
directions: Sequence[str],
|
||||||
|
reflect: bool = False) -> 'Grid':
|
||||||
"""
|
"""
|
||||||
Mirror grid along given directions.
|
Mirror grid along given directions.
|
||||||
|
|
||||||
Parameters
|
Parameters
|
||||||
----------
|
----------
|
||||||
directions : (sequence of) str
|
directions : (sequence of) {'x', 'y', 'z'}
|
||||||
Direction(s) along which the grid is mirrored.
|
Direction(s) along which the grid is mirrored.
|
||||||
Valid entries are 'x', 'y', 'z'.
|
|
||||||
reflect : bool, optional
|
reflect : bool, optional
|
||||||
Reflect (include) outermost layers. Defaults to False.
|
Reflect (include) outermost layers. Defaults to False.
|
||||||
|
|
||||||
|
@ -801,8 +810,7 @@ class Grid:
|
||||||
# materials: 1
|
# materials: 1
|
||||||
|
|
||||||
"""
|
"""
|
||||||
valid = ['x','y','z']
|
if not set(directions).issubset(valid := ['x', 'y', 'z']):
|
||||||
if not set(directions).issubset(valid):
|
|
||||||
raise ValueError(f'invalid direction {set(directions).difference(valid)} specified')
|
raise ValueError(f'invalid direction {set(directions).difference(valid)} specified')
|
||||||
|
|
||||||
limits: Sequence[Optional[int]] = [None,None] if reflect else [-2,0]
|
limits: Sequence[Optional[int]] = [None,None] if reflect else [-2,0]
|
||||||
|
@ -822,15 +830,15 @@ class Grid:
|
||||||
)
|
)
|
||||||
|
|
||||||
|
|
||||||
def flip(self, directions: Union[Literal['x', 'y', 'z'], Sequence[Literal['x', 'y', 'z']]]) -> 'Grid':
|
def flip(self,
|
||||||
|
directions: Sequence[str]) -> 'Grid':
|
||||||
"""
|
"""
|
||||||
Flip grid along given directions.
|
Flip grid along given directions.
|
||||||
|
|
||||||
Parameters
|
Parameters
|
||||||
----------
|
----------
|
||||||
directions : (sequence of) str
|
directions : (sequence of) {'x', 'y', 'z'}
|
||||||
Direction(s) along which the grid is flipped.
|
Direction(s) along which the grid is flipped.
|
||||||
Valid entries are 'x', 'y', 'z'.
|
|
||||||
|
|
||||||
Returns
|
Returns
|
||||||
-------
|
-------
|
||||||
|
@ -838,8 +846,7 @@ class Grid:
|
||||||
Updated grid-based geometry.
|
Updated grid-based geometry.
|
||||||
|
|
||||||
"""
|
"""
|
||||||
valid = ['x','y','z']
|
if not set(directions).issubset(valid := ['x', 'y', 'z']):
|
||||||
if not set(directions).issubset(valid):
|
|
||||||
raise ValueError(f'invalid direction {set(directions).difference(valid)} specified')
|
raise ValueError(f'invalid direction {set(directions).difference(valid)} specified')
|
||||||
|
|
||||||
|
|
||||||
|
@ -852,7 +859,9 @@ class Grid:
|
||||||
)
|
)
|
||||||
|
|
||||||
|
|
||||||
def scale(self, cells: IntSequence, periodic: bool = True) -> 'Grid':
|
def scale(self,
|
||||||
|
cells: IntSequence,
|
||||||
|
periodic: bool = True) -> 'Grid':
|
||||||
"""
|
"""
|
||||||
Scale grid to new cells.
|
Scale grid to new cells.
|
||||||
|
|
||||||
|
@ -958,7 +967,9 @@ class Grid:
|
||||||
)
|
)
|
||||||
|
|
||||||
|
|
||||||
def rotate(self, R: Rotation, fill: int = None) -> 'Grid':
|
def rotate(self,
|
||||||
|
R: Rotation,
|
||||||
|
fill: int = None) -> 'Grid':
|
||||||
"""
|
"""
|
||||||
Rotate grid (pad if required).
|
Rotate grid (pad if required).
|
||||||
|
|
||||||
|
@ -1049,7 +1060,9 @@ class Grid:
|
||||||
)
|
)
|
||||||
|
|
||||||
|
|
||||||
def substitute(self, from_material: IntSequence, to_material: IntSequence) -> 'Grid':
|
def substitute(self,
|
||||||
|
from_material: IntSequence,
|
||||||
|
to_material: IntSequence) -> 'Grid':
|
||||||
"""
|
"""
|
||||||
Substitute material indices.
|
Substitute material indices.
|
||||||
|
|
||||||
|
@ -1150,7 +1163,9 @@ class Grid:
|
||||||
)
|
)
|
||||||
|
|
||||||
|
|
||||||
def get_grain_boundaries(self, periodic: bool = True, directions: Sequence[str] = 'xyz'):
|
def get_grain_boundaries(self,
|
||||||
|
periodic: bool = True,
|
||||||
|
directions: Sequence[str] = 'xyz') -> VTK:
|
||||||
"""
|
"""
|
||||||
Create VTK unstructured grid containing grain boundaries.
|
Create VTK unstructured grid containing grain boundaries.
|
||||||
|
|
||||||
|
@ -1158,9 +1173,9 @@ class Grid:
|
||||||
----------
|
----------
|
||||||
periodic : bool, optional
|
periodic : bool, optional
|
||||||
Assume grid to be periodic. Defaults to True.
|
Assume grid to be periodic. Defaults to True.
|
||||||
directions : (sequence of) string, optional
|
directions : (sequence of) {'x', 'y', 'z'}, optional
|
||||||
Direction(s) along which the boundaries are determined.
|
Direction(s) along which the boundaries are determined.
|
||||||
Valid entries are 'x', 'y', 'z'. Defaults to 'xyz'.
|
Defaults to 'xyz'.
|
||||||
|
|
||||||
Returns
|
Returns
|
||||||
-------
|
-------
|
||||||
|
@ -1168,8 +1183,7 @@ class Grid:
|
||||||
VTK-based geometry of grain boundary network.
|
VTK-based geometry of grain boundary network.
|
||||||
|
|
||||||
"""
|
"""
|
||||||
valid = ['x','y','z']
|
if not set(directions).issubset(valid := ['x', 'y', 'z']):
|
||||||
if not set(directions).issubset(valid):
|
|
||||||
raise ValueError(f'invalid direction {set(directions).difference(valid)} specified')
|
raise ValueError(f'invalid direction {set(directions).difference(valid)} specified')
|
||||||
|
|
||||||
o = [[0, self.cells[0]+1, np.prod(self.cells[:2]+1)+self.cells[0]+1, np.prod(self.cells[:2]+1)],
|
o = [[0, self.cells[0]+1, np.prod(self.cells[:2]+1)+self.cells[0]+1, np.prod(self.cells[:2]+1)],
|
||||||
|
|
|
@ -5,7 +5,7 @@ import os
|
||||||
import copy
|
import copy
|
||||||
import datetime
|
import datetime
|
||||||
import warnings
|
import warnings
|
||||||
import xml.etree.ElementTree as ET
|
import xml.etree.ElementTree as ET # noqa
|
||||||
import xml.dom.minidom
|
import xml.dom.minidom
|
||||||
from pathlib import Path
|
from pathlib import Path
|
||||||
from functools import partial
|
from functools import partial
|
||||||
|
|
|
@ -1,7 +1,7 @@
|
||||||
import re
|
import re
|
||||||
import copy
|
import copy
|
||||||
from pathlib import Path
|
from pathlib import Path
|
||||||
from typing import Union, Optional, Tuple, List
|
from typing import Union, Tuple, List
|
||||||
|
|
||||||
import pandas as pd
|
import pandas as pd
|
||||||
import numpy as np
|
import numpy as np
|
||||||
|
@ -12,7 +12,10 @@ from . import util
|
||||||
class Table:
|
class Table:
|
||||||
"""Manipulate multi-dimensional spreadsheet-like data."""
|
"""Manipulate multi-dimensional spreadsheet-like data."""
|
||||||
|
|
||||||
def __init__(self, data: np.ndarray, shapes: dict, comments: Optional[Union[str, list]] = None):
|
def __init__(self,
|
||||||
|
data: np.ndarray,
|
||||||
|
shapes: dict,
|
||||||
|
comments: Union[str, list] = None):
|
||||||
"""
|
"""
|
||||||
New spreadsheet.
|
New spreadsheet.
|
||||||
|
|
||||||
|
@ -41,7 +44,8 @@ class Table:
|
||||||
return '\n'.join(['# '+c for c in self.comments])+'\n'+data_repr
|
return '\n'.join(['# '+c for c in self.comments])+'\n'+data_repr
|
||||||
|
|
||||||
|
|
||||||
def __getitem__(self, item: Union[slice, Tuple[slice, ...]]) -> 'Table':
|
def __getitem__(self,
|
||||||
|
item: Union[slice, Tuple[slice, ...]]) -> 'Table':
|
||||||
"""
|
"""
|
||||||
Slice the Table according to item.
|
Slice the Table according to item.
|
||||||
|
|
||||||
|
@ -100,7 +104,9 @@ class Table:
|
||||||
copy = __copy__
|
copy = __copy__
|
||||||
|
|
||||||
|
|
||||||
def _label(self, what: Union[str, List[str]], how: str) -> List[str]:
|
def _label(self,
|
||||||
|
what: Union[str, List[str]],
|
||||||
|
how: str) -> List[str]:
|
||||||
"""
|
"""
|
||||||
Expand labels according to data shape.
|
Expand labels according to data shape.
|
||||||
|
|
||||||
|
@ -131,7 +137,8 @@ class Table:
|
||||||
return labels
|
return labels
|
||||||
|
|
||||||
|
|
||||||
def _relabel(self, how: str):
|
def _relabel(self,
|
||||||
|
how: str):
|
||||||
"""
|
"""
|
||||||
Modify labeling of data in-place.
|
Modify labeling of data in-place.
|
||||||
|
|
||||||
|
@ -147,7 +154,10 @@ class Table:
|
||||||
self.data.columns = self._label(self.shapes,how) #type: ignore
|
self.data.columns = self._label(self.shapes,how) #type: ignore
|
||||||
|
|
||||||
|
|
||||||
def _add_comment(self, label: str, shape: Tuple[int, ...], info: Optional[str]):
|
def _add_comment(self,
|
||||||
|
label: str,
|
||||||
|
shape: Tuple[int, ...],
|
||||||
|
info: str = None):
|
||||||
if info is not None:
|
if info is not None:
|
||||||
specific = f'{label}{" "+str(shape) if np.prod(shape,dtype=int) > 1 else ""}: {info}'
|
specific = f'{label}{" "+str(shape) if np.prod(shape,dtype=int) > 1 else ""}: {info}'
|
||||||
general = util.execution_stamp('Table')
|
general = util.execution_stamp('Table')
|
||||||
|
@ -309,8 +319,7 @@ class Table:
|
||||||
data = np.loadtxt(content)
|
data = np.loadtxt(content)
|
||||||
|
|
||||||
shapes = {'eu':3, 'pos':2, 'IQ':1, 'CI':1, 'ID':1, 'intensity':1, 'fit':1}
|
shapes = {'eu':3, 'pos':2, 'IQ':1, 'CI':1, 'ID':1, 'intensity':1, 'fit':1}
|
||||||
remainder = data.shape[1]-sum(shapes.values())
|
if (remainder := data.shape[1]-sum(shapes.values())) > 0:
|
||||||
if remainder > 0: # 3.8 can do: if (remainder := data.shape[1]-sum(shapes.values())) > 0
|
|
||||||
shapes['unknown'] = remainder
|
shapes['unknown'] = remainder
|
||||||
|
|
||||||
return Table(data,shapes,comments)
|
return Table(data,shapes,comments)
|
||||||
|
@ -321,7 +330,8 @@ class Table:
|
||||||
return list(self.shapes)
|
return list(self.shapes)
|
||||||
|
|
||||||
|
|
||||||
def get(self, label: str) -> np.ndarray:
|
def get(self,
|
||||||
|
label: str) -> np.ndarray:
|
||||||
"""
|
"""
|
||||||
Get column data.
|
Get column data.
|
||||||
|
|
||||||
|
@ -341,7 +351,10 @@ class Table:
|
||||||
return data.astype(type(data.flatten()[0]))
|
return data.astype(type(data.flatten()[0]))
|
||||||
|
|
||||||
|
|
||||||
def set(self, label: str, data: np.ndarray, info: str = None) -> 'Table':
|
def set(self,
|
||||||
|
label: str,
|
||||||
|
data: np.ndarray,
|
||||||
|
info: str = None) -> 'Table':
|
||||||
"""
|
"""
|
||||||
Set column data.
|
Set column data.
|
||||||
|
|
||||||
|
@ -362,8 +375,7 @@ class Table:
|
||||||
"""
|
"""
|
||||||
dup = self.copy()
|
dup = self.copy()
|
||||||
dup._add_comment(label, data.shape[1:], info)
|
dup._add_comment(label, data.shape[1:], info)
|
||||||
m = re.match(r'(.*)\[((\d+,)*(\d+))\]',label)
|
if m := re.match(r'(.*)\[((\d+,)*(\d+))\]',label):
|
||||||
if m:
|
|
||||||
key = m.group(1)
|
key = m.group(1)
|
||||||
idx = np.ravel_multi_index(tuple(map(int,m.group(2).split(","))),
|
idx = np.ravel_multi_index(tuple(map(int,m.group(2).split(","))),
|
||||||
self.shapes[key])
|
self.shapes[key])
|
||||||
|
@ -374,7 +386,10 @@ class Table:
|
||||||
return dup
|
return dup
|
||||||
|
|
||||||
|
|
||||||
def add(self, label: str, data: np.ndarray, info: str = None) -> 'Table':
|
def add(self,
|
||||||
|
label: str,
|
||||||
|
data: np.ndarray,
|
||||||
|
info: str = None) -> 'Table':
|
||||||
"""
|
"""
|
||||||
Add column data.
|
Add column data.
|
||||||
|
|
||||||
|
@ -406,7 +421,8 @@ class Table:
|
||||||
return dup
|
return dup
|
||||||
|
|
||||||
|
|
||||||
def delete(self, label: str) -> 'Table':
|
def delete(self,
|
||||||
|
label: str) -> 'Table':
|
||||||
"""
|
"""
|
||||||
Delete column data.
|
Delete column data.
|
||||||
|
|
||||||
|
@ -427,7 +443,10 @@ class Table:
|
||||||
return dup
|
return dup
|
||||||
|
|
||||||
|
|
||||||
def rename(self, old: Union[str, List[str]], new: Union[str, List[str]], info: str = None) -> 'Table':
|
def rename(self,
|
||||||
|
old: Union[str, List[str]],
|
||||||
|
new: Union[str, List[str]],
|
||||||
|
info: str = None) -> 'Table':
|
||||||
"""
|
"""
|
||||||
Rename column data.
|
Rename column data.
|
||||||
|
|
||||||
|
@ -453,7 +472,9 @@ class Table:
|
||||||
return dup
|
return dup
|
||||||
|
|
||||||
|
|
||||||
def sort_by(self, labels: Union[str, List[str]], ascending: Union[bool, List[bool]] = True) -> 'Table':
|
def sort_by(self,
|
||||||
|
labels: Union[str, List[str]],
|
||||||
|
ascending: Union[bool, List[bool]] = True) -> 'Table':
|
||||||
"""
|
"""
|
||||||
Sort table by values of given labels.
|
Sort table by values of given labels.
|
||||||
|
|
||||||
|
@ -472,8 +493,7 @@ class Table:
|
||||||
"""
|
"""
|
||||||
labels_ = [labels] if isinstance(labels,str) else labels.copy()
|
labels_ = [labels] if isinstance(labels,str) else labels.copy()
|
||||||
for i,l in enumerate(labels_):
|
for i,l in enumerate(labels_):
|
||||||
m = re.match(r'(.*)\[((\d+,)*(\d+))\]',l)
|
if m := re.match(r'(.*)\[((\d+,)*(\d+))\]',l):
|
||||||
if m:
|
|
||||||
idx = np.ravel_multi_index(tuple(map(int,m.group(2).split(','))),
|
idx = np.ravel_multi_index(tuple(map(int,m.group(2).split(','))),
|
||||||
self.shapes[m.group(1)])
|
self.shapes[m.group(1)])
|
||||||
labels_[i] = f'{1+idx}_{m.group(1)}'
|
labels_[i] = f'{1+idx}_{m.group(1)}'
|
||||||
|
@ -486,7 +506,8 @@ class Table:
|
||||||
return dup
|
return dup
|
||||||
|
|
||||||
|
|
||||||
def append(self, other: 'Table') -> 'Table':
|
def append(self,
|
||||||
|
other: 'Table') -> 'Table':
|
||||||
"""
|
"""
|
||||||
Append other table vertically (similar to numpy.vstack).
|
Append other table vertically (similar to numpy.vstack).
|
||||||
|
|
||||||
|
@ -505,13 +526,14 @@ class Table:
|
||||||
"""
|
"""
|
||||||
if self.shapes != other.shapes or not self.data.columns.equals(other.data.columns):
|
if self.shapes != other.shapes or not self.data.columns.equals(other.data.columns):
|
||||||
raise KeyError('Labels or shapes or order do not match')
|
raise KeyError('Labels or shapes or order do not match')
|
||||||
else:
|
|
||||||
dup = self.copy()
|
dup = self.copy()
|
||||||
dup.data = dup.data.append(other.data,ignore_index=True)
|
dup.data = dup.data.append(other.data,ignore_index=True)
|
||||||
return dup
|
return dup
|
||||||
|
|
||||||
|
|
||||||
def join(self, other: 'Table') -> 'Table':
|
def join(self,
|
||||||
|
other: 'Table') -> 'Table':
|
||||||
"""
|
"""
|
||||||
Append other table horizontally (similar to numpy.hstack).
|
Append other table horizontally (similar to numpy.hstack).
|
||||||
|
|
||||||
|
@ -530,7 +552,7 @@ class Table:
|
||||||
"""
|
"""
|
||||||
if set(self.shapes) & set(other.shapes) or self.data.shape[0] != other.data.shape[0]:
|
if set(self.shapes) & set(other.shapes) or self.data.shape[0] != other.data.shape[0]:
|
||||||
raise KeyError('Duplicated keys or row count mismatch')
|
raise KeyError('Duplicated keys or row count mismatch')
|
||||||
else:
|
|
||||||
dup = self.copy()
|
dup = self.copy()
|
||||||
dup.data = dup.data.join(other.data)
|
dup.data = dup.data.join(other.data)
|
||||||
for key in other.shapes:
|
for key in other.shapes:
|
||||||
|
@ -538,7 +560,8 @@ class Table:
|
||||||
return dup
|
return dup
|
||||||
|
|
||||||
|
|
||||||
def save(self, fname: FileHandle):
|
def save(self,
|
||||||
|
fname: FileHandle):
|
||||||
"""
|
"""
|
||||||
Save as plain text file.
|
Save as plain text file.
|
||||||
|
|
||||||
|
|
|
@ -9,3 +9,6 @@ import numpy as np
|
||||||
FloatSequence = Union[np.ndarray,Sequence[float]]
|
FloatSequence = Union[np.ndarray,Sequence[float]]
|
||||||
IntSequence = Union[np.ndarray,Sequence[int]]
|
IntSequence = Union[np.ndarray,Sequence[int]]
|
||||||
FileHandle = Union[TextIO, str, Path]
|
FileHandle = Union[TextIO, str, Path]
|
||||||
|
NumpyRngSeed = Union[int, IntSequence, np.random.SeedSequence, np.random.Generator]
|
||||||
|
# BitGenerator does not exists in older numpy versions
|
||||||
|
#NumpyRngSeed = Union[int, IntSequence, np.random.SeedSequence, np.random.BitGenerator, np.random.Generator]
|
||||||
|
|
|
@ -22,7 +22,8 @@ class VTK:
|
||||||
High-level interface to VTK.
|
High-level interface to VTK.
|
||||||
"""
|
"""
|
||||||
|
|
||||||
def __init__(self, vtk_data: vtk.vtkDataSet):
|
def __init__(self,
|
||||||
|
vtk_data: vtk.vtkDataSet):
|
||||||
"""
|
"""
|
||||||
New spatial visualization.
|
New spatial visualization.
|
||||||
|
|
||||||
|
@ -38,7 +39,9 @@ class VTK:
|
||||||
|
|
||||||
|
|
||||||
@staticmethod
|
@staticmethod
|
||||||
def from_image_data(cells: IntSequence, size: FloatSequence, origin: FloatSequence = np.zeros(3)) -> 'VTK':
|
def from_image_data(cells: IntSequence,
|
||||||
|
size: FloatSequence,
|
||||||
|
origin: FloatSequence = np.zeros(3)) -> 'VTK':
|
||||||
"""
|
"""
|
||||||
Create VTK of type vtk.vtkImageData.
|
Create VTK of type vtk.vtkImageData.
|
||||||
|
|
||||||
|
@ -68,7 +71,9 @@ class VTK:
|
||||||
|
|
||||||
|
|
||||||
@staticmethod
|
@staticmethod
|
||||||
def from_rectilinear_grid(grid: np.ndarray, size: FloatSequence, origin: FloatSequence = np.zeros(3)) -> 'VTK':
|
def from_rectilinear_grid(grid: np.ndarray,
|
||||||
|
size: FloatSequence,
|
||||||
|
origin: FloatSequence = np.zeros(3)) -> 'VTK':
|
||||||
"""
|
"""
|
||||||
Create VTK of type vtk.vtkRectilinearGrid.
|
Create VTK of type vtk.vtkRectilinearGrid.
|
||||||
|
|
||||||
|
@ -100,7 +105,9 @@ class VTK:
|
||||||
|
|
||||||
|
|
||||||
@staticmethod
|
@staticmethod
|
||||||
def from_unstructured_grid(nodes: np.ndarray, connectivity: np.ndarray, cell_type: str) -> 'VTK':
|
def from_unstructured_grid(nodes: np.ndarray,
|
||||||
|
connectivity: np.ndarray,
|
||||||
|
cell_type: str) -> 'VTK':
|
||||||
"""
|
"""
|
||||||
Create VTK of type vtk.vtkUnstructuredGrid.
|
Create VTK of type vtk.vtkUnstructuredGrid.
|
||||||
|
|
||||||
|
@ -195,8 +202,7 @@ class VTK:
|
||||||
"""
|
"""
|
||||||
if not os.path.isfile(fname): # vtk has a strange error handling
|
if not os.path.isfile(fname): # vtk has a strange error handling
|
||||||
raise FileNotFoundError(f'No such file: {fname}')
|
raise FileNotFoundError(f'No such file: {fname}')
|
||||||
ext = Path(fname).suffix
|
if (ext := Path(fname).suffix) == '.vtk' or dataset_type is not None:
|
||||||
if ext == '.vtk' or dataset_type is not None:
|
|
||||||
reader = vtk.vtkGenericDataObjectReader()
|
reader = vtk.vtkGenericDataObjectReader()
|
||||||
reader.SetFileName(str(fname))
|
reader.SetFileName(str(fname))
|
||||||
if dataset_type is None:
|
if dataset_type is None:
|
||||||
|
@ -238,7 +244,11 @@ class VTK:
|
||||||
def _write(writer):
|
def _write(writer):
|
||||||
"""Wrapper for parallel writing."""
|
"""Wrapper for parallel writing."""
|
||||||
writer.Write()
|
writer.Write()
|
||||||
def save(self, fname: Union[str, Path], parallel: bool = True, compress: bool = True):
|
|
||||||
|
def save(self,
|
||||||
|
fname: Union[str, Path],
|
||||||
|
parallel: bool = True,
|
||||||
|
compress: bool = True):
|
||||||
"""
|
"""
|
||||||
Save as VTK file.
|
Save as VTK file.
|
||||||
|
|
||||||
|
@ -284,7 +294,9 @@ class VTK:
|
||||||
|
|
||||||
# Check https://blog.kitware.com/ghost-and-blanking-visibility-changes/ for missing data
|
# Check https://blog.kitware.com/ghost-and-blanking-visibility-changes/ for missing data
|
||||||
# Needs support for damask.Table
|
# Needs support for damask.Table
|
||||||
def add(self, data: Union[np.ndarray, np.ma.MaskedArray], label: str = None):
|
def add(self,
|
||||||
|
data: Union[np.ndarray, np.ma.MaskedArray],
|
||||||
|
label: str = None):
|
||||||
"""
|
"""
|
||||||
Add data to either cells or points.
|
Add data to either cells or points.
|
||||||
|
|
||||||
|
@ -331,7 +343,8 @@ class VTK:
|
||||||
raise TypeError
|
raise TypeError
|
||||||
|
|
||||||
|
|
||||||
def get(self, label: str) -> np.ndarray:
|
def get(self,
|
||||||
|
label: str) -> np.ndarray:
|
||||||
"""
|
"""
|
||||||
Get either cell or point data.
|
Get either cell or point data.
|
||||||
|
|
||||||
|
@ -383,7 +396,8 @@ class VTK:
|
||||||
return []
|
return []
|
||||||
|
|
||||||
|
|
||||||
def set_comments(self, comments: Union[str, List[str]]):
|
def set_comments(self,
|
||||||
|
comments: Union[str, List[str]]):
|
||||||
"""
|
"""
|
||||||
Set comments.
|
Set comments.
|
||||||
|
|
||||||
|
@ -400,7 +414,8 @@ class VTK:
|
||||||
self.vtk_data.GetFieldData().AddArray(s)
|
self.vtk_data.GetFieldData().AddArray(s)
|
||||||
|
|
||||||
|
|
||||||
def add_comments(self, comments: Union[str, List[str]]):
|
def add_comments(self,
|
||||||
|
comments: Union[str, List[str]]):
|
||||||
"""
|
"""
|
||||||
Add comments.
|
Add comments.
|
||||||
|
|
||||||
|
@ -440,7 +455,7 @@ class VTK:
|
||||||
width = tk.winfo_screenwidth()
|
width = tk.winfo_screenwidth()
|
||||||
height = tk.winfo_screenheight()
|
height = tk.winfo_screenheight()
|
||||||
tk.destroy()
|
tk.destroy()
|
||||||
except Exception as e:
|
except Exception:
|
||||||
width = 1024
|
width = 1024
|
||||||
height = 768
|
height = 768
|
||||||
|
|
||||||
|
|
|
@ -20,7 +20,9 @@ import numpy as _np
|
||||||
from ._typehints import FloatSequence as _FloatSequence, IntSequence as _IntSequence
|
from ._typehints import FloatSequence as _FloatSequence, IntSequence as _IntSequence
|
||||||
|
|
||||||
|
|
||||||
def _ks(size: _FloatSequence, cells: _IntSequence, first_order: bool = False) -> _np.ndarray:
|
def _ks(size: _FloatSequence,
|
||||||
|
cells: _IntSequence,
|
||||||
|
first_order: bool = False) -> _np.ndarray:
|
||||||
"""
|
"""
|
||||||
Get wave numbers operator.
|
Get wave numbers operator.
|
||||||
|
|
||||||
|
@ -47,7 +49,8 @@ def _ks(size: _FloatSequence, cells: _IntSequence, first_order: bool = False) ->
|
||||||
return _np.stack(_np.meshgrid(k_sk,k_sj,k_si,indexing = 'ij'), axis=-1)
|
return _np.stack(_np.meshgrid(k_sk,k_sj,k_si,indexing = 'ij'), axis=-1)
|
||||||
|
|
||||||
|
|
||||||
def curl(size: _FloatSequence, f: _np.ndarray) -> _np.ndarray:
|
def curl(size: _FloatSequence,
|
||||||
|
f: _np.ndarray) -> _np.ndarray:
|
||||||
u"""
|
u"""
|
||||||
Calculate curl of a vector or tensor field in Fourier space.
|
Calculate curl of a vector or tensor field in Fourier space.
|
||||||
|
|
||||||
|
@ -78,7 +81,8 @@ def curl(size: _FloatSequence, f: _np.ndarray) -> _np.ndarray:
|
||||||
return _np.fft.irfftn(curl_,axes=(0,1,2),s=f.shape[:3])
|
return _np.fft.irfftn(curl_,axes=(0,1,2),s=f.shape[:3])
|
||||||
|
|
||||||
|
|
||||||
def divergence(size: _FloatSequence, f: _np.ndarray) -> _np.ndarray:
|
def divergence(size: _FloatSequence,
|
||||||
|
f: _np.ndarray) -> _np.ndarray:
|
||||||
u"""
|
u"""
|
||||||
Calculate divergence of a vector or tensor field in Fourier space.
|
Calculate divergence of a vector or tensor field in Fourier space.
|
||||||
|
|
||||||
|
@ -105,7 +109,8 @@ def divergence(size: _FloatSequence, f: _np.ndarray) -> _np.ndarray:
|
||||||
return _np.fft.irfftn(div_,axes=(0,1,2),s=f.shape[:3])
|
return _np.fft.irfftn(div_,axes=(0,1,2),s=f.shape[:3])
|
||||||
|
|
||||||
|
|
||||||
def gradient(size: _FloatSequence, f: _np.ndarray) -> _np.ndarray:
|
def gradient(size: _FloatSequence,
|
||||||
|
f: _np.ndarray) -> _np.ndarray:
|
||||||
u"""
|
u"""
|
||||||
Calculate gradient of a scalar or vector field in Fourier space.
|
Calculate gradient of a scalar or vector field in Fourier space.
|
||||||
|
|
||||||
|
@ -163,7 +168,8 @@ def coordinates0_point(cells: _IntSequence,
|
||||||
axis = -1)
|
axis = -1)
|
||||||
|
|
||||||
|
|
||||||
def displacement_fluct_point(size: _FloatSequence, F: _np.ndarray) -> _np.ndarray:
|
def displacement_fluct_point(size: _FloatSequence,
|
||||||
|
F: _np.ndarray) -> _np.ndarray:
|
||||||
"""
|
"""
|
||||||
Cell center displacement field from fluctuation part of the deformation gradient field.
|
Cell center displacement field from fluctuation part of the deformation gradient field.
|
||||||
|
|
||||||
|
@ -195,7 +201,8 @@ def displacement_fluct_point(size: _FloatSequence, F: _np.ndarray) -> _np.ndarra
|
||||||
return _np.fft.irfftn(displacement,axes=(0,1,2),s=F.shape[:3])
|
return _np.fft.irfftn(displacement,axes=(0,1,2),s=F.shape[:3])
|
||||||
|
|
||||||
|
|
||||||
def displacement_avg_point(size: _FloatSequence, F: _np.ndarray) -> _np.ndarray:
|
def displacement_avg_point(size: _FloatSequence,
|
||||||
|
F: _np.ndarray) -> _np.ndarray:
|
||||||
"""
|
"""
|
||||||
Cell center displacement field from average part of the deformation gradient field.
|
Cell center displacement field from average part of the deformation gradient field.
|
||||||
|
|
||||||
|
@ -216,7 +223,8 @@ def displacement_avg_point(size: _FloatSequence, F: _np.ndarray) -> _np.ndarray:
|
||||||
return _np.einsum('ml,ijkl->ijkm',F_avg - _np.eye(3),coordinates0_point(F.shape[:3],size))
|
return _np.einsum('ml,ijkl->ijkm',F_avg - _np.eye(3),coordinates0_point(F.shape[:3],size))
|
||||||
|
|
||||||
|
|
||||||
def displacement_point(size: _FloatSequence, F: _np.ndarray) -> _np.ndarray:
|
def displacement_point(size: _FloatSequence,
|
||||||
|
F: _np.ndarray) -> _np.ndarray:
|
||||||
"""
|
"""
|
||||||
Cell center displacement field from deformation gradient field.
|
Cell center displacement field from deformation gradient field.
|
||||||
|
|
||||||
|
@ -236,7 +244,9 @@ def displacement_point(size: _FloatSequence, F: _np.ndarray) -> _np.ndarray:
|
||||||
return displacement_avg_point(size,F) + displacement_fluct_point(size,F)
|
return displacement_avg_point(size,F) + displacement_fluct_point(size,F)
|
||||||
|
|
||||||
|
|
||||||
def coordinates_point(size: _FloatSequence, F: _np.ndarray, origin: _FloatSequence = _np.zeros(3)) -> _np.ndarray:
|
def coordinates_point(size: _FloatSequence,
|
||||||
|
F: _np.ndarray,
|
||||||
|
origin: _FloatSequence = _np.zeros(3)) -> _np.ndarray:
|
||||||
"""
|
"""
|
||||||
Cell center positions.
|
Cell center positions.
|
||||||
|
|
||||||
|
@ -335,7 +345,8 @@ def coordinates0_node(cells: _IntSequence,
|
||||||
axis = -1)
|
axis = -1)
|
||||||
|
|
||||||
|
|
||||||
def displacement_fluct_node(size: _FloatSequence, F: _np.ndarray) -> _np.ndarray:
|
def displacement_fluct_node(size: _FloatSequence,
|
||||||
|
F: _np.ndarray) -> _np.ndarray:
|
||||||
"""
|
"""
|
||||||
Nodal displacement field from fluctuation part of the deformation gradient field.
|
Nodal displacement field from fluctuation part of the deformation gradient field.
|
||||||
|
|
||||||
|
@ -355,7 +366,8 @@ def displacement_fluct_node(size: _FloatSequence, F: _np.ndarray) -> _np.ndarray
|
||||||
return point_to_node(displacement_fluct_point(size,F))
|
return point_to_node(displacement_fluct_point(size,F))
|
||||||
|
|
||||||
|
|
||||||
def displacement_avg_node(size: _FloatSequence, F: _np.ndarray) -> _np.ndarray:
|
def displacement_avg_node(size: _FloatSequence,
|
||||||
|
F: _np.ndarray) -> _np.ndarray:
|
||||||
"""
|
"""
|
||||||
Nodal displacement field from average part of the deformation gradient field.
|
Nodal displacement field from average part of the deformation gradient field.
|
||||||
|
|
||||||
|
@ -376,7 +388,8 @@ def displacement_avg_node(size: _FloatSequence, F: _np.ndarray) -> _np.ndarray:
|
||||||
return _np.einsum('ml,ijkl->ijkm',F_avg - _np.eye(3),coordinates0_node(F.shape[:3],size))
|
return _np.einsum('ml,ijkl->ijkm',F_avg - _np.eye(3),coordinates0_node(F.shape[:3],size))
|
||||||
|
|
||||||
|
|
||||||
def displacement_node(size: _FloatSequence, F: _np.ndarray) -> _np.ndarray:
|
def displacement_node(size: _FloatSequence,
|
||||||
|
F: _np.ndarray) -> _np.ndarray:
|
||||||
"""
|
"""
|
||||||
Nodal displacement field from deformation gradient field.
|
Nodal displacement field from deformation gradient field.
|
||||||
|
|
||||||
|
@ -396,7 +409,9 @@ def displacement_node(size: _FloatSequence, F: _np.ndarray) -> _np.ndarray:
|
||||||
return displacement_avg_node(size,F) + displacement_fluct_node(size,F)
|
return displacement_avg_node(size,F) + displacement_fluct_node(size,F)
|
||||||
|
|
||||||
|
|
||||||
def coordinates_node(size: _FloatSequence, F: _np.ndarray, origin: _FloatSequence = _np.zeros(3)) -> _np.ndarray:
|
def coordinates_node(size: _FloatSequence,
|
||||||
|
F: _np.ndarray,
|
||||||
|
origin: _FloatSequence = _np.zeros(3)) -> _np.ndarray:
|
||||||
"""
|
"""
|
||||||
Nodal positions.
|
Nodal positions.
|
||||||
|
|
||||||
|
@ -526,7 +541,9 @@ def coordinates0_valid(coordinates0: _np.ndarray) -> bool:
|
||||||
return False
|
return False
|
||||||
|
|
||||||
|
|
||||||
def regrid(size: _FloatSequence, F: _np.ndarray, cells: _IntSequence) -> _np.ndarray:
|
def regrid(size: _FloatSequence,
|
||||||
|
F: _np.ndarray,
|
||||||
|
cells: _IntSequence) -> _np.ndarray:
|
||||||
"""
|
"""
|
||||||
Return mapping from coordinates in deformed configuration to a regular grid.
|
Return mapping from coordinates in deformed configuration to a regular grid.
|
||||||
|
|
||||||
|
|
|
@ -122,7 +122,9 @@ def rotation(T: _np.ndarray) -> _rotation.Rotation:
|
||||||
return _rotation.Rotation.from_matrix(_polar_decomposition(T,'R')[0])
|
return _rotation.Rotation.from_matrix(_polar_decomposition(T,'R')[0])
|
||||||
|
|
||||||
|
|
||||||
def strain(F: _np.ndarray, t: str, m: float) -> _np.ndarray:
|
def strain(F: _np.ndarray,
|
||||||
|
t: str,
|
||||||
|
m: float) -> _np.ndarray:
|
||||||
"""
|
"""
|
||||||
Calculate strain tensor (Seth–Hill family).
|
Calculate strain tensor (Seth–Hill family).
|
||||||
|
|
||||||
|
@ -162,7 +164,8 @@ def strain(F: _np.ndarray, t: str, m: float) -> _np.ndarray:
|
||||||
return eps
|
return eps
|
||||||
|
|
||||||
|
|
||||||
def stress_Cauchy(P: _np.ndarray, F: _np.ndarray) -> _np.ndarray:
|
def stress_Cauchy(P: _np.ndarray,
|
||||||
|
F: _np.ndarray) -> _np.ndarray:
|
||||||
"""
|
"""
|
||||||
Calculate the Cauchy stress (true stress).
|
Calculate the Cauchy stress (true stress).
|
||||||
|
|
||||||
|
@ -184,7 +187,8 @@ def stress_Cauchy(P: _np.ndarray, F: _np.ndarray) -> _np.ndarray:
|
||||||
return _tensor.symmetric(_np.einsum('...,...ij,...kj',1.0/_np.linalg.det(F),P,F))
|
return _tensor.symmetric(_np.einsum('...,...ij,...kj',1.0/_np.linalg.det(F),P,F))
|
||||||
|
|
||||||
|
|
||||||
def stress_second_Piola_Kirchhoff(P: _np.ndarray, F: _np.ndarray) -> _np.ndarray:
|
def stress_second_Piola_Kirchhoff(P: _np.ndarray,
|
||||||
|
F: _np.ndarray) -> _np.ndarray:
|
||||||
"""
|
"""
|
||||||
Calculate the second Piola-Kirchhoff stress.
|
Calculate the second Piola-Kirchhoff stress.
|
||||||
|
|
||||||
|
@ -243,7 +247,8 @@ def stretch_right(T: _np.ndarray) -> _np.ndarray:
|
||||||
return _polar_decomposition(T,'U')[0]
|
return _polar_decomposition(T,'U')[0]
|
||||||
|
|
||||||
|
|
||||||
def _polar_decomposition(T: _np.ndarray, requested: _Sequence[str]) -> tuple:
|
def _polar_decomposition(T: _np.ndarray,
|
||||||
|
requested: _Sequence[str]) -> tuple:
|
||||||
"""
|
"""
|
||||||
Perform singular value decomposition.
|
Perform singular value decomposition.
|
||||||
|
|
||||||
|
@ -251,7 +256,7 @@ def _polar_decomposition(T: _np.ndarray, requested: _Sequence[str]) -> tuple:
|
||||||
----------
|
----------
|
||||||
T : numpy.ndarray, shape (...,3,3)
|
T : numpy.ndarray, shape (...,3,3)
|
||||||
Tensor of which the singular values are computed.
|
Tensor of which the singular values are computed.
|
||||||
requested : iterable of str
|
requested : sequence of {'R', 'U', 'V'}
|
||||||
Requested outputs: ‘R’ for the rotation tensor,
|
Requested outputs: ‘R’ for the rotation tensor,
|
||||||
‘V’ for left stretch tensor and ‘U’ for right stretch tensor.
|
‘V’ for left stretch tensor and ‘U’ for right stretch tensor.
|
||||||
|
|
||||||
|
@ -273,7 +278,8 @@ def _polar_decomposition(T: _np.ndarray, requested: _Sequence[str]) -> tuple:
|
||||||
return tuple(output)
|
return tuple(output)
|
||||||
|
|
||||||
|
|
||||||
def _equivalent_Mises(T_sym: _np.ndarray, s: float) -> _np.ndarray:
|
def _equivalent_Mises(T_sym: _np.ndarray,
|
||||||
|
s: float) -> _np.ndarray:
|
||||||
"""
|
"""
|
||||||
Base equation for Mises equivalent of a stress or strain tensor.
|
Base equation for Mises equivalent of a stress or strain tensor.
|
||||||
|
|
||||||
|
|
|
@ -1,3 +1,4 @@
|
||||||
|
|
||||||
"""Functionality for generation of seed points for Voronoi or Laguerre tessellation."""
|
"""Functionality for generation of seed points for Voronoi or Laguerre tessellation."""
|
||||||
|
|
||||||
from typing import Tuple as _Tuple
|
from typing import Tuple as _Tuple
|
||||||
|
@ -5,13 +6,15 @@ from typing import Tuple as _Tuple
|
||||||
from scipy import spatial as _spatial
|
from scipy import spatial as _spatial
|
||||||
import numpy as _np
|
import numpy as _np
|
||||||
|
|
||||||
from ._typehints import FloatSequence as _FloatSequence, IntSequence as _IntSequence
|
from ._typehints import FloatSequence as _FloatSequence, IntSequence as _IntSequence, NumpyRngSeed as _NumpyRngSeed
|
||||||
from . import util as _util
|
from . import util as _util
|
||||||
from . import grid_filters as _grid_filters
|
from . import grid_filters as _grid_filters
|
||||||
|
|
||||||
|
|
||||||
def from_random(size: _FloatSequence, N_seeds: int, cells: _IntSequence = None,
|
def from_random(size: _FloatSequence,
|
||||||
rng_seed=None) -> _np.ndarray:
|
N_seeds: int,
|
||||||
|
cells: _IntSequence = None,
|
||||||
|
rng_seed: _NumpyRngSeed = None) -> _np.ndarray:
|
||||||
"""
|
"""
|
||||||
Place seeds randomly in space.
|
Place seeds randomly in space.
|
||||||
|
|
||||||
|
@ -46,8 +49,12 @@ def from_random(size: _FloatSequence, N_seeds: int, cells: _IntSequence = None,
|
||||||
return coords
|
return coords
|
||||||
|
|
||||||
|
|
||||||
def from_Poisson_disc(size: _FloatSequence, N_seeds: int, N_candidates: int, distance: float,
|
def from_Poisson_disc(size: _FloatSequence,
|
||||||
periodic: bool = True, rng_seed=None) -> _np.ndarray:
|
N_seeds: int,
|
||||||
|
N_candidates: int,
|
||||||
|
distance: float,
|
||||||
|
periodic: bool = True,
|
||||||
|
rng_seed: _NumpyRngSeed = None) -> _np.ndarray:
|
||||||
"""
|
"""
|
||||||
Place seeds according to a Poisson disc distribution.
|
Place seeds according to a Poisson disc distribution.
|
||||||
|
|
||||||
|
@ -86,10 +93,9 @@ def from_Poisson_disc(size: _FloatSequence, N_seeds: int, N_candidates: int, dis
|
||||||
tree = _spatial.cKDTree(coords[:s],boxsize=size) if periodic else \
|
tree = _spatial.cKDTree(coords[:s],boxsize=size) if periodic else \
|
||||||
_spatial.cKDTree(coords[:s])
|
_spatial.cKDTree(coords[:s])
|
||||||
distances = tree.query(candidates)[0]
|
distances = tree.query(candidates)[0]
|
||||||
best = distances.argmax()
|
if distances.max() > distance: # require minimum separation
|
||||||
if distances[best] > distance: # require minimum separation
|
|
||||||
i = 0
|
i = 0
|
||||||
coords[s] = candidates[best] # maximum separation to existing point cloud
|
coords[s] = candidates[distances.argmax()] # maximum separation to existing point cloud
|
||||||
s += 1
|
s += 1
|
||||||
progress.update(s)
|
progress.update(s)
|
||||||
|
|
||||||
|
@ -99,8 +105,11 @@ def from_Poisson_disc(size: _FloatSequence, N_seeds: int, N_candidates: int, dis
|
||||||
return coords
|
return coords
|
||||||
|
|
||||||
|
|
||||||
def from_grid(grid, selection: _IntSequence = None, invert_selection: bool = False,
|
def from_grid(grid,
|
||||||
average: bool = False, periodic: bool = True) -> _Tuple[_np.ndarray, _np.ndarray]:
|
selection: _IntSequence = None,
|
||||||
|
invert_selection: bool = False,
|
||||||
|
average: bool = False,
|
||||||
|
periodic: bool = True) -> _Tuple[_np.ndarray, _np.ndarray]:
|
||||||
"""
|
"""
|
||||||
Create seeds from grid description.
|
Create seeds from grid description.
|
||||||
|
|
||||||
|
|
|
@ -45,7 +45,8 @@ def eigenvalues(T_sym: _np.ndarray) -> _np.ndarray:
|
||||||
return _np.linalg.eigvalsh(symmetric(T_sym))
|
return _np.linalg.eigvalsh(symmetric(T_sym))
|
||||||
|
|
||||||
|
|
||||||
def eigenvectors(T_sym: _np.ndarray, RHS: bool = False) -> _np.ndarray:
|
def eigenvectors(T_sym: _np.ndarray,
|
||||||
|
RHS: bool = False) -> _np.ndarray:
|
||||||
"""
|
"""
|
||||||
Eigenvectors of a symmetric tensor.
|
Eigenvectors of a symmetric tensor.
|
||||||
|
|
||||||
|
@ -63,14 +64,14 @@ def eigenvectors(T_sym: _np.ndarray, RHS: bool = False) -> _np.ndarray:
|
||||||
associated eigenvalues.
|
associated eigenvalues.
|
||||||
|
|
||||||
"""
|
"""
|
||||||
(u,v) = _np.linalg.eigh(symmetric(T_sym))
|
_,v = _np.linalg.eigh(symmetric(T_sym))
|
||||||
|
|
||||||
if RHS:
|
if RHS: v[_np.linalg.det(v) < 0.0,:,2] *= -1.0
|
||||||
v[_np.linalg.det(v) < 0.0,:,2] *= -1.0
|
|
||||||
return v
|
return v
|
||||||
|
|
||||||
|
|
||||||
def spherical(T: _np.ndarray, tensor: bool = True) -> _np.ndarray:
|
def spherical(T: _np.ndarray,
|
||||||
|
tensor: bool = True) -> _np.ndarray:
|
||||||
"""
|
"""
|
||||||
Calculate spherical part of a tensor.
|
Calculate spherical part of a tensor.
|
||||||
|
|
||||||
|
|
|
@ -7,16 +7,16 @@ import subprocess
|
||||||
import shlex
|
import shlex
|
||||||
import re
|
import re
|
||||||
import fractions
|
import fractions
|
||||||
import collections.abc as abc
|
from collections import abc
|
||||||
from functools import reduce
|
from functools import reduce
|
||||||
from typing import Union, Tuple, Iterable, Callable, Dict, List, Any, Literal, Optional
|
from typing import Union, Tuple, Iterable, Callable, Dict, List, Any, Literal, SupportsIndex, Sequence
|
||||||
from pathlib import Path
|
from pathlib import Path
|
||||||
|
|
||||||
import numpy as np
|
import numpy as np
|
||||||
import h5py
|
import h5py
|
||||||
|
|
||||||
from . import version
|
from . import version
|
||||||
from ._typehints import FloatSequence
|
from ._typehints import FloatSequence, NumpyRngSeed
|
||||||
|
|
||||||
# limit visibility
|
# limit visibility
|
||||||
__all__=[
|
__all__=[
|
||||||
|
@ -54,7 +54,8 @@ _colors = {
|
||||||
####################################################################################################
|
####################################################################################################
|
||||||
# Functions
|
# Functions
|
||||||
####################################################################################################
|
####################################################################################################
|
||||||
def srepr(msg, glue: str = '\n') -> str:
|
def srepr(msg,
|
||||||
|
glue: str = '\n') -> str:
|
||||||
r"""
|
r"""
|
||||||
Join items with glue string.
|
Join items with glue string.
|
||||||
|
|
||||||
|
@ -148,7 +149,10 @@ def strikeout(msg) -> str:
|
||||||
return _colors['crossout']+srepr(msg)+_colors['end_color']
|
return _colors['crossout']+srepr(msg)+_colors['end_color']
|
||||||
|
|
||||||
|
|
||||||
def run(cmd: str, wd: str = './', env: Dict[str, str] = None, timeout: int = None) -> Tuple[str, str]:
|
def run(cmd: str,
|
||||||
|
wd: str = './',
|
||||||
|
env: Dict[str, str] = None,
|
||||||
|
timeout: int = None) -> Tuple[str, str]:
|
||||||
"""
|
"""
|
||||||
Run a command.
|
Run a command.
|
||||||
|
|
||||||
|
@ -233,7 +237,7 @@ def show_progress(iterable: Iterable,
|
||||||
else:
|
else:
|
||||||
if N_iter is None:
|
if N_iter is None:
|
||||||
raise ValueError('N_iter not given')
|
raise ValueError('N_iter not given')
|
||||||
else:
|
|
||||||
N = N_iter
|
N = N_iter
|
||||||
|
|
||||||
if N <= 1:
|
if N <= 1:
|
||||||
|
@ -301,16 +305,20 @@ def project_equal_angle(vector: np.ndarray,
|
||||||
normalize : bool
|
normalize : bool
|
||||||
Ensure unit length of input vector. Defaults to True.
|
Ensure unit length of input vector. Defaults to True.
|
||||||
keepdims : bool
|
keepdims : bool
|
||||||
Maintain three-dimensional output coordinates. Defaults to False.
|
Maintain three-dimensional output coordinates.
|
||||||
Two-dimensional output uses right-handed frame spanned by
|
Defaults to False.
|
||||||
the next and next-next axis relative to the projection direction,
|
|
||||||
e.g. x-y when projecting along z and z-x when projecting along y.
|
|
||||||
|
|
||||||
Returns
|
Returns
|
||||||
-------
|
-------
|
||||||
coordinates : numpy.ndarray, shape (...,2 | 3)
|
coordinates : numpy.ndarray, shape (...,2 | 3)
|
||||||
Projected coordinates.
|
Projected coordinates.
|
||||||
|
|
||||||
|
Notes
|
||||||
|
-----
|
||||||
|
Two-dimensional output uses right-handed frame spanned by
|
||||||
|
the next and next-next axis relative to the projection direction,
|
||||||
|
e.g. x-y when projecting along z and z-x when projecting along y.
|
||||||
|
|
||||||
Examples
|
Examples
|
||||||
--------
|
--------
|
||||||
>>> import damask
|
>>> import damask
|
||||||
|
@ -345,16 +353,21 @@ def project_equal_area(vector: np.ndarray,
|
||||||
normalize : bool
|
normalize : bool
|
||||||
Ensure unit length of input vector. Defaults to True.
|
Ensure unit length of input vector. Defaults to True.
|
||||||
keepdims : bool
|
keepdims : bool
|
||||||
Maintain three-dimensional output coordinates. Defaults to False.
|
Maintain three-dimensional output coordinates.
|
||||||
Two-dimensional output uses right-handed frame spanned by
|
Defaults to False.
|
||||||
the next and next-next axis relative to the projection direction,
|
|
||||||
e.g. x-y when projecting along z and z-x when projecting along y.
|
|
||||||
|
|
||||||
Returns
|
Returns
|
||||||
-------
|
-------
|
||||||
coordinates : numpy.ndarray, shape (...,2 | 3)
|
coordinates : numpy.ndarray, shape (...,2 | 3)
|
||||||
Projected coordinates.
|
Projected coordinates.
|
||||||
|
|
||||||
|
Notes
|
||||||
|
-----
|
||||||
|
Two-dimensional output uses right-handed frame spanned by
|
||||||
|
the next and next-next axis relative to the projection direction,
|
||||||
|
e.g. x-y when projecting along z and z-x when projecting along y.
|
||||||
|
|
||||||
|
|
||||||
Examples
|
Examples
|
||||||
--------
|
--------
|
||||||
>>> import damask
|
>>> import damask
|
||||||
|
@ -373,14 +386,17 @@ def project_equal_area(vector: np.ndarray,
|
||||||
return np.roll(np.block([v[...,:2]/np.sqrt(1.0+np.abs(v[...,2:3])),np.zeros_like(v[...,2:3])]),
|
return np.roll(np.block([v[...,:2]/np.sqrt(1.0+np.abs(v[...,2:3])),np.zeros_like(v[...,2:3])]),
|
||||||
-shift if keepdims else 0,axis=-1)[...,:3 if keepdims else 2]
|
-shift if keepdims else 0,axis=-1)[...,:3 if keepdims else 2]
|
||||||
|
|
||||||
def execution_stamp(class_name: str, function_name: str = None) -> str:
|
def execution_stamp(class_name: str,
|
||||||
|
function_name: str = None) -> str:
|
||||||
"""Timestamp the execution of a (function within a) class."""
|
"""Timestamp the execution of a (function within a) class."""
|
||||||
now = datetime.datetime.now().astimezone().strftime('%Y-%m-%d %H:%M:%S%z')
|
now = datetime.datetime.now().astimezone().strftime('%Y-%m-%d %H:%M:%S%z')
|
||||||
_function_name = '' if function_name is None else f'.{function_name}'
|
_function_name = '' if function_name is None else f'.{function_name}'
|
||||||
return f'damask.{class_name}{_function_name} v{version} ({now})'
|
return f'damask.{class_name}{_function_name} v{version} ({now})'
|
||||||
|
|
||||||
|
|
||||||
def hybrid_IA(dist: np.ndarray, N: int, rng_seed = None) -> np.ndarray:
|
def hybrid_IA(dist: np.ndarray,
|
||||||
|
N: int,
|
||||||
|
rng_seed: NumpyRngSeed = None) -> np.ndarray:
|
||||||
"""
|
"""
|
||||||
Hybrid integer approximation.
|
Hybrid integer approximation.
|
||||||
|
|
||||||
|
@ -411,7 +427,7 @@ def hybrid_IA(dist: np.ndarray, N: int, rng_seed = None) -> np.ndarray:
|
||||||
def shapeshifter(fro: Tuple[int, ...],
|
def shapeshifter(fro: Tuple[int, ...],
|
||||||
to: Tuple[int, ...],
|
to: Tuple[int, ...],
|
||||||
mode: Literal['left','right'] = 'left',
|
mode: Literal['left','right'] = 'left',
|
||||||
keep_ones: bool = False) -> Tuple[Optional[int], ...]:
|
keep_ones: bool = False) -> Sequence[SupportsIndex]:
|
||||||
"""
|
"""
|
||||||
Return dimensions that reshape 'fro' to become broadcastable to 'to'.
|
Return dimensions that reshape 'fro' to become broadcastable to 'to'.
|
||||||
|
|
||||||
|
@ -447,7 +463,7 @@ def shapeshifter(fro: Tuple[int, ...],
|
||||||
|
|
||||||
|
|
||||||
"""
|
"""
|
||||||
if not len(fro) and not len(to): return ()
|
if len(fro) == 0 and len(to) == 0: return ()
|
||||||
|
|
||||||
beg = dict(left ='(^.*\\b)',
|
beg = dict(left ='(^.*\\b)',
|
||||||
right='(^.*?\\b)')
|
right='(^.*?\\b)')
|
||||||
|
@ -455,8 +471,8 @@ def shapeshifter(fro: Tuple[int, ...],
|
||||||
right='(.*?\\b)')
|
right='(.*?\\b)')
|
||||||
end = dict(left ='(.*?$)',
|
end = dict(left ='(.*?$)',
|
||||||
right='(.*$)')
|
right='(.*$)')
|
||||||
fro = (1,) if not len(fro) else fro
|
fro = (1,) if len(fro) == 0 else fro
|
||||||
to = (1,) if not len(to) else to
|
to = (1,) if len(to) == 0 else to
|
||||||
try:
|
try:
|
||||||
match = re.match(beg[mode]
|
match = re.match(beg[mode]
|
||||||
+f',{sep[mode]}'.join(map(lambda x: f'{x}'
|
+f',{sep[mode]}'.join(map(lambda x: f'{x}'
|
||||||
|
@ -467,13 +483,14 @@ def shapeshifter(fro: Tuple[int, ...],
|
||||||
grp = match.groups()
|
grp = match.groups()
|
||||||
except AssertionError:
|
except AssertionError:
|
||||||
raise ValueError(f'Shapes can not be shifted {fro} --> {to}')
|
raise ValueError(f'Shapes can not be shifted {fro} --> {to}')
|
||||||
fill: Tuple[Optional[int], ...] = ()
|
fill: Any = ()
|
||||||
for g,d in zip(grp,fro+(None,)):
|
for g,d in zip(grp,fro+(None,)):
|
||||||
fill += (1,)*g.count(',')+(d,)
|
fill += (1,)*g.count(',')+(d,)
|
||||||
return fill[:-1]
|
return fill[:-1]
|
||||||
|
|
||||||
|
|
||||||
def shapeblender(a: Tuple[int, ...], b: Tuple[int, ...]) -> Tuple[int, ...]:
|
def shapeblender(a: Tuple[int, ...],
|
||||||
|
b: Tuple[int, ...]) -> Sequence[SupportsIndex]:
|
||||||
"""
|
"""
|
||||||
Return a shape that overlaps the rightmost entries of 'a' with the leftmost of 'b'.
|
Return a shape that overlaps the rightmost entries of 'a' with the leftmost of 'b'.
|
||||||
|
|
||||||
|
@ -517,7 +534,8 @@ def extend_docstring(extra_docstring: str) -> Callable:
|
||||||
return _decorator
|
return _decorator
|
||||||
|
|
||||||
|
|
||||||
def extended_docstring(f: Callable, extra_docstring: str) -> Callable:
|
def extended_docstring(f: Callable,
|
||||||
|
extra_docstring: str) -> Callable:
|
||||||
"""
|
"""
|
||||||
Decorator: Combine another function's docstring with a given docstring.
|
Decorator: Combine another function's docstring with a given docstring.
|
||||||
|
|
||||||
|
@ -593,7 +611,9 @@ def DREAM3D_cell_data_group(fname: Union[str, Path]) -> str:
|
||||||
return cell_data_group
|
return cell_data_group
|
||||||
|
|
||||||
|
|
||||||
def Bravais_to_Miller(*, uvtw: np.ndarray = None, hkil: np.ndarray = None) -> np.ndarray:
|
def Bravais_to_Miller(*,
|
||||||
|
uvtw: np.ndarray = None,
|
||||||
|
hkil: np.ndarray = None) -> np.ndarray:
|
||||||
"""
|
"""
|
||||||
Transform 4 Miller–Bravais indices to 3 Miller indices of crystal direction [uvw] or plane normal (hkl).
|
Transform 4 Miller–Bravais indices to 3 Miller indices of crystal direction [uvw] or plane normal (hkl).
|
||||||
|
|
||||||
|
@ -620,7 +640,9 @@ def Bravais_to_Miller(*, uvtw: np.ndarray = None, hkil: np.ndarray = None) -> np
|
||||||
return np.einsum('il,...l',basis,axis)
|
return np.einsum('il,...l',basis,axis)
|
||||||
|
|
||||||
|
|
||||||
def Miller_to_Bravais(*, uvw: np.ndarray = None, hkl: np.ndarray = None) -> np.ndarray:
|
def Miller_to_Bravais(*,
|
||||||
|
uvw: np.ndarray = None,
|
||||||
|
hkl: np.ndarray = None) -> np.ndarray:
|
||||||
"""
|
"""
|
||||||
Transform 3 Miller indices to 4 Miller–Bravais indices of crystal direction [uvtw] or plane normal (hkil).
|
Transform 3 Miller indices to 4 Miller–Bravais indices of crystal direction [uvtw] or plane normal (hkil).
|
||||||
|
|
||||||
|
@ -710,7 +732,10 @@ class ProgressBar:
|
||||||
Works for 0-based loops, ETA is estimated by linear extrapolation.
|
Works for 0-based loops, ETA is estimated by linear extrapolation.
|
||||||
"""
|
"""
|
||||||
|
|
||||||
def __init__(self, total: int, prefix: str, bar_length: int):
|
def __init__(self,
|
||||||
|
total: int,
|
||||||
|
prefix: str,
|
||||||
|
bar_length: int):
|
||||||
"""
|
"""
|
||||||
Set current time as basis for ETA estimation.
|
Set current time as basis for ETA estimation.
|
||||||
|
|
||||||
|
@ -733,12 +758,12 @@ class ProgressBar:
|
||||||
sys.stderr.write(f"{self.prefix} {'░'*self.bar_length} 0% ETA n/a")
|
sys.stderr.write(f"{self.prefix} {'░'*self.bar_length} 0% ETA n/a")
|
||||||
sys.stderr.flush()
|
sys.stderr.flush()
|
||||||
|
|
||||||
def update(self, iteration: int) -> None:
|
def update(self,
|
||||||
|
iteration: int) -> None:
|
||||||
|
|
||||||
fraction = (iteration+1) / self.total
|
fraction = (iteration+1) / self.total
|
||||||
filled_length = int(self.bar_length * fraction)
|
|
||||||
|
|
||||||
if filled_length > int(self.bar_length * self.fraction_last) or \
|
if filled_length := int(self.bar_length * fraction) > int(self.bar_length * self.fraction_last) or \
|
||||||
datetime.datetime.now() - self.time_last_update > datetime.timedelta(seconds=10):
|
datetime.datetime.now() - self.time_last_update > datetime.timedelta(seconds=10):
|
||||||
self.time_last_update = datetime.datetime.now()
|
self.time_last_update = datetime.datetime.now()
|
||||||
bar = '█' * filled_length + '░' * (self.bar_length - filled_length)
|
bar = '█' * filled_length + '░' * (self.bar_length - filled_length)
|
||||||
|
|
|
@ -1,3 +1,5 @@
|
||||||
|
[mypy]
|
||||||
|
warn_redundant_casts = True
|
||||||
[mypy-scipy.*]
|
[mypy-scipy.*]
|
||||||
ignore_missing_imports = True
|
ignore_missing_imports = True
|
||||||
[mypy-h5py.*]
|
[mypy-h5py.*]
|
||||||
|
|
|
@ -8,19 +8,19 @@ from damask import seeds
|
||||||
class TestGridFilters:
|
class TestGridFilters:
|
||||||
|
|
||||||
def test_coordinates0_point(self):
|
def test_coordinates0_point(self):
|
||||||
size = np.random.random(3)
|
size = np.random.random(3) # noqa
|
||||||
cells = np.random.randint(8,32,(3))
|
cells = np.random.randint(8,32,(3))
|
||||||
coord = grid_filters.coordinates0_point(cells,size)
|
coord = grid_filters.coordinates0_point(cells,size)
|
||||||
assert np.allclose(coord[0,0,0],size/cells*.5) and coord.shape == tuple(cells) + (3,)
|
assert np.allclose(coord[0,0,0],size/cells*.5) and coord.shape == tuple(cells) + (3,)
|
||||||
|
|
||||||
def test_coordinates0_node(self):
|
def test_coordinates0_node(self):
|
||||||
size = np.random.random(3)
|
size = np.random.random(3) # noqa
|
||||||
cells = np.random.randint(8,32,(3))
|
cells = np.random.randint(8,32,(3))
|
||||||
coord = grid_filters.coordinates0_node(cells,size)
|
coord = grid_filters.coordinates0_node(cells,size)
|
||||||
assert np.allclose(coord[-1,-1,-1],size) and coord.shape == tuple(cells+1) + (3,)
|
assert np.allclose(coord[-1,-1,-1],size) and coord.shape == tuple(cells+1) + (3,)
|
||||||
|
|
||||||
def test_coord0(self):
|
def test_coord0(self):
|
||||||
size = np.random.random(3)
|
size = np.random.random(3) # noqa
|
||||||
cells = np.random.randint(8,32,(3))
|
cells = np.random.randint(8,32,(3))
|
||||||
c = grid_filters.coordinates0_point(cells+1,size+size/cells)
|
c = grid_filters.coordinates0_point(cells+1,size+size/cells)
|
||||||
n = grid_filters.coordinates0_node(cells,size) + size/cells*.5
|
n = grid_filters.coordinates0_node(cells,size) + size/cells*.5
|
||||||
|
@ -28,7 +28,7 @@ class TestGridFilters:
|
||||||
|
|
||||||
@pytest.mark.parametrize('mode',['point','node'])
|
@pytest.mark.parametrize('mode',['point','node'])
|
||||||
def test_grid_DNA(self,mode):
|
def test_grid_DNA(self,mode):
|
||||||
"""Ensure that cellsSizeOrigin_coordinates0_xx is the inverse of coordinates0_xx."""
|
"""Ensure that cellsSizeOrigin_coordinates0_xx is the inverse of coordinates0_xx.""" # noqa
|
||||||
cells = np.random.randint(8,32,(3))
|
cells = np.random.randint(8,32,(3))
|
||||||
size = np.random.random(3)
|
size = np.random.random(3)
|
||||||
origin = np.random.random(3)
|
origin = np.random.random(3)
|
||||||
|
@ -37,7 +37,7 @@ class TestGridFilters:
|
||||||
assert np.allclose(cells,_cells) and np.allclose(size,_size) and np.allclose(origin,_origin)
|
assert np.allclose(cells,_cells) and np.allclose(size,_size) and np.allclose(origin,_origin)
|
||||||
|
|
||||||
def test_displacement_fluct_equivalence(self):
|
def test_displacement_fluct_equivalence(self):
|
||||||
"""Ensure that fluctuations are periodic."""
|
"""Ensure that fluctuations are periodic.""" # noqa
|
||||||
size = np.random.random(3)
|
size = np.random.random(3)
|
||||||
cells = np.random.randint(8,32,(3))
|
cells = np.random.randint(8,32,(3))
|
||||||
F = np.random.random(tuple(cells)+(3,3))
|
F = np.random.random(tuple(cells)+(3,3))
|
||||||
|
@ -45,14 +45,14 @@ class TestGridFilters:
|
||||||
grid_filters.point_to_node(grid_filters.displacement_fluct_point(size,F)))
|
grid_filters.point_to_node(grid_filters.displacement_fluct_point(size,F)))
|
||||||
|
|
||||||
def test_interpolation_to_node(self):
|
def test_interpolation_to_node(self):
|
||||||
size = np.random.random(3)
|
size = np.random.random(3) # noqa
|
||||||
cells = np.random.randint(8,32,(3))
|
cells = np.random.randint(8,32,(3))
|
||||||
F = np.random.random(tuple(cells)+(3,3))
|
F = np.random.random(tuple(cells)+(3,3))
|
||||||
assert np.allclose(grid_filters.coordinates_node(size,F) [1:-1,1:-1,1:-1],
|
assert np.allclose(grid_filters.coordinates_node(size,F) [1:-1,1:-1,1:-1],
|
||||||
grid_filters.point_to_node(grid_filters.coordinates_point(size,F))[1:-1,1:-1,1:-1])
|
grid_filters.point_to_node(grid_filters.coordinates_point(size,F))[1:-1,1:-1,1:-1])
|
||||||
|
|
||||||
def test_interpolation_to_cell(self):
|
def test_interpolation_to_cell(self):
|
||||||
cells = np.random.randint(1,30,(3))
|
cells = np.random.randint(1,30,(3)) # noqa
|
||||||
|
|
||||||
coordinates_node_x = np.linspace(0,np.pi*2,num=cells[0]+1)
|
coordinates_node_x = np.linspace(0,np.pi*2,num=cells[0]+1)
|
||||||
node_field_x = np.cos(coordinates_node_x)
|
node_field_x = np.cos(coordinates_node_x)
|
||||||
|
@ -66,7 +66,7 @@ class TestGridFilters:
|
||||||
|
|
||||||
@pytest.mark.parametrize('mode',['point','node'])
|
@pytest.mark.parametrize('mode',['point','node'])
|
||||||
def test_coordinates0_origin(self,mode):
|
def test_coordinates0_origin(self,mode):
|
||||||
origin= np.random.random(3)
|
origin= np.random.random(3) # noqa
|
||||||
size = np.random.random(3) # noqa
|
size = np.random.random(3) # noqa
|
||||||
cells = np.random.randint(8,32,(3))
|
cells = np.random.randint(8,32,(3))
|
||||||
shifted = eval(f'grid_filters.coordinates0_{mode}(cells,size,origin)')
|
shifted = eval(f'grid_filters.coordinates0_{mode}(cells,size,origin)')
|
||||||
|
@ -79,7 +79,7 @@ class TestGridFilters:
|
||||||
@pytest.mark.parametrize('function',[grid_filters.displacement_avg_point,
|
@pytest.mark.parametrize('function',[grid_filters.displacement_avg_point,
|
||||||
grid_filters.displacement_avg_node])
|
grid_filters.displacement_avg_node])
|
||||||
def test_displacement_avg_vanishes(self,function):
|
def test_displacement_avg_vanishes(self,function):
|
||||||
"""Ensure that random fluctuations in F do not result in average displacement."""
|
"""Ensure that random fluctuations in F do not result in average displacement.""" # noqa
|
||||||
size = np.random.random(3)
|
size = np.random.random(3)
|
||||||
cells = np.random.randint(8,32,(3))
|
cells = np.random.randint(8,32,(3))
|
||||||
F = np.random.random(tuple(cells)+(3,3))
|
F = np.random.random(tuple(cells)+(3,3))
|
||||||
|
@ -89,7 +89,7 @@ class TestGridFilters:
|
||||||
@pytest.mark.parametrize('function',[grid_filters.displacement_fluct_point,
|
@pytest.mark.parametrize('function',[grid_filters.displacement_fluct_point,
|
||||||
grid_filters.displacement_fluct_node])
|
grid_filters.displacement_fluct_node])
|
||||||
def test_displacement_fluct_vanishes(self,function):
|
def test_displacement_fluct_vanishes(self,function):
|
||||||
"""Ensure that constant F does not result in fluctuating displacement."""
|
"""Ensure that constant F does not result in fluctuating displacement.""" # noqa
|
||||||
size = np.random.random(3)
|
size = np.random.random(3)
|
||||||
cells = np.random.randint(8,32,(3))
|
cells = np.random.randint(8,32,(3))
|
||||||
F = np.broadcast_to(np.random.random((3,3)), tuple(cells)+(3,3))
|
F = np.broadcast_to(np.random.random((3,3)), tuple(cells)+(3,3))
|
||||||
|
@ -142,13 +142,13 @@ class TestGridFilters:
|
||||||
function(unordered,mode)
|
function(unordered,mode)
|
||||||
|
|
||||||
def test_regrid_identity(self):
|
def test_regrid_identity(self):
|
||||||
size = np.random.random(3)
|
size = np.random.random(3) # noqa
|
||||||
cells = np.random.randint(8,32,(3))
|
cells = np.random.randint(8,32,(3))
|
||||||
F = np.broadcast_to(np.eye(3), tuple(cells)+(3,3))
|
F = np.broadcast_to(np.eye(3), tuple(cells)+(3,3))
|
||||||
assert all(grid_filters.regrid(size,F,cells) == np.arange(cells.prod()))
|
assert all(grid_filters.regrid(size,F,cells) == np.arange(cells.prod()))
|
||||||
|
|
||||||
def test_regrid_double_cells(self):
|
def test_regrid_double_cells(self):
|
||||||
size = np.random.random(3)
|
size = np.random.random(3) # noqa
|
||||||
cells = np.random.randint(8,32,(3))
|
cells = np.random.randint(8,32,(3))
|
||||||
g = Grid.from_Voronoi_tessellation(cells,size,seeds.from_random(size,10))
|
g = Grid.from_Voronoi_tessellation(cells,size,seeds.from_random(size,10))
|
||||||
F = np.broadcast_to(np.eye(3), tuple(cells)+(3,3))
|
F = np.broadcast_to(np.eye(3), tuple(cells)+(3,3))
|
||||||
|
|
|
@ -14,6 +14,7 @@ module CPFEM
|
||||||
use config
|
use config
|
||||||
use math
|
use math
|
||||||
use rotations
|
use rotations
|
||||||
|
use polynomials
|
||||||
use lattice
|
use lattice
|
||||||
use material
|
use material
|
||||||
use phase
|
use phase
|
||||||
|
@ -83,6 +84,7 @@ subroutine CPFEM_initAll
|
||||||
call config_init
|
call config_init
|
||||||
call math_init
|
call math_init
|
||||||
call rotations_init
|
call rotations_init
|
||||||
|
call polynomials_init
|
||||||
call lattice_init
|
call lattice_init
|
||||||
call discretization_marc_init
|
call discretization_marc_init
|
||||||
call material_init(.false.)
|
call material_init(.false.)
|
||||||
|
|
|
@ -16,6 +16,7 @@ module CPFEM2
|
||||||
use config
|
use config
|
||||||
use math
|
use math
|
||||||
use rotations
|
use rotations
|
||||||
|
use polynomials
|
||||||
use lattice
|
use lattice
|
||||||
use material
|
use material
|
||||||
use phase
|
use phase
|
||||||
|
@ -57,6 +58,7 @@ subroutine CPFEM_initAll
|
||||||
call config_init
|
call config_init
|
||||||
call math_init
|
call math_init
|
||||||
call rotations_init
|
call rotations_init
|
||||||
|
call polynomials_init
|
||||||
call lattice_init
|
call lattice_init
|
||||||
#if defined(MESH)
|
#if defined(MESH)
|
||||||
call discretization_mesh_init(restart=interface_restartInc>0)
|
call discretization_mesh_init(restart=interface_restartInc>0)
|
||||||
|
|
|
@ -191,8 +191,10 @@ logical function isScalar(line)
|
||||||
|
|
||||||
character(len=*), intent(in) :: line
|
character(len=*), intent(in) :: line
|
||||||
|
|
||||||
isScalar = (.not.isKeyValue(line) .and. .not.isKey(line) .and. .not.isListItem(line) &
|
isScalar = (.not. isKeyValue(line) .and. &
|
||||||
.and. .not.isFlow(line))
|
.not. isKey(line) .and. &
|
||||||
|
.not. isListItem(line) .and. &
|
||||||
|
.not. isFlow(line))
|
||||||
|
|
||||||
end function isScalar
|
end function isScalar
|
||||||
|
|
||||||
|
|
|
@ -14,6 +14,7 @@
|
||||||
#include "LAPACK_interface.f90"
|
#include "LAPACK_interface.f90"
|
||||||
#include "math.f90"
|
#include "math.f90"
|
||||||
#include "rotations.f90"
|
#include "rotations.f90"
|
||||||
|
#include "polynomials.f90"
|
||||||
#include "lattice.f90"
|
#include "lattice.f90"
|
||||||
#include "element.f90"
|
#include "element.f90"
|
||||||
#include "geometry_plastic_nonlocal.f90"
|
#include "geometry_plastic_nonlocal.f90"
|
||||||
|
|
|
@ -466,7 +466,14 @@ program DAMASK_grid
|
||||||
call MPI_Allreduce(interface_SIGUSR2,signal,1_MPI_INTEGER_KIND,MPI_LOGICAL,MPI_LOR,MPI_COMM_WORLD,err_MPI)
|
call MPI_Allreduce(interface_SIGUSR2,signal,1_MPI_INTEGER_KIND,MPI_LOGICAL,MPI_LOR,MPI_COMM_WORLD,err_MPI)
|
||||||
if (err_MPI /= 0_MPI_INTEGER_KIND) error stop 'MPI error'
|
if (err_MPI /= 0_MPI_INTEGER_KIND) error stop 'MPI error'
|
||||||
if (mod(inc,loadCases(l)%f_restart) == 0 .or. signal) then
|
if (mod(inc,loadCases(l)%f_restart) == 0 .or. signal) then
|
||||||
|
do field = 1, nActiveFields
|
||||||
|
select case (ID(field))
|
||||||
|
case(FIELD_MECH_ID)
|
||||||
call mechanical_restartWrite
|
call mechanical_restartWrite
|
||||||
|
case(FIELD_THERMAL_ID)
|
||||||
|
call grid_thermal_spectral_restartWrite
|
||||||
|
end select
|
||||||
|
end do
|
||||||
call CPFEM_restartWrite
|
call CPFEM_restartWrite
|
||||||
endif
|
endif
|
||||||
if (signal) call interface_setSIGUSR2(.false.)
|
if (signal) call interface_setSIGUSR2(.false.)
|
||||||
|
|
|
@ -16,6 +16,9 @@ module grid_thermal_spectral
|
||||||
use prec
|
use prec
|
||||||
use parallelization
|
use parallelization
|
||||||
use IO
|
use IO
|
||||||
|
use DAMASK_interface
|
||||||
|
use HDF5_utilities
|
||||||
|
use HDF5
|
||||||
use spectral_utilities
|
use spectral_utilities
|
||||||
use discretization_grid
|
use discretization_grid
|
||||||
use homogenization
|
use homogenization
|
||||||
|
@ -54,13 +57,13 @@ module grid_thermal_spectral
|
||||||
public :: &
|
public :: &
|
||||||
grid_thermal_spectral_init, &
|
grid_thermal_spectral_init, &
|
||||||
grid_thermal_spectral_solution, &
|
grid_thermal_spectral_solution, &
|
||||||
|
grid_thermal_spectral_restartWrite, &
|
||||||
grid_thermal_spectral_forward
|
grid_thermal_spectral_forward
|
||||||
|
|
||||||
contains
|
contains
|
||||||
|
|
||||||
!--------------------------------------------------------------------------------------------------
|
!--------------------------------------------------------------------------------------------------
|
||||||
!> @brief allocates all neccessary fields and fills them with data
|
!> @brief allocates all neccessary fields and fills them with data
|
||||||
! ToDo: Restart not implemented
|
|
||||||
!--------------------------------------------------------------------------------------------------
|
!--------------------------------------------------------------------------------------------------
|
||||||
subroutine grid_thermal_spectral_init(T_0)
|
subroutine grid_thermal_spectral_init(T_0)
|
||||||
|
|
||||||
|
@ -72,6 +75,7 @@ subroutine grid_thermal_spectral_init(T_0)
|
||||||
PetscScalar, dimension(:,:,:), pointer :: T_PETSc
|
PetscScalar, dimension(:,:,:), pointer :: T_PETSc
|
||||||
integer(MPI_INTEGER_KIND) :: err_MPI
|
integer(MPI_INTEGER_KIND) :: err_MPI
|
||||||
PetscErrorCode :: err_PETSc
|
PetscErrorCode :: err_PETSc
|
||||||
|
integer(HID_T) :: fileHandle, groupHandle
|
||||||
class(tNode), pointer :: &
|
class(tNode), pointer :: &
|
||||||
num_grid
|
num_grid
|
||||||
|
|
||||||
|
@ -105,12 +109,6 @@ subroutine grid_thermal_spectral_init(T_0)
|
||||||
allocate(T_lastInc(cells(1),cells(2),cells3), source=T_0)
|
allocate(T_lastInc(cells(1),cells(2),cells3), source=T_0)
|
||||||
allocate(T_stagInc(cells(1),cells(2),cells3), source=T_0)
|
allocate(T_stagInc(cells(1),cells(2),cells3), source=T_0)
|
||||||
|
|
||||||
ce = 0
|
|
||||||
do k = 1, cells3; do j = 1, cells(2); do i = 1,cells(1)
|
|
||||||
ce = ce + 1
|
|
||||||
call homogenization_thermal_setField(T_0,0.0_pReal,ce)
|
|
||||||
end do; end do; end do
|
|
||||||
|
|
||||||
!--------------------------------------------------------------------------------------------------
|
!--------------------------------------------------------------------------------------------------
|
||||||
! initialize solver specific parts of PETSc
|
! initialize solver specific parts of PETSc
|
||||||
call SNESCreate(PETSC_COMM_WORLD,SNES_thermal,err_PETSc)
|
call SNESCreate(PETSC_COMM_WORLD,SNES_thermal,err_PETSc)
|
||||||
|
@ -142,6 +140,24 @@ subroutine grid_thermal_spectral_init(T_0)
|
||||||
CHKERRQ(err_PETSc)
|
CHKERRQ(err_PETSc)
|
||||||
call SNESSetFromOptions(SNES_thermal,err_PETSc) ! pull it all together with additional CLI arguments
|
call SNESSetFromOptions(SNES_thermal,err_PETSc) ! pull it all together with additional CLI arguments
|
||||||
CHKERRQ(err_PETSc)
|
CHKERRQ(err_PETSc)
|
||||||
|
|
||||||
|
|
||||||
|
restartRead: if (interface_restartInc > 0) then
|
||||||
|
print'(/,1x,a,i0,a)', 'reading restart data of increment ', interface_restartInc, ' from file'
|
||||||
|
|
||||||
|
fileHandle = HDF5_openFile(getSolverJobName()//'_restart.hdf5','r')
|
||||||
|
groupHandle = HDF5_openGroup(fileHandle,'solver')
|
||||||
|
|
||||||
|
call HDF5_read(T_current,groupHandle,'T',.false.)
|
||||||
|
call HDF5_read(T_lastInc,groupHandle,'T_lastInc',.false.)
|
||||||
|
end if restartRead
|
||||||
|
|
||||||
|
ce = 0
|
||||||
|
do k = 1, cells3; do j = 1, cells(2); do i = 1, cells(1)
|
||||||
|
ce = ce + 1
|
||||||
|
call homogenization_thermal_setField(T_current(i,j,k),0.0_pReal,ce)
|
||||||
|
end do; end do; end do
|
||||||
|
|
||||||
call DMDAVecGetArrayF90(thermal_grid,solution_vec,T_PETSc,err_PETSc)
|
call DMDAVecGetArrayF90(thermal_grid,solution_vec,T_PETSc,err_PETSc)
|
||||||
CHKERRQ(err_PETSc)
|
CHKERRQ(err_PETSc)
|
||||||
T_PETSc = T_current
|
T_PETSc = T_current
|
||||||
|
@ -253,6 +269,37 @@ subroutine grid_thermal_spectral_forward(cutBack)
|
||||||
end subroutine grid_thermal_spectral_forward
|
end subroutine grid_thermal_spectral_forward
|
||||||
|
|
||||||
|
|
||||||
|
!--------------------------------------------------------------------------------------------------
|
||||||
|
!> @brief Write current solver and constitutive data for restart to file
|
||||||
|
!--------------------------------------------------------------------------------------------------
|
||||||
|
subroutine grid_thermal_spectral_restartWrite
|
||||||
|
|
||||||
|
PetscErrorCode :: err_PETSc
|
||||||
|
DM :: dm_local
|
||||||
|
integer(HID_T) :: fileHandle, groupHandle
|
||||||
|
PetscScalar, dimension(:,:,:), pointer :: T
|
||||||
|
|
||||||
|
call SNESGetDM(SNES_thermal,dm_local,err_PETSc);
|
||||||
|
CHKERRQ(err_PETSc)
|
||||||
|
call DMDAVecGetArrayF90(dm_local,solution_vec,T,err_PETSc);
|
||||||
|
CHKERRQ(err_PETSc)
|
||||||
|
|
||||||
|
print'(1x,a)', 'writing thermal solver data required for restart to file'; flush(IO_STDOUT)
|
||||||
|
|
||||||
|
fileHandle = HDF5_openFile(getSolverJobName()//'_restart.hdf5','a')
|
||||||
|
groupHandle = HDF5_openGroup(fileHandle,'solver')
|
||||||
|
call HDF5_write(T,groupHandle,'T')
|
||||||
|
call HDF5_write(T_lastInc,groupHandle,'T_lastInc')
|
||||||
|
call HDF5_closeGroup(groupHandle)
|
||||||
|
call HDF5_closeFile(fileHandle)
|
||||||
|
|
||||||
|
call DMDAVecRestoreArrayF90(dm_local,solution_vec,T,err_PETSc);
|
||||||
|
CHKERRQ(err_PETSc)
|
||||||
|
|
||||||
|
end subroutine grid_thermal_spectral_restartWrite
|
||||||
|
|
||||||
|
|
||||||
|
|
||||||
!--------------------------------------------------------------------------------------------------
|
!--------------------------------------------------------------------------------------------------
|
||||||
!> @brief forms the spectral thermal residual vector
|
!> @brief forms the spectral thermal residual vector
|
||||||
!--------------------------------------------------------------------------------------------------
|
!--------------------------------------------------------------------------------------------------
|
||||||
|
|
|
@ -414,7 +414,7 @@ subroutine homogenization_restartWrite(fileHandle)
|
||||||
|
|
||||||
groupHandle(2) = HDF5_addGroup(groupHandle(1),material_name_homogenization(ho))
|
groupHandle(2) = HDF5_addGroup(groupHandle(1),material_name_homogenization(ho))
|
||||||
|
|
||||||
call HDF5_write(homogState(ho)%state,groupHandle(2),'omega') ! ToDo: should be done by mech
|
call HDF5_write(homogState(ho)%state,groupHandle(2),'omega_mechanical') ! ToDo: should be done by mech
|
||||||
|
|
||||||
call HDF5_closeGroup(groupHandle(2))
|
call HDF5_closeGroup(groupHandle(2))
|
||||||
|
|
||||||
|
@ -441,7 +441,7 @@ subroutine homogenization_restartRead(fileHandle)
|
||||||
|
|
||||||
groupHandle(2) = HDF5_openGroup(groupHandle(1),material_name_homogenization(ho))
|
groupHandle(2) = HDF5_openGroup(groupHandle(1),material_name_homogenization(ho))
|
||||||
|
|
||||||
call HDF5_read(homogState(ho)%state0,groupHandle(2),'omega') ! ToDo: should be done by mech
|
call HDF5_read(homogState(ho)%state0,groupHandle(2),'omega_mechanical') ! ToDo: should be done by mech
|
||||||
|
|
||||||
call HDF5_closeGroup(groupHandle(2))
|
call HDF5_closeGroup(groupHandle(2))
|
||||||
|
|
||||||
|
|
|
@ -66,7 +66,7 @@ subroutine material_init(restart)
|
||||||
print'(/,1x,a)', '<<<+- material init -+>>>'; flush(IO_STDOUT)
|
print'(/,1x,a)', '<<<+- material init -+>>>'; flush(IO_STDOUT)
|
||||||
|
|
||||||
|
|
||||||
call parse
|
call parse()
|
||||||
print'(/,1x,a)', 'parsed material.yaml'
|
print'(/,1x,a)', 'parsed material.yaml'
|
||||||
|
|
||||||
|
|
||||||
|
|
|
@ -8,6 +8,7 @@ module phase
|
||||||
use constants
|
use constants
|
||||||
use math
|
use math
|
||||||
use rotations
|
use rotations
|
||||||
|
use polynomials
|
||||||
use IO
|
use IO
|
||||||
use config
|
use config
|
||||||
use material
|
use material
|
||||||
|
@ -123,11 +124,20 @@ module phase
|
||||||
integer, intent(in) :: ph
|
integer, intent(in) :: ph
|
||||||
end subroutine mechanical_restartWrite
|
end subroutine mechanical_restartWrite
|
||||||
|
|
||||||
|
module subroutine thermal_restartWrite(groupHandle,ph)
|
||||||
|
integer(HID_T), intent(in) :: groupHandle
|
||||||
|
integer, intent(in) :: ph
|
||||||
|
end subroutine thermal_restartWrite
|
||||||
|
|
||||||
module subroutine mechanical_restartRead(groupHandle,ph)
|
module subroutine mechanical_restartRead(groupHandle,ph)
|
||||||
integer(HID_T), intent(in) :: groupHandle
|
integer(HID_T), intent(in) :: groupHandle
|
||||||
integer, intent(in) :: ph
|
integer, intent(in) :: ph
|
||||||
end subroutine mechanical_restartRead
|
end subroutine mechanical_restartRead
|
||||||
|
|
||||||
|
module subroutine thermal_restartRead(groupHandle,ph)
|
||||||
|
integer(HID_T), intent(in) :: groupHandle
|
||||||
|
integer, intent(in) :: ph
|
||||||
|
end subroutine thermal_restartRead
|
||||||
|
|
||||||
module function mechanical_S(ph,en) result(S)
|
module function mechanical_S(ph,en) result(S)
|
||||||
integer, intent(in) :: ph,en
|
integer, intent(in) :: ph,en
|
||||||
|
@ -640,6 +650,7 @@ subroutine phase_restartWrite(fileHandle)
|
||||||
groupHandle(2) = HDF5_addGroup(groupHandle(1),material_name_phase(ph))
|
groupHandle(2) = HDF5_addGroup(groupHandle(1),material_name_phase(ph))
|
||||||
|
|
||||||
call mechanical_restartWrite(groupHandle(2),ph)
|
call mechanical_restartWrite(groupHandle(2),ph)
|
||||||
|
call thermal_restartWrite(groupHandle(2),ph)
|
||||||
|
|
||||||
call HDF5_closeGroup(groupHandle(2))
|
call HDF5_closeGroup(groupHandle(2))
|
||||||
|
|
||||||
|
@ -668,6 +679,7 @@ subroutine phase_restartRead(fileHandle)
|
||||||
groupHandle(2) = HDF5_openGroup(groupHandle(1),material_name_phase(ph))
|
groupHandle(2) = HDF5_openGroup(groupHandle(1),material_name_phase(ph))
|
||||||
|
|
||||||
call mechanical_restartRead(groupHandle(2),ph)
|
call mechanical_restartRead(groupHandle(2),ph)
|
||||||
|
call thermal_restartRead(groupHandle(2),ph)
|
||||||
|
|
||||||
call HDF5_closeGroup(groupHandle(2))
|
call HDF5_closeGroup(groupHandle(2))
|
||||||
|
|
||||||
|
|
|
@ -1256,7 +1256,7 @@ module subroutine mechanical_restartWrite(groupHandle,ph)
|
||||||
integer, intent(in) :: ph
|
integer, intent(in) :: ph
|
||||||
|
|
||||||
|
|
||||||
call HDF5_write(plasticState(ph)%state,groupHandle,'omega')
|
call HDF5_write(plasticState(ph)%state,groupHandle,'omega_plastic')
|
||||||
call HDF5_write(phase_mechanical_Fi(ph)%data,groupHandle,'F_i')
|
call HDF5_write(phase_mechanical_Fi(ph)%data,groupHandle,'F_i')
|
||||||
call HDF5_write(phase_mechanical_Li(ph)%data,groupHandle,'L_i')
|
call HDF5_write(phase_mechanical_Li(ph)%data,groupHandle,'L_i')
|
||||||
call HDF5_write(phase_mechanical_Lp(ph)%data,groupHandle,'L_p')
|
call HDF5_write(phase_mechanical_Lp(ph)%data,groupHandle,'L_p')
|
||||||
|
@ -1273,7 +1273,7 @@ module subroutine mechanical_restartRead(groupHandle,ph)
|
||||||
integer, intent(in) :: ph
|
integer, intent(in) :: ph
|
||||||
|
|
||||||
|
|
||||||
call HDF5_read(plasticState(ph)%state0,groupHandle,'omega')
|
call HDF5_read(plasticState(ph)%state0,groupHandle,'omega_plastic')
|
||||||
call HDF5_read(phase_mechanical_Fi0(ph)%data,groupHandle,'F_i')
|
call HDF5_read(phase_mechanical_Fi0(ph)%data,groupHandle,'F_i')
|
||||||
call HDF5_read(phase_mechanical_Li0(ph)%data,groupHandle,'L_i')
|
call HDF5_read(phase_mechanical_Li0(ph)%data,groupHandle,'L_i')
|
||||||
call HDF5_read(phase_mechanical_Lp0(ph)%data,groupHandle,'L_p')
|
call HDF5_read(phase_mechanical_Lp0(ph)%data,groupHandle,'L_p')
|
||||||
|
|
|
@ -8,10 +8,9 @@ submodule(phase:eigen) thermalexpansion
|
||||||
integer, dimension(:), allocatable :: kinematics_thermal_expansion_instance
|
integer, dimension(:), allocatable :: kinematics_thermal_expansion_instance
|
||||||
|
|
||||||
type :: tParameters
|
type :: tParameters
|
||||||
real(pReal) :: &
|
type(tPolynomial) :: &
|
||||||
T_ref
|
A_11, &
|
||||||
real(pReal), dimension(3,3,3) :: &
|
A_33
|
||||||
A = 0.0_pReal
|
|
||||||
end type tParameters
|
end type tParameters
|
||||||
|
|
||||||
type(tParameters), dimension(:), allocatable :: param
|
type(tParameters), dimension(:), allocatable :: param
|
||||||
|
@ -34,7 +33,7 @@ module function thermalexpansion_init(kinematics_length) result(myKinematics)
|
||||||
phase, &
|
phase, &
|
||||||
mech, &
|
mech, &
|
||||||
kinematics, &
|
kinematics, &
|
||||||
kinematic_type
|
myConfig
|
||||||
|
|
||||||
print'(/,1x,a)', '<<<+- phase:mechanical:eigen:thermalexpansion init -+>>>'
|
print'(/,1x,a)', '<<<+- phase:mechanical:eigen:thermalexpansion init -+>>>'
|
||||||
|
|
||||||
|
@ -56,21 +55,13 @@ module function thermalexpansion_init(kinematics_length) result(myKinematics)
|
||||||
do k = 1, kinematics%length
|
do k = 1, kinematics%length
|
||||||
if (myKinematics(k,p)) then
|
if (myKinematics(k,p)) then
|
||||||
associate(prm => param(kinematics_thermal_expansion_instance(p)))
|
associate(prm => param(kinematics_thermal_expansion_instance(p)))
|
||||||
kinematic_type => kinematics%get(k)
|
|
||||||
|
|
||||||
prm%T_ref = kinematic_type%get_asFloat('T_ref', defaultVal=T_ROOM)
|
myConfig => kinematics%get(k)
|
||||||
|
|
||||||
|
prm%A_11 = polynomial(myConfig%asDict(),'A_11','T')
|
||||||
|
if (any(phase_lattice(p) == ['hP','tI'])) &
|
||||||
|
prm%A_33 = polynomial(myConfig%asDict(),'A_33','T')
|
||||||
|
|
||||||
prm%A(1,1,1) = kinematic_type%get_asFloat('A_11')
|
|
||||||
prm%A(1,1,2) = kinematic_type%get_asFloat('A_11,T', defaultVal=0.0_pReal)
|
|
||||||
prm%A(1,1,3) = kinematic_type%get_asFloat('A_11,T^2',defaultVal=0.0_pReal)
|
|
||||||
if (any(phase_lattice(p) == ['hP','tI'])) then
|
|
||||||
prm%A(3,3,1) = kinematic_type%get_asFloat('A_33')
|
|
||||||
prm%A(3,3,2) = kinematic_type%get_asFloat('A_33,T', defaultVal=0.0_pReal)
|
|
||||||
prm%A(3,3,3) = kinematic_type%get_asFloat('A_33,T^2',defaultVal=0.0_pReal)
|
|
||||||
end if
|
|
||||||
do i=1, size(prm%A,3)
|
|
||||||
prm%A(1:3,1:3,i) = lattice_symmetrize_33(prm%A(1:3,1:3,i),phase_lattice(p))
|
|
||||||
end do
|
|
||||||
end associate
|
end associate
|
||||||
end if
|
end if
|
||||||
end do
|
end do
|
||||||
|
@ -91,22 +82,20 @@ module subroutine thermalexpansion_LiAndItsTangent(Li, dLi_dTstar, ph,me)
|
||||||
dLi_dTstar !< derivative of Li with respect to Tstar (4th-order tensor defined to be zero)
|
dLi_dTstar !< derivative of Li with respect to Tstar (4th-order tensor defined to be zero)
|
||||||
|
|
||||||
real(pReal) :: T, dot_T
|
real(pReal) :: T, dot_T
|
||||||
|
real(pReal), dimension(3,3) :: A
|
||||||
|
|
||||||
|
|
||||||
T = thermal_T(ph,me)
|
T = thermal_T(ph,me)
|
||||||
dot_T = thermal_dot_T(ph,me)
|
dot_T = thermal_dot_T(ph,me)
|
||||||
|
|
||||||
associate(prm => param(kinematics_thermal_expansion_instance(ph)))
|
associate(prm => param(kinematics_thermal_expansion_instance(ph)))
|
||||||
Li = dot_T * ( &
|
|
||||||
prm%A(1:3,1:3,1) & ! constant coefficient
|
A = 0.0_pReal
|
||||||
+ prm%A(1:3,1:3,2)*(T - prm%T_ref) & ! linear coefficient
|
A(1,1) = prm%A_11%at(T)
|
||||||
+ prm%A(1:3,1:3,3)*(T - prm%T_ref)**2 & ! quadratic coefficient
|
if (any(phase_lattice(ph) == ['hP','tI'])) A(3,3) = prm%A_33%at(T)
|
||||||
) / &
|
A = lattice_symmetrize_33(A,phase_lattice(ph))
|
||||||
(1.0_pReal &
|
Li = dot_T * A
|
||||||
+ prm%A(1:3,1:3,1)*(T - prm%T_ref) / 1.0_pReal &
|
|
||||||
+ prm%A(1:3,1:3,2)*(T - prm%T_ref)**2 / 2.0_pReal &
|
|
||||||
+ prm%A(1:3,1:3,3)*(T - prm%T_ref)**3 / 3.0_pReal &
|
|
||||||
)
|
|
||||||
end associate
|
end associate
|
||||||
dLi_dTstar = 0.0_pReal
|
dLi_dTstar = 0.0_pReal
|
||||||
|
|
||||||
|
|
|
@ -1,15 +1,13 @@
|
||||||
submodule(phase:mechanical) elastic
|
submodule(phase:mechanical) elastic
|
||||||
|
|
||||||
type :: tParameters
|
type :: tParameters
|
||||||
real(pReal),dimension(3) :: &
|
type(tPolynomial) :: &
|
||||||
C_11 = 0.0_pReal, &
|
C_11, &
|
||||||
C_12 = 0.0_pReal, &
|
C_12, &
|
||||||
C_13 = 0.0_pReal, &
|
C_13, &
|
||||||
C_33 = 0.0_pReal, &
|
C_33, &
|
||||||
C_44 = 0.0_pReal, &
|
C_44, &
|
||||||
C_66 = 0.0_pReal
|
C_66
|
||||||
real(pReal) :: &
|
|
||||||
T_ref
|
|
||||||
end type tParameters
|
end type tParameters
|
||||||
|
|
||||||
type(tParameters), allocatable, dimension(:) :: param
|
type(tParameters), allocatable, dimension(:) :: param
|
||||||
|
@ -47,35 +45,17 @@ module subroutine elastic_init(phases)
|
||||||
|
|
||||||
associate(prm => param(ph))
|
associate(prm => param(ph))
|
||||||
|
|
||||||
prm%T_ref = elastic%get_asFloat('T_ref', defaultVal=T_ROOM)
|
prm%C_11 = polynomial(elastic%asDict(),'C_11','T')
|
||||||
|
prm%C_12 = polynomial(elastic%asDict(),'C_12','T')
|
||||||
prm%C_11(1) = elastic%get_asFloat('C_11')
|
prm%C_44 = polynomial(elastic%asDict(),'C_44','T')
|
||||||
prm%C_11(2) = elastic%get_asFloat('C_11,T', defaultVal=0.0_pReal)
|
|
||||||
prm%C_11(3) = elastic%get_asFloat('C_11,T^2',defaultVal=0.0_pReal)
|
|
||||||
|
|
||||||
prm%C_12(1) = elastic%get_asFloat('C_12')
|
|
||||||
prm%C_12(2) = elastic%get_asFloat('C_12,T', defaultVal=0.0_pReal)
|
|
||||||
prm%C_12(3) = elastic%get_asFloat('C_12,T^2',defaultVal=0.0_pReal)
|
|
||||||
|
|
||||||
prm%C_44(1) = elastic%get_asFloat('C_44')
|
|
||||||
prm%C_44(2) = elastic%get_asFloat('C_44,T', defaultVal=0.0_pReal)
|
|
||||||
prm%C_44(3) = elastic%get_asFloat('C_44,T^2',defaultVal=0.0_pReal)
|
|
||||||
|
|
||||||
if (any(phase_lattice(ph) == ['hP','tI'])) then
|
if (any(phase_lattice(ph) == ['hP','tI'])) then
|
||||||
prm%C_13(1) = elastic%get_asFloat('C_13')
|
prm%C_13 = polynomial(elastic%asDict(),'C_13','T')
|
||||||
prm%C_13(2) = elastic%get_asFloat('C_13,T', defaultVal=0.0_pReal)
|
prm%C_33 = polynomial(elastic%asDict(),'C_33','T')
|
||||||
prm%C_13(3) = elastic%get_asFloat('C_13,T^2',defaultVal=0.0_pReal)
|
|
||||||
|
|
||||||
prm%C_33(1) = elastic%get_asFloat('C_33')
|
|
||||||
prm%C_33(2) = elastic%get_asFloat('C_33,T', defaultVal=0.0_pReal)
|
|
||||||
prm%C_33(3) = elastic%get_asFloat('C_33,T^2',defaultVal=0.0_pReal)
|
|
||||||
end if
|
end if
|
||||||
|
|
||||||
if (phase_lattice(ph) == 'tI') then
|
if (phase_lattice(ph) == 'tI') &
|
||||||
prm%C_66(1) = elastic%get_asFloat('C_66')
|
prm%C_66 = polynomial(elastic%asDict(),'C_66','T')
|
||||||
prm%C_66(2) = elastic%get_asFloat('C_66,T', defaultVal=0.0_pReal)
|
|
||||||
prm%C_66(3) = elastic%get_asFloat('C_66,T^2',defaultVal=0.0_pReal)
|
|
||||||
end if
|
|
||||||
|
|
||||||
end associate
|
end associate
|
||||||
end do
|
end do
|
||||||
|
@ -97,38 +77,20 @@ pure module function elastic_C66(ph,en) result(C66)
|
||||||
|
|
||||||
|
|
||||||
associate(prm => param(ph))
|
associate(prm => param(ph))
|
||||||
|
|
||||||
C66 = 0.0_pReal
|
C66 = 0.0_pReal
|
||||||
T = thermal_T(ph,en)
|
T = thermal_T(ph,en)
|
||||||
|
|
||||||
C66(1,1) = prm%C_11(1) &
|
C66(1,1) = prm%C_11%at(T)
|
||||||
+ prm%C_11(2)*(T - prm%T_ref) &
|
C66(1,2) = prm%C_12%at(T)
|
||||||
+ prm%C_11(3)*(T - prm%T_ref)**2
|
C66(4,4) = prm%C_44%at(T)
|
||||||
|
|
||||||
C66(1,2) = prm%C_12(1) &
|
|
||||||
+ prm%C_12(2)*(T - prm%T_ref) &
|
|
||||||
+ prm%C_12(3)*(T - prm%T_ref)**2
|
|
||||||
|
|
||||||
C66(4,4) = prm%C_44(1) &
|
|
||||||
+ prm%C_44(2)*(T - prm%T_ref) &
|
|
||||||
+ prm%C_44(3)*(T - prm%T_ref)**2
|
|
||||||
|
|
||||||
|
|
||||||
if (any(phase_lattice(ph) == ['hP','tI'])) then
|
if (any(phase_lattice(ph) == ['hP','tI'])) then
|
||||||
C66(1,3) = prm%C_13(1) &
|
C66(1,3) = prm%C_13%at(T)
|
||||||
+ prm%C_13(2)*(T - prm%T_ref) &
|
C66(3,3) = prm%C_33%at(T)
|
||||||
+ prm%C_13(3)*(T - prm%T_ref)**2
|
|
||||||
|
|
||||||
C66(3,3) = prm%C_33(1) &
|
|
||||||
+ prm%C_33(2)*(T - prm%T_ref) &
|
|
||||||
+ prm%C_33(3)*(T - prm%T_ref)**2
|
|
||||||
|
|
||||||
end if
|
end if
|
||||||
|
|
||||||
if (phase_lattice(ph) == 'tI') then
|
if (phase_lattice(ph) == 'tI') C66(6,6) = prm%C_66%at(T)
|
||||||
C66(6,6) = prm%C_66(1) &
|
|
||||||
+ prm%C_66(2)*(T - prm%T_ref) &
|
|
||||||
+ prm%C_66(3)*(T - prm%T_ref)**2
|
|
||||||
end if
|
|
||||||
|
|
||||||
C66 = lattice_symmetrize_C66(C66,phase_lattice(ph))
|
C66 = lattice_symmetrize_C66(C66,phase_lattice(ph))
|
||||||
|
|
||||||
|
|
|
@ -254,6 +254,36 @@ function integrateThermalState(Delta_t, ph,en) result(broken)
|
||||||
end function integrateThermalState
|
end function integrateThermalState
|
||||||
|
|
||||||
|
|
||||||
|
module subroutine thermal_restartWrite(groupHandle,ph)
|
||||||
|
|
||||||
|
integer(HID_T), intent(in) :: groupHandle
|
||||||
|
integer, intent(in) :: ph
|
||||||
|
|
||||||
|
integer :: so
|
||||||
|
|
||||||
|
|
||||||
|
do so = 1,thermal_Nsources(ph)
|
||||||
|
call HDF5_write(thermalState(ph)%p(so)%state,groupHandle,'omega_thermal')
|
||||||
|
enddo
|
||||||
|
|
||||||
|
end subroutine thermal_restartWrite
|
||||||
|
|
||||||
|
|
||||||
|
module subroutine thermal_restartRead(groupHandle,ph)
|
||||||
|
|
||||||
|
integer(HID_T), intent(in) :: groupHandle
|
||||||
|
integer, intent(in) :: ph
|
||||||
|
|
||||||
|
integer :: so
|
||||||
|
|
||||||
|
|
||||||
|
do so = 1,thermal_Nsources(ph)
|
||||||
|
call HDF5_read(thermalState(ph)%p(so)%state0,groupHandle,'omega_thermal')
|
||||||
|
enddo
|
||||||
|
|
||||||
|
end subroutine thermal_restartRead
|
||||||
|
|
||||||
|
|
||||||
module subroutine thermal_forward()
|
module subroutine thermal_forward()
|
||||||
|
|
||||||
integer :: ph, so
|
integer :: ph, so
|
||||||
|
|
|
@ -0,0 +1,179 @@
|
||||||
|
!--------------------------------------------------------------------------------------------------
|
||||||
|
!> @author Martin Diehl, KU Leuven
|
||||||
|
!> @brief Polynomial representation for variable data
|
||||||
|
!--------------------------------------------------------------------------------------------------
|
||||||
|
module polynomials
|
||||||
|
use prec
|
||||||
|
use IO
|
||||||
|
use YAML_parse
|
||||||
|
use YAML_types
|
||||||
|
|
||||||
|
implicit none
|
||||||
|
private
|
||||||
|
|
||||||
|
type, public :: tPolynomial
|
||||||
|
real(pReal), dimension(:), allocatable :: coef
|
||||||
|
real(pReal) :: x_ref
|
||||||
|
contains
|
||||||
|
procedure, public :: at => eval
|
||||||
|
procedure, public :: der1_at => eval_der1
|
||||||
|
end type tPolynomial
|
||||||
|
|
||||||
|
interface polynomial
|
||||||
|
module procedure polynomial_from_dict
|
||||||
|
module procedure polynomial_from_coef
|
||||||
|
end interface polynomial
|
||||||
|
|
||||||
|
public :: &
|
||||||
|
polynomial, &
|
||||||
|
polynomials_init
|
||||||
|
|
||||||
|
contains
|
||||||
|
|
||||||
|
|
||||||
|
!--------------------------------------------------------------------------------------------------
|
||||||
|
!> @brief Run self-test.
|
||||||
|
!--------------------------------------------------------------------------------------------------
|
||||||
|
subroutine polynomials_init()
|
||||||
|
|
||||||
|
print'(/,1x,a)', '<<<+- polynomials init -+>>>'; flush(IO_STDOUT)
|
||||||
|
|
||||||
|
call selfTest()
|
||||||
|
|
||||||
|
end subroutine polynomials_init
|
||||||
|
|
||||||
|
|
||||||
|
!--------------------------------------------------------------------------------------------------
|
||||||
|
!> @brief Initialize a Polynomial from Coefficients.
|
||||||
|
!--------------------------------------------------------------------------------------------------
|
||||||
|
function polynomial_from_coef(coef,x_ref) result(p)
|
||||||
|
|
||||||
|
real(pReal), dimension(:), intent(in) :: coef
|
||||||
|
real(pReal), intent(in) :: x_ref
|
||||||
|
type(tPolynomial) :: p
|
||||||
|
|
||||||
|
|
||||||
|
allocate(p%coef(0:size(coef)-1),source=coef) ! should be zero based
|
||||||
|
p%x_ref = x_ref
|
||||||
|
|
||||||
|
end function polynomial_from_coef
|
||||||
|
|
||||||
|
|
||||||
|
!--------------------------------------------------------------------------------------------------
|
||||||
|
!> @brief Initialize a Polynomial from a Dictionary with Coefficients.
|
||||||
|
!--------------------------------------------------------------------------------------------------
|
||||||
|
function polynomial_from_dict(dict,y,x) result(p)
|
||||||
|
|
||||||
|
type(tDict), intent(in) :: dict
|
||||||
|
character(len=*), intent(in) :: y, x
|
||||||
|
type(tPolynomial) :: p
|
||||||
|
|
||||||
|
real(pReal), dimension(:), allocatable :: coef
|
||||||
|
real(pReal) :: x_ref
|
||||||
|
|
||||||
|
|
||||||
|
allocate(coef(1),source=dict%get_asFloat(y))
|
||||||
|
|
||||||
|
if (dict%contains(y//','//x)) then
|
||||||
|
x_ref = dict%get_asFloat(x//'_ref')
|
||||||
|
coef = [coef,dict%get_asFloat(y//','//x)]
|
||||||
|
if (dict%contains(y//','//x//'^2')) then
|
||||||
|
coef = [coef,dict%get_asFloat(y//','//x//'^2')]
|
||||||
|
end if
|
||||||
|
else
|
||||||
|
x_ref = huge(0.0_pReal) ! Simplify debugging
|
||||||
|
end if
|
||||||
|
|
||||||
|
p = Polynomial(coef,x_ref)
|
||||||
|
|
||||||
|
end function polynomial_from_dict
|
||||||
|
|
||||||
|
|
||||||
|
!--------------------------------------------------------------------------------------------------
|
||||||
|
!> @brief Evaluate a Polynomial.
|
||||||
|
!--------------------------------------------------------------------------------------------------
|
||||||
|
pure function eval(self,x) result(y)
|
||||||
|
|
||||||
|
class(tPolynomial), intent(in) :: self
|
||||||
|
real(pReal), intent(in) :: x
|
||||||
|
real(pReal) :: y
|
||||||
|
|
||||||
|
integer :: i
|
||||||
|
|
||||||
|
|
||||||
|
y = self%coef(0)
|
||||||
|
do i = 1, ubound(self%coef,1)
|
||||||
|
y = y + self%coef(i) * (x-self%x_ref)**i
|
||||||
|
enddo
|
||||||
|
|
||||||
|
end function eval
|
||||||
|
|
||||||
|
|
||||||
|
!--------------------------------------------------------------------------------------------------
|
||||||
|
!> @brief Evaluate a first derivative of Polynomial.
|
||||||
|
!--------------------------------------------------------------------------------------------------
|
||||||
|
pure function eval_der1(self,x) result(y)
|
||||||
|
|
||||||
|
class(tPolynomial), intent(in) :: self
|
||||||
|
real(pReal), intent(in) :: x
|
||||||
|
real(pReal) :: y
|
||||||
|
|
||||||
|
integer :: i
|
||||||
|
|
||||||
|
|
||||||
|
y = 0.0_pReal
|
||||||
|
do i = 1, ubound(self%coef,1)
|
||||||
|
y = y + real(i,pReal)*self%coef(i) * (x-self%x_ref)**(i-1)
|
||||||
|
enddo
|
||||||
|
|
||||||
|
end function eval_der1
|
||||||
|
|
||||||
|
|
||||||
|
!--------------------------------------------------------------------------------------------------
|
||||||
|
!> @brief Check correctness of polynomical functionality.
|
||||||
|
!--------------------------------------------------------------------------------------------------
|
||||||
|
subroutine selfTest
|
||||||
|
|
||||||
|
type(tPolynomial) :: p1, p2
|
||||||
|
real(pReal), dimension(3) :: coef
|
||||||
|
real(pReal) :: x_ref, x
|
||||||
|
class(tNode), pointer :: dict
|
||||||
|
character(len=pStringLen), dimension(3) :: coef_s
|
||||||
|
character(len=pStringLen) :: x_ref_s, x_s, YAML_s
|
||||||
|
|
||||||
|
call random_number(coef)
|
||||||
|
call random_number(x_ref)
|
||||||
|
call random_number(x)
|
||||||
|
|
||||||
|
coef = coef*10_pReal -0.5_pReal
|
||||||
|
x_ref = x_ref*10_pReal -0.5_pReal
|
||||||
|
x = x*10_pReal -0.5_pReal
|
||||||
|
|
||||||
|
p1 = polynomial(coef,x_ref)
|
||||||
|
if (dNeq(p1%at(x_ref),coef(1))) error stop 'polynomial: @ref'
|
||||||
|
|
||||||
|
write(coef_s(1),*) coef(1)
|
||||||
|
write(coef_s(2),*) coef(2)
|
||||||
|
write(coef_s(3),*) coef(3)
|
||||||
|
write(x_ref_s,*) x_ref
|
||||||
|
write(x_s,*) x
|
||||||
|
YAML_s = 'C: '//trim(adjustl(coef_s(1)))//IO_EOL//&
|
||||||
|
'C,T: '//trim(adjustl(coef_s(2)))//IO_EOL//&
|
||||||
|
'C,T^2: '//trim(adjustl(coef_s(3)))//IO_EOL//&
|
||||||
|
'T_ref: '//trim(adjustl(x_ref_s))//IO_EOL
|
||||||
|
Dict => YAML_parse_str(trim(YAML_s))
|
||||||
|
p2 = polynomial(dict%asDict(),'C','T')
|
||||||
|
if (dNeq(p1%at(x),p2%at(x),1.0e-10_pReal)) error stop 'polynomials: init'
|
||||||
|
|
||||||
|
p1 = polynomial(coef*[0.0_pReal,1.0_pReal,0.0_pReal],x_ref)
|
||||||
|
if (dNeq(p1%at(x_ref+x),-p1%at(x_ref-x),1.0e-10_pReal)) error stop 'polynomials: eval(odd)'
|
||||||
|
if (dNeq(p1%der1_at(x),p1%der1_at(5.0_pReal*x),1.0e-10_pReal)) error stop 'polynomials: eval_der(odd)'
|
||||||
|
|
||||||
|
p1 = polynomial(coef*[0.0_pReal,0.0_pReal,1.0_pReal],x_ref)
|
||||||
|
if (dNeq(p1%at(x_ref+x),p1%at(x_ref-x),1e-10_pReal)) error stop 'polynomials: eval(even)'
|
||||||
|
if (dNeq(p1%der1_at(x_ref+x),-p1%der1_at(x_ref-x),1e-10_pReal)) error stop 'polynomials: eval_der(even)'
|
||||||
|
|
||||||
|
|
||||||
|
end subroutine selfTest
|
||||||
|
|
||||||
|
end module polynomials
|
Loading…
Reference in New Issue