Merge branch 'hybridIA-and-ODFsampling' into 'development'

Hybrid IA and ODF sampling

See merge request damask/DAMASK!239
This commit is contained in:
Martin Diehl 2020-09-30 06:10:39 +02:00
commit e1d459aab8
10 changed files with 386827 additions and 48 deletions

View File

@ -1,6 +1,8 @@
import numpy as np
from . import mechanics
from . import util
from . import grid_filters
_P = -1
@ -647,13 +649,63 @@ class Rotation:
return Rotation(q.reshape(r.shape[:-1]+(4,)) if shape is not None else q)._standardize()
# for compatibility (old names do not follow convention)
fromEulers = from_Eulers
fromQuaternion = from_quaternion
asAxisAngle = as_axis_angle
# for compatibility
__mul__ = __matmul__
@staticmethod
def from_ODF(weights,Eulers,N=500,degrees=True,fractions=True,seed=None):
"""
Sample discrete values from a binned ODF.
Parameters
----------
weights : numpy.ndarray of shape (n)
Texture intensity values (probability density or volume fraction) at Euler grid points.
Eulers : numpy.ndarray of shape (n,3)
Grid coordinates in Euler space at which weights are defined.
N : integer, optional
Number of discrete orientations to be sampled from the given ODF.
Defaults to 500.
degrees : boolean, optional
Euler grid values are in degrees. Defaults to True.
fractions : boolean, optional
ODF values correspond to volume fractions, not probability density.
Defaults to True.
seed: {None, int, array_like[ints], SeedSequence, BitGenerator, Generator}, optional
A seed to initialize the BitGenerator. Defaults to None, i.e. unpredictable entropy
will be pulled from the OS.
Returns
-------
samples : damask.Rotation of shape (N)
Array of sampled rotations closely representing the input ODF.
Notes
-----
Due to the distortion of Euler space in the vicinity of ϕ = 0, probability densities, p, defined on
grid points with ϕ = 0 will never result in reconstructed orientations as their dV/V = p dγ = p × 0.
Hence, it is recommended to transform any such dataset to cell centers that avoid grid points at ϕ = 0.
References
----------
P. Eisenlohr, F. Roters, Computational Materials Science 42(4), 670-678, 2008
https://doi.org/10.1016/j.commatsci.2007.09.015
"""
def _dg(eu,deg):
"""Return infinitesimal Euler space volume of bin(s)."""
Eulers_sorted = eu[np.lexsort((eu[:,0],eu[:,1],eu[:,2]))]
steps,size,_ = grid_filters.cell_coord0_gridSizeOrigin(Eulers_sorted)
delta = np.radians(size/steps) if deg else size/steps
return delta[0]*2.0*np.sin(delta[1]/2.0)*delta[2] / 8.0 / np.pi**2 * np.sin(np.radians(eu[:,1]) if deg else eu[:,1])
dg = 1.0 if fractions else _dg(Eulers,degrees)
dV_V = dg * np.maximum(0.0,weights.squeeze())
return Rotation.from_Eulers(Eulers[util.hybrid_IA(dV_V,N,seed)],degrees)
@staticmethod
def from_spherical_component(center,sigma,N=500,degrees=True,seed=None):
"""

View File

@ -20,6 +20,7 @@ __all__=[
'execute',
'show_progress',
'scale_to_coprime',
'hybrid_IA',
'return_message',
'extendableOption',
'execution_stamp'
@ -187,6 +188,20 @@ def execution_stamp(class_name,function_name=None):
return f'damask.{class_name}{_function_name} v{version} ({now})'
def hybrid_IA(dist,N,seed=None):
N_opt_samples,N_inv_samples = (max(np.count_nonzero(dist),N),0) # random subsampling if too little samples requested
scale_,scale,inc_factor = (0.0,float(N_opt_samples),1.0)
while (not np.isclose(scale, scale_)) and (N_inv_samples != N_opt_samples):
repeats = np.rint(scale*dist).astype(int)
N_inv_samples = np.sum(repeats)
scale_,scale,inc_factor = (scale,scale+inc_factor*0.5*(scale - scale_), inc_factor*2.0) \
if N_inv_samples < N_opt_samples else \
(scale_,0.5*(scale_ + scale), 1.0)
return np.repeat(np.arange(len(dist)),repeats)[np.random.default_rng(seed).permutation(N_inv_samples)[:N]]
####################################################################################################
# Classes
####################################################################################################

File diff suppressed because it is too large Load Diff

File diff suppressed because it is too large Load Diff

File diff suppressed because it is too large Load Diff

View File

@ -77,17 +77,17 @@ class TestColormap:
@pytest.mark.parametrize('format',['ASCII','paraview','GOM','gmsh'])
@pytest.mark.parametrize('model',['rgb','hsv','hsl','xyz','lab','msh'])
def test_from_range(self,model,format,tmpdir):
def test_from_range(self,model,format,tmp_path):
N = np.random.randint(2,256)
c = Colormap.from_range(np.random.rand(3),np.random.rand(3),model=model,N=N) # noqa
eval(f'c.save_{format}(tmpdir/"color_out")')
eval(f'c.save_{format}(tmp_path/"color_out")')
@pytest.mark.parametrize('format',['ASCII','paraview','GOM','gmsh'])
@pytest.mark.parametrize('name',['strain','gnuplot','Greys','PRGn','viridis'])
def test_from_predefined(self,name,format,tmpdir):
def test_from_predefined(self,name,format,tmp_path):
N = np.random.randint(2,256)
c = Colormap.from_predefined(name,N) # noqa
os.chdir(tmpdir)
os.chdir(tmp_path)
eval(f'c.save_{format}()')
@pytest.mark.parametrize('format,name',[('ASCII','test.txt'),
@ -95,9 +95,9 @@ class TestColormap:
('GOM','test.legend'),
('gmsh','test.msh')
])
def test_write_filehandle(self,format,name,tmpdir):
def test_write_filehandle(self,format,name,tmp_path):
c = Colormap.from_predefined('Dark2') # noqa
fname = tmpdir/name
fname = tmp_path/name
with open(fname,'w') as f: # noqa
eval(f'c.save_{format}(f)')
for i in range(10):
@ -146,14 +146,14 @@ class TestColormap:
('GOM','.legend'),
('gmsh','.msh')
])
def test_compare_reference(self,format,ext,tmpdir,reference_dir,update):
def test_compare_reference(self,format,ext,tmp_path,reference_dir,update):
name = 'binary'
c = Colormap.from_predefined(name) # noqa
if update:
os.chdir(reference_dir)
eval(f'c.save_{format}()')
else:
os.chdir(tmpdir)
os.chdir(tmp_path)
eval(f'c.save_{format}()')
time.sleep(.5)
assert filecmp.cmp(tmpdir/(name+ext),reference_dir/(name+ext))
assert filecmp.cmp(tmp_path/(name+ext),reference_dir/(name+ext))

View File

@ -42,46 +42,46 @@ class TestGeom:
assert str(default.diff(new)) != ''
def test_write_read_str(self,default,tmpdir):
default.save_ASCII(str(tmpdir/'default.geom'))
new = Geom.load_ASCII(str(tmpdir/'default.geom'))
def test_write_read_str(self,default,tmp_path):
default.save_ASCII(str(tmp_path/'default.geom'))
new = Geom.load_ASCII(str(tmp_path/'default.geom'))
assert geom_equal(default,new)
def test_write_read_file(self,default,tmpdir):
with open(tmpdir/'default.geom','w') as f:
def test_write_read_file(self,default,tmp_path):
with open(tmp_path/'default.geom','w') as f:
default.save_ASCII(f,compress=True)
with open(tmpdir/'default.geom') as f:
with open(tmp_path/'default.geom') as f:
new = Geom.load_ASCII(f)
assert geom_equal(default,new)
def test_read_write_vtr(self,default,tmpdir):
default.save(tmpdir/'default')
def test_read_write_vtr(self,default,tmp_path):
default.save(tmp_path/'default')
for _ in range(10):
time.sleep(.2)
if os.path.exists(tmpdir/'default.vtr'): break
if os.path.exists(tmp_path/'default.vtr'): break
new = Geom.load(tmpdir/'default.vtr')
new = Geom.load(tmp_path/'default.vtr')
assert geom_equal(new,default)
def test_invalid_geom(self,tmpdir):
with open('invalid_file','w') as f:
def test_invalid_geom(self,tmp_path):
with open(tmp_path/'invalid_file','w') as f:
f.write('this is not a valid header')
with open('invalid_file','r') as f:
with open(tmp_path/'invalid_file','r') as f:
with pytest.raises(TypeError):
Geom.load_ASCII(f)
def test_invalid_vtr(self,tmpdir):
def test_invalid_vtr(self,tmp_path):
v = VTK.from_rectilinearGrid(np.random.randint(5,10,3)*2,np.random.random(3) + 1.0)
v.save(tmpdir/'no_materialpoint.vtr')
v.save(tmp_path/'no_materialpoint.vtr')
for _ in range(10):
time.sleep(.2)
if os.path.exists(tmpdir/'no_materialpoint.vtr'): break
if os.path.exists(tmp_path/'no_materialpoint.vtr'): break
with pytest.raises(ValueError):
Geom.load(tmpdir/'no_materialpoint.vtr')
Geom.load(tmp_path/'no_materialpoint.vtr')
def test_invalid_material(self):
with pytest.raises(TypeError):
@ -92,9 +92,9 @@ class TestGeom:
assert g.material.dtype in np.sctypes['int']
@pytest.mark.parametrize('compress',[True,False])
def test_compress(self,default,tmpdir,compress):
default.save_ASCII(tmpdir/'default.geom',compress=compress)
new = Geom.load_ASCII(tmpdir/'default.geom')
def test_compress(self,default,tmp_path,compress):
default.save_ASCII(tmp_path/'default.geom',compress=compress)
new = Geom.load_ASCII(tmp_path/'default.geom')
assert geom_equal(new,default)

View File

@ -1,11 +1,11 @@
import os
import pytest
import numpy as np
from scipy import stats
from damask import Rotation
from damask import Table
from damask import _rotation
from damask import grid_filters
n = 1000
atol=1.e-4
@ -13,7 +13,7 @@ atol=1.e-4
@pytest.fixture
def reference_dir(reference_dir_base):
"""Directory containing reference results."""
return os.path.join(reference_dir_base,'Rotation')
return reference_dir_base/'Rotation'
@pytest.fixture
def set_of_rotations(set_of_quaternions):
@ -943,3 +943,39 @@ class TestRotation:
sigma_out = np.degrees(np.std(dist))
p = np.average(p)
assert (.9 < sigma/sigma_out < 1.1) and p > 1e-2, f'{sigma/sigma_out},{p}'
@pytest.mark.parametrize('fractions',[True,False])
@pytest.mark.parametrize('degrees',[True,False])
@pytest.mark.parametrize('N',[2**13,2**14,2**15])
def test_ODF_cell(self,reference_dir,fractions,degrees,N):
steps = np.array([144,36,36])
limits = np.array([360.,90.,90.])
rng = tuple(zip(np.zeros(3),limits))
weights = Table.load(reference_dir/'ODF_experimental_cell.txt').get('intensity').flatten()
Eulers = grid_filters.cell_coord0(steps,limits)
Eulers = np.radians(Eulers) if not degrees else Eulers
Eulers_r = Rotation.from_ODF(weights,Eulers.reshape(-1,3,order='F'),N,degrees,fractions).as_Eulers(True)
weights_r = np.histogramdd(Eulers_r,steps,rng)[0].flatten(order='F')/N * np.sum(weights)
if fractions: assert np.sqrt(((weights_r - weights) ** 2).mean()) < 4
@pytest.mark.parametrize('degrees',[True,False])
@pytest.mark.parametrize('N',[2**13,2**14,2**15])
def test_ODF_node(self,reference_dir,degrees,N):
steps = np.array([144,36,36])
limits = np.array([360.,90.,90.])
rng = tuple(zip(-limits/steps*.5,limits-limits/steps*.5))
weights = Table.load(reference_dir/'ODF_experimental.txt').get('intensity')
weights = weights.reshape(steps+1,order='F')[:-1,:-1,:-1].reshape(-1,order='F')
Eulers = grid_filters.node_coord0(steps,limits)[:-1,:-1,:-1]
Eulers = np.radians(Eulers) if not degrees else Eulers
Eulers_r = Rotation.from_ODF(weights,Eulers.reshape(-1,3,order='F'),N,degrees).as_Eulers(True)
weights_r = np.histogramdd(Eulers_r,steps,rng)[0].flatten(order='F')/N * np.sum(weights)
assert np.sqrt(((weights_r - weights) ** 2).mean()) < 5

View File

@ -34,31 +34,31 @@ class TestTable:
assert np.allclose(d,1.0) and d.shape[1:] == (1,)
@pytest.mark.parametrize('mode',['str','path'])
def test_write_read(self,default,tmpdir,mode):
default.save(tmpdir/'default.txt')
def test_write_read(self,default,tmp_path,mode):
default.save(tmp_path/'default.txt')
if mode == 'path':
new = Table.load(tmpdir/'default.txt')
new = Table.load(tmp_path/'default.txt')
elif mode == 'str':
new = Table.load(str(tmpdir/'default.txt'))
new = Table.load(str(tmp_path/'default.txt'))
assert all(default.data==new.data) and default.shapes == new.shapes
def test_write_read_file(self,default,tmpdir):
with open(tmpdir/'default.txt','w') as f:
def test_write_read_file(self,default,tmp_path):
with open(tmp_path/'default.txt','w') as f:
default.save(f)
with open(tmpdir/'default.txt') as f:
with open(tmp_path/'default.txt') as f:
new = Table.load(f)
assert all(default.data==new.data) and default.shapes == new.shapes
def test_write_read_legacy_style(self,default,tmpdir):
with open(tmpdir/'legacy.txt','w') as f:
def test_write_read_legacy_style(self,default,tmp_path):
with open(tmp_path/'legacy.txt','w') as f:
default.save(f,legacy=True)
with open(tmpdir/'legacy.txt') as f:
with open(tmp_path/'legacy.txt') as f:
new = Table.load(f)
assert all(default.data==new.data) and default.shapes == new.shapes
def test_write_invalid_format(self,default,tmpdir):
def test_write_invalid_format(self,default,tmp_path):
with pytest.raises(TypeError):
default.save(tmpdir/'shouldnotbethere.txt',format='invalid')
default.save(tmp_path/'shouldnotbethere.txt',format='invalid')
@pytest.mark.parametrize('mode',['str','path'])
def test_read_ang(self,reference_dir,mode):

View File

@ -1,5 +1,7 @@
import pytest
import numpy as np
from scipy import stats
from damask import util
@ -31,3 +33,14 @@ class TestUtil:
def test_lackofprecision(self):
with pytest.raises(ValueError):
util.scale_to_coprime(np.array([1/333.333,1,1]))
@pytest.mark.parametrize('rv',[stats.rayleigh(),stats.weibull_min(1.2),stats.halfnorm(),stats.pareto(2.62)])
def test_hybridIA(self,rv):
bins = np.linspace(0,10,100000)
centers = (bins[1:]+bins[:-1])/2
N_samples = bins.shape[0]-1000
dist = rv.pdf(centers)
selected = util.hybrid_IA(dist,N_samples)
dist_sampled = np.histogram(centers[selected],bins)[0]/N_samples*np.sum(dist)
assert np.sqrt(((dist - dist_sampled) ** 2).mean()) < .025 and selected.shape[0]==N_samples