[skip ci] added parameter description; shortened hybrid_IA pre-loop logic
This commit is contained in:
parent
8d204ea445
commit
31f86c28f4
|
@ -665,9 +665,10 @@ class Rotation:
|
||||||
|
|
||||||
Parameters
|
Parameters
|
||||||
----------
|
----------
|
||||||
fname : file, str, or pathlib.Path
|
weights : numpy.ndarray of shape (n)
|
||||||
ODF file containing normalized probability (labeled 'intensity')
|
Texture intensity values (probability density or volume fraction) at Euler grid points.
|
||||||
on a grid in Euler space (labeled 'euler').
|
Eulers : numpy.ndarray of shape (n,3)
|
||||||
|
Grid coordinates in Euler space at which weights are defined.
|
||||||
N : integer, optional
|
N : integer, optional
|
||||||
Number of discrete orientations to be sampled from the given ODF.
|
Number of discrete orientations to be sampled from the given ODF.
|
||||||
Defaults to 500.
|
Defaults to 500.
|
||||||
|
@ -680,10 +681,6 @@ class Rotation:
|
||||||
A seed to initialize the BitGenerator. Defaults to None, i.e. unpredictable entropy
|
A seed to initialize the BitGenerator. Defaults to None, i.e. unpredictable entropy
|
||||||
will be pulled from the OS.
|
will be pulled from the OS.
|
||||||
|
|
||||||
Notes
|
|
||||||
-----
|
|
||||||
Explain here the different things that need to be considered
|
|
||||||
|
|
||||||
"""
|
"""
|
||||||
def _dg(eu,deg):
|
def _dg(eu,deg):
|
||||||
"""Return infinitesimal Euler space volume of bin(s)."""
|
"""Return infinitesimal Euler space volume of bin(s)."""
|
||||||
|
@ -694,9 +691,8 @@ class Rotation:
|
||||||
|
|
||||||
dg = 1.0 if fractions else _dg(Eulers,degrees)
|
dg = 1.0 if fractions else _dg(Eulers,degrees)
|
||||||
dV_V = dg * np.maximum(0.0,weights.squeeze())
|
dV_V = dg * np.maximum(0.0,weights.squeeze())
|
||||||
orientations = Rotation.from_Eulers(Eulers[util.hybrid_IA(dV_V,N,seed)],degrees)
|
|
||||||
|
|
||||||
return orientations
|
return Rotation.from_Eulers(Eulers[util.hybrid_IA(dV_V,N,seed)],degrees)
|
||||||
|
|
||||||
|
|
||||||
@staticmethod
|
@staticmethod
|
||||||
|
|
|
@ -189,20 +189,17 @@ def execution_stamp(class_name,function_name=None):
|
||||||
|
|
||||||
|
|
||||||
def hybrid_IA(dist,N,seed=None):
|
def hybrid_IA(dist,N,seed=None):
|
||||||
rng = np.random.default_rng(seed)
|
N_opt_samples,N_inv_samples = (max(np.count_nonzero(dist),N),0) # random subsampling if too little samples requested
|
||||||
N_opt_samples = max(np.count_nonzero(dist),N) # random subsampling if too little samples requested
|
|
||||||
|
|
||||||
scale_,scale,inc_factor = (0.0,float(N_opt_samples),1.0)
|
scale_,scale,inc_factor = (0.0,float(N_opt_samples),1.0)
|
||||||
|
while (not np.isclose(scale, scale_)) and (N_inv_samples != N_opt_samples):
|
||||||
repeats = np.rint(scale*dist).astype(int)
|
repeats = np.rint(scale*dist).astype(int)
|
||||||
N_inv_samples = np.sum(repeats)
|
N_inv_samples = np.sum(repeats)
|
||||||
while (not np.isclose(scale, scale_)) and (N_inv_samples != N_opt_samples):
|
|
||||||
scale_,scale,inc_factor = (scale,scale+inc_factor*0.5*(scale - scale_), inc_factor*2.0) \
|
scale_,scale,inc_factor = (scale,scale+inc_factor*0.5*(scale - scale_), inc_factor*2.0) \
|
||||||
if N_inv_samples < N_opt_samples else \
|
if N_inv_samples < N_opt_samples else \
|
||||||
(scale_,0.5*(scale_ + scale), 1.0)
|
(scale_,0.5*(scale_ + scale), 1.0)
|
||||||
repeats = np.rint(scale*dist).astype(int)
|
|
||||||
N_inv_samples = np.sum(repeats)
|
|
||||||
|
|
||||||
return np.repeat(np.arange(len(dist)),repeats)[rng.permutation(N_inv_samples)[:N]]
|
return np.repeat(np.arange(len(dist)),repeats)[np.random.default_rng(seed).permutation(N_inv_samples)[:N]]
|
||||||
|
|
||||||
|
|
||||||
####################################################################################################
|
####################################################################################################
|
||||||
|
|
Loading…
Reference in New Issue