[skip ci] added parameter description; shortened hybrid_IA pre-loop logic
This commit is contained in:
parent
8d204ea445
commit
31f86c28f4
|
@ -665,9 +665,10 @@ class Rotation:
|
|||
|
||||
Parameters
|
||||
----------
|
||||
fname : file, str, or pathlib.Path
|
||||
ODF file containing normalized probability (labeled 'intensity')
|
||||
on a grid in Euler space (labeled 'euler').
|
||||
weights : numpy.ndarray of shape (n)
|
||||
Texture intensity values (probability density or volume fraction) at Euler grid points.
|
||||
Eulers : numpy.ndarray of shape (n,3)
|
||||
Grid coordinates in Euler space at which weights are defined.
|
||||
N : integer, optional
|
||||
Number of discrete orientations to be sampled from the given ODF.
|
||||
Defaults to 500.
|
||||
|
@ -680,10 +681,6 @@ class Rotation:
|
|||
A seed to initialize the BitGenerator. Defaults to None, i.e. unpredictable entropy
|
||||
will be pulled from the OS.
|
||||
|
||||
Notes
|
||||
-----
|
||||
Explain here the different things that need to be considered
|
||||
|
||||
"""
|
||||
def _dg(eu,deg):
|
||||
"""Return infinitesimal Euler space volume of bin(s)."""
|
||||
|
@ -694,9 +691,8 @@ class Rotation:
|
|||
|
||||
dg = 1.0 if fractions else _dg(Eulers,degrees)
|
||||
dV_V = dg * np.maximum(0.0,weights.squeeze())
|
||||
orientations = Rotation.from_Eulers(Eulers[util.hybrid_IA(dV_V,N,seed)],degrees)
|
||||
|
||||
return orientations
|
||||
return Rotation.from_Eulers(Eulers[util.hybrid_IA(dV_V,N,seed)],degrees)
|
||||
|
||||
|
||||
@staticmethod
|
||||
|
|
|
@ -189,20 +189,17 @@ def execution_stamp(class_name,function_name=None):
|
|||
|
||||
|
||||
def hybrid_IA(dist,N,seed=None):
|
||||
rng = np.random.default_rng(seed)
|
||||
N_opt_samples = max(np.count_nonzero(dist),N) # random subsampling if too little samples requested
|
||||
N_opt_samples,N_inv_samples = (max(np.count_nonzero(dist),N),0) # random subsampling if too little samples requested
|
||||
|
||||
scale_,scale,inc_factor = (0.0,float(N_opt_samples),1.0)
|
||||
while (not np.isclose(scale, scale_)) and (N_inv_samples != N_opt_samples):
|
||||
repeats = np.rint(scale*dist).astype(int)
|
||||
N_inv_samples = np.sum(repeats)
|
||||
while (not np.isclose(scale, scale_)) and (N_inv_samples != N_opt_samples):
|
||||
scale_,scale,inc_factor = (scale,scale+inc_factor*0.5*(scale - scale_), inc_factor*2.0) \
|
||||
if N_inv_samples < N_opt_samples else \
|
||||
(scale_,0.5*(scale_ + scale), 1.0)
|
||||
repeats = np.rint(scale*dist).astype(int)
|
||||
N_inv_samples = np.sum(repeats)
|
||||
|
||||
return np.repeat(np.arange(len(dist)),repeats)[rng.permutation(N_inv_samples)[:N]]
|
||||
return np.repeat(np.arange(len(dist)),repeats)[np.random.default_rng(seed).permutation(N_inv_samples)[:N]]
|
||||
|
||||
|
||||
####################################################################################################
|
||||
|
|
Loading…
Reference in New Issue