updated hybridIA sampling to work with new format

cropLinearODF is not working for the new format, but filterTable should be able to do the task
This commit is contained in:
Martin Diehl 2015-04-27 05:30:29 +00:00
parent 97aba96ff8
commit 906c3f63a1
3 changed files with 76 additions and 187 deletions

View File

@ -2,7 +2,7 @@
# Header: Project1::test1000::All data::Binned: Size=5.0, HW=5.0 4/15/2015 # Header: Project1::test1000::All data::Binned: Size=5.0, HW=5.0 4/15/2015
# Bin Size: 5.0º # Bin Size: 5.0º
# Gaussian Smoothing: 5.0º # Gaussian Smoothing: 5.0º
phi1 PHI phi2 intensity phi1 Phi phi2 intensity
2.50 2.50 2.50 0.980476 2.50 2.50 2.50 0.980476
2.50 2.50 7.50 0.773179 2.50 2.50 7.50 0.773179
2.50 2.50 12.50 0.393272 2.50 2.50 12.50 0.393272

View File

@ -1,105 +0,0 @@
#!/usr/bin/env python
import os,sys,math,re
# --- helper functions ---
def binAsBins(bin,intervals):
""" explode compound bin into 3D bins list """
bins = [0]*3
bins[0] = (bin//(intervals[1] * intervals[2])) % intervals[0]
bins[1] = (bin//intervals[2]) % intervals[1]
bins[2] = bin % intervals[2]
return bins
def binsAsBin(bins,intervals):
""" implode 3D bins into compound bin """
return (bins[0]*intervals[1] + bins[1])*intervals[2] + bins[2]
def EulersAsBins(Eulers,intervals,deltas,center):
""" return list of Eulers translated into 3D bins list """
return [\
int((euler+(0.5-center)*delta)//delta)%interval \
for euler,delta,interval in zip(Eulers,deltas,intervals) \
]
def binAsEulers(bin,intervals,deltas,center):
""" compound bin number translated into list of Eulers """
Eulers = [0.0]*3
Eulers[2] = (bin%intervals[2] + center)*deltas[2]
Eulers[1] = (bin//intervals[2]%intervals[1] + center)*deltas[1]
Eulers[0] = (bin//(intervals[2]*intervals[1]) + center)*deltas[0]
return Eulers
# check usage
try:
inName = sys.argv[1]
outLimits = sys.argv[2:5]
except:
print "usage:",sys.argv[0],"nameLinearODF limitPhi1 limitPHI limitPhi2"
sys.exit(1);
#open binned ODF
try:
inFile = open(inName,'r')
except:
print 'unable to open binnedODF:', inName;
sys.exit(1);
# process header info
ODF = {}
inLimits = [math.radians(int(float(limit))) for limit in inFile.readline().split()]
outLimits = [math.radians(int(float(limit))) for limit in outLimits]
deltas = [math.radians(float(delta)) for delta in inFile.readline().split()]
inIntervals = [int(limit/delta) for limit,delta in zip(inLimits,deltas)]
outIntervals = [int(limit/delta) for limit,delta in zip(outLimits,deltas)]
inBins = inIntervals[0]*inIntervals[1]*inIntervals[2]
print 'Limit:', [math.degrees(limit) for limit in inLimits]
print 'Crop:', [math.degrees(limit) for limit in outLimits]
print 'Delta:', [math.degrees(delta) for delta in deltas]
print 'Interval:', inIntervals
print 'Interval:', outIntervals
centering = inFile.readline()
if re.search('cell',centering.lower()):
ODF['center'] = 0.5
print 'cell-centered data (offset %g)'%ODF['center']
else:
ODF['center'] = 0.0
print 'vertex-centered data (offset %g)'%ODF['center']
inFile.readline() # skip blank delimiter
# read linear binned data
inODF = map(float,inFile.readlines())
inFile.close()
if len(inODF) != inBins:
print 'expecting', inBins, 'values but got', len(inODF)
sys.exit(1)
try:
outName = os.path.splitext(inName)[0]+'_%ix%ix%i'%(outIntervals[0],outIntervals[1],outIntervals[2])+'.linearODF'
outFile = open(outName,'w')
except:
print 'unable to write:',outName
sys.exit(1)
outFile.write('%g\t%g\t%g\n'%(\
math.degrees(outIntervals[0]*deltas[0]),\
math.degrees(outIntervals[1]*deltas[1]),\
math.degrees(outIntervals[2]*deltas[2]) ))
outFile.write('%i\t%i\t%i\n'%(math.degrees(deltas[0]),math.degrees(deltas[1]),math.degrees(deltas[2])))
outFile.write('%s-centered data\n'%{True:'vertex',False:'cell'}[ODF['center']==0.0])
outFile.write('\n')
for phi1 in range(outIntervals[0]):
for Phi in range(outIntervals[1]):
for phi2 in range(outIntervals[2]):
outFile.write('%g\n'%(inODF[((phi1*inIntervals[1])+Phi)*inIntervals[2]+phi2]))
outFile.close()

View File

@ -8,8 +8,6 @@ import numpy as np
scriptID = string.replace('$Id$','\n','\\n') scriptID = string.replace('$Id$','\n','\\n')
scriptName = scriptID.split()[1] scriptName = scriptID.split()[1]
random.seed(1)
# --- helper functions --- # --- helper functions ---
def binAsBins(bin,intervals): def binAsBins(bin,intervals):
@ -78,10 +76,12 @@ def directInversion (ODF,nSamples):
incFactor = 1.0 incFactor = 1.0
nInvSamplesUpper = directInvRepetitions(ODF['dV_V'],scaleUpper) nInvSamplesUpper = directInvRepetitions(ODF['dV_V'],scaleUpper)
nIter += 1 nIter += 1
print '%i:(%12.11f,%12.11f) %i <= %i <= %i'%(nIter,scaleLower,scaleUpper,nInvSamplesLower,nOptSamples,nInvSamplesUpper) file['croak'].write('%i:(%12.11f,%12.11f) %i <= %i <= %i\n'\
%(nIter,scaleLower,scaleUpper,nInvSamplesLower,nOptSamples,nInvSamplesUpper))
nInvSamples = nInvSamplesUpper nInvSamples = nInvSamplesUpper
scale = scaleUpper scale = scaleUpper
print 'created set of',nInvSamples,'samples (',float(nInvSamples)/nOptSamples-1.0,') with scaling',scale,'delivering',nSamples file['croak'].write('created set of %i samples (%12.11f) with scaling %12.11f delivering %i\n'\
%(nInvSamples,float(nInvSamples)/nOptSamples-1.0,scale,nSamples))
repetition = [None]*ODF['nBins'] # preallocate and clear repetition = [None]*ODF['nBins'] # preallocate and clear
for bin in range(ODF['nBins']): # loop over bins for bin in range(ODF['nBins']): # loop over bins
@ -181,8 +181,7 @@ def TothVanHoutteSTAT (ODF,nSamples):
reconstructedODF[bin] += unitInc reconstructedODF[bin] += unitInc
countSamples += 1 countSamples += 1
indexSelector += 1 indexSelector += 1
file['croak'].write('created set of %i when asked to deliver %i\n'%(countSamples,nSamples))
print 'created set of',countSamples,'when asked to deliver',nSamples
return orientations, reconstructedODF return orientations, reconstructedODF
@ -190,35 +189,23 @@ def TothVanHoutteSTAT (ODF,nSamples):
# -------------------------------------------------------------------- # --------------------------------------------------------------------
# MAIN # MAIN
# -------------------------------------------------------------------- # --------------------------------------------------------------------
identifiers = {
'limit': ['phi1','phi','phi2'],
'delta': ['phi1','phi','phi2'],
}
mappings = {
'limit': lambda x: math.radians(float(x)),
'delta': lambda x: math.radians(float(x)),
'origin': lambda x: str(x),
}
parser = OptionParser(option_class=damask.extendableOption, usage='%prog options [file[s]]', description = """ parser = OptionParser(option_class=damask.extendableOption, usage='%prog options [file[s]]', description = """
Transform linear binned data into Euler angles. Transform linear binned data into Euler angles.
""", version = scriptID) """, version = scriptID)
parser.add_option('-n', '--number', dest='number', type='int', metavar = 'int', parser.add_option('-n', '--number', dest='number', type='int', metavar = 'int',
help='the number of orientations needed to be generated, the default is [%default]') help='number of orientations to be generated [%default]')
parser.add_option('-a','--algorithm', dest='algorithm', type='string', metavar = 'string', parser.add_option('-a','--algorithm', dest='algorithm', type='string', metavar = 'string',
help='''The algorithm adopted, three algorithms are provided, help='sampling algorithm. IA: direct inversion, STAT: Van Houtte, MC: Monte Carlo. [%default].') #make choice
that is:
[IA]: direct inversion,
[STAT]: Van Houtte,
[MC]: Monte Carlo.
the default is [%default].''') #make (multiple) choice
parser.add_option('-p','--phase', dest='phase', type='int', metavar = 'int', parser.add_option('-p','--phase', dest='phase', type='int', metavar = 'int',
help='phase index to be used [%default]') help='phase index to be used [%default]')
parser.add_option('--crystallite', dest='crystallite', type='int', metavar = 'int', parser.add_option('--crystallite', dest='crystallite', type='int', metavar = 'int',
help='crystallite index to be used [%default]') help='crystallite index to be used [%default]')
parser.add_option('-r', '--rnd', dest='randomSeed', type='int', metavar='int', \
help='seed of random number generator [%default]')
parser.set_defaults(randomSeed = None)
parser.set_defaults(number = 500) parser.set_defaults(number = 500)
parser.set_defaults(algorithm = 'IA') parser.set_defaults(algorithm = 'IA')
parser.set_defaults(phase = 1) parser.set_defaults(phase = 1)
@ -228,80 +215,86 @@ parser.set_defaults(crystallite = 1)
nSamples = options.number nSamples = options.number
methods = [options.algorithm] methods = [options.algorithm]
if options.randomSeed == None:
options.randomSeed = int(os.urandom(4).encode('hex'), 16)
random.seed(options.randomSeed)
#--- setup file handles --------------------------------------------------------------------------- #--- setup file handles ---------------------------------------------------------------------------
files = [] files = []
if filenames == []: if filenames == []:
files.append({'name':'STDIN', files.append({'name':'STDIN','input':sys.stdin,'output':sys.stdout,'croak':sys.stderr})
'input':sys.stdin,
'output':sys.stdout,
'croak':sys.stderr,
})
else: else:
for name in filenames: for name in filenames:
if os.path.exists(name): if os.path.exists(name):
files.append({'name':name, files.append({'name':name,'input':open(name),'output':open(name+'_tmp','w'),'croak':sys.stdout})
'input':open(name),
'output':open(name+'_tmp','w'),
'croak':sys.stdout,
})
#--- loop over input files ------------------------------------------------------------------------ #--- loop over input files ------------------------------------------------------------------------
for file in files: for file in files:
file['croak'].write('\033[1m' + scriptName + '\033[0m: ' + (file['name'] if file['name'] != 'STDIN' else '') + '\n') file['croak'].write('\033[1m' + scriptName + '\033[0m: ' + (file['name'] if file['name'] != 'STDIN' else '') + '\n')
ODF = {
'limit': np.empty(3,'d'),
'delta': np.empty(3,'d'),
'interval':np.empty(3,'i'),
'origin': ''
}
table = damask.ASCIItable(file['input'],file['output'],buffered = False) table = damask.ASCIItable(file['input'],file['output'],buffered = False)
table.head_read() table.head_read()
for header in table.info: # --------------- figure out columns in table ----------- -----------------------------------------
headitems = map(str.lower,header.split()) column = {}
if len(headitems) == 0: continue pos = 0
if headitems[0] in mappings.keys(): keys = ['phi1','Phi','phi2','intensity']
if headitems[0] in identifiers.keys(): for key in keys:
for i in xrange(len(identifiers[headitems[0]])): if key not in table.labels:
ODF[headitems[0]][i] = \ file['croak'].write('column %s not found...\n'%key)
mappings[headitems[0]](headitems[headitems.index(identifiers[headitems[0]][i])+1])
else: else:
ODF[headitems[0]] = mappings[headitems[0]](headitems[1]) column[key] = pos
pos+=1
if pos != 4: continue
ODF['interval'] = np.array([int(round(limit/delta)) for limit,delta in zip(ODF['limit'],ODF['delta'])]) binnedODF = table.data_readArray(keys)
ODF['nBins'] = ODF['interval'].prod() # --------------- figure out limits (left/right), delta, and interval -----------------------------
ODF = {}
limits = np.array([[np.min(table.data[:,column['phi1']]),\
np.min(table.data[:,column['Phi']]),\
np.min(table.data[:,column['phi2']])],\
[np.max(table.data[:,column['phi1']]),\
np.max(table.data[:,column['Phi']]),\
np.max(table.data[:,column['phi2']])]])
ODF['limit'] = np.radians(limits[1,:])
if re.search('boundary',ODF['origin'].lower()): if all(limits[0,:]<1e-8): # vertex centered
ODF['center'] = 0.5
else:
ODF['center'] = 0.0 ODF['center'] = 0.0
else: # cell centered
ODF['center'] = 0.5
binnedODF=table.data_readArray([table.labels.index('intensity')]) eulers = [{},{},{}]
for i in xrange(table.data.shape[0]):
for j in xrange(3):
eulers[j][str(table.data[i,column[keys[j]]])] = True # remember eulers along phi1, Phi, and phi2
ODF['interval'] = np.array([len(eulers[0]),len(eulers[1]),len(eulers[2]),],'i') # steps are number of distict values
ODF['nBins'] = ODF['interval'].prod()
ODF['delta'] = np.radians(np.array(limits[1,0:3]-limits[0,0:3])/(ODF['interval']-1))
if binnedODF[0] != ODF['nBins']: if binnedODF[0] != ODF['nBins']:
print 'expecting', ODF['nBins'], 'values but got', len(linesBinnedODF) file['croak'].write('expecting %i values but got %i'%(ODF['nBins'],len(linesBinnedODF)))
sys.exit(1) continue
# build binnedODF array # build binnedODF array
sumdV_V = 0.0 sumdV_V = 0.0
ODF['dV_V'] = [None]*ODF['nBins'] ODF['dV_V'] = [None]*ODF['nBins']
ODF['nNonZero'] = 0 ODF['nNonZero'] = 0
dg = ODF['delta'][0]*2*math.sin(ODF['delta'][1]/2.0)*ODF['delta'][2] dg = ODF['delta'][0]*2.0*math.sin(ODF['delta'][1]/2.0)*ODF['delta'][2]
for bin in range(ODF['nBins']): for b in range(ODF['nBins']):
ODF['dV_V'][bin] = \ ODF['dV_V'][b] = \
max(0.0,table.data[bin,0]) * dg * \ max(0.0,table.data[b,column['intensity']]) * dg * \
math.sin(((bin//ODF['interval'][2])%ODF['interval'][1]+ODF['center'])*ODF['delta'][1]) math.sin(((b//ODF['interval'][2])%ODF['interval'][1]+ODF['center'])*ODF['delta'][1])
if ODF['dV_V'][bin] > 0.0: if ODF['dV_V'][b] > 0.0:
sumdV_V += ODF['dV_V'][bin] sumdV_V += ODF['dV_V'][b]
ODF['nNonZero'] += 1 ODF['nNonZero'] += 1
for bin in range(ODF['nBins']): ODF['dV_V'][bin] /= sumdV_V # normalize dV/V for b in range(ODF['nBins']): ODF['dV_V'][b] /= sumdV_V # normalize dV/V
print 'non-zero fraction:', float(ODF['nNonZero'])/ODF['nBins'],'(%i/%i)'%(ODF['nNonZero'],ODF['nBins']) file['croak'].write('non-zero fraction: %12.11f (%i/%i)\n'\
print 'Volume integral of ODF:', sumdV_V %(float(ODF['nNonZero'])/ODF['nBins'],ODF['nNonZero'],ODF['nBins']))
print 'Reference Integral:', ODF['limit'][0]*ODF['limit'][2]*(1-math.cos(ODF['limit'][1])) file['croak'].write('Volume integral of ODF: %12.11f\n'%sumdV_V)
file['croak'].write('Reference Integral: %12.11f\n'\
%(ODF['limit'][0]*ODF['limit'][2]*(1-math.cos(ODF['limit'][1]))))
# call methods # call methods
Functions = {'IA': 'directInversion', 'STAT': 'TothVanHoutteSTAT', 'MC': 'MonteCarloBins'} Functions = {'IA': 'directInversion', 'STAT': 'TothVanHoutteSTAT', 'MC': 'MonteCarloBins'}
@ -326,15 +319,16 @@ for file in files:
indivSum['orig'] += ODF['dV_V'][bin] indivSum['orig'] += ODF['dV_V'][bin]
indivSquaredSum['orig'] += ODF['dV_V'][bin]**2 indivSquaredSum['orig'] += ODF['dV_V'][bin]**2
print 'sqrt(N*)RMSD of ODFs:\t', math.sqrt(nSamples*squaredDiff[method]) file['croak'].write('sqrt(N*)RMSD of ODFs:\t %12.11f\n'% math.sqrt(nSamples*squaredDiff[method]))
print 'RMSrD of ODFs:\t', math.sqrt(squaredRelDiff[method]) file['croak'].write('RMSrD of ODFs:\t %12.11f\n'%math.sqrt(squaredRelDiff[method]))
print 'rMSD of ODFs:\t', squaredDiff[method]/indivSquaredSum['orig'] file['croak'].write('rMSD of ODFs:\t %12.11f\n'%(squaredDiff[method]/indivSquaredSum['orig']))
print 'nNonZero correlation slope:\t', (ODF['nNonZero']*mutualProd[method]-indivSum['orig']*indivSum[method])/\ file['croak'].write('nNonZero correlation slope:\t %12.11f\n'\
(ODF['nNonZero']*indivSquaredSum['orig']-indivSum['orig']**2) %((ODF['nNonZero']*mutualProd[method]-indivSum['orig']*indivSum[method])/\
print 'nNonZero correlation confidence:\t',\ (ODF['nNonZero']*indivSquaredSum['orig']-indivSum['orig']**2)))
(mutualProd[method]-indivSum['orig']*indivSum[method]/ODF['nNonZero'])/\ file['croak'].write( 'nNonZero correlation confidence:\t %12.11f\n'\
%((mutualProd[method]-indivSum['orig']*indivSum[method]/ODF['nNonZero'])/\
(ODF['nNonZero']*math.sqrt((indivSquaredSum['orig']/ODF['nNonZero']-(indivSum['orig']/ODF['nNonZero'])**2)*\ (ODF['nNonZero']*math.sqrt((indivSquaredSum['orig']/ODF['nNonZero']-(indivSum['orig']/ODF['nNonZero'])**2)*\
(indivSquaredSum[method]/ODF['nNonZero']-(indivSum[method]/ODF['nNonZero'])**2))) (indivSquaredSum[method]/ODF['nNonZero']-(indivSum[method]/ODF['nNonZero'])**2)))))
if method == 'IA' and nSamples < ODF['nNonZero']: if method == 'IA' and nSamples < ODF['nNonZero']:
strOpt = '(%i)'%ODF['nNonZero'] strOpt = '(%i)'%ODF['nNonZero']