fixed bug in asciitable when updating labels for readArray, fixed checking for euler angels presence in tessellation, improved help
This commit is contained in:
parent
7ea350b90e
commit
fa1368204d
|
@ -258,12 +258,14 @@ class ASCIItable():
|
|||
read whole data of all (given) labels as numpy array
|
||||
'''
|
||||
|
||||
if labels == []: indices = range(self.__IO__['validReadSize']) # use all columns
|
||||
else:
|
||||
indices = self.labels_index(labels) # use specified columns
|
||||
dictionary = dict(zip(indices, labels))
|
||||
self.labels_index = range(len(dictionary))
|
||||
self.labels = [dictionary[label] for label in sorted(dictionary)]
|
||||
if labels != []: # read only some labels
|
||||
indices = self.labels_index(labels) # get indices to read
|
||||
tempDict = dict(zip(indices, labels)) # column <> label connections
|
||||
self.labels = [tempDict[label] for label in sorted(tempDict)] # sort labels to reflect order from np.readtxt
|
||||
self.__IO__['validReadSize'] = len(labels)
|
||||
else:
|
||||
indices = range(self.__IO__['validReadSize']) # use all columns
|
||||
|
||||
try:
|
||||
self.data_rewind() # try to wind back to start of data
|
||||
except:
|
||||
|
|
|
@ -40,19 +40,11 @@ parser.set_defaults(data = True)
|
|||
#--- setup file handles --------------------------------------------------------------------------
|
||||
files = []
|
||||
if filenames == []:
|
||||
files.append({'name':'STDIN',
|
||||
'input':sys.stdin,
|
||||
'output':sys.stdout,
|
||||
'croak':sys.stderr,
|
||||
})
|
||||
files.append({'name':'STDIN', 'input':sys.stdin, 'output':sys.stdout, 'croak':sys.stderr, })
|
||||
else:
|
||||
for name in filenames:
|
||||
if os.path.exists(name):
|
||||
files.append({'name':name,
|
||||
'input':open(name),
|
||||
'output':sys.stdout,
|
||||
'croak':sys.stdout,
|
||||
})
|
||||
files.append({'name':name, 'input':open(name), 'output':sys.stdout, 'croak':sys.stdout, })
|
||||
|
||||
#--- loop over input files ------------------------------------------------------------------------
|
||||
for file in files:
|
||||
|
|
|
@ -31,43 +31,43 @@ def meshgrid2(*arrs):
|
|||
return tuple(ans)
|
||||
|
||||
def laguerreTessellation(undeformed, coords):
|
||||
bestdist = np.ones(len(undeformed)) * np.finfo('d').max
|
||||
bestseed = np.zeros(len(undeformed))
|
||||
bestdist = np.ones(len(undeformed)) * np.finfo('d').max
|
||||
bestseed = np.zeros(len(undeformed))
|
||||
|
||||
for i,seed in enumerate(coords):
|
||||
for copy in np.array([[1, 0, 0, ],
|
||||
[0, 1, 0, ],
|
||||
[0, 0, 1, ],
|
||||
[-1, 0, 0, ],
|
||||
[0, -1, 0, ],
|
||||
[0, 0, -1, ],
|
||||
[1, 1, 0, ],
|
||||
[1, 0, 1, ],
|
||||
[0, 1, 1, ],
|
||||
[-1, 1, 0, ],
|
||||
[-1, 0, 1, ],
|
||||
[0, -1, 1, ],
|
||||
[-1, -1, 0, ],
|
||||
[-1, 0, -1, ],
|
||||
[0, -1, -1, ],
|
||||
[1, -1, 0, ],
|
||||
[1, 0, -1, ],
|
||||
[0, 1, -1, ],
|
||||
[1, 1, 1, ],
|
||||
[-1, 1, 1, ],
|
||||
[1, -1, 1, ],
|
||||
[1, 1, -1, ],
|
||||
[-1, -1, -1, ],
|
||||
[1, -1, -1, ],
|
||||
[-1, 1, -1, ],
|
||||
[-1, -1, 1, ]]).astype(float):
|
||||
for i,seed in enumerate(coords):
|
||||
for copy in np.array([[1, 0, 0, ],
|
||||
[0, 1, 0, ],
|
||||
[0, 0, 1, ],
|
||||
[-1, 0, 0, ],
|
||||
[0, -1, 0, ],
|
||||
[0, 0, -1, ],
|
||||
[1, 1, 0, ],
|
||||
[1, 0, 1, ],
|
||||
[0, 1, 1, ],
|
||||
[-1, 1, 0, ],
|
||||
[-1, 0, 1, ],
|
||||
[0, -1, 1, ],
|
||||
[-1, -1, 0, ],
|
||||
[-1, 0, -1, ],
|
||||
[0, -1, -1, ],
|
||||
[1, -1, 0, ],
|
||||
[1, 0, -1, ],
|
||||
[0, 1, -1, ],
|
||||
[1, 1, 1, ],
|
||||
[-1, 1, 1, ],
|
||||
[1, -1, 1, ],
|
||||
[1, 1, -1, ],
|
||||
[-1, -1, -1, ],
|
||||
[1, -1, -1, ],
|
||||
[-1, 1, -1, ],
|
||||
[-1, -1, 1, ]]).astype(float):
|
||||
|
||||
diff = undeformed - np.repeat((seed+info['size']*copy).reshape(3,1),len(undeformed),axis=1).T
|
||||
dist = np.sum(diff*diff,axis=1) - weights[i]
|
||||
|
||||
bestseed = np.where(dist < bestdist, np.ones(len(undeformed))*(i+1),bestseed)
|
||||
bestdist = np.where(dist < bestdist, dist,bestdist)
|
||||
return bestseed
|
||||
diff = undeformed - np.repeat((seed+info['size']*copy).reshape(3,1),len(undeformed),axis=1).T
|
||||
dist = np.sum(diff*diff,axis=1) - weights[i]
|
||||
|
||||
bestseed = np.where(dist < bestdist, np.ones(len(undeformed))*(i+1),bestseed)
|
||||
bestdist = np.where(dist < bestdist, dist,bestdist)
|
||||
return bestseed
|
||||
|
||||
|
||||
# --------------------------------------------------------------------
|
||||
|
@ -109,8 +109,6 @@ parser.add_option('--secondphase', type='float', dest='secondphase', metavar= 'f
|
|||
help='volume fraction of randomly distribute second phase [%default]')
|
||||
parser.add_option('--laguerre', dest='laguerre', action='store_true',
|
||||
help='for weighted voronoi (Laguerre) tessellation [%default]')
|
||||
|
||||
|
||||
parser.set_defaults(grid = (0,0,0))
|
||||
parser.set_defaults(size = (0.0,0.0,0.0))
|
||||
parser.set_defaults(origin = (0.0,0.0,0.0))
|
||||
|
@ -159,7 +157,7 @@ for file in files:
|
|||
file['croak'].write('no coordinate data (1_coords/x) found ...')
|
||||
continue
|
||||
|
||||
hasEulers = np.any(np.asarray(table.labels_index(['phi1','Phi','phi2'])) == -1)
|
||||
hasEulers = np.all(np.asarray(table.labels_index(['phi1','Phi','phi2'])) != -1)
|
||||
if hasEulers:
|
||||
labels += ['phi1','Phi','phi2']
|
||||
|
||||
|
@ -174,8 +172,8 @@ for file in files:
|
|||
table.data_readArray(labels)
|
||||
coords = table.data[:,table.labels_index(coords)]
|
||||
eulers = table.data[:,table.labels_index(['phi1','Phi','phi2'])] if hasEulers else np.zeros(3*len(coords))
|
||||
grain = table.data[:,table.labels_index('microstructure')] if hasGrains else 1+np.arange(len(coords))
|
||||
weights = table.data[:,table.labels_index('weight')] if hasWeight else np.zeros(len(coords))
|
||||
grain = table.data[:,table.labels.index('microstructure')] if hasGrains else 1+np.arange(len(coords))
|
||||
weights = table.data[:,table.labels.index('weight')] if hasWeight else np.zeros(len(coords))
|
||||
grainIDs = np.unique(grain).astype('i')
|
||||
|
||||
|
||||
|
@ -308,6 +306,4 @@ for file in files:
|
|||
if file['name'] != 'STDIN':
|
||||
table.input_close()
|
||||
table.output_close()
|
||||
os.rename(file['name']+'_tmp',os.path.splitext(file['name'])[0] + \
|
||||
{True: '_material.config',
|
||||
False:'.geom'}[options.config])
|
||||
os.rename(file['name']+'_tmp',os.path.splitext(file['name'])[0] + '_material.config' if options.config else '.geom')
|
||||
|
|
|
@ -36,22 +36,18 @@ Examples:
|
|||
|
||||
""", version = scriptID)
|
||||
|
||||
parser.add_option('-p', '--positions', dest = 'pos', type = 'string',
|
||||
help = 'coordinate label')
|
||||
parser.add_option('-p', '--positions', dest = 'pos', metavar='string',
|
||||
help = 'coordinate label')
|
||||
parser.add_option('--boundingbox', dest = 'box', type = 'float', nargs = 6,
|
||||
help = 'min (x,y,z) and max (x,y,z) to specify bounding box [auto]')
|
||||
help = 'min (x,y,z) and max (x,y,z) to specify bounding box [auto]')
|
||||
parser.add_option('-i', '--index', dest = 'index', type = 'string',
|
||||
help = 'microstructure index label')
|
||||
parser.add_option('-w','--white', dest = 'whitelist', action = 'extend', type = 'string', \
|
||||
help = 'white list of microstructure indices', metavar = '<LIST>')
|
||||
parser.add_option('-b','--black', dest = 'blacklist', action = 'extend', type = 'string', \
|
||||
help = 'black list of microstructure indices', metavar = '<LIST>')
|
||||
|
||||
help = 'microstructure index label')
|
||||
parser.add_option('-w','--white', dest = 'whitelist', action = 'extend',\
|
||||
help = 'white list of microstructure indices', metavar = '<LIST>')
|
||||
parser.add_option('-b','--black', dest = 'blacklist', action = 'extend',\
|
||||
help = 'black list of microstructure indices', metavar = '<LIST>')
|
||||
parser.set_defaults(pos = 'pos')
|
||||
parser.set_defaults(index = 'microstructure')
|
||||
parser.set_defaults(box = ())
|
||||
parser.set_defaults(whitelist = [])
|
||||
parser.set_defaults(blacklist = [])
|
||||
|
||||
(options,filenames) = parser.parse_args()
|
||||
|
||||
|
@ -62,10 +58,10 @@ datainfo = { # lis
|
|||
'label':[]},
|
||||
}
|
||||
|
||||
if options.pos != None: datainfo['vector']['label'] += [options.pos]
|
||||
if options.index != None: datainfo['scalar']['label'] += [options.index]
|
||||
options.whitelist = map(int,options.whitelist)
|
||||
options.blacklist = map(int,options.blacklist)
|
||||
datainfo['vector']['label'] += [options.pos]
|
||||
datainfo['scalar']['label'] += [options.index]
|
||||
if options.whitelist != None: options.whitelist = map(int,options.whitelist)
|
||||
if options.blacklist != None: options.blacklist = map(int,options.blacklist)
|
||||
|
||||
#--- setup file handles --------------------------------------------------------------------------
|
||||
files = []
|
||||
|
@ -126,10 +122,10 @@ for file in files:
|
|||
#--- filtering of grain voxels ------------------------------------------------------------------------------------
|
||||
mask = np.logical_and(\
|
||||
np.ones_like(table.data[:,3],bool) \
|
||||
if options.whitelist == [] \
|
||||
if options.whitelist == None \
|
||||
else np.in1d(table.data[:,3].ravel(), options.whitelist).reshape(table.data[:,3].shape),
|
||||
np.ones_like(table.data[:,3],bool) \
|
||||
if options.blacklist == [] \
|
||||
if options.blacklist == None \
|
||||
else np.invert(np.in1d(table.data[:,3].ravel(), options.blacklist).reshape(table.data[:,3].shape))
|
||||
)
|
||||
table.data = table.data[mask]
|
||||
|
|
Loading…
Reference in New Issue