more improved scripts

This commit is contained in:
Martin Diehl 2016-03-01 21:35:59 +01:00
parent 9cfc27d295
commit 98476ea29c
10 changed files with 21 additions and 22 deletions

View File

@ -102,7 +102,7 @@ for name in filenames:
maxcorner = np.array(map(max,coords)) maxcorner = np.array(map(max,coords))
grid = np.array(map(len,coords),'i') grid = np.array(map(len,coords),'i')
size = grid/np.maximum(np.ones(3,'d'), grid-1.0) * (maxcorner-mincorner) # size from edge to edge = dim * n/(n-1) size = grid/np.maximum(np.ones(3,'d'), grid-1.0) * (maxcorner-mincorner) # size from edge to edge = dim * n/(n-1)
size = np.where(grid > 1, size, min(size[grid > 1]/grid[grid > 1])) # spacing for grid==1 equal to smallest among other spacings size = np.where(grid > 1, size, min(size[grid > 1]/grid[grid > 1])) # spacing for grid==1 set to smallest among other spacings
delta = size/np.maximum(np.ones(3,'d'), grid) delta = size/np.maximum(np.ones(3,'d'), grid)
origin = mincorner - 0.5*delta # shift from cell center to corner origin = mincorner - 0.5*delta # shift from cell center to corner

View File

@ -28,7 +28,7 @@ parser.add_option('-l','--label',
(options,filenames) = parser.parse_args() (options,filenames) = parser.parse_args()
if options.label == None: if options.label is None:
parser.error('no grouping column specified.') parser.error('no grouping column specified.')

View File

@ -77,11 +77,11 @@ minmax = np.array([np.array(options.xrange),
grid = np.zeros(options.bins,'f') grid = np.zeros(options.bins,'f')
result = np.zeros((options.bins[0],options.bins[1],3),'f') result = np.zeros((options.bins[0],options.bins[1],3),'f')
if options.data == None: parser.error('no data columns specified.') if options.data is None: parser.error('no data columns specified.')
labels = options.data labels = options.data
if options.weight != None: labels += [options.weight] # prevent character splitting of single string value if options.weight is not None: labels += [options.weight] # prevent character splitting of single string value
# --- loop over input files ------------------------------------------------------------------------- # --- loop over input files -------------------------------------------------------------------------
@ -124,7 +124,7 @@ for name in filenames:
x = int(options.bins[0]*(table.data[i,0]-minmax[0,0])/delta[0]) x = int(options.bins[0]*(table.data[i,0]-minmax[0,0])/delta[0])
y = int(options.bins[1]*(table.data[i,1]-minmax[1,0])/delta[1]) y = int(options.bins[1]*(table.data[i,1]-minmax[1,0])/delta[1])
if x >= 0 and x < options.bins[0] and y >= 0 and y < options.bins[1]: if x >= 0 and x < options.bins[0] and y >= 0 and y < options.bins[1]:
grid[x,y] += 1. if options.weight == None else table.data[i,2] # count (weighted) occurrences grid[x,y] += 1. if options.weight is None else table.data[i,2] # count (weighted) occurrences
if options.normCol: if options.normCol:
for x in xrange(options.bins[0]): for x in xrange(options.bins[0]):

View File

@ -86,7 +86,7 @@ for name in filenames:
max(map(float,coords[2].keys()))-min(map(float,coords[2].keys())),\ max(map(float,coords[2].keys()))-min(map(float,coords[2].keys())),\
],'d') # size from bounding box, corrected for cell-centeredness ],'d') # size from bounding box, corrected for cell-centeredness
size = np.where(grid > 1, size, min(size[grid > 1]/grid[grid > 1])) # spacing for grid==1 equal to smallest among other spacings size = np.where(grid > 1, size, min(size[grid > 1]/grid[grid > 1])) # spacing for grid==1 set to smallest among other spacings
packing = np.array(options.packing,'i') packing = np.array(options.packing,'i')

View File

@ -43,7 +43,7 @@ parser.set_defaults(condition = '',
(options,filenames) = parser.parse_args() (options,filenames) = parser.parse_args()
if options.labels == None or options.formulae == None: if options.labels is None or options.formulae is None:
parser.error('no formulae specified.') parser.error('no formulae specified.')
if len(options.labels) != len(options.formulae): if len(options.labels) != len(options.formulae):
parser.error('number of labels ({}) and formulae ({}) do not match.'.format(len(options.labels),len(options.formulae))) parser.error('number of labels ({}) and formulae ({}) do not match.'.format(len(options.labels),len(options.formulae)))

View File

@ -80,14 +80,14 @@ for name in filenames:
positions = [] positions = []
for position,label in enumerate(table.labels): for position,label in enumerate(table.labels):
if (options.whitelist == None or any([ position in table.label_indexrange(needle) \ if (options.whitelist is None or any([ position in table.label_indexrange(needle) \
or fnmatch.fnmatch(label,needle) for needle in options.whitelist])) \ or fnmatch.fnmatch(label,needle) for needle in options.whitelist])) \
and (options.blacklist == None or not any([ position in table.label_indexrange(needle) \ and (options.blacklist is None or not any([ position in table.label_indexrange(needle) \
or fnmatch.fnmatch(label,needle) for needle in options.blacklist])): # a label to keep? or fnmatch.fnmatch(label,needle) for needle in options.blacklist])): # a label to keep?
labels.append(label) # remember name... labels.append(label) # remember name...
positions.append(position) # ...and position positions.append(position) # ...and position
if len(labels) > 0 and options.whitelist != None and options.blacklist == None: # check whether reordering is possible if len(labels) > 0 and options.whitelist is not None and options.blacklist is None: # check whether reordering is possible
whitelistitem = np.zeros(len(labels),dtype=int) whitelistitem = np.zeros(len(labels),dtype=int)
for i,label in enumerate(labels): # check each selected label for i,label in enumerate(labels): # check each selected label
match = [ positions[i] in table.label_indexrange(needle) \ match = [ positions[i] in table.label_indexrange(needle) \
@ -119,7 +119,7 @@ for name in filenames:
# ------------------------------------------ assemble header --------------------------------------- # ------------------------------------------ assemble header ---------------------------------------
table.info_append(scriptID + '\t' + ' '.join(sys.argv[1:])) # read ASCII header info table.info_append(scriptID + '\t' + ' '.join(sys.argv[1:]))
table.labels_clear() table.labels_clear()
table.labels_append(np.array(labels)[order]) # update with new label set table.labels_append(np.array(labels)[order]) # update with new label set
table.head_write() table.head_write()

View File

@ -112,7 +112,7 @@ for name in filenames:
try: try:
table = damask.ASCIItable(name = name, table = damask.ASCIItable(name = name,
buffered = False, buffered = False,
labeled = options.label != None, labeled = options.label is not None,
readonly = True) readonly = True)
except: continue except: continue
table.report_name(scriptName,name) table.report_name(scriptName,name)
@ -162,8 +162,8 @@ for name in filenames:
nodes -= boundingBox[0].repeat(np.prod(options.dimension+1)).reshape([3]+list(options.dimension+1)) nodes -= boundingBox[0].repeat(np.prod(options.dimension+1)).reshape([3]+list(options.dimension+1))
nodes *= (options.pixelsize*options.dimension/options.size).repeat(np.prod(options.dimension+1)).reshape([3]+list(options.dimension+1)) nodes *= (options.pixelsize*options.dimension/options.size).repeat(np.prod(options.dimension+1)).reshape([3]+list(options.dimension+1))
imagesize = (options.pixelsize*(boundingBox[1]-boundingBox[0])*options.dimension\ imagesize = (options.pixelsize*(boundingBox[1]-boundingBox[0])* # determine image size from number of
/options.size)[:2].astype('i') # determine image size from number of cells in overall bounding box options.dimension/options.size)[:2].astype('i') # cells in overall bounding box
im = Image.new('RGBA',imagesize) im = Image.new('RGBA',imagesize)
draw = ImageDraw.Draw(im) draw = ImageDraw.Draw(im)

View File

@ -80,7 +80,7 @@ for name in filenames:
try: try:
table = damask.ASCIItable(name = name, table = damask.ASCIItable(name = name,
buffered = False, buffered = False,
labeled = options.label != None, labeled = options.label is not None,
readonly = True) readonly = True)
except: continue except: continue
damask.util.report(scriptName,name) damask.util.report(scriptName,name)
@ -104,7 +104,6 @@ for name in filenames:
damask.util.croak(errors) damask.util.croak(errors)
table.close(dismiss = True) # close ASCII table file handles and delete output file table.close(dismiss = True) # close ASCII table file handles and delete output file
continue continue
# convert data to shape and arrange according to given options # convert data to shape and arrange according to given options
if options.dimension != []: table.data = table.data.reshape(options.dimension[1],options.dimension[0],3) if options.dimension != []: table.data = table.data.reshape(options.dimension[1],options.dimension[0],3)
if options.flipLR: table.data = np.fliplr(table.data) if options.flipLR: table.data = np.fliplr(table.data)

View File

@ -36,7 +36,7 @@ parser.set_defaults(key = [],
(options,filenames) = parser.parse_args() (options,filenames) = parser.parse_args()
if options.keys == None: if options.keys is None:
parser.error('No sorting column(s) specified.') parser.error('No sorting column(s) specified.')
options.keys.reverse() # numpy sorts with most significant column as last options.keys.reverse() # numpy sorts with most significant column as last

View File

@ -49,8 +49,8 @@ Polydata = reader.GetOutput()
if Npoints != Ncells or Npoints != Nvertices: if Npoints != Ncells or Npoints != Nvertices:
parser.error('Number of points, cells, and vertices in VTK differ from each other'); sys.exit() parser.error('Number of points, cells, and vertices in VTK differ from each other'); sys.exit()
if options.scalar != None: datainfo['scalar']['label'] += options.scalar if options.scalar is not None: datainfo['scalar']['label'] += options.scalar
if options.color != None: datainfo['color']['label'] += options.color if options.color is not None: datainfo['color']['label'] += options.color
# ------------------------------------------ setup file handles --------------------------------------- # ------------------------------------------ setup file handles ---------------------------------------