simplified processing of ASCIItables by utilizing the improved class methods.

modernized file looping.
updated help.
This commit is contained in:
Philip Eisenlohr 2015-06-16 05:42:11 +00:00
parent c089ff9256
commit d57c7568dd
1 changed files with 53 additions and 66 deletions

View File

@ -36,78 +36,67 @@ Examples:
""", version = scriptID)
parser.add_option('-p', '--positions', dest = 'pos', metavar='string',
parser.add_option('-p', '--positions', dest = 'pos', metavar = 'string',
help = 'coordinate label')
parser.add_option('--boundingbox', dest = 'box', type = 'float', nargs = 6,
help = 'min (x,y,z) and max (x,y,z) to specify bounding box [auto]')
parser.add_option('-i', '--index', dest = 'index', type = 'string',
parser.add_option('--boundingbox', dest = 'box', type = 'float', nargs = 6, metavar = ' '.join(['float']*6),
help = 'min (x,y,z) and max (x,y,z) coordinates of bounding box [auto]')
parser.add_option('-i', '--index', dest = 'index', type = 'string', metavar = 'string',
help = 'microstructure index label')
parser.add_option('-w','--white', dest = 'whitelist', action = 'extend',\
parser.add_option('-w','--white', dest = 'whitelist', action = 'extend',
help = 'white list of microstructure indices', metavar = '<LIST>')
parser.add_option('-b','--black', dest = 'blacklist', action = 'extend',\
parser.add_option('-b','--black', dest = 'blacklist', action = 'extend',
help = 'black list of microstructure indices', metavar = '<LIST>')
parser.set_defaults(pos = 'pos')
parser.set_defaults(index = 'microstructure')
parser.set_defaults(pos = 'pos',
index ='microstructure',
)
(options,filenames) = parser.parse_args()
datainfo = { # list of requested labels per datatype
'scalar': {'len':1,
'label':[]},
'vector': {'len':3,
'label':[]},
}
datainfo['vector']['label'] += [options.pos]
datainfo['scalar']['label'] += [options.index]
if options.whitelist != None: options.whitelist = map(int,options.whitelist)
if options.blacklist != None: options.blacklist = map(int,options.blacklist)
#--- setup file handles --------------------------------------------------------------------------
files = []
# --- loop over input files -------------------------------------------------------------------------
if filenames == []:
files.append({'name':'STDIN',
'input':sys.stdin,
'output':sys.stdout,
'croak':sys.stderr,
})
else:
for name in filenames:
if os.path.exists(name):
files.append({'name':name,
'input':open(name),
'output':open(os.path.splitext(name)[0]+'.seeds','w'),
'croak':sys.stdout,
})
filenames = ['STDIN']
#--- loop over input files ------------------------------------------------------------------------
for file in files:
file['croak'].write('\033[1m' + scriptName + '\033[0m: ' + (file['name'] if file['name'] != 'STDIN' else '') + '\n')
for name in filenames:
if name == 'STDIN':
file = {'name':'STDIN', 'input':sys.stdin, 'output':sys.stdout, 'croak':sys.stderr}
file['croak'].write('\033[1m'+scriptName+'\033[0m\n')
else:
if not os.path.exists(name): continue
file = {'name':name,
'input':open(name),
'output':open(os.path.splitext(name)[0]+ \
('' if options.label == None else '_'+options.label)+ \
'.png','w'),
'croak':sys.stderr}
file['croak'].write('\033[1m'+scriptName+'\033[0m: '+file['name']+'\n')
table = damask.ASCIItable(file['input'],file['output'],
buffered = False) # make unbuffered ASCII_table
table.head_read() # read ASCII header info
table = damask.ASCIItable(file['input'],file['output'],buffered = False)
table.head_read()
# --------------- figure out columns to process
active = defaultdict(list)
column = defaultdict(dict)
for datatype,info in datainfo.items():
for label in info['label']:
foundIt = False
for key in ['1_'+label,label]:
if key in table.labels:
foundIt = True
active[datatype].append(label)
column[datatype][label] = table.labels.index(key) # remember columns of requested data
if not foundIt:
file['croak'].write('column %s not found...\n'%label)
break
# ------------------------------------------ process data ---------------------------------------
table.data_readArray(list(itertools.chain.from_iterable(map(lambda x:[x+i for i in range(datainfo['vector']['len'])],
[column['vector'][label] for label in active['vector']]))) +
[column['scalar'][label] for label in active['scalar']])
# ------------------------------------------ process data ------------------------------------------
errors = []
missing_labels = table.data_readArray(options.pos,options.label)
if len(missing_labels) > 0:
errors.append('column%s %s not found'%('s' if len(missing_labels) > 1 else '',
', '.join(missing_labels)))
for label, dim in {options.pos: 3,
options.label: 1}.iteritems():
if table.label_dimension(label) != dim:
errors.append('column %s has wrong dimension'%label)
if errors != []:
file['croak'].write('\n'.join(errors))
table.close(dismiss = True) # close ASCII table file handles and delete output file
continue
#--- finding bounding box ------------------------------------------------------------------------------------
boundingBox = np.array((np.amin(table.data[:,0:3],axis = 0),np.amax(table.data[:,0:3],axis = 0)))
if options.box:
@ -126,7 +115,7 @@ for file in files:
else np.in1d(table.data[:,3].ravel(), options.whitelist).reshape(table.data[:,3].shape),
np.ones_like(table.data[:,3],bool) \
if options.blacklist == None \
else np.invert(np.in1d(table.data[:,3].ravel(), options.blacklist).reshape(table.data[:,3].shape))
else np.invert(np.in1d(table.data[:,3].ravel(), options.blacklist).reshape(table.data[:,3].shape))
)
table.data = table.data[mask]
@ -135,10 +124,10 @@ for file in files:
# ------------------------------------------ assemble header ---------------------------------------
table.info = [
scriptID,
'size %s'%(' '.join(list(itertools.chain.from_iterable(zip(['x','y','z'],
map(str,boundingBox[1,:]-boundingBox[0,:])))))),
]
scriptID,
'size %s'%(' '.join(list(itertools.chain.from_iterable(zip(['x','y','z'],
map(str,boundingBox[1,:]-boundingBox[0,:])))))),
]
table.labels_clear()
table.labels_append(['1_coords','2_coords','3_coords','microstructure']) # implicitly switching label processing/writing on
table.head_write()
@ -146,6 +135,4 @@ for file in files:
table.data_writeArray()
table.output_flush()
table.input_close() # close input ASCII table
if file['name'] != 'STDIN':
table.output_close() # close output ASCII table
table.close() # close ASCII tables