From deb2bd7dca07a42a5e2882526b738766579e9a43 Mon Sep 17 00:00:00 2001 From: Christoph Kords Date: Mon, 13 May 2013 10:06:40 +0000 Subject: [PATCH] added option to only process ip or node based geometry/data --- processing/post/marc_deformedGeometry.py | 33 ++++--------- processing/post/marc_extractData.py | 63 +++++++++++++----------- 2 files changed, 45 insertions(+), 51 deletions(-) diff --git a/processing/post/marc_deformedGeometry.py b/processing/post/marc_deformedGeometry.py index 1f0cfdf0d..45cceb801 100755 --- a/processing/post/marc_deformedGeometry.py +++ b/processing/post/marc_deformedGeometry.py @@ -2,36 +2,17 @@ import os, sys, math, string, numpy, shutil import damask -from optparse import OptionParser, Option +from optparse import OptionParser -# ----------------------------- -class MyOption(Option): -# ----------------------------- -# used for definition of new option parser action 'extend', which enables to take multiple option arguments -# taken from online tutorial http://docs.python.org/library/optparse.html - - ACTIONS = Option.ACTIONS + ("extend",) - STORE_ACTIONS = Option.STORE_ACTIONS + ("extend",) - TYPED_ACTIONS = Option.TYPED_ACTIONS + ("extend",) - ALWAYS_TYPED_ACTIONS = Option.ALWAYS_TYPED_ACTIONS + ("extend",) - - def take_action(self, action, dest, opt, value, values, parser): - if action == "extend": - lvalue = value.split(",") - values.ensure_value(dest, []).extend(lvalue) - else: - Option.take_action(self, action, dest, opt, value, values, parser) - - # ----------------------------- # MAIN FUNCTION STARTS HERE # ----------------------------- # --- input parsing -parser = OptionParser(option_class=MyOption, usage='%prog [options] resultfile', description = """ +parser = OptionParser(usage='%prog [options] resultfile', description = """ Extract data from a .t16 (MSC.Marc) results file. """ + string.replace('$Id$','\n','\\n') ) @@ -42,7 +23,8 @@ parser.add_option('-r','--range', dest='range', type='int', nargs=3, \ help='range of positions (or increments) to output (start, end, step) [all]') parser.add_option('--increments', action='store_true', dest='getIncrements', \ help='switch to increment range [%default]') - +parser.add_option('-t','--type', dest='type', type='choice', choices=['ipbased','nodebased'], \ + help='processed geometry type [ipbased and nodebased]') parser.set_defaults(dir = 'vtk') parser.set_defaults(getIncrements= False) @@ -60,6 +42,11 @@ if not os.path.exists(filename+'.t16'): parser.print_help() parser.error('invalid file "%s" specified...'%filename+'.t16') +if not options.type : + options.type = ['nodebased', 'ipbased'] +else: + options.type = [options.type] + # --- more sanity checks @@ -136,7 +123,7 @@ for incCount,position in enumerate(locations): # walk through locations # --- append displacements to corresponding files - for geomtype in ['nodebased', 'ipbased']: + for geomtype in options.type: outFilename = eval('"'+eval("'%%s_%%s_inc%%0%ii.vtk'%(math.log10(max(increments+[1]))+1)")+'"%(dirname + os.sep + os.path.split(filename)[1],geomtype,increments[incCount])') print outFilename shutil.copyfile('%s_%s.vtk'%(filename,geomtype),outFilename) diff --git a/processing/post/marc_extractData.py b/processing/post/marc_extractData.py index 4654b5432..014b65d93 100755 --- a/processing/post/marc_extractData.py +++ b/processing/post/marc_extractData.py @@ -299,6 +299,8 @@ parser.add_option('-r','--range', dest='range', type='int', nargs=3, \ help='range of positions (or increments) to output (start, end, step) [all]') parser.add_option('--increments', action='store_true', dest='getIncrements', \ help='switch to increment range [%default]') +parser.add_option('-t','--type', dest='type', type='choice', choices=['ipbased','nodebased'], \ + help='processed geometry type [ipbased and nodebased]') group_material = OptionGroup(parser,'Material identifier') @@ -340,6 +342,11 @@ except: print('error: no valid Mentat release found') sys.exit(-1) +if not options.type : + options.type = ['nodebased', 'ipbased'] +else: + options.type = [options.type] + # --- initialize mesh data @@ -403,38 +410,38 @@ for incCount,position in enumerate(locations): # walk through locations # --- write header outFilename = {} - for geomtype in ['nodebased','ipbased']: + for geomtype in options.type: outFilename[geomtype] = eval('"'+eval("'%%s_%%s_inc%%0%ii.txt'%(math.log10(max(increments+[1]))+1)")+'"%(dirname + os.sep + os.path.split(filename)[1],geomtype,increments[incCount])') with open(outFilename[geomtype],'w') as myfile: writeHeader(myfile,stat,geomtype) - - # --- write node based data + + # --- write node based data + + if geomtype == 'nodebased': + for n in range(stat['NumberOfNodes']): + myfile.write(str(n)) + for l in range(stat['NumberOfNodalScalars']): + myfile.write('\t'+str(p.node_scalar(n,l))) + myfile.write('\n') - with open(outFilename['nodebased'],'a') as myfile: - for n in range(stat['NumberOfNodes']): - myfile.write(str(n)) - for l in range(stat['NumberOfNodalScalars']): - myfile.write('\t'+str(p.node_scalar(n,l))) - myfile.write('\n') - - # --- write ip based data - - with open(outFilename['ipbased'],'a') as myfile: - for e in range(stat['NumberOfElements']): - if asciiFile: - print 'ascii postfile not yet supported' - sys.exit(-1) - else: - ipData = [[]] - for l in range(stat['NumberOfElementalScalars']): - data = p.element_scalar(e,l) - for i in range(len(data)): # at least as many nodes as ips - node = damask.core.mesh.mesh_get_nodeAtIP(str(p.element(e).type),i+1) # fortran indexing starts at 1 - if not node: break # no more ips - while i >= len(ipData): ipData.append([]) - ipData[i].extend([data[node-1].value]) # python indexing starts at 0 - for i in range(len(ipData)): - myfile.write('\t'.join(map(str,[e,i]+ipData[i]))+'\n') + # --- write ip based data + + elif geomtype == 'ipbased': + for e in range(stat['NumberOfElements']): + if asciiFile: + print 'ascii postfile not yet supported' + sys.exit(-1) + else: + ipData = [[]] + for l in range(stat['NumberOfElementalScalars']): + data = p.element_scalar(e,l) + for i in range(len(data)): # at least as many nodes as ips + node = damask.core.mesh.mesh_get_nodeAtIP(str(p.element(e).type),i+1) # fortran indexing starts at 1 + if not node: break # no more ips + while i >= len(ipData): ipData.append([]) + ipData[i].extend([data[node-1].value]) # python indexing starts at 0 + for i in range(len(ipData)): + myfile.write('\t'.join(map(str,[e,i]+ipData[i]))+'\n') p.close() sys.stdout.write("\n")