more and improved tests for add...

This commit is contained in:
Martin Diehl 2014-07-24 20:21:18 +00:00
parent 38048a09f2
commit 7954c9a4a3
7 changed files with 186 additions and 289 deletions

View File

@ -103,10 +103,8 @@ for file in files:
if labelLen[label] == 0: if labelLen[label] == 0:
brokenFormula[label] = True brokenFormula[label] = True
if label not in brokenFormula: if label not in brokenFormula:
if labelLen[label] == 1: table.labels_append({True:['%i_%s'%(i+1,label) for i in xrange(labelLen[label])],
table.labels_append(label) False:label}[labelLen[label]>1] )
else:
table.labels_append(['%i_%s'%(i+1,label) for i in xrange(labelLen[label])])
table.head_write() table.head_write()
firstLine = False firstLine = False

View File

@ -124,10 +124,8 @@ for file in files:
for datatype,labels in active.items(): # loop over vector,tensor for datatype,labels in active.items(): # loop over vector,tensor
for label in labels: for label in labels:
for accuracy in options.accuracy: for accuracy in options.accuracy:
if datatype == 'vector': # extend ASCII header with new labels table.labels_append({False: ['%i_div%s(%s)'%(i+1,accuracy,label) for i in xrange(3)], # extend ASCII header with new labels
table.labels_append(['div%s(%s)'%(accuracy,label)]) {True: ['div%s(%s)'%(accuracy,label)]} [datatype == 'tensor'])
if datatype == 'tensor':
table.labels_append(['%i_div%s(%s)'%(i+1,accuracy,label) for i in xrange(3)])
table.head_write() table.head_write()
# ------------------------------------------ read value field -------------------------------------- # ------------------------------------------ read value field --------------------------------------

View File

@ -20,25 +20,25 @@ Add RGB color value corresponding to TSL-OIM scheme for inverse pole figures.
""", version = string.replace(scriptID,'\n','\\n') """, version = string.replace(scriptID,'\n','\\n')
) )
parser.add_option('-p', '--pole', dest='pole', type='float', nargs=3, metavar='X Y Z', parser.add_option('-p', '--pole', dest='pole', action='store', type='float', nargs=3, metavar='float float float',
help = 'lab frame direction for inverse pole figure %default') help = 'lab frame direction for inverse pole figure %default')
parser.add_option('-s', '--symmetry', dest='symmetry', type='string', parser.add_option('-s', '--symmetry', dest='symmetry', action='store', type='choice',
help = 'crystal symmetry [%default]') choices=damask.Symmetry.lattices[1:], metavar='string',
parser.add_option('-e', '--eulers', dest='eulers', type='string', metavar='LABEL', help = 'crystal symmetry (%s) [cubic]'%(', '.join(damask.Symmetry.lattices[1:])))
parser.add_option('-e', '--eulers', dest='eulers', action='store', type='string', metavar='string',
help = 'Euler angles label') help = 'Euler angles label')
parser.add_option('-d', '--degrees', dest='degrees', action='store_true', parser.add_option('-d', '--degrees', dest='degrees', action='store_true',
help = 'Euler angles are given in degrees [%default]') help = 'Euler angles are given in degrees [%default]')
parser.add_option('-m', '--matrix', dest='matrix', type='string', metavar='LABEL', parser.add_option('-m', '--matrix', dest='matrix', action='store', type='string', metavar='string',
help = 'orientation matrix label') help = 'orientation matrix label')
parser.add_option('-a', dest='a', type='string', metavar='LABEL', parser.add_option('-a', dest='a', action='store', type='string', metavar='string',
help = 'crystal frame a vector label') help = 'crystal frame a vector label')
parser.add_option('-b', dest='b', type='string', metavar='LABEL', parser.add_option('-b', dest='b', action='store', type='string', metavar='string',
help = 'crystal frame b vector label') help = 'crystal frame b vector label')
parser.add_option('-c', dest='c', type='string', metavar='LABEL', parser.add_option('-c', dest='c', action='store', type='string', metavar='string',
help = 'crystal frame c vector label') help = 'crystal frame c vector label')
parser.add_option('-q', '--quaternion', dest='quaternion', type='string', metavar='LABEL', parser.add_option('-q', '--quaternion', dest='quaternion', action='store', type='string', metavar='string',
help = 'quaternion label') help = 'quaternion label')
parser.set_defaults(pole = [0.0,0.0,1.0]) parser.set_defaults(pole = [0.0,0.0,1.0])
parser.set_defaults(symmetry = 'cubic') parser.set_defaults(symmetry = 'cubic')
parser.set_defaults(degrees = False) parser.set_defaults(degrees = False)
@ -65,8 +65,7 @@ toRadians = math.pi/180.0 if options.degrees else 1.0
pole = np.array(options.pole) pole = np.array(options.pole)
pole /= np.linalg.norm(pole) pole /= np.linalg.norm(pole)
# ------------------------------------------ setup file handles --------------------------------------- # ------------------------------------------ setup file handles -----------------------------------
files = [] files = []
if filenames == []: if filenames == []:
files.append({'name':'STDIN', 'input':sys.stdin, 'output':sys.stdout, 'croak':sys.stderr}) files.append({'name':'STDIN', 'input':sys.stdin, 'output':sys.stdout, 'croak':sys.stderr})
@ -75,7 +74,7 @@ else:
if os.path.exists(name): if os.path.exists(name):
files.append({'name':name, 'input':open(name), 'output':open(name+'_tmp','w'), 'croak':sys.stderr}) files.append({'name':name, 'input':open(name), 'output':open(name+'_tmp','w'), 'croak':sys.stderr})
#--- loop over input files ------------------------------------------------------------------------ # ------------------------------------------ loop over input files ----------------------------------
for file in files: for file in files:
if file['name'] != 'STDIN': file['croak'].write('\033[1m'+scriptName+'\033[0m: '+file['name']+'\n') if file['name'] != 'STDIN': file['croak'].write('\033[1m'+scriptName+'\033[0m: '+file['name']+'\n')
else: file['croak'].write('\033[1m'+scriptName+'\033[0m\n') else: file['croak'].write('\033[1m'+scriptName+'\033[0m\n')
@ -84,7 +83,6 @@ for file in files:
table.head_read() # read ASCII header info table.head_read() # read ASCII header info
table.info_append(string.replace(scriptID,'\n','\\n') + '\t' + ' '.join(sys.argv[1:])) table.info_append(string.replace(scriptID,'\n','\\n') + '\t' + ' '.join(sys.argv[1:]))
# --------------- figure out columns to process
active = defaultdict(list) active = defaultdict(list)
column = defaultdict(dict) column = defaultdict(dict)
@ -100,16 +98,13 @@ for file in files:
file['croak'].write('column %s not found...\n'%label) file['croak'].write('column %s not found...\n'%label)
break break
table.labels_append(['%i_IPF_%g%g%g'%(i+1,options.pole[0],options.pole[1],options.pole[2]) for i in xrange(3)])
# ------------------------------------------ assemble header --------------------------------------- # ------------------------------------------ assemble header ---------------------------------------
table.labels_append(['%i_IPF_%g%g%g'%(i+1,options.pole[0],options.pole[1],options.pole[2]) for i in xrange(3)])
table.head_write() table.head_write()
# ------------------------------------------ process data --------------------------------------- # ------------------------------------------ process data ----------------------------------------
outputAlive = True
while table.data_read(): # read next data line of ASCII table while outputAlive and table.data_read(): # read next data line of ASCII table
if input == 'eulers': if input == 'eulers':
o = damask.Orientation(Eulers=toRadians*np.array(map(float,table.data[column['vector'][options.eulers]:\ o = damask.Orientation(Eulers=toRadians*np.array(map(float,table.data[column['vector'][options.eulers]:\
column['vector'][options.eulers]+datainfo['vector']['len']])), column['vector'][options.eulers]+datainfo['vector']['len']])),

View File

@ -1,57 +1,37 @@
#!/usr/bin/env python #!/usr/bin/env python
# -*- coding: UTF-8 no BOM -*- # -*- coding: UTF-8 no BOM -*-
import os,re,sys,math,string,damask import os,re,sys,math,string
from optparse import OptionParser, Option from optparse import OptionParser
import damask
scriptID = '$Id: addNorm.py 3167 2014-06-06 09:43:28Z p.eisenlohr $' scriptID = '$Id: addCauchy.py 3301 2014-07-22 14:21:49Z MPIE\m.diehl $'
scriptName = scriptID.split()[1] scriptName = scriptID.split()[1]
# -----------------------------
class extendableOption(Option):
# -----------------------------
# used for definition of new option parser action 'extend', which enables to take multiple option arguments
# taken from online tutorial http://docs.python.org/library/optparse.html
ACTIONS = Option.ACTIONS + ("extend",)
STORE_ACTIONS = Option.STORE_ACTIONS + ("extend",)
TYPED_ACTIONS = Option.TYPED_ACTIONS + ("extend",)
ALWAYS_TYPED_ACTIONS = Option.ALWAYS_TYPED_ACTIONS + ("extend",)
def take_action(self, action, dest, opt, value, values, parser):
if action == "extend":
lvalue = value.split(",")
values.ensure_value(dest, []).extend(lvalue)
else:
Option.take_action(self, action, dest, opt, value, values, parser)
# -------------------------------------------------------------------- # --------------------------------------------------------------------
# MAIN # MAIN
# -------------------------------------------------------------------- # --------------------------------------------------------------------
parser = OptionParser(option_class=extendableOption, usage='%prog options [file[s]]', description = """ parser = OptionParser(option_class=damask.extendableOption, usage='%prog options [file[s]]', description = """
Add data in column(s) of second ASCIItable selected from row that is given by the value in a mapping column. Add data in column(s) of second ASCIItable selected from row that is given by the value in a mapping column.
""" + string.replace(scriptID,'\n','\\n') """, version = string.replace(scriptID,'\n','\\n')
) )
parser.add_option('-a','--asciitable', dest='asciitable', type='string', metavar='FILE', parser.add_option('-a','--asciitable', dest='asciitable', action='store', type='string', metavar='string',
help='mapped ASCIItable')
parser.add_option('-c','--map', dest='map', action='store', type='string', metavar='string',
help='heading of column containing row mapping') help='heading of column containing row mapping')
parser.add_option('-c','--map', dest='map', type='string', metavar='LABEL', parser.add_option('-o','--offset', dest='offset', action='store', type='int', metavar='int',
help='heading of column containing row mapping')
parser.add_option('-o','--offset', dest='offset', type='int', metavar='n',
help='offset between mapped column value and row') help='offset between mapped column value and row')
parser.add_option('-v','--vector', dest='vector', action='extend', type='string', metavar='LABEL', parser.add_option('-v','--vector', dest='vector', action='extend', type='string', metavar='<string LIST>',
help='heading of columns containing vector field values') help='heading of columns containing vector field values')
parser.add_option('-t','--tensor', dest='tensor', action='extend', type='string', metavar='LABEL', parser.add_option('-t','--tensor', dest='tensor', action='extend', type='string', metavar='<string LIST>',
help='heading of columns containing tensor field values') help='heading of columns containing tensor field values')
parser.add_option('-s','--special', dest='special', action='extend', type='string', metavar='LABEL', parser.add_option('-s','--special', dest='special', action='extend', type='string', metavar='<string LIST>',
help='heading of columns containing field values of special dimension') help='heading of columns containing field values of special dimension')
parser.add_option('-d','--dimension', dest='N', action='store', type='int', \ parser.add_option('-d','--dimension', dest='N', action='store', type='int', metavar='int',
help='dimension of special field values [%default]') help='dimension of special field values [%default]')
parser.set_defaults(vector = []) parser.set_defaults(vector = [])
parser.set_defaults(tensor = []) parser.set_defaults(tensor = [])
parser.set_defaults(special = []) parser.set_defaults(special = [])
@ -62,6 +42,8 @@ parser.set_defaults(N = 1)
if len(options.vector) + len(options.tensor) + len(options.special) == 0: if len(options.vector) + len(options.tensor) + len(options.special) == 0:
parser.error('no data column specified...') parser.error('no data column specified...')
if options.map == None:
parser.error('missing mapping column...')
datainfo = { # list of requested labels per datatype datainfo = { # list of requested labels per datatype
'vector': {'len':3, 'vector': {'len':3,
@ -72,13 +54,13 @@ datainfo = { # lis
'label':[]}, 'label':[]},
} }
if options.vector != None: datainfo['vector']['label'] += options.vector if options.vector != None: datainfo['vector']['label'] += options.vector
if options.tensor != None: datainfo['tensor']['label'] += options.tensor if options.tensor != None: datainfo['tensor']['label'] += options.tensor
if options.special != None: datainfo['special']['label'] += options.special if options.special != None: datainfo['special']['label'] += options.special
# ------------------------------------------ processing mapping ASCIItable ---------------------------
if options.asciitable != None and os.path.isfile(options.asciitable): if options.asciitable != None and os.path.isfile(options.asciitable):
mappedTable = damask.ASCIItable(open(options.asciitable),open(options.asciitable),False) mappedTable = damask.ASCIItable(open(options.asciitable),None,False)
mappedTable.head_read() # read ASCII header info of mapped table mappedTable.head_read() # read ASCII header info of mapped table
labels = [] labels = []
@ -98,18 +80,11 @@ if options.asciitable != None and os.path.isfile(options.asciitable):
break break
mappedTable.data_readArray(indices) mappedTable.data_readArray(indices)
mappedTable.input_close() # close mapped input ASCII table mappedTable.__IO__['in'].close() # close mapped input ASCII table
mappedTable.output_close() # close mapped output (same as input) ASCII table
else: else:
parser.error("Missing mapped ASCIItable") parser.error('missing mapped ASCIItable...')
if options.map == None:
parser.error("Missing mapping column")
# ------------------------------------------ setup file handles --------------------------------------- # ------------------------------------------ setup file handles ---------------------------------------
files = [] files = []
if filenames == []: if filenames == []:
files.append({'name':'STDIN', 'input':sys.stdin, 'output':sys.stdout, 'croak':sys.stderr}) files.append({'name':'STDIN', 'input':sys.stdin, 'output':sys.stdout, 'croak':sys.stderr})
@ -118,7 +93,7 @@ else:
if os.path.exists(name): if os.path.exists(name):
files.append({'name':name, 'input':open(name), 'output':open(name+'_tmp','w'), 'croak':sys.stderr}) files.append({'name':name, 'input':open(name), 'output':open(name+'_tmp','w'), 'croak':sys.stderr})
#--- loop over input files ------------------------------------------------------------------------ # ------------------------------------------ loop over input files ---------------------------------------
for file in files: for file in files:
if file['name'] != 'STDIN': file['croak'].write('\033[1m'+scriptName+'\033[0m: '+file['name']+'\n') if file['name'] != 'STDIN': file['croak'].write('\033[1m'+scriptName+'\033[0m: '+file['name']+'\n')
else: file['croak'].write('\033[1m'+scriptName+'\033[0m\n') else: file['croak'].write('\033[1m'+scriptName+'\033[0m\n')
@ -127,34 +102,28 @@ for file in files:
table.head_read() # read ASCII header info table.head_read() # read ASCII header info
table.info_append(string.replace(scriptID,'\n','\\n') + '\t' + ' '.join(sys.argv[1:])) table.info_append(string.replace(scriptID,'\n','\\n') + '\t' + ' '.join(sys.argv[1:]))
# --------------- figure out columns to process
if options.map not in table.labels: if options.map not in table.labels:
file['croak'].write('column %s not found...\n'%options.map)
continue continue
mappedColumn = table.labels.index(options.map) # ------------------------------------------ assemble header ------------------------------------
for label in labels: for datatype,info in datainfo.items():
table.labels_append(label) # extend ASCII header of current table with new labels for label in info['label']:
table.labels_append({True:['%i_%s'%(i+1,label) for i in xrange(info['len'])],
# ------------------------------------------ assemble header --------------------------------------- False:table.labels_append(label)}[info['len']>1] ) # extend ASCII header of current table with new labels
table.head_write() table.head_write()
# ------------------------------------------ process data --------------------------------------- # ------------------------------------------ process data ----------------------------------------
mappedColumn = table.labels.index(options.map)
outputAlive = True outputAlive = True
while outputAlive and table.data_read(): # read next data line of ASCII table while outputAlive and table.data_read(): # read next data line of ASCII table
# file['croak'].write('%i\n'%(int(table.data[mappedColumn])+options.offset-1))
table.data_append(mappedTable.data[int(table.data[mappedColumn])+options.offset-1]) # add all mapped data types table.data_append(mappedTable.data[int(table.data[mappedColumn])+options.offset-1]) # add all mapped data types
outputAlive = table.data_write() # output processed line outputAlive = table.data_write() # output processed line
# ------------------------------------------ output result --------------------------------------- # ------------------------------------------ output result ---------------------------------------
outputAlive and table.output_flush() # just in case of buffered ASCII table outputAlive and table.output_flush() # just in case of buffered ASCII table
table.input_close() # close input ASCII table file['input'].close() # close input ASCII table (works for stdin)
file['output'].close() # close output ASCII table (works for stdout)
if file['name'] != 'STDIN': if file['name'] != 'STDIN':
table.output_close() # close output ASCII table
os.rename(file['name']+'_tmp',file['name']) # overwrite old one with tmp new os.rename(file['name']+'_tmp',file['name']) # overwrite old one with tmp new

View File

@ -3,6 +3,7 @@
import os,re,sys,math,string import os,re,sys,math,string
import numpy as np import numpy as np
from collections import defaultdict
from optparse import OptionParser from optparse import OptionParser
import damask import damask
@ -26,15 +27,13 @@ def Mises(what,tensor):
parser = OptionParser(option_class=damask.extendableOption, usage='%prog options [file[s]]', description = """ parser = OptionParser(option_class=damask.extendableOption, usage='%prog options [file[s]]', description = """
Add vonMises equivalent values for symmetric part of requested strains and/or stresses. Add vonMises equivalent values for symmetric part of requested strains and/or stresses.
""" + string.replace(scriptID,'\n','\\n') """, version = string.replace(scriptID,'\n','\\n')
) )
parser.add_option('-e','--strain', dest='strain', action='extend', type='string', metavar='<string LIST>',
parser.add_option('-e','--strain', dest='strain', action='extend', type='string', \
help='heading(s) of columns containing strain tensors') help='heading(s) of columns containing strain tensors')
parser.add_option('-s','--stress', dest='stress', action='extend', type='string', \ parser.add_option('-s','--stress', dest='stress', action='extend', type='string', metavar='<string LIST>',
help='heading(s) of columns containing stress tensors') help='heading(s) of columns containing stress tensors')
parser.set_defaults(strain = []) parser.set_defaults(strain = [])
parser.set_defaults(stress = []) parser.set_defaults(stress = [])
@ -50,12 +49,10 @@ datainfo = { # lis
'label':[]}, 'label':[]},
} }
if options.strain != None: datainfo['strain']['label'] += options.strain if options.strain != None: datainfo['strain']['label'] += options.strain
if options.stress != None: datainfo['stress']['label'] += options.stress if options.stress != None: datainfo['stress']['label'] += options.stress
# ------------------------------------------ setup file handles --------------------------------------- # ------------------------------------------ setup file handles ---------------------------------------
files = [] files = []
if filenames == []: if filenames == []:
files.append({'name':'STDIN', 'input':sys.stdin, 'output':sys.stdout, 'croak':sys.stderr}) files.append({'name':'STDIN', 'input':sys.stdin, 'output':sys.stdout, 'croak':sys.stderr})
@ -65,7 +62,6 @@ else:
files.append({'name':name, 'input':open(name), 'output':open(name+'_tmp','w'), 'croak':sys.stderr}) files.append({'name':name, 'input':open(name), 'output':open(name+'_tmp','w'), 'croak':sys.stderr})
# ------------------------------------------ loop over input files --------------------------------------- # ------------------------------------------ loop over input files ---------------------------------------
for file in files: for file in files:
if file['name'] != 'STDIN': file['croak'].write('\033[1m'+scriptName+'\033[0m: '+file['name']+'\n') if file['name'] != 'STDIN': file['croak'].write('\033[1m'+scriptName+'\033[0m: '+file['name']+'\n')
else: file['croak'].write('\033[1m'+scriptName+'\033[0m\n') else: file['croak'].write('\033[1m'+scriptName+'\033[0m\n')
@ -74,45 +70,39 @@ for file in files:
table.head_read() # read ASCII header info table.head_read() # read ASCII header info
table.info_append(string.replace(scriptID,'\n','\\n') + '\t' + ' '.join(sys.argv[1:])) table.info_append(string.replace(scriptID,'\n','\\n') + '\t' + ' '.join(sys.argv[1:]))
active = {} active = defaultdict(list)
column = {} column = defaultdict(dict)
head = []
for datatype,info in datainfo.items(): for datatype,info in datainfo.items():
for label in info['label']: for label in info['label']:
key = {True :'1_%s', key = {True :'1_%s',
False:'%s' }[info['len']>1]%label False:'%s' }[info['len']>1]%label
if key not in table.labels: if key not in table.labels:
sys.stderr.write('column %s not found...\n'%key) file['croak'].write('column %s not found...\n'%key)
else: else:
if datatype not in active: active[datatype] = []
if datatype not in column: column[datatype] = {}
active[datatype].append(label) active[datatype].append(label)
column[datatype][label] = table.labels.index(key) # remember columns of requested data column[datatype][label] = table.labels.index(key) # remember columns of requested data
table.labels_append('Mises(%s)'%label) # extend ASCII header with new labels
# ------------------------------------------ assemble header --------------------------------------- # ------------------------------------------ assemble header ---------------------------------------
for datatype,labels in active.items(): # loop over vector,tensor
for label in labels: # loop over all requested determinants
table.labels_append('Mises(%s)'%label) # extend ASCII header with new labels
table.head_write() table.head_write()
# ------------------------------------------ process data --------------------------------------- # ------------------------------------------ process data ----------------------------------------
outputAlive = True
while table.data_read(): # read next data line of ASCII table while outputAlive and table.data_read(): # read next data line of ASCII table
for datatype,labels in active.items(): # loop over vector,tensor for datatype,labels in active.items(): # loop over vector,tensor
for label in labels: # loop over all requested norms for label in labels: # loop over all requested norms
table.data_append(Mises(datatype, table.data_append(Mises(datatype,
np.array(map(float,table.data[column[datatype][label]: np.array(map(float,table.data[column[datatype][label]:
column[datatype][label]+datainfo[datatype]['len']]),'d').reshape(3,3))) column[datatype][label]+datainfo[datatype]['len']]),'d').reshape(3,3)))
outputAlive = table.data_write() # output processed line
table.data_write() # output processed line
# ------------------------------------------ output result --------------------------------------- # ------------------------------------------ output result ---------------------------------------
outputAlive and table.output_flush() # just in case of buffered ASCII table
table.output_flush() # just in case of buffered ASCII table file['input'].close() # close input ASCII table (works for stdin)
file['output'].close() # close output ASCII table (works for stdout)
file['input'].close() # close input ASCII table
if file['name'] != 'STDIN': if file['name'] != 'STDIN':
file['output'].close # close output ASCII table
os.rename(file['name']+'_tmp',file['name']) # overwrite old one with tmp new os.rename(file['name']+'_tmp',file['name']) # overwrite old one with tmp new

View File

@ -1,71 +1,45 @@
#!/usr/bin/env python #!/usr/bin/env python
# -*- coding: UTF-8 no BOM -*- # -*- coding: UTF-8 no BOM -*-
import os,re,sys,math,string,damask import os,re,sys,math,string
from collections import defaultdict from collections import defaultdict
from optparse import OptionParser, Option from optparse import OptionParser
import damask
scriptID = '$Id$' scriptID = '$Id$'
scriptName = scriptID.split()[1] scriptName = scriptID.split()[1]
# -----------------------------
class extendableOption(Option):
# -----------------------------
# used for definition of new option parser action 'extend', which enables to take multiple option arguments
# taken from online tutorial http://docs.python.org/library/optparse.html
ACTIONS = Option.ACTIONS + ("extend",)
STORE_ACTIONS = Option.STORE_ACTIONS + ("extend",)
TYPED_ACTIONS = Option.TYPED_ACTIONS + ("extend",)
ALWAYS_TYPED_ACTIONS = Option.ALWAYS_TYPED_ACTIONS + ("extend",)
def take_action(self, action, dest, opt, value, values, parser):
if action == "extend":
lvalue = value.split(",")
values.ensure_value(dest, []).extend(lvalue)
else:
Option.take_action(self, action, dest, opt, value, values, parser)
# definition of element-wise p-norms for matrices # definition of element-wise p-norms for matrices
def normAbs(object): # p = 1
# p = 1
def normAbs(object):
return sum(map(abs, object)) return sum(map(abs, object))
# p = 2 def normFrobenius(object): # p = 2
def normFrobenius(object):
return math.sqrt(sum([x*x for x in object])) return math.sqrt(sum([x*x for x in object]))
# p = infinity def normMax(object): # p = infinity
def normMax(object):
return max(map(abs, object)) return max(map(abs, object))
# -------------------------------------------------------------------- # --------------------------------------------------------------------
# MAIN # MAIN
# -------------------------------------------------------------------- # --------------------------------------------------------------------
parser = OptionParser(option_class=extendableOption, usage='%prog options [file[s]]', description = """ parser = OptionParser(option_class=damask.extendableOption, usage='%prog options [file[s]]', description = """
Add column(s) containing norm of requested column(s) being either vectors or tensors. Add column(s) containing norm of requested column(s) being either vectors or tensors.
""" + string.replace(scriptID,'\n','\\n') """, version = string.replace(scriptID,'\n','\\n')
) )
normChoices = ['abs','frobenius','max'] normChoices = ['abs','frobenius','max']
parser.add_option('-n','--norm', dest='norm', action='store', type='choice', choices=normChoices, metavar='string',
parser.add_option('-n','--norm', dest='norm', action='store', type='choice', choices=normChoices, \ help='type of element-wise p-norm (%s) [frobenius]'%(','.join(map(str,normChoices))))
help='type of element-wise p-norm (%s) [2]'%(','.join(map(str,normChoices)))) parser.add_option('-v','--vector', dest='vector', action='extend', type='string', metavar='<string LIST>',
parser.add_option('-v','--vector', dest='vector', action='extend', type='string', \
help='heading of columns containing vector field values') help='heading of columns containing vector field values')
parser.add_option('-t','--tensor', dest='tensor', action='extend', type='string', \ parser.add_option('-t','--tensor', dest='tensor', action='extend', type='string', metavar='<string LIST>',
help='heading of columns containing tensor field values') help='heading of columns containing tensor field values')
parser.add_option('-s','--special', dest='special', action='extend', type='string', \ parser.add_option('-s','--special', dest='special', action='extend', type='string', metavar='<string LIST>',
help='heading of columns containing field values of special dimension') help='heading of columns containing field values of special dimension')
parser.add_option('-d','--dimension', dest='N', action='store', type='int', \ parser.add_option('-d','--dimension', dest='N', action='store', type='int', metavar='int',
help='dimension of special field values [%default]') help='dimension of special field values [%default]')
parser.set_defaults(norm = 'frobenius') parser.set_defaults(norm = 'frobenius')
parser.set_defaults(vector = []) parser.set_defaults(vector = [])
parser.set_defaults(tensor = []) parser.set_defaults(tensor = [])
@ -86,14 +60,11 @@ datainfo = { # lis
'label':[]}, 'label':[]},
} }
if options.vector != None: datainfo['vector']['label'] += options.vector if options.vector != None: datainfo['vector']['label'] += options.vector
if options.tensor != None: datainfo['tensor']['label'] += options.tensor if options.tensor != None: datainfo['tensor']['label'] += options.tensor
if options.special != None: datainfo['special']['label'] += options.special if options.special != None: datainfo['special']['label'] += options.special
# ------------------------------------------ setup file handles --------------------------------------- # ------------------------------------------ setup file handles ---------------------------------------
files = [] files = []
if filenames == []: if filenames == []:
files.append({'name':'STDIN', 'input':sys.stdin, 'output':sys.stdout, 'croak':sys.stderr}) files.append({'name':'STDIN', 'input':sys.stdin, 'output':sys.stdout, 'croak':sys.stderr})
@ -111,44 +82,37 @@ for file in files:
table.head_read() # read ASCII header info table.head_read() # read ASCII header info
table.info_append(string.replace(scriptID,'\n','\\n') + '\t' + ' '.join(sys.argv[1:])) table.info_append(string.replace(scriptID,'\n','\\n') + '\t' + ' '.join(sys.argv[1:]))
# --------------- figure out columns to process
active = defaultdict(list) active = defaultdict(list)
column = defaultdict(dict) column = defaultdict(dict)
for datatype,info in datainfo.items(): for datatype,info in datainfo.items():
for label in info['label']: for label in info['label']:
foundIt = False key = {True :'1_%s',
for key in ['1_'+label,label]: False:'%s' }[info['len']>1]%label
if key in table.labels: if key not in table.labels:
foundIt = True file['croak'].write('column %s not found...\n'%key)
else:
active[datatype].append(label) active[datatype].append(label)
column[datatype][label] = table.labels.index(key) # remember columns of requested data column[datatype][label] = table.labels.index(key) # remember columns of requested data
table.labels_append('norm%s(%s)'%(options.norm.capitalize(),label)) # extend ASCII header with new labels
if not foundIt:
file['croak'].write('column %s not found...\n'%label)
break
# ------------------------------------------ assemble header --------------------------------------- # ------------------------------------------ assemble header ---------------------------------------
for datatype,labels in active.items(): # loop over vector,tensor
for label in labels: # loop over all requested determinants
table.labels_append('norm%s(%s)'%(options.norm.capitalize(),label)) # extend ASCII header with new labels
table.head_write() table.head_write()
# ------------------------------------------ process data --------------------------------------- # ------------------------------------------ process data ---------------------------------------
outputAlive = True outputAlive = True
while outputAlive and table.data_read(): # read next data line of ASCII table while outputAlive and table.data_read(): # read next data line of ASCII table
for datatype,labels in active.items(): # loop over vector,tensor for datatype,labels in active.items(): # loop over vector,tensor
for label in labels: # loop over all requested norms for label in labels: # loop over all requested norms
eval("table.data_append(norm%s(map(float,table.data[column[datatype][label]:column[datatype][label]+datainfo[datatype]['len']])))"%options.norm.capitalize()) eval("table.data_append(norm%s(map(float,table.data[column[datatype][label]:column[datatype][label]+datainfo[datatype]['len']])))"%options.norm.capitalize())
outputAlive = table.data_write() # output processed line outputAlive = table.data_write() # output processed line
# ------------------------------------------ output result --------------------------------------- # ------------------------------------------ output result ---------------------------------------
outputAlive and table.output_flush() # just in case of buffered ASCII table outputAlive and table.output_flush() # just in case of buffered ASCII table
file['input'].close() # close input ASCII table file['input'].close() # close input ASCII table (works for stdin)
file['output'].close() # close output ASCII table (works for stdout)
if file['name'] != 'STDIN': if file['name'] != 'STDIN':
file['output'].close() # close output ASCII table
os.rename(file['name']+'_tmp',file['name']) # overwrite old one with tmp new os.rename(file['name']+'_tmp',file['name']) # overwrite old one with tmp new

View File

@ -2,60 +2,47 @@
# -*- coding: UTF-8 no BOM -*- # -*- coding: UTF-8 no BOM -*-
import os,sys,string,itertools,re,math,numpy import os,sys,string,itertools,re,math,numpy
import damask
from collections import defaultdict from collections import defaultdict
from optparse import OptionParser, OptionGroup, Option, SUPPRESS_HELP from optparse import OptionParser
import damask
scriptID = '$Id$' scriptID = '$Id$'
scriptName = scriptID.split()[1] scriptName = scriptID.split()[1]
#-------------------------------------------------------------------------------------------------- # --------------------------------------------------------------------
class extendedOption(Option): # MAIN
#-------------------------------------------------------------------------------------------------- # --------------------------------------------------------------------
# used for definition of new option parser action 'extend', which enables to take multiple option arguments
# taken from online tutorial http://docs.python.org/library/optparse.html
ACTIONS = Option.ACTIONS + ("extend",) parser = OptionParser(option_class=damask.extendableOption, usage='%prog options [file[s]]', description = """
STORE_ACTIONS = Option.STORE_ACTIONS + ("extend",)
TYPED_ACTIONS = Option.TYPED_ACTIONS + ("extend",)
ALWAYS_TYPED_ACTIONS = Option.ALWAYS_TYPED_ACTIONS + ("extend",)
def take_action(self, action, dest, opt, value, values, parser):
if action == "extend":
lvalue = value.split(",")
values.ensure_value(dest, []).extend(lvalue)
else:
Option.take_action(self, action, dest, opt, value, values, parser)
parser = OptionParser(option_class=extendedOption, usage='%prog options [file[s]]', description = """\
Add quaternion and/or Bunge Euler angle representation of crystal lattice orientation. Add quaternion and/or Bunge Euler angle representation of crystal lattice orientation.
Orientation is given by quaternion, Euler angles, Orientation is given by quaternion, Euler angles,
rotation matrix, or crystal frame coordinates (i.e. component vectors of rotation matrix). rotation matrix, or crystal frame coordinates (i.e. component vectors of rotation matrix).
""" + string.replace(scriptID,'\n','\\n')
""", version = string.replace(scriptID,'\n','\\n')
) )
parser.add_option('-o', '--output', dest='output', action='append', metavar='<LIST>', outputChoices = ['quaternion','eulers']
help = 'output orientation formats') parser.add_option('-o', '--output', dest='output', action='extend', type='string', metavar='<string LIST>',
parser.add_option('-s', '--symmetry', dest='symmetry', type='string', help = 'output orientation formats (%s)'%(','.join(outputChoices)))
help = 'crystal symmetry [%default]') parser.add_option('-s', '--symmetry', dest='symmetry', action='store', type='choice',
parser.add_option('-r', '--rotation', dest='rotation', type='float', nargs=4, choices=damask.Symmetry.lattices[1:], metavar='string',
help = 'crystal symmetry (%s) [cubic]'%(', '.join(damask.Symmetry.lattices[1:])))
parser.add_option('-r', '--rotation', dest='rotation', action='store', type='float', nargs=4, metavar='float float float float',
help = 'angle and axis to (pre)rotate orientation') help = 'angle and axis to (pre)rotate orientation')
parser.add_option('-e', '--eulers', dest='eulers', type='string', metavar='LABEL', parser.add_option('-e', '--eulers', dest='eulers', action='store', type='string', metavar='string',
help = 'Euler angles') help = 'Euler angles label')
parser.add_option('-d', '--degrees', dest='degrees', action='store_true', parser.add_option('-d', '--degrees', dest='degrees', action='store_true',
help = 'Angles are given in degrees [%default]') help = 'Euler angles are given in degrees [%default]')
parser.add_option('-m', '--matrix', dest='matrix', type='string', metavar='LABEL', parser.add_option('-m', '--matrix', dest='matrix', action='store', type='string', metavar='string',
help = 'orientation matrix') help = 'orientation matrix label')
parser.add_option('-a', dest='a', type='string', metavar='LABEL', parser.add_option('-a', dest='a', action='store', type='string', metavar='string',
help = 'crystal frame a vector') help = 'crystal frame a vector label')
parser.add_option('-b', dest='b', type='string', metavar='LABEL', parser.add_option('-b', dest='b', action='store', type='string', metavar='string',
help = 'crystal frame b vector') help = 'crystal frame b vector label')
parser.add_option('-c', dest='c', type='string', metavar='LABEL', parser.add_option('-c', dest='c', action='store', type='string', metavar='string',
help = 'crystal frame c vector') help = 'crystal frame c vector label')
parser.add_option('-q', '--quaternion', dest='quaternion', type='string', metavar='LABEL', parser.add_option('-q', '--quaternion', dest='quaternion', action='store', type='string', metavar='string',
help = 'quaternion') help = 'quaternion label')
parser.set_defaults(output = []) parser.set_defaults(output = [])
parser.set_defaults(symmetry = 'cubic') parser.set_defaults(symmetry = 'cubic')
parser.set_defaults(rotation = [0.,1.,1.,1.]) # no rotation about 1,1,1 parser.set_defaults(rotation = [0.,1.,1.,1.]) # no rotation about 1,1,1
@ -72,6 +59,9 @@ datainfo = { # lis
'label':[]}, 'label':[]},
} }
if not set(options.output).issubset(set(outputChoices)):
parser.error('output must be chosen from %s...'%(', '.join(outputChoices)))
if options.eulers != None: datainfo['vector']['label'] += [options.eulers]; input = 'eulers' if options.eulers != None: datainfo['vector']['label'] += [options.eulers]; input = 'eulers'
if options.a != None and \ if options.a != None and \
options.b != None and \ options.b != None and \
@ -94,16 +84,15 @@ else:
if os.path.exists(name): if os.path.exists(name):
files.append({'name':name, 'input':open(name), 'output':open(name+'_tmp','w'), 'croak':sys.stderr}) files.append({'name':name, 'input':open(name), 'output':open(name+'_tmp','w'), 'croak':sys.stderr})
#--- loop over input files ------------------------------------------------------------------------ # ------------------------------------------ loop over input files ----------------------------------
for file in files: for file in files:
if file['name'] != 'STDIN': file['croak'].write('\033[1m'+scriptName+'\033[0m: '+file['name']+'\n') if file['name'] != 'STDIN': file['croak'].write('\033[1m'+scriptName+'\033[0m: '+file['name']+'\n')
else: file['croak'].write('\033[1m'+scriptName+'\033[0m\n') else: file['croak'].write('\033[1m'+scriptName+'\033[0m\n')
table = damask.ASCIItable(file['input'],file['output'],buffered = False) # make unbuffered ASCII_table table = damask.ASCIItable(file['input'],file['output'],False) # make unbuffered ASCII_table
table.head_read() # read ASCII header info table.head_read() # read ASCII header info
table.info_append(string.replace(scriptID,'\n','\\n') + '\t' + ' '.join(sys.argv[1:])) table.info_append(string.replace(scriptID,'\n','\\n') + '\t' + ' '.join(sys.argv[1:]))
# --------------- figure out columns to process
active = defaultdict(list) active = defaultdict(list)
column = defaultdict(dict) column = defaultdict(dict)
@ -119,20 +108,17 @@ for file in files:
file['croak'].write('column %s not found...\n'%label) file['croak'].write('column %s not found...\n'%label)
break break
# ------------------------------------------ assemble header ---------------------------------------
for output in options.output: for output in options.output:
if output == 'quaternion': if output == 'quaternion':
table.labels_append(['%i_quaternion_%s'%(i+1,options.symmetry) for i in xrange(4)]) table.labels_append(['%i_quaternion_%s'%(i+1,options.symmetry) for i in xrange(4)])
if output == 'eulers': if output == 'eulers':
table.labels_append(['%i_eulers_%s'%(i+1,options.symmetry) for i in xrange(3)]) table.labels_append(['%i_eulers_%s'%(i+1,options.symmetry) for i in xrange(3)])
# ------------------------------------------ assemble header ---------------------------------------
table.head_write() table.head_write()
# ------------------------------------------ process data --------------------------------------- # ------------------------------------------ process data ----------------------------------------
outputAlive = True
while table.data_read(): # read next data line of ASCII table while outputAlive and table.data_read(): # read next data line of ASCII table
if input == 'eulers': if input == 'eulers':
o = damask.Orientation(Eulers=toRadians*numpy.array(map(float,table.data[column['vector'][options.eulers]:\ o = damask.Orientation(Eulers=toRadians*numpy.array(map(float,table.data[column['vector'][options.eulers]:\
column['vector'][options.eulers]+datainfo['vector']['len']])), column['vector'][options.eulers]+datainfo['vector']['len']])),
@ -155,7 +141,6 @@ for file in files:
column['quaternion'][options.quaternion]+datainfo['quaternion']['len']])), column['quaternion'][options.quaternion]+datainfo['quaternion']['len']])),
symmetry=options.symmetry).reduced() symmetry=options.symmetry).reduced()
o.quaternion = r*o.quaternion o.quaternion = r*o.quaternion
for output in options.output: for output in options.output:
@ -163,14 +148,12 @@ for file in files:
table.data_append(o.asQuaternion()) table.data_append(o.asQuaternion())
if output == 'eulers': if output == 'eulers':
table.data_append(o.asEulers('Bunge')) table.data_append(o.asEulers('Bunge'))
outputAlive = table.data_write() # output processed line
table.data_write() # output processed line
# ------------------------------------------ output result --------------------------------------- # ------------------------------------------ output result ---------------------------------------
outputAlive and table.output_flush() # just in case of buffered ASCII table
table.output_flush() # just in case of buffered ASCII table file['input'].close() # close input ASCII table (works for stdin)
file['output'].close() # close output ASCII table (works for stdout)
if file['name'] != 'STDIN': if file['name'] != 'STDIN':
file['input'].close() # close input ASCII table
file['output'].close() # close output ASCII table
os.rename(file['name']+'_tmp',file['name']) # overwrite old one with tmp new os.rename(file['name']+'_tmp',file['name']) # overwrite old one with tmp new