new scripts to add deformed configuration to ASCII table and for generation of layered 2D geometries (EBSD)

addCompatibilityMismatch.py is now using functions from ASCII table library
This commit is contained in:
Martin Diehl 2012-04-18 09:58:59 +00:00
parent caff3124fd
commit 17f644b261
3 changed files with 289 additions and 102 deletions

View File

@ -44,29 +44,22 @@ Operates on periodic ordered three-dimensional data sets.
)
parser.add_option('--no-shape','-s', dest='shape', action='store_false', \
parser.add_option('--no-shape','-s', dest='noShape', action='store_false', \
help='do not calcuate shape mismatch [%default]')
parser.add_option('--no-volume','-v', dest='volume', action='store_false', \
parser.add_option('--no-volume','-v', dest='noVolume', action='store_false', \
help='do not calculate volume mismatch [%default]')
parser.add_option('-d','--dimension', dest='dim', type='float', nargs=3, \
help='physical dimension of data set in x (fast) y z (slow) [%default]')
parser.add_option('-r','--resolution', dest='res', type='int', nargs=3, \
help='resolution of data set in x (fast) y z (slow)')
parser.add_option('-c','--coordinates', dest='coords', type='string',\
help='column heading for coordinates [%default]')
parser.add_option('-f','--deformation', dest='defgrad', action='extend', type='string', \
help='heading(s) of columns containing deformation tensor values %default')
parser.set_defaults(volume = True)
parser.set_defaults(shape = True)
parser.set_defaults(defgrad = ['f'])
parser.set_defaults(noVolume = False)
parser.set_defaults(noShape = False)
parser.set_defaults(coords = 'ip')
parser.set_defaults(defgrad = 'f')
(options,filenames) = parser.parse_args()
if not options.res or len(options.res) < 3:
parser.error('improper resolution specification...')
if not options.dim or len(options.dim) < 3:
parser.error('improper dimension specification...')
defgrad = {}
defgrad_av = {}
centroids = {}
nodes = {}
@ -84,113 +77,96 @@ if options.defgrad != None: datainfo['defgrad']['label'] += options.defgrad
files = []
if filenames == []:
parser.error('no data file specified')
files.append({'name':'STDIN', 'input':sys.stdin, 'output':sys.stdout})
else:
for name in filenames:
if os.path.exists(name):
files.append({'name':name, 'input':open(name), 'output':open(name+'_tmp','w')})
# ------------------------------------------ loop over input files ---------------------------------------
# ------------------------------------------ loop over input files ---------------------------------------
for file in files:
print file['name']
if file['name'] != 'STDIN': print file['name'],
# get labels by either read the first row, or - if keyword header is present - the last line of the header
table = damask.ASCIItable(file['input'],file['output'],False) # make unbuffered ASCII_table
table.head_read() # read ASCII header info
table.info_append(string.replace('$Id$','\n','\\n') + \
'\t' + ' '.join(sys.argv[1:]))
firstline = file['input'].readline()
m = re.search('(\d+)\s*head', firstline.lower())
if m:
headerlines = int(m.group(1))
passOn = [file['input'].readline() for i in range(1,headerlines)]
headers = file['input'].readline().split()
# --------------- figure out dimension and resolution
try:
locationCol = table.labels.index('%s.x'%options.coords) # columns containing location data
except ValueError:
print 'no coordinate data found...'
continue
grid = [{},{},{}]
while table.data_read(): # read next data line of ASCII table
for j in xrange(3):
grid[j][str(table.data[locationCol+j])] = True # remember coordinate along x,y,z
res = numpy.array([len(grid[0]),\
len(grid[1]),\
len(grid[2]),],'i') # resolution is number of distinct coordinates found
geomdim = res/numpy.maximum(numpy.ones(3,'d'),res-1.0)* \
numpy.array([max(map(float,grid[0].keys()))-min(map(float,grid[0].keys())),\
max(map(float,grid[1].keys()))-min(map(float,grid[1].keys())),\
max(map(float,grid[2].keys()))-min(map(float,grid[2].keys())),\
],'d') # dimension from bounding box, corrected for cell-centeredness
if res[2] == 1:
geomdim[2] = min(geomdim[:2]/res[:2])
N = res.prod()
print '\t%s @ %s'%(geomdim,res)
# --------------- figure out columns to process
key = '1_%s' %options.defgrad
if key not in table.labels:
sys.stderr.write('column %s not found...\n'%key)
else:
headerlines = 1
passOn = []
headers = firstline.split()
data = file['input'].readlines()
for i,l in enumerate(headers):
if l.startswith('1_'):
if re.match('\d+_',l[2:]) or i == len(headers)-1 or not headers[i+1].endswith(l[2:]):
headers[i] = l[2:]
active = {}
column = {}
head = []
for datatype,info in datainfo.items():
for label in info['label']:
key = {True :'1_%s',
False:'%s' }[info['len']>1]%label
if key not in headers:
sys.stderr.write('column %s not found...\n'%key)
else:
if datatype not in active: active[datatype] = []
if datatype not in column: column[datatype] = {}
active[datatype].append(label)
column[datatype][label] = headers.index(key)
if options.shape: head += ['mismatch_shape(%s)'%label]
if options.volume: head += ['mismatch_volume(%s)'%label]
defgrad = numpy.array([0.0 for i in xrange(N*9)]).reshape(list(res)+[3,3])
if not options.noShape: table.labels_append(['mismatch_shape(%s)' %options.defgrad])
if not options.noVolume: table.labels_append(['mismatch_volume(%s)'%options.defgrad])
column = table.labels.index(key)
# ------------------------------------------ assemble header ---------------------------------------
output = '%i\theader'%(headerlines+1) + '\n' + \
''.join(passOn) + \
string.replace('$Id$','\n','\\n')+ '\t' + \
' '.join(sys.argv[1:]) + '\n' + \
'\t'.join(headers + head) + '\n' # build extended header
table.head_write()
# ------------------------------------------ read deformation gradient field -----------------------
# ------------------------------------------ read deformation tensors ---------------------------------------
for datatype,labels in active.items():
for label in labels:
defgrad[label] = numpy.array([0.0 for i in xrange(9*options.res[0]*options.res[1]*options.res[2])],'d').\
reshape((options.res[0],options.res[1],options.res[2],3,3))
idx = 0
for line in data:
items = line.split()[:len(headers)] # take only valid first items
if len(items) < len(headers): # too short lines get dropped
continue
defgrad[label][location(idx,options.res)[0]]\
[location(idx,options.res)[1]]\
[location(idx,options.res)[2]]\
= numpy.array(map(float,items[column[datatype][label]:
column[datatype][label]+datainfo[datatype]['len']]),'d').reshape(3,3)
idx += 1
print options.res
defgrad_av[label] = damask.core.math.tensor_avg(options.res,defgrad[label])
centroids[label] = damask.core.math.deformed_fft(options.res,options.dim,defgrad_av[label],1.0,defgrad[label])
nodes[label] = damask.core.math.mesh_regular_grid(options.res,options.dim,defgrad_av[label],centroids[label])
if options.shape: shape_mismatch[label] = damask.core.math.shape_compare( options.res,options.dim,defgrad[label],nodes[label],centroids[label])
if options.volume: volume_mismatch[label] = damask.core.math.volume_compare(options.res,options.dim,defgrad[label],nodes[label])
# ------------------------------------------ read file ---------------------------------------
table.data_rewind()
idx = 0
for line in data:
items = line.split()[:len(headers)]
if len(items) < len(headers):
continue
while table.data_read(): # read next data line of ASCII table
(x,y,z) = location(idx,res) # figure out (x,y,z) position from line count
idx += 1
defgrad[x,y,z] = numpy.array(map(float,table.data[column:column+9]),'d').reshape(3,3)
output += '\t'.join(items)
defgrad_av = damask.core.math.tensor_avg(res,defgrad)
centroids = damask.core.math.deformed_fft(res,geomdim,defgrad_av,1.0,defgrad)
nodes = damask.core.math.mesh_regular_grid(res,geomdim,defgrad_av,centroids)
if not options.noShape: shape_mismatch = damask.core.math.shape_compare( res,geomdim,defgrad,nodes,centroids)
if not options.noVolume: volume_mismatch = damask.core.math.volume_compare(res,geomdim,defgrad,nodes)
for datatype,labels in active.items():
for label in labels:
# ------------------------------------------ process data ---------------------------------------
if options.shape: output += '\t%f'%shape_mismatch[label][location(idx,options.res)[0]][location(idx,options.res)[1]][location(idx,options.res)[2]]
if options.volume: output += '\t%f'%volume_mismatch[label][location(idx,options.res)[0]][location(idx,options.res)[1]][location(idx,options.res)[2]]
output += '\n'
idx += 1
file['input'].close()
table.data_rewind()
idx = 0
while table.data_read(): # read next data line of ASCII table
(x,y,z) = location(idx,res) # figure out (x,y,z) position from line count
idx += 1
if not options.noShape: table.data_append(shape_mismatch[x,y,z])
if not options.noVolume: table.data_append(volume_mismatch[x,y,z])
table.data_write() # output processed line
# ------------------------------------------ output result ---------------------------------------
file['output'].write(output)
file['output'].close
os.rename(file['name']+'_tmp',file['name'])
table.output_flush() # just in case of buffered ASCII table
file['input'].close() # close input ASCII table
if file['name'] != 'STDIN':
file['output'].close # close output ASCII table
os.rename(file['name']+'_tmp',file['name']) # overwrite old one with tmp new

View File

@ -0,0 +1,151 @@
#!/usr/bin/env python
import os,re,sys,math,string,numpy,damask
from optparse import OptionParser, Option
# -----------------------------
class extendableOption(Option):
# -----------------------------
# used for definition of new option parser action 'extend', which enables to take multiple option arguments
# taken from online tutorial http://docs.python.org/library/optparse.html
ACTIONS = Option.ACTIONS + ("extend",)
STORE_ACTIONS = Option.STORE_ACTIONS + ("extend",)
TYPED_ACTIONS = Option.TYPED_ACTIONS + ("extend",)
ALWAYS_TYPED_ACTIONS = Option.ALWAYS_TYPED_ACTIONS + ("extend",)
def take_action(self, action, dest, opt, value, values, parser):
if action == "extend":
lvalue = value.split(",")
values.ensure_value(dest, []).extend(lvalue)
else:
Option.take_action(self, action, dest, opt, value, values, parser)
def location(idx,res):
return ( idx % res[0], \
( idx // res[0]) % res[1], \
( idx // res[0] // res[1]) % res[2] )
def index(location,res):
return ( location[0] % res[0] + \
( location[1] % res[1]) * res[0] + \
( location[2] % res[2]) * res[1] * res[0] )
# --------------------------------------------------------------------
# MAIN
# --------------------------------------------------------------------
parser = OptionParser(option_class=extendableOption, usage='%prog options [file[s]]', description = """
Add column(s) containing deformed configuration of requested column(s).
Operates on periodic ordered three-dimensional data sets.
""" + string.replace('$Id: addDivergence.py 1282 2012-02-08 12:01:38Z MPIE\p.eisenlohr $','\n','\\n')
)
parser.add_option('-c','--coordinates', dest='coords', type='string',\
help='column heading for coordinates [%default]')
parser.add_option('-d','--defgrad', dest='defgrad', type='string', \
help='heading of columns containing tensor field values')
parser.set_defaults(coords = 'ip')
parser.set_defaults(defgrad = 'f' )
(options,filenames) = parser.parse_args()
# ------------------------------------------ setup file handles ---------------------------------------
files = []
if filenames == []:
files.append({'name':'STDIN', 'input':sys.stdin, 'output':sys.stdout})
else:
for name in filenames:
if os.path.exists(name):
files.append({'name':name, 'input':open(name), 'output':open(name+'_tmp','w')})
# ------------------------------------------ loop over input files ---------------------------------------
for file in files:
if file['name'] != 'STDIN': print file['name'],
table = damask.ASCIItable(file['input'],file['output'],False) # make unbuffered ASCII_table
table.head_read() # read ASCII header info
table.info_append(string.replace('$Id: addDivergence.py 1282 2012-02-08 12:01:38Z MPIE\p.eisenlohr $','\n','\\n') + \
'\t' + ' '.join(sys.argv[1:]))
# --------------- figure out dimension and resolution
try:
locationCol = table.labels.index('%s.x'%options.coords) # columns containing location data
except ValueError:
print 'no coordinate data found...'
continue
grid = [{},{},{}]
while table.data_read(): # read next data line of ASCII table
for j in xrange(3):
grid[j][str(table.data[locationCol+j])] = True # remember coordinate along x,y,z
res = numpy.array([len(grid[0]),\
len(grid[1]),\
len(grid[2]),],'i') # resolution is number of distinct coordinates found
geomdim = res/numpy.maximum(numpy.ones(3,'d'),res-1.0)* \
numpy.array([max(map(float,grid[0].keys()))-min(map(float,grid[0].keys())),\
max(map(float,grid[1].keys()))-min(map(float,grid[1].keys())),\
max(map(float,grid[2].keys()))-min(map(float,grid[2].keys())),\
],'d') # dimension from bounding box, corrected for cell-centeredness
if res[2] == 1:
geomdim[2] = min(geomdim[:2]/res[:2])
N = res.prod()
print '\t%s @ %s'%(geomdim,res)
# --------------- figure out columns to process
key = '1_%s' %options.defgrad
if key not in table.labels:
sys.stderr.write('column %s not found...\n'%key)
else:
defgrad = numpy.array([0.0 for i in xrange(N*9)]).reshape(list(res)+[3,3])
table.labels_append(['%s_deformed'%(coord) for coord in 'x','y','z']) # extend ASCII header with new labels
column = table.labels.index(key)
# ------------------------------------------ assemble header ---------------------------------------
table.head_write()
# ------------------------------------------ read value field ---------------------------------------
table.data_rewind()
idx = 0
while table.data_read(): # read next data line of ASCII table
(x,y,z) = location(idx,res) # figure out (x,y,z) position from line count
idx += 1
defgrad[x,y,z] = numpy.array(map(float,table.data[column:column+9]),'d').reshape(3,3)
# ------------------------------------------ process value field ----------------------------
defgrad_av = damask.core.math.tensor_avg(res,defgrad)
centroids = damask.core.math.deformed_fft(res,geomdim,defgrad_av,1.0,defgrad)
# ------------------------------------------ process data ---------------------------------------
table.data_rewind()
idx = 0
while table.data_read(): # read next data line of ASCII table
(x,y,z) = location(idx,res) # figure out (x,y,z) position from line count
idx += 1
table.data_append(list(centroids[x,y,z]))
table.data_write() # output processed line
# ------------------------------------------ output result ---------------------------------------
table.output_flush() # just in case of buffered ASCII table
file['input'].close() # close input ASCII table
if file['name'] != 'STDIN':
file['output'].close # close output ASCII table
os.rename(file['name']+'_tmp',file['name']) # overwrite old one with tmp new

View File

@ -0,0 +1,60 @@
#!/usr/bin/env python
# -*- coding: UTF-8 no BOM -*-
import os,sys,string,re,math,numpy,random
from optparse import OptionParser, OptionGroup, Option
# -----------------------------
class extendedOption(Option):
# -----------------------------
# used for definition of new option parser action 'extend', which enables to take multiple option arguments
# taken from online tutorial http://docs.python.org/library/optparse.html
ACTIONS = Option.ACTIONS + ("extend",)
STORE_ACTIONS = Option.STORE_ACTIONS + ("extend",)
TYPED_ACTIONS = Option.TYPED_ACTIONS + ("extend",)
ALWAYS_TYPED_ACTIONS = Option.ALWAYS_TYPED_ACTIONS + ("extend",)
def take_action(self, action, dest, opt, value, values, parser):
if action == "extend":
lvalue = value.split(",")
values.ensure_value(dest, []).extend(lvalue)
else:
Option.take_action(self, action, dest, opt, value, values, parser)
# ----------------------- MAIN -------------------------------
parser = OptionParser(option_class=extendedOption, usage='%prog options [file[s]]', description = """
Construct continuous Microstructure (e.g. from EBSD) with Layer
""" + string.replace('$Id: spectral_randomSeeding.py 1423 2012-03-31 12:42:49Z MPIE\m.diehl $','\n','\\n')
)
parser.add_option('-r','--res', dest='res', type='int', nargs=2, \
help='Continuous Fourier Points in x, y [%default]')
parser.add_option('-l','--layer', dest='layer', type='int', nargs=2, \
help='Constant Layer of Fourier Points x, y [%default]')
parser.set_defaults(res=[16,16])
parser.set_defaults(layer=[0,0])
(options, filenames) = parser.parse_args()
files = []
if filenames == []:
files.append({'name':'STDIN', 'input':sys.stdin, 'output':sys.stdout})
else:
for name in filenames:
files.append({'name':name,'output':open(name+'_tmp','w')})
for file in files:
if file['name'] != 'STDIN': print file['name']
file['output'].write("{0:1d} {1:6s}\n".format(1,'header'))
file['output'].write("{0:s} {1:8d} {2:s} {3:8d} {4:s}\n".format('resolution a',options.res[0]+options.layer[0],'b',options.res[1]+options.layer[1],'c 1'))
for y in xrange(options.res[1]+options.layer[1]):
file['output'].write('%i copies of %i\n'%(options.layer[0],options.res[0]*options.res[1]+2))
if (y<options.layer[1]):
file['output'].write('%i copies of %i\n'%(options.res[0],options.res[0]*options.res[1]+1))
else:
file['output'].write('%i to %i\n'%((y-options.layer[1])*options.res[0]+1,(y-options.layer[1])*options.res[0]+options.res[0]))
file['output'].close()