made some more scripts working again
This commit is contained in:
parent
8ff79e1eb0
commit
08f8746182
|
@ -81,7 +81,7 @@ for name in filenames:
|
||||||
table.info_append(scriptID + '\t' + ' '.join(sys.argv[1:]))
|
table.info_append(scriptID + '\t' + ' '.join(sys.argv[1:]))
|
||||||
if options.shape: table.labels_append('shapeMismatch({})'.format(options.defgrad))
|
if options.shape: table.labels_append('shapeMismatch({})'.format(options.defgrad))
|
||||||
if options.volume: table.labels_append('volMismatch({})'.format(options.defgrad))
|
if options.volume: table.labels_append('volMismatch({})'.format(options.defgrad))
|
||||||
table.head_write()
|
#table.head_write()
|
||||||
|
|
||||||
# --------------- figure out size and grid ---------------------------------------------------------
|
# --------------- figure out size and grid ---------------------------------------------------------
|
||||||
|
|
||||||
|
@ -96,24 +96,45 @@ for name in filenames:
|
||||||
|
|
||||||
N = grid.prod()
|
N = grid.prod()
|
||||||
|
|
||||||
# ------------------------------------------ process deformation gradient --------------------------
|
# --------------- figure out columns to process ---------------------------------------------------
|
||||||
|
key = '1_%s'%options.defgrad
|
||||||
|
if key not in table.labels:
|
||||||
|
file['croak'].write('column %s not found...\n'%key)
|
||||||
|
continue
|
||||||
|
else:
|
||||||
|
column = table.labels.index(key) # remember columns of requested data
|
||||||
|
|
||||||
F = table.data[:,colF:colF+9].transpose().reshape([3,3]+grid.tolist(),order='F')
|
# ------------------------------------------ assemble header ---------------------------------------
|
||||||
|
if options.shape: table.labels_append(['shapeMismatch(%s)' %options.defgrad])
|
||||||
|
if options.volume: table.labels_append(['volMismatch(%s)'%options.defgrad])
|
||||||
|
table.head_write()
|
||||||
|
|
||||||
|
# ------------------------------------------ read deformation gradient field -----------------------
|
||||||
|
table.data_rewind()
|
||||||
|
F = np.zeros(N*9,'d').reshape([3,3]+list(grid))
|
||||||
|
idx = 0
|
||||||
|
while table.data_read():
|
||||||
|
(x,y,z) = damask.util.gridLocation(idx,grid) # figure out (x,y,z) position from line count
|
||||||
|
idx += 1
|
||||||
|
F[0:3,0:3,x,y,z] = np.array(map(float,table.data[column:column+9]),'d').reshape(3,3)
|
||||||
|
print 'hm'
|
||||||
Favg = damask.core.math.tensorAvg(F)
|
Favg = damask.core.math.tensorAvg(F)
|
||||||
centres = damask.core.mesh.deformedCoordsFFT(size,F,Favg,[1.0,1.0,1.0])
|
centres = damask.core.mesh.deformedCoordsFFT(size,F,Favg,[1.0,1.0,1.0])
|
||||||
|
|
||||||
nodes = damask.core.mesh.nodesAroundCentres(size,Favg,centres)
|
nodes = damask.core.mesh.nodesAroundCentres(size,Favg,centres)
|
||||||
|
if options.shape: shapeMismatch = damask.core.mesh.shapeMismatch( size,F,nodes,centres)
|
||||||
|
if options.volume: volumeMismatch = damask.core.mesh.volumeMismatch(size,F,nodes)
|
||||||
|
|
||||||
stack =[table.data]
|
# ------------------------------------------ process data ------------------------------------------
|
||||||
if options.shape: stack.append(damask.core.mesh.shapeMismatch( size,F,nodes,centres).reshape([grid.prod(),1]))
|
table.data_rewind()
|
||||||
if options.volume: stack.append(damask.core.mesh.volumeMismatch(size,F,nodes).reshape([grid.prod(),1]))
|
idx = 0
|
||||||
|
outputAlive = True
|
||||||
for i in stack:
|
while outputAlive and table.data_read(): # read next data line of ASCII table
|
||||||
print i.shape
|
(x,y,z) = damask.util.gridLocation(idx,grid) # figure out (x,y,z) position from line count
|
||||||
|
idx += 1
|
||||||
# ------------------------------------------ output result -----------------------------------------
|
if options.shape: table.data_append( shapeMismatch[x,y,z])
|
||||||
|
if options.volume: table.data_append(volumeMismatch[x,y,z])
|
||||||
if len(stack) > 1: table.data = np.hstack(tuple(stack))
|
outputAlive = table.data_write()
|
||||||
table.data_writeArray()
|
|
||||||
|
|
||||||
# ------------------------------------------ output finalization -----------------------------------
|
# ------------------------------------------ output finalization -----------------------------------
|
||||||
|
|
||||||
|
|
|
@ -94,21 +94,33 @@ for name in filenames:
|
||||||
# ------------------------------------------ assemble header ---------------------------------------
|
# ------------------------------------------ assemble header ---------------------------------------
|
||||||
|
|
||||||
table.info_append(scriptID + '\t' + ' '.join(sys.argv[1:]))
|
table.info_append(scriptID + '\t' + ' '.join(sys.argv[1:]))
|
||||||
table.labels_append(['{}_{}.{}'%(coord+1,options.defgrad,options.coords) for coord in xrange(3)]) # extend ASCII header with new labels
|
for coord in xrange(3):
|
||||||
|
table.labels_append(['{}_{}.{}'.format(coord+1,options.defgrad,options.coords) ]) # extend ASCII header with new labels
|
||||||
table.head_write()
|
table.head_write()
|
||||||
|
|
||||||
# ------------------------------------------ process deformation gradient --------------------------
|
# ------------------------------------------ read deformation gradient field -----------------------
|
||||||
|
table.data_rewind()
|
||||||
|
F = np.array([0.0 for i in xrange(N*9)]).reshape([3,3]+grid.tolist())
|
||||||
|
idx = 0
|
||||||
|
while table.data_read():
|
||||||
|
(x,y,z) = damask.util.gridLocation(idx,grid) # figure out (x,y,z) position from line count
|
||||||
|
idx += 1
|
||||||
|
F[0:3,0:3,x,y,z] = np.array(map(float,table.data[table.label_index(options.defgrad):\
|
||||||
|
table.label_index(options.defgrad)+9]),'d').reshape(3,3)
|
||||||
|
|
||||||
F = table.data[:,colF:colF+9].transpose().reshape([3,3]+list(options.dimension),order='F')
|
# ------------------------------------------ calculate coordinates ---------------------------------
|
||||||
Favg = damask.core.math.tensorAvg(F)
|
Favg = damask.core.math.tensorAvg(F)
|
||||||
centres = damask.core.mesh.deformedCoordsFFT(size,F,Favg,[1.0,1.0,1.0])
|
centroids = damask.core.mesh.deformedCoordsFFT(size,F,Favg)
|
||||||
|
|
||||||
stack = [table.data,centres]
|
# ------------------------------------------ process data ------------------------------------------
|
||||||
|
table.data_rewind()
|
||||||
# ------------------------------------------ output result -----------------------------------------
|
idx = 0
|
||||||
|
outputAlive = True
|
||||||
if len(stack) > 1: table.data = np.hstack(tuple(stack))
|
while outputAlive and table.data_read(): # read next data line of ASCII table
|
||||||
table.data_writeArray()
|
(x,y,z) = damask.util.gridLocation(idx,grid) # figure out (x,y,z) position from line count
|
||||||
|
idx += 1
|
||||||
|
table.data_append(list(centroids[:,x,y,z]))
|
||||||
|
outputAlive = table.data_write()
|
||||||
|
|
||||||
# ------------------------------------------ output finalization -----------------------------------
|
# ------------------------------------------ output finalization -----------------------------------
|
||||||
|
|
||||||
|
|
|
@ -56,7 +56,7 @@ for name in filenames:
|
||||||
table = damask.ASCIItable(name = name,
|
table = damask.ASCIItable(name = name,
|
||||||
buffered = False)
|
buffered = False)
|
||||||
except: continue
|
except: continue
|
||||||
table.croak('\033[1m'+scriptName+'\033[0m'+(': '+name if name else ''))
|
table.croak(damask.util.emph(scriptName)+(': '+name if name else ''))
|
||||||
|
|
||||||
# ------------------------------------------ read header ------------------------------------------
|
# ------------------------------------------ read header ------------------------------------------
|
||||||
|
|
||||||
|
|
|
@ -38,60 +38,51 @@ parser.set_defaults(size = (0.0,0.0,0.0))
|
||||||
options.packing = np.array(options.packing)
|
options.packing = np.array(options.packing)
|
||||||
prefix = 'blowUp%ix%ix%i_'%(options.packing[0],options.packing[1],options.packing[2])
|
prefix = 'blowUp%ix%ix%i_'%(options.packing[0],options.packing[1],options.packing[2])
|
||||||
|
|
||||||
# ------------------------------------------ setup file handles ------------------------------------
|
# --- loop over input files -------------------------------------------------------------------------
|
||||||
files = []
|
|
||||||
|
if filenames == []: filenames = [None]
|
||||||
|
|
||||||
for name in filenames:
|
for name in filenames:
|
||||||
if os.path.exists(name):
|
try:
|
||||||
files.append({'name':name, 'input':open(name), 'output':open(name+'_tmp','w'), 'croak':sys.stderr})
|
table = damask.ASCIItable(name = name,
|
||||||
|
outname = os.path.join(os.path.dirname(name),
|
||||||
|
prefix+ \
|
||||||
|
os.path.basename(name)) if name else name,
|
||||||
|
buffered = False)
|
||||||
|
except: continue
|
||||||
|
table.croak(damask.util.emph(scriptName)+(': '+name if name else ''))
|
||||||
|
|
||||||
#--- loop over input files -------------------------------------------------------------------------
|
# ------------------------------------------ read header ------------------------------------------
|
||||||
for file in files:
|
|
||||||
file['croak'].write('\033[1m'+scriptName+'\033[0m: '+file['name']+'\n')
|
table.head_read()
|
||||||
|
|
||||||
|
# ------------------------------------------ sanity checks ----------------------------------------
|
||||||
|
|
||||||
|
if table.label_dimension(options.coords) != 3: errors.append('coordinates {} are not a vector.'.format(options.coords))
|
||||||
|
else: coordCol = table.label_index(options.coords)
|
||||||
|
|
||||||
|
|
||||||
|
# ------------------------------------------ assemble header --------------------------------------
|
||||||
|
|
||||||
table = damask.ASCIItable(file['input'],file['output'],False) # make unbuffered ASCII_table
|
|
||||||
table.head_read() # read ASCII header info
|
|
||||||
table.info_append(scriptID + '\t' + ' '.join(sys.argv[1:]))
|
table.info_append(scriptID + '\t' + ' '.join(sys.argv[1:]))
|
||||||
|
|
||||||
# --------------- figure out size and grid ---------------------------------------------------------
|
# --------------- figure out size and grid ---------------------------------------------------------
|
||||||
try:
|
|
||||||
elemCol = table.labels.index('elem')
|
|
||||||
locationCol = table.labels.index('1_%s'%options.coords) # columns containing location data
|
|
||||||
except ValueError:
|
|
||||||
try:
|
|
||||||
locationCol = table.labels.index('%s.x'%options.coords) # columns containing location data (legacy naming scheme)
|
|
||||||
except ValueError:
|
|
||||||
file['croak'].write('no coordinate (1_%s/%s.x) and/or elem data found...\n'%(options.coords,options.coords))
|
|
||||||
continue
|
|
||||||
|
|
||||||
if (any(options.grid)==0 or any(options.size)==0.0):
|
table.data_readArray()
|
||||||
|
|
||||||
coords = [{},{},{}]
|
coords = [{},{},{}]
|
||||||
while table.data_read(): # read next data line of ASCII table
|
for i in xrange(len(table.data)):
|
||||||
for j in xrange(3):
|
for j in xrange(3):
|
||||||
coords[j][str(table.data[locationCol+j])] = True # remember coordinate along x,y,z
|
coords[j][str(table.data[i,coordCol+j])] = True
|
||||||
grid = np.array([len(coords[0]),\
|
grid = np.array(map(len,coords),'i')
|
||||||
len(coords[1]),\
|
|
||||||
len(coords[2]),],'i') # resolution is number of distinct coordinates found
|
|
||||||
size = grid/np.maximum(np.ones(3,'d'),grid-1.0)* \
|
size = grid/np.maximum(np.ones(3,'d'),grid-1.0)* \
|
||||||
np.array([max(map(float,coords[0].keys()))-min(map(float,coords[0].keys())),\
|
np.array([max(map(float,coords[0].keys()))-min(map(float,coords[0].keys())),\
|
||||||
max(map(float,coords[1].keys()))-min(map(float,coords[1].keys())),\
|
max(map(float,coords[1].keys()))-min(map(float,coords[1].keys())),\
|
||||||
max(map(float,coords[2].keys()))-min(map(float,coords[2].keys())),\
|
max(map(float,coords[2].keys()))-min(map(float,coords[2].keys())),\
|
||||||
],'d') # size from bounding box, corrected for cell-centeredness
|
],'d') # size from bounding box, corrected for cell-centeredness
|
||||||
origin = np.array([min(map(float,coords[0].keys())),\
|
|
||||||
min(map(float,coords[1].keys())),\
|
|
||||||
min(map(float,coords[2].keys())),\
|
|
||||||
],'d') - 0.5 * size / grid
|
|
||||||
else:
|
|
||||||
grid = np.array(options.grid,'i')
|
|
||||||
size = np.array(options.size,'d')
|
|
||||||
origin = np.zeros(3,'d')
|
|
||||||
|
|
||||||
for i, res in enumerate(grid):
|
size = np.where(grid > 1, size, min(size[grid > 1]/grid[grid > 1])) # spacing for grid==1 equal to smallest among other spacings
|
||||||
if res == 1:
|
|
||||||
options.packing[i] = 1
|
|
||||||
options.shift[i] = 0
|
|
||||||
mask = np.ones(3,dtype=bool)
|
|
||||||
mask[i]=0
|
|
||||||
size[i] = min(size[mask]/grid[mask]) # third spacing equal to smaller of other spacing
|
|
||||||
|
|
||||||
packing = np.array(options.packing,'i')
|
packing = np.array(options.packing,'i')
|
||||||
outSize = grid*packing
|
outSize = grid*packing
|
||||||
|
@ -113,22 +104,17 @@ for file in files:
|
||||||
d[1]:d[1]+packing[1],
|
d[1]:d[1]+packing[1],
|
||||||
d[2]:d[2]+packing[2],
|
d[2]:d[2]+packing[2],
|
||||||
: ] = np.tile(np.array(table.data_asFloat(),'d'),packing.tolist()+[1]) # tile to match blowUp voxel size
|
: ] = np.tile(np.array(table.data_asFloat(),'d'),packing.tolist()+[1]) # tile to match blowUp voxel size
|
||||||
|
|
||||||
elementSize = size/grid/packing
|
elementSize = size/grid/packing
|
||||||
elem = 1
|
elem = 1
|
||||||
for c in xrange(outSize[2]):
|
for c in xrange(outSize[2]):
|
||||||
for b in xrange(outSize[1]):
|
for b in xrange(outSize[1]):
|
||||||
for a in xrange(outSize[0]):
|
for a in xrange(outSize[0]):
|
||||||
data[a,b,c,locationCol:locationCol+3] = [a+0.5,b+0.5,c+0.5]*elementSize
|
data[a,b,c,coordCol:coordCol+3] = [a+0.5,b+0.5,c+0.5]*elementSize
|
||||||
data[a,b,c,elemCol] = elem
|
data[a,b,c,table.label_index('elem')] = elem
|
||||||
table.data = data[a,b,c,:].tolist()
|
table.data = data[a,b,c,:].tolist()
|
||||||
outputAlive = table.data_write() # output processed line
|
outputAlive = table.data_write() # output processed line
|
||||||
elem += 1
|
elem += 1
|
||||||
|
|
||||||
# ------------------------------------------ output result -----------------------------------------
|
# ------------------------------------------ output finalization -----------------------------------
|
||||||
outputAlive and table.output_flush() # just in case of buffered ASCII table
|
|
||||||
|
|
||||||
table.input_close() # close input ASCII table
|
table.close() # close input ASCII table (works for stdin)
|
||||||
table.output_close() # close output ASCII table
|
|
||||||
os.rename(file['name']+'_tmp',\
|
|
||||||
os.path.join(os.path.dirname(file['name']),prefix+os.path.basename(file['name'])))
|
|
||||||
|
|
Loading…
Reference in New Issue