2016-05-11 14:31:39 +05:30
|
|
|
#!/usr/bin/env python2
|
2014-04-02 00:11:14 +05:30
|
|
|
# -*- coding: UTF-8 no BOM -*-
|
2011-06-21 21:55:48 +05:30
|
|
|
|
2016-03-01 22:55:14 +05:30
|
|
|
import os,sys
|
2014-08-06 20:55:18 +05:30
|
|
|
import numpy as np
|
2015-08-21 01:12:05 +05:30
|
|
|
import scipy.ndimage
|
2014-08-06 20:55:18 +05:30
|
|
|
from optparse import OptionParser
|
|
|
|
import damask
|
2011-06-21 21:55:48 +05:30
|
|
|
|
2016-01-27 22:36:00 +05:30
|
|
|
scriptName = os.path.splitext(os.path.basename(__file__))[0]
|
|
|
|
scriptID = ' '.join([scriptName,damask.version])
|
2011-06-21 21:55:48 +05:30
|
|
|
|
|
|
|
# --------------------------------------------------------------------
|
|
|
|
# MAIN
|
|
|
|
# --------------------------------------------------------------------
|
|
|
|
|
2014-08-06 20:55:18 +05:30
|
|
|
parser = OptionParser(option_class=damask.extendableOption, usage='%prog options [file[s]]', description = """
|
|
|
|
Average each data block of size 'packing' into single values thus reducing the former grid to grid/packing.
|
2011-06-21 21:55:48 +05:30
|
|
|
|
2014-08-06 20:55:18 +05:30
|
|
|
""", version = scriptID)
|
2011-06-21 21:55:48 +05:30
|
|
|
|
2015-08-12 19:13:05 +05:30
|
|
|
parser.add_option('-c','--coordinates',
|
2016-04-27 02:19:58 +05:30
|
|
|
dest = 'pos',
|
2015-08-21 01:12:05 +05:30
|
|
|
type = 'string', metavar = 'string',
|
2016-04-16 03:57:23 +05:30
|
|
|
help = 'column label of coordinates [%default]')
|
2015-08-12 19:13:05 +05:30
|
|
|
parser.add_option('-p','--packing',
|
2015-08-21 01:12:05 +05:30
|
|
|
dest = 'packing',
|
|
|
|
type = 'int', nargs = 3, metavar = 'int int int',
|
|
|
|
help = 'size of packed group [%default]')
|
2015-08-12 19:13:05 +05:30
|
|
|
parser.add_option('--shift',
|
2015-08-21 01:12:05 +05:30
|
|
|
dest = 'shift',
|
|
|
|
type = 'int', nargs = 3, metavar = 'int int int',
|
|
|
|
help = 'shift vector of packing stencil [%default]')
|
2015-08-12 19:13:05 +05:30
|
|
|
parser.add_option('-g', '--grid',
|
2015-08-21 01:12:05 +05:30
|
|
|
dest = 'grid',
|
|
|
|
type = 'int', nargs = 3, metavar = 'int int int',
|
|
|
|
help = 'grid in x,y,z [autodetect]')
|
|
|
|
parser.add_option('-s', '--size',
|
|
|
|
dest = 'size',
|
|
|
|
type = 'float', nargs = 3, metavar = 'float float float',
|
|
|
|
help = 'size in x,y,z [autodetect]')
|
2016-04-27 02:19:58 +05:30
|
|
|
parser.set_defaults(pos = 'pos',
|
2015-08-12 19:13:05 +05:30
|
|
|
packing = (2,2,2),
|
|
|
|
shift = (0,0,0),
|
|
|
|
grid = (0,0,0),
|
2015-08-21 01:12:05 +05:30
|
|
|
size = (0.0,0.0,0.0),
|
|
|
|
)
|
2011-06-21 21:55:48 +05:30
|
|
|
|
|
|
|
(options,filenames) = parser.parse_args()
|
|
|
|
|
2015-08-21 01:12:05 +05:30
|
|
|
packing = np.array(options.packing,dtype = int)
|
|
|
|
shift = np.array(options.shift, dtype = int)
|
2012-02-02 22:42:48 +05:30
|
|
|
|
2015-08-21 01:12:05 +05:30
|
|
|
prefix = 'averagedDown{}x{}x{}_'.format(*packing)
|
|
|
|
if any(shift != 0): prefix += 'shift{:+}{:+}{:+}_'.format(*shift)
|
2011-06-21 21:55:48 +05:30
|
|
|
|
2015-08-21 01:12:05 +05:30
|
|
|
# --- loop over input files ------------------------------------------------------------------------
|
2015-08-12 19:13:05 +05:30
|
|
|
|
2015-08-21 01:12:05 +05:30
|
|
|
if filenames == []: filenames = [None]
|
2015-08-12 19:13:05 +05:30
|
|
|
|
2014-08-06 20:55:18 +05:30
|
|
|
for name in filenames:
|
2016-04-16 03:57:23 +05:30
|
|
|
try: table = damask.ASCIItable(name = name,
|
|
|
|
outname = os.path.join(os.path.dirname(name),
|
|
|
|
prefix+os.path.basename(name)) if name else name,
|
|
|
|
buffered = False)
|
2015-08-21 01:12:05 +05:30
|
|
|
except: continue
|
2015-09-24 14:54:42 +05:30
|
|
|
damask.util.report(scriptName,name)
|
2011-06-21 21:55:48 +05:30
|
|
|
|
2015-08-21 01:12:05 +05:30
|
|
|
# ------------------------------------------ read header ------------------------------------------
|
2012-02-02 22:42:48 +05:30
|
|
|
|
2015-08-12 19:13:05 +05:30
|
|
|
table.head_read()
|
2012-02-02 22:42:48 +05:30
|
|
|
|
2015-08-21 01:12:05 +05:30
|
|
|
# ------------------------------------------ sanity checks ----------------------------------------
|
|
|
|
|
|
|
|
errors = []
|
|
|
|
remarks = []
|
2012-10-26 18:29:43 +05:30
|
|
|
|
2016-04-27 02:19:58 +05:30
|
|
|
if table.label_dimension(options.pos) != 3: errors.append('coordinates {} are not a vector.'.format(options.pos))
|
|
|
|
else: colCoord = table.label_index(options.pos)
|
2015-08-21 01:12:05 +05:30
|
|
|
|
2015-09-24 14:54:42 +05:30
|
|
|
if remarks != []: damask.util.croak(remarks)
|
2015-08-21 01:12:05 +05:30
|
|
|
if errors != []:
|
2015-09-24 14:54:42 +05:30
|
|
|
damask.util.croak(errors)
|
2015-08-21 01:12:05 +05:30
|
|
|
table.close(dismiss = True)
|
|
|
|
continue
|
|
|
|
|
2014-08-06 20:55:18 +05:30
|
|
|
# ------------------------------------------ assemble header ---------------------------------------
|
2015-08-21 01:12:05 +05:30
|
|
|
|
|
|
|
table.info_append(scriptID + '\t' + ' '.join(sys.argv[1:]))
|
2012-02-02 22:42:48 +05:30
|
|
|
table.head_write()
|
|
|
|
|
2015-08-21 01:12:05 +05:30
|
|
|
# --------------- figure out size and grid ---------------------------------------------------------
|
|
|
|
|
|
|
|
table.data_readArray()
|
|
|
|
|
|
|
|
if (any(options.grid) == 0 or any(options.size) == 0.0):
|
|
|
|
coords = [np.unique(table.data[:,colCoord+i]) for i in xrange(3)]
|
|
|
|
mincorner = np.array(map(min,coords))
|
|
|
|
maxcorner = np.array(map(max,coords))
|
|
|
|
grid = np.array(map(len,coords),'i')
|
2016-03-02 02:05:59 +05:30
|
|
|
size = grid/np.maximum(np.ones(3,'d'), grid-1.0) * (maxcorner-mincorner) # size from edge to edge = dim * n/(n-1)
|
|
|
|
size = np.where(grid > 1, size, min(size[grid > 1]/grid[grid > 1])) # spacing for grid==1 set to smallest among other spacings
|
2015-08-21 01:12:05 +05:30
|
|
|
delta = size/np.maximum(np.ones(3,'d'), grid)
|
2016-03-02 02:05:59 +05:30
|
|
|
origin = mincorner - 0.5*delta # shift from cell center to corner
|
2015-08-21 01:12:05 +05:30
|
|
|
|
|
|
|
else:
|
|
|
|
grid = np.array(options.grid,'i')
|
|
|
|
size = np.array(options.size,'d')
|
|
|
|
origin = np.zeros(3,'d')
|
|
|
|
|
|
|
|
packing = np.where(grid == 1,1,packing) # reset packing to 1 where grid==1
|
|
|
|
shift = np.where(grid == 1,0,shift) # reset shift to 0 where grid==1
|
|
|
|
packedGrid = np.maximum(np.ones(3,'i'),grid//packing)
|
|
|
|
|
|
|
|
averagedDown = scipy.ndimage.filters.uniform_filter( \
|
|
|
|
np.roll(
|
|
|
|
np.roll(
|
|
|
|
np.roll(table.data.reshape(list(grid)+[table.data.shape[1]],order = 'F'),
|
|
|
|
-shift[0],axis = 0),
|
|
|
|
-shift[1],axis = 1),
|
|
|
|
-shift[2],axis = 2),
|
|
|
|
size = list(packing) + [1],
|
|
|
|
mode = 'wrap',
|
|
|
|
origin = list(-(packing/2)) + [0])\
|
|
|
|
[::packing[0],::packing[1],::packing[2],:].reshape((packedGrid.prod(),table.data.shape[1]),order = 'F')
|
|
|
|
|
2012-10-26 18:29:43 +05:30
|
|
|
|
2015-08-21 01:12:05 +05:30
|
|
|
table.data = averagedDown
|
|
|
|
|
|
|
|
#--- generate grid --------------------------------------------------------------------------------
|
2012-02-02 22:42:48 +05:30
|
|
|
|
2015-08-21 01:12:05 +05:30
|
|
|
if colCoord:
|
|
|
|
x = (0.5 + shift[0] + np.arange(packedGrid[0],dtype=float))/packedGrid[0]*size[0] + origin[0]
|
|
|
|
y = (0.5 + shift[1] + np.arange(packedGrid[1],dtype=float))/packedGrid[1]*size[1] + origin[1]
|
|
|
|
z = (0.5 + shift[2] + np.arange(packedGrid[2],dtype=float))/packedGrid[2]*size[2] + origin[2]
|
|
|
|
|
|
|
|
xx = np.tile( x, packedGrid[1]* packedGrid[2])
|
|
|
|
yy = np.tile(np.repeat(y,packedGrid[0] ),packedGrid[2])
|
|
|
|
zz = np.repeat(z,packedGrid[0]*packedGrid[1])
|
|
|
|
|
|
|
|
table.data[:,colCoord:colCoord+3] = np.squeeze(np.dstack((xx,yy,zz)))
|
|
|
|
|
|
|
|
# ------------------------------------------ output result -----------------------------------------
|
|
|
|
|
|
|
|
table.data_writeArray()
|
|
|
|
|
2015-08-12 19:13:05 +05:30
|
|
|
# ------------------------------------------ output finalization -----------------------------------
|
2012-02-02 22:42:48 +05:30
|
|
|
|
2015-08-12 19:13:05 +05:30
|
|
|
table.close() # close ASCII tables
|