migrating ASCIItable to Table class
This commit is contained in:
parent
be319c5a83
commit
05b2c80430
|
@ -49,6 +49,7 @@ parser.set_defaults(pos = 'pos',
|
||||||
)
|
)
|
||||||
|
|
||||||
(options,filenames) = parser.parse_args()
|
(options,filenames) = parser.parse_args()
|
||||||
|
if filenames == []: filenames = [None]
|
||||||
|
|
||||||
packing = np.array(options.packing,dtype = int)
|
packing = np.array(options.packing,dtype = int)
|
||||||
shift = np.array(options.shift, dtype = int)
|
shift = np.array(options.shift, dtype = int)
|
||||||
|
@ -56,47 +57,14 @@ shift = np.array(options.shift, dtype = int)
|
||||||
prefix = 'averagedDown{}x{}x{}_'.format(*packing)
|
prefix = 'averagedDown{}x{}x{}_'.format(*packing)
|
||||||
if any(shift != 0): prefix += 'shift{:+}{:+}{:+}_'.format(*shift)
|
if any(shift != 0): prefix += 'shift{:+}{:+}{:+}_'.format(*shift)
|
||||||
|
|
||||||
# --- loop over input files ------------------------------------------------------------------------
|
|
||||||
|
|
||||||
if filenames == []: filenames = [None]
|
|
||||||
|
|
||||||
for name in filenames:
|
for name in filenames:
|
||||||
try: table = damask.ASCIItable(name = name,
|
|
||||||
outname = os.path.join(os.path.dirname(name),
|
|
||||||
prefix+os.path.basename(name)) if name else name,
|
|
||||||
buffered = False)
|
|
||||||
except IOError:
|
|
||||||
continue
|
|
||||||
damask.util.report(scriptName,name)
|
damask.util.report(scriptName,name)
|
||||||
|
|
||||||
# ------------------------------------------ read header ------------------------------------------
|
|
||||||
|
|
||||||
table.head_read()
|
|
||||||
|
|
||||||
# ------------------------------------------ sanity checks ----------------------------------------
|
|
||||||
|
|
||||||
errors = []
|
|
||||||
remarks = []
|
|
||||||
|
|
||||||
if table.label_dimension(options.pos) != 3: errors.append('coordinates {} are not a vector.'.format(options.pos))
|
table = damask.Table.from_ASCII(StringIO(''.join(sys.stdin.read())) if name is None else name)
|
||||||
|
|
||||||
if remarks != []: damask.util.croak(remarks)
|
|
||||||
if errors != []:
|
|
||||||
damask.util.croak(errors)
|
|
||||||
table.close(dismiss = True)
|
|
||||||
continue
|
|
||||||
|
|
||||||
# ------------------------------------------ assemble header ---------------------------------------
|
|
||||||
|
|
||||||
table.info_append(scriptID + '\t' + ' '.join(sys.argv[1:]))
|
|
||||||
table.head_write()
|
|
||||||
|
|
||||||
# --------------- figure out size and grid ---------------------------------------------------------
|
|
||||||
|
|
||||||
table.data_readArray()
|
|
||||||
|
|
||||||
if (options.grid is None or options.size is None):
|
if (options.grid is None or options.size is None):
|
||||||
grid,size,origin = damask.grid_filters.cell_coord0_2_DNA(table.data[:,table.label_indexrange(options.pos)])
|
grid,size,origin = damask.grid_filters.cell_coord0_2_DNA(table.get(options.pos))
|
||||||
else:
|
else:
|
||||||
grid = np.array(options.grid,'i')
|
grid = np.array(options.grid,'i')
|
||||||
size = np.array(options.size,'d')
|
size = np.array(options.size,'d')
|
||||||
|
@ -105,27 +73,25 @@ for name in filenames:
|
||||||
shift = np.where(grid == 1,0,shift) # reset shift to 0 where grid==1
|
shift = np.where(grid == 1,0,shift) # reset shift to 0 where grid==1
|
||||||
packedGrid = np.maximum(np.ones(3,'i'),grid//packing)
|
packedGrid = np.maximum(np.ones(3,'i'),grid//packing)
|
||||||
|
|
||||||
|
data = table.data.values.reshape(tuple(grid)+(-1,),order = 'F')
|
||||||
averagedDown = scipy.ndimage.filters.uniform_filter( \
|
averagedDown = scipy.ndimage.filters.uniform_filter( \
|
||||||
np.roll(
|
np.roll(
|
||||||
np.roll(
|
np.roll(
|
||||||
np.roll(table.data.reshape(list(grid)+[table.data.shape[1]],order = 'F'),
|
np.roll(data,
|
||||||
-shift[0],axis = 0),
|
-shift[0],axis = 0),
|
||||||
-shift[1],axis = 1),
|
-shift[1],axis = 1),
|
||||||
-shift[2],axis = 2),
|
-shift[2],axis = 2),
|
||||||
size = list(packing) + [1],
|
size = list(packing) + [1],
|
||||||
mode = 'wrap',
|
mode = 'wrap',
|
||||||
origin = list(-(packing//2)) + [0])\
|
origin = list(-(packing//2)) + [0])\
|
||||||
[::packing[0],::packing[1],::packing[2],:].reshape((packedGrid.prod(),table.data.shape[1]),order = 'F')
|
[::packing[0],::packing[1],::packing[2],:].reshape((packedGrid.prod(),-1),order = 'F')
|
||||||
|
|
||||||
|
|
||||||
table.data = averagedDown
|
table = damask.Table(averagedDown,table.shapes,table.comments)
|
||||||
|
|
||||||
#--- generate grid --------------------------------------------------------------------------------
|
|
||||||
|
|
||||||
coords = damask.grid_filters.cell_coord0(packedGrid,size,shift/packedGrid*size+origin)
|
coords = damask.grid_filters.cell_coord0(packedGrid,size,shift/packedGrid*size+origin)
|
||||||
table.data[:,table.label_indexrange(options.pos)] = coords.reshape((-1,3))
|
table.set(options.pos, coords.reshape((-1,3)))
|
||||||
|
|
||||||
|
|
||||||
# ------------------------------------------ output finalization -----------------------------------
|
outname = os.path.join(os.path.dirname(name),prefix+os.path.basename(name))
|
||||||
table.data_writeArray()
|
table.to_ASCII(sys.stdout if name is None else outname)
|
||||||
table.close()
|
|
||||||
|
|
|
@ -43,65 +43,29 @@ parser.set_defaults(pos = 'pos',
|
||||||
)
|
)
|
||||||
|
|
||||||
(options,filenames) = parser.parse_args()
|
(options,filenames) = parser.parse_args()
|
||||||
|
if filenames == []: filenames = [None]
|
||||||
|
|
||||||
options.packing = np.array(options.packing)
|
options.packing = np.array(options.packing)
|
||||||
prefix = 'blowUp{}x{}x{}_'.format(*options.packing)
|
prefix = 'blowUp{}x{}x{}_'.format(*options.packing)
|
||||||
|
|
||||||
# --- loop over input files -------------------------------------------------------------------------
|
|
||||||
|
|
||||||
if filenames == []: filenames = [None]
|
|
||||||
|
|
||||||
for name in filenames:
|
for name in filenames:
|
||||||
try: table = damask.ASCIItable(name = name,
|
|
||||||
outname = os.path.join(os.path.dirname(name),
|
|
||||||
prefix+os.path.basename(name)) if name else name,
|
|
||||||
buffered = False)
|
|
||||||
except IOError:
|
|
||||||
continue
|
|
||||||
damask.util.report(scriptName,name)
|
damask.util.report(scriptName,name)
|
||||||
|
|
||||||
# ------------------------------------------ read header ------------------------------------------
|
|
||||||
|
|
||||||
table.head_read()
|
|
||||||
|
|
||||||
# ------------------------------------------ sanity checks ----------------------------------------
|
|
||||||
|
|
||||||
errors = []
|
table = damask.Table.from_ASCII(StringIO(''.join(sys.stdin.read())) if name is None else name)
|
||||||
remarks = []
|
grid,size,origin = damask.grid_filters.cell_coord0_2_DNA(table.get(options.pos))
|
||||||
|
|
||||||
if table.label_dimension(options.pos) != 3: errors.append('coordinates "{}" are not a vector.'.format(options.pos))
|
|
||||||
|
|
||||||
colElem = table.label_index('elem')
|
|
||||||
|
|
||||||
if remarks != []: damask.util.croak(remarks)
|
|
||||||
if errors != []:
|
|
||||||
damask.util.croak(errors)
|
|
||||||
table.close(dismiss = True)
|
|
||||||
continue
|
|
||||||
|
|
||||||
# --------------- figure out size and grid ---------------------------------------------------------
|
|
||||||
|
|
||||||
table.data_readArray(options.pos)
|
|
||||||
table.data_rewind()
|
|
||||||
|
|
||||||
grid,size,origin = damask.grid_filters.cell_coord0_2_DNA(table.data)
|
|
||||||
|
|
||||||
packing = np.array(options.packing,'i')
|
packing = np.array(options.packing,'i')
|
||||||
outSize = grid*packing
|
outSize = grid*packing
|
||||||
|
|
||||||
# ------------------------------------------ assemble header --------------------------------------
|
data = table.data.values.reshape(tuple(grid)+(-1,))
|
||||||
|
blownUp = ndimage.interpolation.zoom(data,tuple(packing)+(1,),order=0,mode='nearest').reshape((outSize.prod(),-1))
|
||||||
|
|
||||||
table.info_append(scriptID + '\t' + ' '.join(sys.argv[1:]))
|
table = damask.Table(blownUp,table.shapes,table.comments)
|
||||||
table.head_write()
|
|
||||||
|
|
||||||
# ------------------------------------------ process data -------------------------------------------
|
|
||||||
table.data_readArray()
|
|
||||||
data = table.data.reshape(tuple(grid)+(-1,))
|
|
||||||
table.data = ndimage.interpolation.zoom(data,tuple(packing)+(1,),order=0,mode='nearest').reshape((outSize.prod(),-1))
|
|
||||||
coords = damask.grid_filters.cell_coord0(outSize,size,origin)
|
coords = damask.grid_filters.cell_coord0(outSize,size,origin)
|
||||||
table.data[:,table.label_indexrange(options.pos)] = coords.reshape((-1,3))
|
table.set(options.pos,coords.reshape((-1,3)))
|
||||||
table.data[:,table.label_index('elem')] = np.arange(1,outSize.prod()+1)
|
table.set('elem',np.arange(1,outSize.prod()+1))
|
||||||
|
|
||||||
# ------------------------------------------ output finalization -----------------------------------
|
outname = os.path.join(os.path.dirname(name),prefix+os.path.basename(name))
|
||||||
table.data_writeArray()
|
table.to_ASCII(sys.stdout if name is None else outname)
|
||||||
table.close()
|
|
||||||
|
|
Loading…
Reference in New Issue