using central functionality
- Table class for table data - grid_filters for grid related functions
This commit is contained in:
parent
b293498864
commit
6989679d3b
|
@ -121,6 +121,7 @@ parser.set_defaults(pos = 'pos',
|
||||||
)
|
)
|
||||||
|
|
||||||
(options,filenames) = parser.parse_args()
|
(options,filenames) = parser.parse_args()
|
||||||
|
if filenames == []: filenames = [None]
|
||||||
|
|
||||||
if options.type is None:
|
if options.type is None:
|
||||||
parser.error('no feature type selected.')
|
parser.error('no feature type selected.')
|
||||||
|
@ -137,67 +138,15 @@ for i,feature in enumerate(features):
|
||||||
feature_list.append(i) # remember valid features
|
feature_list.append(i) # remember valid features
|
||||||
break
|
break
|
||||||
|
|
||||||
# --- loop over input files -------------------------------------------------------------------------
|
|
||||||
|
|
||||||
if filenames == []: filenames = [None]
|
|
||||||
|
|
||||||
for name in filenames:
|
for name in filenames:
|
||||||
try: table = damask.ASCIItable(name = name, buffered = False)
|
|
||||||
except: continue
|
|
||||||
damask.util.report(scriptName,name)
|
damask.util.report(scriptName,name)
|
||||||
|
|
||||||
# ------------------------------------------ read header ------------------------------------------
|
table = damask.Table.from_ASCII(StringIO(''.join(sys.stdin.read())) if name is None else name)
|
||||||
|
grid,size,origin = damask.grid_filters.cell_coord0_2_DNA(table.get(options.pos))
|
||||||
table.head_read()
|
|
||||||
|
|
||||||
# ------------------------------------------ sanity checks ----------------------------------------
|
|
||||||
|
|
||||||
errors = []
|
|
||||||
remarks = []
|
|
||||||
|
|
||||||
if not 3 >= table.label_dimension(options.pos) >= 1:
|
|
||||||
errors.append('coordinates "{}" need to have one, two, or three dimensions.'.format(options.pos))
|
|
||||||
|
|
||||||
if table.label_dimension(options.id) != 1: errors.append('grain identifier {} not found.'.format(options.id))
|
|
||||||
else: idCol = table.label_index(options.id)
|
|
||||||
|
|
||||||
if remarks != []:
|
|
||||||
damask.util.croak(remarks)
|
|
||||||
remarks = []
|
|
||||||
if errors != []:
|
|
||||||
damask.util.croak(errors)
|
|
||||||
table.close(dismiss = True)
|
|
||||||
continue
|
|
||||||
|
|
||||||
# ------------------------------------------ assemble header ---------------------------------------
|
|
||||||
|
|
||||||
table.info_append(scriptID + '\t' + ' '.join(sys.argv[1:]))
|
|
||||||
for feature in feature_list:
|
|
||||||
table.labels_append('ED_{}({})'.format(features[feature]['names'][0],options.id)) # extend ASCII header with new labels
|
|
||||||
table.head_write()
|
|
||||||
|
|
||||||
# --------------- figure out size and grid ---------------------------------------------------------
|
|
||||||
|
|
||||||
table.data_readArray()
|
|
||||||
|
|
||||||
grid,size = damask.util.coordGridAndSize(table.data[:,table.label_indexrange(options.pos)])
|
|
||||||
N = grid.prod()
|
|
||||||
|
|
||||||
if N != len(table.data): errors.append('data count {} does not match grid {}.'.format(N,'x'.join(map(str,grid))))
|
|
||||||
else: remarks.append('grid: {}x{}x{}'.format(*grid))
|
|
||||||
if remarks != []: damask.util.croak(remarks)
|
|
||||||
if errors != []:
|
|
||||||
damask.util.croak(errors)
|
|
||||||
table.close(dismiss = True)
|
|
||||||
continue
|
|
||||||
|
|
||||||
# ------------------------------------------ process value field -----------------------------------
|
|
||||||
|
|
||||||
stack = [table.data]
|
|
||||||
|
|
||||||
neighborhood = neighborhoods[options.neighborhood]
|
neighborhood = neighborhoods[options.neighborhood]
|
||||||
diffToNeighbor = np.empty(list(grid+2)+[len(neighborhood)],'i')
|
diffToNeighbor = np.empty(list(grid+2)+[len(neighborhood)],'i')
|
||||||
microstructure = periodic_3Dpad(table.data[:,idCol].astype('i').reshape(grid,order='F'))
|
microstructure = periodic_3Dpad(table.get(options.id).astype('i').reshape(grid,order='F'))
|
||||||
|
|
||||||
for i,p in enumerate(neighborhood):
|
for i,p in enumerate(neighborhood):
|
||||||
stencil = np.zeros((3,3,3),'i')
|
stencil = np.zeros((3,3,3),'i')
|
||||||
|
@ -227,14 +176,11 @@ for name in filenames:
|
||||||
distance[i,:,:,:] = ndimage.morphology.distance_transform_edt(distance[i,:,:,:])*[options.scale]*3
|
distance[i,:,:,:] = ndimage.morphology.distance_transform_edt(distance[i,:,:,:])*[options.scale]*3
|
||||||
|
|
||||||
distance = distance.reshape([len(feature_list),grid.prod(),1],order='F')
|
distance = distance.reshape([len(feature_list),grid.prod(),1],order='F')
|
||||||
for i in range(len(feature_list)):
|
|
||||||
stack.append(distance[i,:])
|
|
||||||
|
|
||||||
# ------------------------------------------ output result -----------------------------------------
|
|
||||||
|
|
||||||
if len(stack) > 1: table.data = np.hstack(tuple(stack))
|
for i,feature in enumerate(feature_list):
|
||||||
table.data_writeArray('%.12g')
|
table.add('ED_{}({})'.format(features[feature]['names'][0],options.id),
|
||||||
|
distance[i,:],
|
||||||
|
scriptID+' '+' '.join(sys.argv[1:]))
|
||||||
|
|
||||||
# ------------------------------------------ output finalization -----------------------------------
|
table.to_ASCII(sys.stdout if name is None else name)
|
||||||
|
|
||||||
table.close() # close input ASCII table (works for stdin)
|
|
||||||
|
|
|
@ -30,7 +30,7 @@ parser.add_option('-p','--pos','--periodiccellcenter',
|
||||||
type = 'string', metavar = 'string',
|
type = 'string', metavar = 'string',
|
||||||
help = 'label of coordinates [%default]')
|
help = 'label of coordinates [%default]')
|
||||||
parser.add_option('-s','--scalar',
|
parser.add_option('-s','--scalar',
|
||||||
dest = 'scalar',
|
dest = 'labels',
|
||||||
action = 'extend', metavar = '<string LIST>',
|
action = 'extend', metavar = '<string LIST>',
|
||||||
help = 'label(s) of scalar field values')
|
help = 'label(s) of scalar field values')
|
||||||
parser.add_option('-o','--order',
|
parser.add_option('-o','--order',
|
||||||
|
@ -56,78 +56,21 @@ parser.set_defaults(pos = 'pos',
|
||||||
)
|
)
|
||||||
|
|
||||||
(options,filenames) = parser.parse_args()
|
(options,filenames) = parser.parse_args()
|
||||||
|
|
||||||
if options.scalar is None:
|
|
||||||
parser.error('no data column specified.')
|
|
||||||
|
|
||||||
# --- loop over input files ------------------------------------------------------------------------
|
|
||||||
|
|
||||||
if filenames == []: filenames = [None]
|
if filenames == []: filenames = [None]
|
||||||
|
|
||||||
|
if options.labels is None: parser.error('no data column specified.')
|
||||||
|
|
||||||
for name in filenames:
|
for name in filenames:
|
||||||
try: table = damask.ASCIItable(name = name,buffered = False)
|
|
||||||
except: continue
|
|
||||||
damask.util.report(scriptName,name)
|
damask.util.report(scriptName,name)
|
||||||
|
|
||||||
# ------------------------------------------ read header ------------------------------------------
|
table = damask.Table.from_ASCII(StringIO(''.join(sys.stdin.read())) if name is None else name)
|
||||||
|
damask.grid_filters.coord0_check(table.get(options.pos))
|
||||||
|
|
||||||
table.head_read()
|
for label in options.labels:
|
||||||
|
table.add('Gauss{}({})'.format(options.sigma,label),
|
||||||
# ------------------------------------------ sanity checks ----------------------------------------
|
ndimage.filters.gaussian_filter(table.get(label).reshape((-1)),
|
||||||
|
|
||||||
items = {
|
|
||||||
'scalar': {'dim': 1, 'shape': [1], 'labels':options.scalar, 'active':[], 'column': []},
|
|
||||||
}
|
|
||||||
errors = []
|
|
||||||
remarks = []
|
|
||||||
column = {}
|
|
||||||
|
|
||||||
if table.label_dimension(options.pos) != 3: errors.append('coordinates {} are not a vector.'.format(options.pos))
|
|
||||||
else: colCoord = table.label_index(options.pos)
|
|
||||||
|
|
||||||
for type, data in items.items():
|
|
||||||
for what in (data['labels'] if data['labels'] is not None else []):
|
|
||||||
dim = table.label_dimension(what)
|
|
||||||
if dim != data['dim']: remarks.append('column {} is not a {}.'.format(what,type))
|
|
||||||
else:
|
|
||||||
items[type]['active'].append(what)
|
|
||||||
items[type]['column'].append(table.label_index(what))
|
|
||||||
|
|
||||||
if remarks != []: damask.util.croak(remarks)
|
|
||||||
if errors != []:
|
|
||||||
damask.util.croak(errors)
|
|
||||||
table.close(dismiss = True)
|
|
||||||
continue
|
|
||||||
|
|
||||||
# ------------------------------------------ assemble header --------------------------------------
|
|
||||||
|
|
||||||
table.info_append(scriptID + '\t' + ' '.join(sys.argv[1:]))
|
|
||||||
for type, data in items.items():
|
|
||||||
for label in data['active']:
|
|
||||||
table.labels_append(['Gauss{}({})'.format(options.sigma,label)]) # extend ASCII header with new labels
|
|
||||||
table.head_write()
|
|
||||||
|
|
||||||
# --------------- figure out size and grid ---------------------------------------------------------
|
|
||||||
|
|
||||||
table.data_readArray()
|
|
||||||
|
|
||||||
grid,size = damask.util.coordGridAndSize(table.data[:,table.label_indexrange(options.pos)])
|
|
||||||
|
|
||||||
# ------------------------------------------ process value field -----------------------------------
|
|
||||||
|
|
||||||
stack = [table.data]
|
|
||||||
for type, data in items.items():
|
|
||||||
for i,label in enumerate(data['active']):
|
|
||||||
stack.append(ndimage.filters.gaussian_filter(table.data[:,data['column'][i]],
|
|
||||||
options.sigma,options.order,
|
options.sigma,options.order,
|
||||||
mode = 'wrap' if options.periodic else 'nearest'
|
mode = 'wrap' if options.periodic else 'nearest'),
|
||||||
).reshape([table.data.shape[0],1])
|
scriptID+' '+' '.join(sys.argv[1:]))
|
||||||
)
|
|
||||||
|
|
||||||
# ------------------------------------------ output result -----------------------------------------
|
table.to_ASCII(sys.stdout if name is None else name)
|
||||||
if len(stack) > 1: table.data = np.hstack(tuple(stack))
|
|
||||||
table.data_writeArray('%.12g')
|
|
||||||
|
|
||||||
# ------------------------------------------ output finalization -----------------------------------
|
|
||||||
|
|
||||||
table.close() # close input ASCII table (works for stdin)
|
|
||||||
|
|
|
@ -217,6 +217,19 @@ def cell_coord0_2_DNA(coord0,ordered=True):
|
||||||
|
|
||||||
return (grid,size,origin)
|
return (grid,size,origin)
|
||||||
|
|
||||||
|
def coord0_check(coord0):
|
||||||
|
"""
|
||||||
|
Check whether coordinates lie on a regular grid
|
||||||
|
|
||||||
|
Parameters
|
||||||
|
----------
|
||||||
|
coord0 : numpy.ndarray
|
||||||
|
array of undeformed cell coordinates.
|
||||||
|
|
||||||
|
"""
|
||||||
|
cell_coord0_2_DNA(coord0,ordered=True)
|
||||||
|
|
||||||
|
|
||||||
|
|
||||||
def node_coord0(grid,size,origin=np.zeros(3)):
|
def node_coord0(grid,size,origin=np.zeros(3)):
|
||||||
"""
|
"""
|
||||||
|
|
Loading…
Reference in New Issue