2019-01-04 04:55:28 +05:30
|
|
|
#!/usr/bin/env python3
|
2015-01-23 06:27:10 +05:30
|
|
|
|
2019-06-14 16:33:30 +05:30
|
|
|
import os
|
|
|
|
import sys
|
|
|
|
from optparse import OptionParser, OptionGroup
|
2016-10-25 00:00:51 +05:30
|
|
|
import math # noqa
|
2019-06-14 16:33:30 +05:30
|
|
|
|
2015-01-23 06:27:10 +05:30
|
|
|
import numpy as np
|
2019-06-14 16:33:30 +05:30
|
|
|
|
2015-01-23 06:27:10 +05:30
|
|
|
import damask
|
|
|
|
|
2019-06-14 16:33:30 +05:30
|
|
|
|
2016-11-30 01:07:43 +05:30
|
|
|
def periodicAverage(coords, limits):
|
|
|
|
"""Centroid in periodic domain, see https://en.wikipedia.org/wiki/Center_of_mass#Systems_with_periodic_boundary_conditions"""
|
|
|
|
theta = 2.0*np.pi * (coords - limits[0])/(limits[1] - limits[0])
|
|
|
|
theta_avg = np.pi + np.arctan2(-np.sin(theta).mean(axis=0), -np.cos(theta).mean(axis=0))
|
|
|
|
return limits[0] + theta_avg * (limits[1] - limits[0])/2.0/np.pi
|
2016-09-11 02:03:28 +05:30
|
|
|
|
2016-01-27 22:36:00 +05:30
|
|
|
scriptName = os.path.splitext(os.path.basename(__file__))[0]
|
|
|
|
scriptID = ' '.join([scriptName,damask.version])
|
2015-01-23 06:27:10 +05:30
|
|
|
|
|
|
|
# --------------------------------------------------------------------
|
|
|
|
# MAIN
|
|
|
|
# --------------------------------------------------------------------
|
|
|
|
|
2019-02-16 22:55:41 +05:30
|
|
|
parser = OptionParser(option_class=damask.extendableOption, usage='%prog options [ASCIItable(s)]', description = """
|
2019-01-04 04:55:28 +05:30
|
|
|
Apply a user-specified function to condense into a single row all those rows for which columns 'label' have identical values.
|
|
|
|
Output table will contain as many rows as there are different (unique) values in the grouping column(s).
|
2016-11-30 01:07:43 +05:30
|
|
|
Periodic domain averaging of coordinate values is supported.
|
2015-01-23 06:27:10 +05:30
|
|
|
|
|
|
|
Examples:
|
2015-07-15 22:27:03 +05:30
|
|
|
For grain averaged values, replace all rows of particular 'texture' with a single row containing their average.
|
2019-01-04 04:55:28 +05:30
|
|
|
{name} --label texture --function np.average data.txt
|
|
|
|
""".format(name = scriptName), version = scriptID)
|
2015-01-23 06:27:10 +05:30
|
|
|
|
2015-08-08 00:33:26 +05:30
|
|
|
parser.add_option('-l','--label',
|
|
|
|
dest = 'label',
|
2019-01-04 04:55:28 +05:30
|
|
|
action = 'extend', metavar = '<string LIST>',
|
|
|
|
help = 'column label(s) for grouping rows')
|
2016-08-25 21:45:03 +05:30
|
|
|
parser.add_option('-f','--function',
|
|
|
|
dest = 'function',
|
|
|
|
type = 'string', metavar = 'string',
|
|
|
|
help = 'mapping function [%default]')
|
|
|
|
parser.add_option('-a','--all',
|
|
|
|
dest = 'all',
|
2016-08-25 21:47:27 +05:30
|
|
|
action = 'store_true',
|
2019-01-04 04:55:28 +05:30
|
|
|
help = 'apply mapping function also to grouping column(s)')
|
2016-11-30 01:07:43 +05:30
|
|
|
|
2016-09-11 02:03:28 +05:30
|
|
|
group = OptionGroup(parser, "periodic averaging", "")
|
|
|
|
|
2016-11-30 01:07:43 +05:30
|
|
|
group.add_option ('-p','--periodic',
|
|
|
|
dest = 'periodic',
|
|
|
|
action = 'extend', metavar = '<string LIST>',
|
|
|
|
help = 'coordinate label(s) to average across periodic domain')
|
|
|
|
group.add_option ('--limits',
|
|
|
|
dest = 'boundary',
|
|
|
|
type = 'float', metavar = 'float float', nargs = 2,
|
|
|
|
help = 'min and max of periodic domain %default')
|
2016-09-11 02:03:28 +05:30
|
|
|
|
|
|
|
parser.add_option_group(group)
|
|
|
|
|
|
|
|
parser.set_defaults(function = 'np.average',
|
|
|
|
all = False,
|
2019-01-04 04:55:28 +05:30
|
|
|
label = [],
|
2016-09-11 02:03:28 +05:30
|
|
|
boundary = [0.0, 1.0])
|
2015-08-08 00:33:26 +05:30
|
|
|
|
2015-01-23 06:27:10 +05:30
|
|
|
(options,filenames) = parser.parse_args()
|
|
|
|
|
2016-08-25 21:45:03 +05:30
|
|
|
funcModule,funcName = options.function.split('.')
|
|
|
|
|
|
|
|
try:
|
|
|
|
mapFunction = getattr(locals().get(funcModule) or
|
|
|
|
globals().get(funcModule) or
|
|
|
|
__import__(funcModule),
|
|
|
|
funcName)
|
|
|
|
except:
|
|
|
|
mapFunction = None
|
|
|
|
|
2019-01-04 04:55:28 +05:30
|
|
|
if options.label is []:
|
2015-08-08 00:33:26 +05:30
|
|
|
parser.error('no grouping column specified.')
|
2016-08-25 21:45:03 +05:30
|
|
|
if not hasattr(mapFunction,'__call__'):
|
|
|
|
parser.error('function "{}" is not callable.'.format(options.function))
|
2015-01-23 06:27:10 +05:30
|
|
|
|
|
|
|
|
2015-07-15 22:27:03 +05:30
|
|
|
# --- loop over input files -------------------------------------------------------------------------
|
2015-01-23 06:27:10 +05:30
|
|
|
|
2015-08-21 01:12:05 +05:30
|
|
|
if filenames == []: filenames = [None]
|
2015-01-23 06:27:10 +05:30
|
|
|
|
2015-07-15 22:27:03 +05:30
|
|
|
for name in filenames:
|
2020-02-20 19:35:38 +05:30
|
|
|
try:
|
|
|
|
table = damask.ASCIItable(name = name)
|
|
|
|
except IOError:
|
|
|
|
continue
|
2015-09-24 14:54:42 +05:30
|
|
|
damask.util.report(scriptName,name)
|
2015-01-23 06:27:10 +05:30
|
|
|
|
2015-08-08 00:33:26 +05:30
|
|
|
# ------------------------------------------ sanity checks ---------------------------------------
|
2015-07-15 22:27:03 +05:30
|
|
|
|
2019-01-04 04:55:28 +05:30
|
|
|
remarks = []
|
|
|
|
errors = []
|
|
|
|
|
2015-08-08 00:33:26 +05:30
|
|
|
table.head_read()
|
2019-01-04 04:55:28 +05:30
|
|
|
grpColumns = table.label_index(options.label)[::-1]
|
|
|
|
grpColumns = grpColumns[np.where(grpColumns>=0)]
|
|
|
|
|
|
|
|
if len(grpColumns) == 0: errors.append('no valid grouping column present.')
|
|
|
|
|
|
|
|
if remarks != []: damask.util.croak(remarks)
|
|
|
|
if errors != []:
|
|
|
|
damask.util.croak(errors)
|
|
|
|
table.close(dismiss=True)
|
2015-07-15 22:27:03 +05:30
|
|
|
continue
|
2015-01-23 06:27:10 +05:30
|
|
|
|
2015-08-08 00:33:26 +05:30
|
|
|
# ------------------------------------------ assemble info ---------------------------------------
|
2015-01-23 06:27:10 +05:30
|
|
|
|
2015-08-08 00:33:26 +05:30
|
|
|
table.info_append(scriptID + '\t' + ' '.join(sys.argv[1:]))
|
2015-01-23 06:27:10 +05:30
|
|
|
table.head_write()
|
|
|
|
|
|
|
|
# ------------------------------------------ process data --------------------------------
|
|
|
|
|
2015-07-15 22:27:03 +05:30
|
|
|
table.data_readArray()
|
2016-11-30 01:07:43 +05:30
|
|
|
indexrange = table.label_indexrange(options.periodic) if options.periodic is not None else []
|
2015-07-15 22:27:03 +05:30
|
|
|
rows,cols = table.data.shape
|
2015-01-23 06:27:10 +05:30
|
|
|
|
2019-01-04 04:55:28 +05:30
|
|
|
table.data = table.data[np.lexsort(table.data[:,grpColumns].T)] # sort data by grpColumn(s)
|
|
|
|
values,index = np.unique(table.data[:,grpColumns], axis=0, return_index=True) # unique grpColumn values and their positions
|
|
|
|
index = sorted(np.append(index,rows)) # add termination position
|
2016-10-25 00:46:29 +05:30
|
|
|
grpTable = np.empty((len(values), cols)) # initialize output
|
2015-01-23 06:27:10 +05:30
|
|
|
|
2016-10-25 00:46:29 +05:30
|
|
|
for i in range(len(values)): # iterate over groups (unique values in grpColumn)
|
2016-11-30 01:07:43 +05:30
|
|
|
grpTable[i] = np.apply_along_axis(mapFunction,0,table.data[index[i]:index[i+1]]) # apply (general) mapping function
|
|
|
|
grpTable[i,indexrange] = \
|
|
|
|
periodicAverage(table.data[index[i]:index[i+1],indexrange],options.boundary) # apply periodicAverage mapping function
|
|
|
|
|
2019-01-04 04:55:28 +05:30
|
|
|
if not options.all: grpTable[i,grpColumns] = table.data[index[i],grpColumns] # restore grouping column value
|
2015-01-23 06:27:10 +05:30
|
|
|
|
2016-08-25 21:45:03 +05:30
|
|
|
table.data = grpTable
|
2015-07-15 22:27:03 +05:30
|
|
|
|
2015-01-23 06:27:10 +05:30
|
|
|
# ------------------------------------------ output result -------------------------------
|
|
|
|
|
2015-07-15 22:27:03 +05:30
|
|
|
table.data_writeArray()
|
2015-08-08 00:33:26 +05:30
|
|
|
table.close() # close ASCII table
|