2018-12-09 12:27:05 +05:30
|
|
|
#!/usr/bin/env python3
|
2014-04-02 00:11:14 +05:30
|
|
|
# -*- coding: UTF-8 no BOM -*-
|
2012-05-08 00:39:11 +05:30
|
|
|
|
2016-03-01 22:55:14 +05:30
|
|
|
import os,sys
|
2014-08-07 00:36:33 +05:30
|
|
|
import numpy as np
|
|
|
|
from optparse import OptionParser
|
|
|
|
import damask
|
2012-05-08 00:39:11 +05:30
|
|
|
|
2016-01-27 22:36:00 +05:30
|
|
|
scriptName = os.path.splitext(os.path.basename(__file__))[0]
|
|
|
|
scriptID = ' '.join([scriptName,damask.version])
|
2012-05-08 00:39:11 +05:30
|
|
|
|
|
|
|
# --------------------------------------------------------------------
|
|
|
|
# MAIN
|
|
|
|
# --------------------------------------------------------------------
|
|
|
|
|
2019-02-17 02:50:10 +05:30
|
|
|
parser = OptionParser(option_class=damask.extendableOption, usage='%prog options [ASCIItable(s)]', description = """
|
2012-05-08 00:39:11 +05:30
|
|
|
Produces a binned grid of two columns from an ASCIItable, i.e. a two-dimensional probability density map.
|
2014-08-07 00:36:33 +05:30
|
|
|
|
|
|
|
""", version = scriptID)
|
|
|
|
|
2015-08-08 03:38:54 +05:30
|
|
|
parser.add_option('-d','--data',
|
|
|
|
dest = 'data',
|
2016-09-10 01:47:00 +05:30
|
|
|
type = 'string', nargs = 2, metavar = 'string string',
|
2015-08-08 03:38:54 +05:30
|
|
|
help = 'column labels containing x and y ')
|
|
|
|
parser.add_option('-w','--weight',
|
|
|
|
dest = 'weight',
|
|
|
|
type = 'string', metavar = 'string',
|
|
|
|
help = 'column label containing weight of (x,y) point')
|
|
|
|
parser.add_option('-b','--bins',
|
|
|
|
dest = 'bins',
|
|
|
|
type = 'int', nargs = 2, metavar = 'int int',
|
|
|
|
help = 'number of bins in x and y direction [%default]')
|
|
|
|
parser.add_option('-t','--type',
|
|
|
|
dest = 'type',
|
|
|
|
type = 'string', nargs = 3, metavar = 'string string string',
|
|
|
|
help = 'type (linear/log) of x, y, and z axis [%default]')
|
|
|
|
parser.add_option('-x','--xrange',
|
|
|
|
dest = 'xrange',
|
|
|
|
type = 'float', nargs = 2, metavar = 'float float',
|
2019-02-16 19:23:56 +05:30
|
|
|
help = 'min max limits in x direction (optional)')
|
2015-08-08 03:38:54 +05:30
|
|
|
parser.add_option('-y','--yrange',
|
|
|
|
dest = 'yrange',
|
|
|
|
type = 'float', nargs = 2, metavar = 'float float',
|
2019-02-16 19:23:56 +05:30
|
|
|
help = 'min max limits in y direction (optional)')
|
2015-08-08 03:38:54 +05:30
|
|
|
parser.add_option('-z','--zrange',
|
|
|
|
dest = 'zrange',
|
|
|
|
type = 'float', nargs = 2, metavar = 'float float',
|
2019-02-16 19:23:56 +05:30
|
|
|
help = 'min max limits in z direction (optional)')
|
2015-08-08 03:38:54 +05:30
|
|
|
parser.add_option('-i','--invert',
|
|
|
|
dest = 'invert',
|
|
|
|
action = 'store_true',
|
2016-09-10 01:47:00 +05:30
|
|
|
help = 'invert probability density')
|
2015-08-08 03:38:54 +05:30
|
|
|
parser.add_option('-r','--rownormalize',
|
|
|
|
dest = 'normRow',
|
|
|
|
action = 'store_true',
|
2016-09-10 01:47:00 +05:30
|
|
|
help = 'normalize probability density in each row')
|
2015-08-08 03:38:54 +05:30
|
|
|
parser.add_option('-c','--colnormalize',
|
|
|
|
dest = 'normCol',
|
|
|
|
action = 'store_true',
|
2016-09-10 01:47:00 +05:30
|
|
|
help = 'normalize probability density in each column')
|
2015-08-08 03:38:54 +05:30
|
|
|
|
|
|
|
parser.set_defaults(bins = (10,10),
|
|
|
|
type = ('linear','linear','linear'),
|
|
|
|
xrange = (0.0,0.0),
|
|
|
|
yrange = (0.0,0.0),
|
|
|
|
zrange = (0.0,0.0),
|
|
|
|
)
|
2012-05-08 00:39:11 +05:30
|
|
|
|
|
|
|
(options,filenames) = parser.parse_args()
|
|
|
|
|
2015-05-21 05:38:32 +05:30
|
|
|
minmax = np.array([np.array(options.xrange),
|
|
|
|
np.array(options.yrange),
|
|
|
|
np.array(options.zrange)])
|
|
|
|
grid = np.zeros(options.bins,'f')
|
2015-04-09 12:15:21 +05:30
|
|
|
result = np.zeros((options.bins[0],options.bins[1],3),'f')
|
2014-08-07 00:36:33 +05:30
|
|
|
|
2016-03-02 02:05:59 +05:30
|
|
|
if options.data is None: parser.error('no data columns specified.')
|
2012-05-08 00:39:11 +05:30
|
|
|
|
2016-09-10 01:47:00 +05:30
|
|
|
labels = list(options.data)
|
|
|
|
|
2015-04-09 12:15:21 +05:30
|
|
|
|
2016-03-02 02:05:59 +05:30
|
|
|
if options.weight is not None: labels += [options.weight] # prevent character splitting of single string value
|
2015-04-09 12:15:21 +05:30
|
|
|
|
2015-08-08 03:38:54 +05:30
|
|
|
# --- loop over input files -------------------------------------------------------------------------
|
|
|
|
|
2015-08-18 13:26:03 +05:30
|
|
|
if filenames == []: filenames = [None]
|
2015-04-09 12:15:21 +05:30
|
|
|
|
|
|
|
for name in filenames:
|
2017-01-19 19:40:38 +05:30
|
|
|
try: table = damask.ASCIItable(name = name,
|
|
|
|
outname = os.path.join(os.path.dirname(name),
|
|
|
|
'binned-{}-{}_'.format(*options.data) +
|
|
|
|
('weighted-{}_'.format(options.weight) if options.weight else '') +
|
|
|
|
os.path.basename(name)) if name else name,
|
|
|
|
buffered = False)
|
|
|
|
except: continue
|
2015-09-24 14:54:42 +05:30
|
|
|
damask.util.report(scriptName,name)
|
2015-08-08 03:38:54 +05:30
|
|
|
|
|
|
|
# ------------------------------------------ read header ------------------------------------------
|
2015-04-09 12:15:21 +05:30
|
|
|
|
2015-08-08 03:38:54 +05:30
|
|
|
table.head_read()
|
2015-04-09 12:15:21 +05:30
|
|
|
|
2015-08-08 03:38:54 +05:30
|
|
|
# ------------------------------------------ sanity checks ----------------------------------------
|
2015-05-21 05:38:32 +05:30
|
|
|
|
2015-08-08 03:38:54 +05:30
|
|
|
missing_labels = table.data_readArray(labels)
|
2016-09-10 01:47:00 +05:30
|
|
|
|
2015-05-21 05:38:32 +05:30
|
|
|
if len(missing_labels) > 0:
|
2015-09-24 14:54:42 +05:30
|
|
|
damask.util.croak('column{} {} not found.'.format('s' if len(missing_labels) > 1 else '',', '.join(missing_labels)))
|
2015-05-21 05:38:32 +05:30
|
|
|
table.close(dismiss = True)
|
|
|
|
continue
|
|
|
|
|
|
|
|
for c in (0,1): # check data minmax for x and y (i = 0 and 1)
|
|
|
|
if (minmax[c] == 0.0).all(): minmax[c] = [table.data[:,c].min(),table.data[:,c].max()]
|
|
|
|
if options.type[c].lower() == 'log': # if log scale
|
|
|
|
table.data[:,c] = np.log(table.data[:,c]) # change x,y coordinates to log
|
|
|
|
minmax[c] = np.log(minmax[c]) # change minmax to log, too
|
2015-04-09 12:15:21 +05:30
|
|
|
|
|
|
|
delta = minmax[:,1]-minmax[:,0]
|
2016-09-10 01:47:00 +05:30
|
|
|
(grid,xedges,yedges) = np.histogram2d(table.data[:,0],table.data[:,1],
|
|
|
|
bins=options.bins,
|
2018-12-21 03:39:53 +05:30
|
|
|
range=minmax[:2],
|
2016-09-10 01:47:00 +05:30
|
|
|
weights=None if options.weight is None else table.data[:,2])
|
2015-04-14 01:25:28 +05:30
|
|
|
|
|
|
|
if options.normCol:
|
2016-10-25 00:46:29 +05:30
|
|
|
for x in range(options.bins[0]):
|
2015-04-14 01:25:28 +05:30
|
|
|
sum = np.sum(grid[x,:])
|
|
|
|
if sum > 0.0:
|
|
|
|
grid[x,:] /= sum
|
|
|
|
if options.normRow:
|
2016-10-25 00:46:29 +05:30
|
|
|
for y in range(options.bins[1]):
|
2015-04-14 01:25:28 +05:30
|
|
|
sum = np.sum(grid[:,y])
|
|
|
|
if sum > 0.0:
|
|
|
|
grid[:,y] /= sum
|
2016-09-10 01:47:00 +05:30
|
|
|
|
2015-04-09 12:15:21 +05:30
|
|
|
if (minmax[2] == 0.0).all(): minmax[2] = [grid.min(),grid.max()] # auto scale from data
|
|
|
|
if minmax[2,0] == minmax[2,1]:
|
|
|
|
minmax[2,0] -= 1.
|
|
|
|
minmax[2,1] += 1.
|
|
|
|
if (minmax[2] == 0.0).all(): # no data in grid?
|
2015-09-24 14:54:42 +05:30
|
|
|
damask.util.croak('no data found on grid...')
|
2015-04-09 12:15:21 +05:30
|
|
|
minmax[2,:] = np.array([0.0,1.0]) # making up arbitrary z minmax
|
2012-05-08 00:39:11 +05:30
|
|
|
if options.type[2].lower() == 'log':
|
2014-08-07 00:36:33 +05:30
|
|
|
grid = np.log(grid)
|
2015-04-09 12:15:21 +05:30
|
|
|
minmax[2] = np.log(minmax[2])
|
2016-09-10 01:47:00 +05:30
|
|
|
|
2015-04-09 12:15:21 +05:30
|
|
|
delta[2] = minmax[2,1]-minmax[2,0]
|
2014-05-19 19:13:26 +05:30
|
|
|
|
2016-10-25 00:46:29 +05:30
|
|
|
for x in range(options.bins[0]):
|
|
|
|
for y in range(options.bins[1]):
|
2015-04-09 12:15:21 +05:30
|
|
|
result[x,y,:] = [minmax[0,0]+delta[0]/options.bins[0]*(x+0.5),
|
|
|
|
minmax[1,0]+delta[1]/options.bins[1]*(y+0.5),
|
|
|
|
min(1.0,max(0.0,(grid[x,y]-minmax[2,0])/delta[2]))]
|
|
|
|
|
2015-05-21 05:38:32 +05:30
|
|
|
for c in (0,1):
|
|
|
|
if options.type[c].lower() == 'log': result[:,:,c] = np.exp(result[:,:,c])
|
2015-04-09 12:15:21 +05:30
|
|
|
|
2015-04-14 01:25:28 +05:30
|
|
|
if options.invert: result[:,:,2] = 1.0 - result[:,:,2]
|
2014-08-07 00:36:33 +05:30
|
|
|
|
2015-05-21 05:38:32 +05:30
|
|
|
# --- assemble header -------------------------------------------------------------------------------
|
|
|
|
|
2015-05-10 16:59:11 +05:30
|
|
|
table.info_clear()
|
|
|
|
table.info_append(scriptID + '\t' + ' '.join(sys.argv[1:]))
|
2016-05-17 05:25:06 +05:30
|
|
|
table.labels_clear()
|
|
|
|
table.labels_append(['bin_%s'%options.data[0],'bin_%s'%options.data[1],'z'])
|
2015-05-10 16:59:11 +05:30
|
|
|
table.head_write()
|
|
|
|
|
2015-05-21 05:38:32 +05:30
|
|
|
# --- output result ---------------------------------------------------------------------------------
|
|
|
|
|
2015-08-08 03:38:54 +05:30
|
|
|
table.data = result.reshape(options.bins[0]*options.bins[1],3)
|
|
|
|
table.data_writeArray()
|
|
|
|
|
|
|
|
table.close()
|