2018-12-09 12:27:05 +05:30
|
|
|
#!/usr/bin/env python3
|
2012-05-08 00:39:11 +05:30
|
|
|
|
2019-06-14 16:33:30 +05:30
|
|
|
import os
|
|
|
|
import sys
|
2020-03-18 18:17:09 +05:30
|
|
|
from io import StringIO
|
2014-08-07 00:36:33 +05:30
|
|
|
from optparse import OptionParser
|
2019-06-14 16:33:30 +05:30
|
|
|
|
|
|
|
import numpy as np
|
|
|
|
|
2014-08-07 00:36:33 +05:30
|
|
|
import damask
|
2012-05-08 00:39:11 +05:30
|
|
|
|
2019-06-14 16:33:30 +05:30
|
|
|
|
2016-01-27 22:36:00 +05:30
|
|
|
scriptName = os.path.splitext(os.path.basename(__file__))[0]
|
|
|
|
scriptID = ' '.join([scriptName,damask.version])
|
2012-05-08 00:39:11 +05:30
|
|
|
|
2019-06-14 16:33:30 +05:30
|
|
|
|
2012-05-08 00:39:11 +05:30
|
|
|
# --------------------------------------------------------------------
|
|
|
|
# MAIN
|
|
|
|
# --------------------------------------------------------------------
|
|
|
|
|
2019-02-17 02:50:10 +05:30
|
|
|
parser = OptionParser(option_class=damask.extendableOption, usage='%prog options [ASCIItable(s)]', description = """
|
2012-05-08 00:39:11 +05:30
|
|
|
Produces a binned grid of two columns from an ASCIItable, i.e. a two-dimensional probability density map.
|
2014-08-07 00:36:33 +05:30
|
|
|
|
|
|
|
""", version = scriptID)
|
|
|
|
|
2015-08-08 03:38:54 +05:30
|
|
|
parser.add_option('-d','--data',
|
|
|
|
dest = 'data',
|
2016-09-10 01:47:00 +05:30
|
|
|
type = 'string', nargs = 2, metavar = 'string string',
|
2015-08-08 03:38:54 +05:30
|
|
|
help = 'column labels containing x and y ')
|
|
|
|
parser.add_option('-w','--weight',
|
|
|
|
dest = 'weight',
|
|
|
|
type = 'string', metavar = 'string',
|
|
|
|
help = 'column label containing weight of (x,y) point')
|
|
|
|
parser.add_option('-b','--bins',
|
|
|
|
dest = 'bins',
|
|
|
|
type = 'int', nargs = 2, metavar = 'int int',
|
|
|
|
help = 'number of bins in x and y direction [%default]')
|
|
|
|
parser.add_option('-t','--type',
|
|
|
|
dest = 'type',
|
|
|
|
type = 'string', nargs = 3, metavar = 'string string string',
|
|
|
|
help = 'type (linear/log) of x, y, and z axis [%default]')
|
|
|
|
parser.add_option('-x','--xrange',
|
|
|
|
dest = 'xrange',
|
|
|
|
type = 'float', nargs = 2, metavar = 'float float',
|
2019-02-16 19:23:56 +05:30
|
|
|
help = 'min max limits in x direction (optional)')
|
2015-08-08 03:38:54 +05:30
|
|
|
parser.add_option('-y','--yrange',
|
|
|
|
dest = 'yrange',
|
|
|
|
type = 'float', nargs = 2, metavar = 'float float',
|
2019-02-16 19:23:56 +05:30
|
|
|
help = 'min max limits in y direction (optional)')
|
2015-08-08 03:38:54 +05:30
|
|
|
parser.add_option('-z','--zrange',
|
|
|
|
dest = 'zrange',
|
|
|
|
type = 'float', nargs = 2, metavar = 'float float',
|
2019-02-16 19:23:56 +05:30
|
|
|
help = 'min max limits in z direction (optional)')
|
2015-08-08 03:38:54 +05:30
|
|
|
parser.add_option('-i','--invert',
|
|
|
|
dest = 'invert',
|
|
|
|
action = 'store_true',
|
2016-09-10 01:47:00 +05:30
|
|
|
help = 'invert probability density')
|
2015-08-08 03:38:54 +05:30
|
|
|
parser.add_option('-r','--rownormalize',
|
|
|
|
dest = 'normRow',
|
|
|
|
action = 'store_true',
|
2016-09-10 01:47:00 +05:30
|
|
|
help = 'normalize probability density in each row')
|
2015-08-08 03:38:54 +05:30
|
|
|
parser.add_option('-c','--colnormalize',
|
|
|
|
dest = 'normCol',
|
|
|
|
action = 'store_true',
|
2016-09-10 01:47:00 +05:30
|
|
|
help = 'normalize probability density in each column')
|
2015-08-08 03:38:54 +05:30
|
|
|
|
|
|
|
parser.set_defaults(bins = (10,10),
|
|
|
|
type = ('linear','linear','linear'),
|
|
|
|
xrange = (0.0,0.0),
|
|
|
|
yrange = (0.0,0.0),
|
|
|
|
zrange = (0.0,0.0),
|
|
|
|
)
|
2012-05-08 00:39:11 +05:30
|
|
|
|
|
|
|
(options,filenames) = parser.parse_args()
|
2020-03-18 18:17:09 +05:30
|
|
|
if filenames == []: filenames = [None]
|
2012-05-08 00:39:11 +05:30
|
|
|
|
2020-03-18 18:17:09 +05:30
|
|
|
minmax = np.array([options.xrange,options.yrange,options.zrange])
|
|
|
|
result = np.empty((options.bins[0],options.bins[1],3),'f')
|
2014-08-07 00:36:33 +05:30
|
|
|
|
2016-03-02 02:05:59 +05:30
|
|
|
if options.data is None: parser.error('no data columns specified.')
|
2012-05-08 00:39:11 +05:30
|
|
|
|
2015-04-09 12:15:21 +05:30
|
|
|
for name in filenames:
|
2015-09-24 14:54:42 +05:30
|
|
|
damask.util.report(scriptName,name)
|
2015-08-08 03:38:54 +05:30
|
|
|
|
2020-03-18 18:17:09 +05:30
|
|
|
table = damask.Table.from_ASCII(StringIO(''.join(sys.stdin.read())) if name is None else name)
|
|
|
|
data = np.hstack((table.get(options.data[0]),table.get(options.data[1])))
|
2015-05-21 05:38:32 +05:30
|
|
|
|
|
|
|
for c in (0,1): # check data minmax for x and y (i = 0 and 1)
|
2020-03-18 18:17:09 +05:30
|
|
|
if (minmax[c] == 0.0).all(): minmax[c] = [data[:,c].min(),data[:,c].max()]
|
2015-05-21 05:38:32 +05:30
|
|
|
if options.type[c].lower() == 'log': # if log scale
|
2020-03-18 18:17:09 +05:30
|
|
|
data[:,c] = np.log(data[:,c]) # change x,y coordinates to log
|
2015-05-21 05:38:32 +05:30
|
|
|
minmax[c] = np.log(minmax[c]) # change minmax to log, too
|
2015-04-09 12:15:21 +05:30
|
|
|
|
|
|
|
delta = minmax[:,1]-minmax[:,0]
|
2020-03-18 18:17:09 +05:30
|
|
|
(grid,xedges,yedges) = np.histogram2d(data[:,0],data[:,1],
|
2016-09-10 01:47:00 +05:30
|
|
|
bins=options.bins,
|
2018-12-21 03:39:53 +05:30
|
|
|
range=minmax[:2],
|
2020-03-18 18:17:09 +05:30
|
|
|
weights=table.get(options.weight) if options.weight else None)
|
2015-04-14 01:25:28 +05:30
|
|
|
if options.normCol:
|
2016-10-25 00:46:29 +05:30
|
|
|
for x in range(options.bins[0]):
|
2015-04-14 01:25:28 +05:30
|
|
|
sum = np.sum(grid[x,:])
|
|
|
|
if sum > 0.0:
|
|
|
|
grid[x,:] /= sum
|
|
|
|
if options.normRow:
|
2016-10-25 00:46:29 +05:30
|
|
|
for y in range(options.bins[1]):
|
2015-04-14 01:25:28 +05:30
|
|
|
sum = np.sum(grid[:,y])
|
|
|
|
if sum > 0.0:
|
|
|
|
grid[:,y] /= sum
|
2016-09-10 01:47:00 +05:30
|
|
|
|
2015-04-09 12:15:21 +05:30
|
|
|
if (minmax[2] == 0.0).all(): minmax[2] = [grid.min(),grid.max()] # auto scale from data
|
|
|
|
if minmax[2,0] == minmax[2,1]:
|
|
|
|
minmax[2,0] -= 1.
|
|
|
|
minmax[2,1] += 1.
|
|
|
|
if (minmax[2] == 0.0).all(): # no data in grid?
|
2015-09-24 14:54:42 +05:30
|
|
|
damask.util.croak('no data found on grid...')
|
2015-04-09 12:15:21 +05:30
|
|
|
minmax[2,:] = np.array([0.0,1.0]) # making up arbitrary z minmax
|
2012-05-08 00:39:11 +05:30
|
|
|
if options.type[2].lower() == 'log':
|
2014-08-07 00:36:33 +05:30
|
|
|
grid = np.log(grid)
|
2015-04-09 12:15:21 +05:30
|
|
|
minmax[2] = np.log(minmax[2])
|
2016-09-10 01:47:00 +05:30
|
|
|
|
2015-04-09 12:15:21 +05:30
|
|
|
delta[2] = minmax[2,1]-minmax[2,0]
|
2014-05-19 19:13:26 +05:30
|
|
|
|
2016-10-25 00:46:29 +05:30
|
|
|
for x in range(options.bins[0]):
|
|
|
|
for y in range(options.bins[1]):
|
2015-04-09 12:15:21 +05:30
|
|
|
result[x,y,:] = [minmax[0,0]+delta[0]/options.bins[0]*(x+0.5),
|
|
|
|
minmax[1,0]+delta[1]/options.bins[1]*(y+0.5),
|
2020-03-18 18:17:09 +05:30
|
|
|
np.clip((grid[x,y]-minmax[2,0])/delta[2],0.0,1.0)]
|
2015-04-09 12:15:21 +05:30
|
|
|
|
2015-05-21 05:38:32 +05:30
|
|
|
for c in (0,1):
|
|
|
|
if options.type[c].lower() == 'log': result[:,:,c] = np.exp(result[:,:,c])
|
2015-04-09 12:15:21 +05:30
|
|
|
|
2015-04-14 01:25:28 +05:30
|
|
|
if options.invert: result[:,:,2] = 1.0 - result[:,:,2]
|
2014-08-07 00:36:33 +05:30
|
|
|
|
2020-03-18 18:17:09 +05:30
|
|
|
comments = scriptID + '\t' + ' '.join(sys.argv[1:])
|
|
|
|
shapes = {'bin_%s'%options.data[0]:(1,),'bin_%s'%options.data[1]:(1,),'z':(1,)}
|
|
|
|
table = damask.Table(result.reshape(options.bins[0]*options.bins[1],3),shapes,[comments])
|
|
|
|
if name:
|
|
|
|
outname = os.path.join(os.path.dirname(name),'binned-{}-{}_'.format(*options.data) +
|
|
|
|
('weighted-{}_'.format(options.weight) if options.weight else '') +
|
|
|
|
os.path.basename(name))
|
|
|
|
table.to_ASCII(outname)
|
|
|
|
else:
|
|
|
|
table.to_ASCII(sys.stdout)
|