Table class instead of ASCIItable

This commit is contained in:
Martin Diehl 2019-12-21 20:18:33 +01:00
parent a7d60dc52a
commit 343da2e54d
4 changed files with 31 additions and 208 deletions

View File

@ -2,6 +2,7 @@
import os
import sys
from io import StringIO
from optparse import OptionParser
import numpy as np
@ -22,79 +23,26 @@ Add cumulative (sum of first to current row) values for given label(s).
""", version = scriptID)
parser.add_option('-l','--label',
dest='label',
dest='labels',
action = 'extend', metavar = '<string LIST>',
help = 'columns to cumulate')
parser.add_option('-p','--product',
dest='product', action = 'store_true',
help = 'product of values instead of sum')
(options,filenames) = parser.parse_args()
if options.label is None:
parser.error('no data column(s) specified.')
# --- loop over input files -------------------------------------------------------------------------
if filenames == []: filenames = [None]
if options.labels is None:
parser.error('no data column(s) specified.')
for name in filenames:
try:
table = damask.ASCIItable(name = name, buffered = False)
except IOError:
continue
damask.util.report(scriptName,name)
damask.util.report(scriptName,name)
# ------------------------------------------ read header ------------------------------------------
table = damask.Table.from_ASCII(StringIO(''.join(sys.stdin.read())) if name is None else name)
for label in options.labels:
table.add('cum_{}({})'.format('prod' if options.product else 'sum',label),
np.cumprod(table.get(label),0) if options.product else np.cumsum(table.get(label),0),
scriptID+' '+' '.join(sys.argv[1:]))
table.head_read()
# ------------------------------------------ sanity checks ----------------------------------------
errors = []
remarks = []
columns = []
dims = []
how = 'prod' if options.product else 'sum'
for what in options.label:
dim = table.label_dimension(what)
if dim < 0: remarks.append('column {} not found...'.format(what))
else:
dims.append(dim)
columns.append(table.label_index(what))
table.labels_append('cum_{}({})'.format(how,what) if dim == 1 else
['{}_cum_{}({})'.format(i+1,how,what) for i in range(dim)] ) # extend ASCII header with new labels
if remarks != []: damask.util.croak(remarks)
if errors != []:
damask.util.croak(errors)
table.close(dismiss = True)
continue
# ------------------------------------------ assemble header ---------------------------------------
table.info_append(scriptID + '\t' + ' '.join(sys.argv[1:]))
table.head_write()
# ------------------------------------------ process data ------------------------------------------
mask = []
for col,dim in zip(columns,dims): mask += range(col,col+dim) # isolate data columns to cumulate
cumulated = np.ones(len(mask)) if options.product else np.zeros(len(mask)) # prepare output field
outputAlive = True
while outputAlive and table.data_read(): # read next data line of ASCII table
if options.product:
for i,col in enumerate(mask):
cumulated[i] *= float(table.data[col]) # cumulate values (multiplication)
else:
for i,col in enumerate(mask):
cumulated[i] += float(table.data[col]) # cumulate values (addition)
table.data_append(cumulated)
outputAlive = table.data_write() # output processed line
# ------------------------------------------ output finalization -----------------------------------
table.close() # close ASCII tables
table.to_ASCII(sys.stdout if name is None else name)

View File

@ -2,6 +2,7 @@
import os
import sys
from io import StringIO
from optparse import OptionParser
import numpy as np
@ -30,7 +31,7 @@ def derivative(coordinates,what):
(coordinates[0] - coordinates[1])
result[-1,:] = (what[-1,:] - what[-2,:]) / \
(coordinates[-1] - coordinates[-2])
return result
@ -48,78 +49,26 @@ parser.add_option('-c','--coordinates',
type = 'string', metavar='string',
help = 'heading of coordinate column')
parser.add_option('-l','--label',
dest = 'label',
dest = 'labels',
action = 'extend', metavar = '<string LIST>',
help = 'heading of column(s) to differentiate')
(options,filenames) = parser.parse_args()
if options.coordinates is None:
parser.error('no coordinate column specified.')
if options.label is None:
parser.error('no data column specified.')
# --- loop over input files -------------------------------------------------------------------------
if filenames == []: filenames = [None]
if options.coordinates is None:
parser.error('no coordinate column specified.')
if options.labels is None:
parser.error('no data column specified.')
for name in filenames:
try: table = damask.ASCIItable(name = name,
buffered = False)
except: continue
damask.util.report(scriptName,name)
damask.util.report(scriptName,name)
# ------------------------------------------ read header ------------------------------------------
table = damask.Table.from_ASCII(StringIO(''.join(sys.stdin.read())) if name is None else name)
for label in options.labels:
table.add('d({})/d({})'.format(label,options.coordinates),
derivative(table.get(options.coordinates),table.get(label)),
scriptID+' '+' '.join(sys.argv[1:]))
table.head_read()
# ------------------------------------------ sanity checks ----------------------------------------
errors = []
remarks = []
columns = []
dims = []
if table.label_dimension(options.coordinates) != 1:
errors.append('coordinate column {} is not scalar.'.format(options.coordinates))
for what in options.label:
dim = table.label_dimension(what)
if dim < 0: remarks.append('column {} not found...'.format(what))
else:
dims.append(dim)
columns.append(table.label_index(what))
table.labels_append('d({})/d({})'.format(what,options.coordinates) if dim == 1 else
['{}_d({})/d({})'.format(i+1,what,options.coordinates) for i in range(dim)] ) # extend ASCII header with new labels
if remarks != []: damask.util.croak(remarks)
if errors != []:
damask.util.croak(errors)
table.close(dismiss = True)
continue
# ------------------------------------------ assemble header --------------------------------------
table.info_append(scriptID + '\t' + ' '.join(sys.argv[1:]))
table.head_write()
# ------------------------------------------ process data ------------------------------------------
table.data_readArray()
mask = []
for col,dim in zip(columns,dims): mask += range(col,col+dim) # isolate data columns to differentiate
differentiated = derivative(table.data[:,table.label_index(options.coordinates)].reshape((len(table.data),1)),
table.data[:,mask]) # calculate numerical derivative
table.data = np.hstack((table.data,differentiated))
# ------------------------------------------ output result -----------------------------------------
table.data_writeArray()
# ------------------------------------------ output finalization -----------------------------------
table.close() # close ASCII tables
table.to_ASCII(sys.stdout if name is None else name)

View File

@ -33,49 +33,20 @@ parser.set_defaults(pos = 'pos',
)
(options, filenames) = parser.parse_args()
# --- loop over input files -------------------------------------------------------------------------
if filenames == []: filenames = [None]
for name in filenames:
try: table = damask.ASCIItable(name = name,
buffered = False,
readonly = True)
except: continue
damask.util.report(scriptName,name)
# --- interpret header ----------------------------------------------------------------------------
table.head_read()
errors = []
remarks = []
coordDim = table.label_dimension(options.pos)
if not 3 >= coordDim >= 1: errors.append('coordinates "{}" need to have one, two, or three dimensions.'.format(options.pos))
elif coordDim < 3: remarks.append('appending {} dimension{} to coordinates "{}"...'.format(3-coordDim,
's' if coordDim < 2 else '',
options.pos))
if remarks != []: damask.util.croak(remarks)
if errors != []:
damask.util.croak(errors)
table.close(dismiss=True)
continue
table = damask.Table.from_ASCII(StringIO(''.join(sys.stdin.read())) if name is None else name)
# ------------------------------------------ process data ---------------------------------------
table.data_readArray(options.pos)
if table.data.shape[1] < 3:
table.data = np.hstack((table.data,
np.zeros((table.data.shape[0],
3-table.data.shape[1]),dtype='f'))) # fill coords up to 3D with zeros
Polydata = vtk.vtkPolyData()
Points = vtk.vtkPoints()
Vertices = vtk.vtkCellArray()
for p in table.data:
for p in table.get(options.pos):
pointID = Points.InsertNextPoint(p)
Vertices.InsertNextCell(1)
Vertices.InsertCellPoint(pointID)
@ -104,5 +75,3 @@ for name in filenames:
writer.Write()
if name is None: sys.stdout.write(writer.GetOutputString())
table.close()

View File

@ -40,48 +40,14 @@ parser.set_defaults(mode = 'cell',
)
(options, filenames) = parser.parse_args()
# --- loop over input files -------------------------------------------------------------------------
if filenames == []: filenames = [None]
for name in filenames:
try: table = damask.ASCIItable(name = name,
buffered = False,
labeled = True,
readonly = True,
)
except: continue
damask.util.report(scriptName,name)
# --- interpret header ----------------------------------------------------------------------------
table.head_read()
remarks = []
errors = []
coordDim = table.label_dimension(options.pos)
if not 3 >= coordDim >= 1: errors.append('coordinates "{}" need to have one, two, or three dimensions.'.format(options.pos))
elif coordDim < 3: remarks.append('appending {} dimension{} to coordinates "{}"...'.format(3-coordDim,
's' if coordDim < 2 else '',
options.pos))
if remarks != []: damask.util.croak(remarks)
if errors != []:
damask.util.croak(errors)
table.close(dismiss=True)
continue
# --------------- figure out size and grid ---------------------------------------------------------
table.data_readArray(options.pos)
if table.data.shape[1] < 3:
table.data = np.hstack((table.data,
np.zeros((table.data.shape[0],
3-table.data.shape[1]),dtype='f'))) # fill coords up to 3D with zeros
coords = [np.unique(table.data[:,i]) for i in range(3)]
table = damask.Table.from_ASCII(StringIO(''.join(sys.stdin.read())) if name is None else name)
coords = [np.unique(table.get(options.pos)[:,i]) for i in range(3)]
if options.mode == 'cell':
coords = [0.5 * np.array([3.0 * coords[i][0] - coords[i][0 + int(len(coords[i]) > 1)]] + \
[coords[i][j-1] + coords[i][j] for j in range(1,len(coords[i]))] + \
@ -90,13 +56,6 @@ for name in filenames:
grid = np.array(list(map(len,coords)),'i')
N = grid.prod() if options.mode == 'point' else (grid-1).prod()
if N != len(table.data):
errors.append('data count {} does not match grid {}x{}x{}.'.format(N,*(grid - (options.mode == 'cell')) ))
if errors != []:
damask.util.croak(errors)
table.close(dismiss = True)
continue
# ------------------------------------------ process data ---------------------------------------
rGrid = vtk.vtkRectilinearGrid()
@ -135,5 +94,3 @@ for name in filenames:
writer.Write()
if name is None: sys.stdout.write(writer.GetOutputString())
table.close()