Merge remote branch 'origin/development' into cmake
This commit is contained in:
commit
c8f85453d2
|
@ -21,7 +21,7 @@ Operates on periodic three-dimensional x,y,z-ordered data sets.
|
||||||
|
|
||||||
|
|
||||||
parser.add_option('-c','--coordinates',
|
parser.add_option('-c','--coordinates',
|
||||||
dest = 'coords',
|
dest = 'pos',
|
||||||
type = 'string', metavar = 'string',
|
type = 'string', metavar = 'string',
|
||||||
help = 'column heading of coordinates [%default]')
|
help = 'column heading of coordinates [%default]')
|
||||||
parser.add_option('-f','--defgrad',
|
parser.add_option('-f','--defgrad',
|
||||||
|
@ -36,10 +36,10 @@ parser.add_option('--no-volume','-v',
|
||||||
dest = 'volume',
|
dest = 'volume',
|
||||||
action = 'store_false',
|
action = 'store_false',
|
||||||
help = 'omit volume mismatch')
|
help = 'omit volume mismatch')
|
||||||
parser.set_defaults(coords = 'pos',
|
parser.set_defaults(pos = 'pos',
|
||||||
defgrad = 'f',
|
defgrad = 'f',
|
||||||
shape = True,
|
shape = True,
|
||||||
volume = True,
|
volume = True,
|
||||||
)
|
)
|
||||||
|
|
||||||
(options,filenames) = parser.parse_args()
|
(options,filenames) = parser.parse_args()
|
||||||
|
@ -64,8 +64,8 @@ for name in filenames:
|
||||||
errors = []
|
errors = []
|
||||||
remarks = []
|
remarks = []
|
||||||
|
|
||||||
if table.label_dimension(options.coords) != 3: errors.append('coordinates {} are not a vector.'.format(options.coords))
|
if table.label_dimension(options.pos) != 3: errors.append('coordinates {} are not a vector.'.format(options.pos))
|
||||||
else: colCoord = table.label_index(options.coords)
|
else: colCoord = table.label_index(options.pos)
|
||||||
|
|
||||||
if table.label_dimension(options.defgrad) != 9: errors.append('deformation gradient {} is not a tensor.'.format(options.defgrad))
|
if table.label_dimension(options.defgrad) != 9: errors.append('deformation gradient {} is not a tensor.'.format(options.defgrad))
|
||||||
else: colF = table.label_index(options.defgrad)
|
else: colF = table.label_index(options.defgrad)
|
||||||
|
|
|
@ -71,7 +71,7 @@ Deals with both vector- and tensor fields.
|
||||||
""", version = scriptID)
|
""", version = scriptID)
|
||||||
|
|
||||||
parser.add_option('-p','--pos','--periodiccellcenter',
|
parser.add_option('-p','--pos','--periodiccellcenter',
|
||||||
dest = 'coords',
|
dest = 'pos',
|
||||||
type = 'string', metavar = 'string',
|
type = 'string', metavar = 'string',
|
||||||
help = 'label of coordinates [%default]')
|
help = 'label of coordinates [%default]')
|
||||||
parser.add_option('-v','--vector',
|
parser.add_option('-v','--vector',
|
||||||
|
@ -83,7 +83,7 @@ parser.add_option('-t','--tensor',
|
||||||
action = 'extend', metavar = '<string LIST>',
|
action = 'extend', metavar = '<string LIST>',
|
||||||
help = 'label(s) of tensor field values')
|
help = 'label(s) of tensor field values')
|
||||||
|
|
||||||
parser.set_defaults(coords = 'pos',
|
parser.set_defaults(pos = 'pos',
|
||||||
)
|
)
|
||||||
|
|
||||||
(options,filenames) = parser.parse_args()
|
(options,filenames) = parser.parse_args()
|
||||||
|
@ -114,8 +114,8 @@ for name in filenames:
|
||||||
remarks = []
|
remarks = []
|
||||||
column = {}
|
column = {}
|
||||||
|
|
||||||
if table.label_dimension(options.coords) != 3: errors.append('coordinates {} are not a vector.'.format(options.coords))
|
if table.label_dimension(options.pos) != 3: errors.append('coordinates {} are not a vector.'.format(options.pos))
|
||||||
else: colCoord = table.label_index(options.coords)
|
else: colCoord = table.label_index(options.pos)
|
||||||
|
|
||||||
for type, data in items.iteritems():
|
for type, data in items.iteritems():
|
||||||
for what in (data['labels'] if data['labels'] is not None else []):
|
for what in (data['labels'] if data['labels'] is not None else []):
|
||||||
|
|
|
@ -101,7 +101,7 @@ parser.add_option('-f',
|
||||||
help = 'column label of deformation gradient [%default]')
|
help = 'column label of deformation gradient [%default]')
|
||||||
parser.add_option('-p',
|
parser.add_option('-p',
|
||||||
'--pos', '--position',
|
'--pos', '--position',
|
||||||
dest = 'coords',
|
dest = 'pos',
|
||||||
metavar = 'string',
|
metavar = 'string',
|
||||||
help = 'label of coordinates [%default]')
|
help = 'label of coordinates [%default]')
|
||||||
parser.add_option('--nodal',
|
parser.add_option('--nodal',
|
||||||
|
@ -110,7 +110,7 @@ parser.add_option('--nodal',
|
||||||
help = 'output nodal (instad of cell-centered) displacements')
|
help = 'output nodal (instad of cell-centered) displacements')
|
||||||
|
|
||||||
parser.set_defaults(defgrad = 'f',
|
parser.set_defaults(defgrad = 'f',
|
||||||
coords = 'pos',
|
pos = 'pos',
|
||||||
nodal = False,
|
nodal = False,
|
||||||
)
|
)
|
||||||
|
|
||||||
|
@ -121,13 +121,14 @@ parser.set_defaults(defgrad = 'f',
|
||||||
if filenames == []: filenames = [None]
|
if filenames == []: filenames = [None]
|
||||||
|
|
||||||
for name in filenames:
|
for name in filenames:
|
||||||
|
outname = (os.path.splitext(name)[0] +
|
||||||
|
'_nodal' +
|
||||||
|
os.path.splitext(name)[1]) if (options.nodal and name) else None
|
||||||
try: table = damask.ASCIItable(name = name,
|
try: table = damask.ASCIItable(name = name,
|
||||||
outname = (os.path.splitext(name)[0] +
|
outname = outname,
|
||||||
'_nodal' +
|
|
||||||
os.path.splitext(name)[1]) if (options.nodal and name) else None,
|
|
||||||
buffered = False)
|
buffered = False)
|
||||||
except: continue
|
except: continue
|
||||||
damask.util.report(scriptName,name)
|
damask.util.report(scriptName,'{}{}'.format(name,' --> {}'.format(outname) if outname else ''))
|
||||||
|
|
||||||
# ------------------------------------------ read header ------------------------------------------
|
# ------------------------------------------ read header ------------------------------------------
|
||||||
|
|
||||||
|
@ -141,13 +142,13 @@ for name in filenames:
|
||||||
if table.label_dimension(options.defgrad) != 9:
|
if table.label_dimension(options.defgrad) != 9:
|
||||||
errors.append('deformation gradient "{}" is not a 3x3 tensor.'.format(options.defgrad))
|
errors.append('deformation gradient "{}" is not a 3x3 tensor.'.format(options.defgrad))
|
||||||
|
|
||||||
coordDim = table.label_dimension(options.coords)
|
coordDim = table.label_dimension(options.pos)
|
||||||
if not 3 >= coordDim >= 1:
|
if not 3 >= coordDim >= 1:
|
||||||
errors.append('coordinates "{}" need to have one, two, or three dimensions.'.format(options.coords))
|
errors.append('coordinates "{}" need to have one, two, or three dimensions.'.format(options.pos))
|
||||||
elif coordDim < 3:
|
elif coordDim < 3:
|
||||||
remarks.append('appending {} dimension{} to coordinates "{}"...'.format(3-coordDim,
|
remarks.append('appending {} dimension{} to coordinates "{}"...'.format(3-coordDim,
|
||||||
's' if coordDim < 2 else '',
|
's' if coordDim < 2 else '',
|
||||||
options.coords))
|
options.pos))
|
||||||
|
|
||||||
if remarks != []: damask.util.croak(remarks)
|
if remarks != []: damask.util.croak(remarks)
|
||||||
if errors != []:
|
if errors != []:
|
||||||
|
@ -157,7 +158,7 @@ for name in filenames:
|
||||||
|
|
||||||
# --------------- figure out size and grid ---------------------------------------------------------
|
# --------------- figure out size and grid ---------------------------------------------------------
|
||||||
|
|
||||||
table.data_readArray([options.defgrad,options.coords])
|
table.data_readArray([options.defgrad,options.pos])
|
||||||
table.data_rewind()
|
table.data_rewind()
|
||||||
|
|
||||||
if len(table.data.shape) < 2: table.data.shape += (1,) # expand to 2D shape
|
if len(table.data.shape) < 2: table.data.shape += (1,) # expand to 2D shape
|
||||||
|
@ -196,8 +197,8 @@ for name in filenames:
|
||||||
|
|
||||||
table.info_append(scriptID + '\t' + ' '.join(sys.argv[1:]))
|
table.info_append(scriptID + '\t' + ' '.join(sys.argv[1:]))
|
||||||
table.labels_append((['{}_pos' .format(i+1) for i in xrange(3)] if options.nodal else []) +
|
table.labels_append((['{}_pos' .format(i+1) for i in xrange(3)] if options.nodal else []) +
|
||||||
['{}_avg({}).{}' .format(i+1,options.defgrad,options.coords) for i in xrange(3)] +
|
['{}_avg({}).{}' .format(i+1,options.defgrad,options.pos) for i in xrange(3)] +
|
||||||
['{}_fluct({}).{}'.format(i+1,options.defgrad,options.coords) for i in xrange(3)] )
|
['{}_fluct({}).{}'.format(i+1,options.defgrad,options.pos) for i in xrange(3)] )
|
||||||
table.head_write()
|
table.head_write()
|
||||||
|
|
||||||
# ------------------------------------------ output data -------------------------------------------
|
# ------------------------------------------ output data -------------------------------------------
|
||||||
|
|
|
@ -57,7 +57,7 @@ Deals with both vector- and tensor-valued fields.
|
||||||
""", version = scriptID)
|
""", version = scriptID)
|
||||||
|
|
||||||
parser.add_option('-p','--pos','--periodiccellcenter',
|
parser.add_option('-p','--pos','--periodiccellcenter',
|
||||||
dest = 'coords',
|
dest = 'pos',
|
||||||
type = 'string', metavar = 'string',
|
type = 'string', metavar = 'string',
|
||||||
help = 'label of coordinates [%default]')
|
help = 'label of coordinates [%default]')
|
||||||
parser.add_option('-v','--vector',
|
parser.add_option('-v','--vector',
|
||||||
|
@ -69,7 +69,7 @@ parser.add_option('-t','--tensor',
|
||||||
action = 'extend', metavar = '<string LIST>',
|
action = 'extend', metavar = '<string LIST>',
|
||||||
help = 'label(s) of tensor field values')
|
help = 'label(s) of tensor field values')
|
||||||
|
|
||||||
parser.set_defaults(coords = 'pos',
|
parser.set_defaults(pos = 'pos',
|
||||||
)
|
)
|
||||||
|
|
||||||
(options,filenames) = parser.parse_args()
|
(options,filenames) = parser.parse_args()
|
||||||
|
@ -100,8 +100,8 @@ for name in filenames:
|
||||||
remarks = []
|
remarks = []
|
||||||
column = {}
|
column = {}
|
||||||
|
|
||||||
if table.label_dimension(options.coords) != 3: errors.append('coordinates {} are not a vector.'.format(options.coords))
|
if table.label_dimension(options.pos) != 3: errors.append('coordinates {} are not a vector.'.format(options.pos))
|
||||||
else: colCoord = table.label_index(options.coords)
|
else: colCoord = table.label_index(options.pos)
|
||||||
|
|
||||||
for type, data in items.iteritems():
|
for type, data in items.iteritems():
|
||||||
for what in (data['labels'] if data['labels'] is not None else []):
|
for what in (data['labels'] if data['labels'] is not None else []):
|
||||||
|
|
|
@ -90,7 +90,7 @@ Add column(s) containing Euclidean distance to grain structural features: bounda
|
||||||
|
|
||||||
parser.add_option('-p',
|
parser.add_option('-p',
|
||||||
'--pos', '--position',
|
'--pos', '--position',
|
||||||
dest = 'coords', metavar = 'string',
|
dest = 'pos', metavar = 'string',
|
||||||
help = 'label of coordinates [%default]')
|
help = 'label of coordinates [%default]')
|
||||||
parser.add_option('-i',
|
parser.add_option('-i',
|
||||||
'--id', '--identifier',
|
'--id', '--identifier',
|
||||||
|
@ -109,7 +109,7 @@ parser.add_option('-s',
|
||||||
dest = 'scale', type = 'float', metavar = 'float',
|
dest = 'scale', type = 'float', metavar = 'float',
|
||||||
help = 'voxel size [%default]')
|
help = 'voxel size [%default]')
|
||||||
|
|
||||||
parser.set_defaults(coords = 'pos',
|
parser.set_defaults(pos = 'pos',
|
||||||
id = 'texture',
|
id = 'texture',
|
||||||
neighborhood = 'neumann',
|
neighborhood = 'neumann',
|
||||||
scale = 1.0,
|
scale = 1.0,
|
||||||
|
@ -151,10 +151,10 @@ for name in filenames:
|
||||||
remarks = []
|
remarks = []
|
||||||
column = {}
|
column = {}
|
||||||
|
|
||||||
coordDim = table.label_dimension(options.coords)
|
coordDim = table.label_dimension(options.pos)
|
||||||
if not 3 >= coordDim >= 1:
|
if not 3 >= coordDim >= 1:
|
||||||
errors.append('coordinates "{}" need to have one, two, or three dimensions.'.format(options.coords))
|
errors.append('coordinates "{}" need to have one, two, or three dimensions.'.format(options.pos))
|
||||||
else: coordCol = table.label_index(options.coords)
|
else: coordCol = table.label_index(options.pos)
|
||||||
|
|
||||||
if table.label_dimension(options.id) != 1: errors.append('grain identifier {} not found.'.format(options.id))
|
if table.label_dimension(options.id) != 1: errors.append('grain identifier {} not found.'.format(options.id))
|
||||||
else: idCol = table.label_index(options.id)
|
else: idCol = table.label_index(options.id)
|
||||||
|
|
|
@ -61,7 +61,7 @@ Deals with both vector- and scalar fields.
|
||||||
""", version = scriptID)
|
""", version = scriptID)
|
||||||
|
|
||||||
parser.add_option('-p','--pos','--periodiccellcenter',
|
parser.add_option('-p','--pos','--periodiccellcenter',
|
||||||
dest = 'coords',
|
dest = 'pos',
|
||||||
type = 'string', metavar = 'string',
|
type = 'string', metavar = 'string',
|
||||||
help = 'label of coordinates [%default]')
|
help = 'label of coordinates [%default]')
|
||||||
parser.add_option('-v','--vector',
|
parser.add_option('-v','--vector',
|
||||||
|
@ -73,7 +73,7 @@ parser.add_option('-s','--scalar',
|
||||||
action = 'extend', metavar = '<string LIST>',
|
action = 'extend', metavar = '<string LIST>',
|
||||||
help = 'label(s) of scalar field values')
|
help = 'label(s) of scalar field values')
|
||||||
|
|
||||||
parser.set_defaults(coords = 'pos',
|
parser.set_defaults(pos = 'pos',
|
||||||
)
|
)
|
||||||
|
|
||||||
(options,filenames) = parser.parse_args()
|
(options,filenames) = parser.parse_args()
|
||||||
|
@ -104,8 +104,8 @@ for name in filenames:
|
||||||
remarks = []
|
remarks = []
|
||||||
column = {}
|
column = {}
|
||||||
|
|
||||||
if table.label_dimension(options.coords) != 3: errors.append('coordinates {} are not a vector.'.format(options.coords))
|
if table.label_dimension(options.pos) != 3: errors.append('coordinates {} are not a vector.'.format(options.pos))
|
||||||
else: colCoord = table.label_index(options.coords)
|
else: colCoord = table.label_index(options.pos)
|
||||||
|
|
||||||
for type, data in items.iteritems():
|
for type, data in items.iteritems():
|
||||||
for what in (data['labels'] if data['labels'] is not None else []):
|
for what in (data['labels'] if data['labels'] is not None else []):
|
||||||
|
|
|
@ -20,7 +20,7 @@ Average each data block of size 'packing' into single values thus reducing the f
|
||||||
""", version = scriptID)
|
""", version = scriptID)
|
||||||
|
|
||||||
parser.add_option('-c','--coordinates',
|
parser.add_option('-c','--coordinates',
|
||||||
dest = 'coords',
|
dest = 'pos',
|
||||||
type = 'string', metavar = 'string',
|
type = 'string', metavar = 'string',
|
||||||
help = 'column label of coordinates [%default]')
|
help = 'column label of coordinates [%default]')
|
||||||
parser.add_option('-p','--packing',
|
parser.add_option('-p','--packing',
|
||||||
|
@ -39,7 +39,7 @@ parser.add_option('-s', '--size',
|
||||||
dest = 'size',
|
dest = 'size',
|
||||||
type = 'float', nargs = 3, metavar = 'float float float',
|
type = 'float', nargs = 3, metavar = 'float float float',
|
||||||
help = 'size in x,y,z [autodetect]')
|
help = 'size in x,y,z [autodetect]')
|
||||||
parser.set_defaults(coords = 'pos',
|
parser.set_defaults(pos = 'pos',
|
||||||
packing = (2,2,2),
|
packing = (2,2,2),
|
||||||
shift = (0,0,0),
|
shift = (0,0,0),
|
||||||
grid = (0,0,0),
|
grid = (0,0,0),
|
||||||
|
@ -75,8 +75,8 @@ for name in filenames:
|
||||||
errors = []
|
errors = []
|
||||||
remarks = []
|
remarks = []
|
||||||
|
|
||||||
if table.label_dimension(options.coords) != 3: errors.append('coordinates {} are not a vector.'.format(options.coords))
|
if table.label_dimension(options.pos) != 3: errors.append('coordinates {} are not a vector.'.format(options.pos))
|
||||||
else: colCoord = table.label_index(options.coords)
|
else: colCoord = table.label_index(options.pos)
|
||||||
|
|
||||||
if remarks != []: damask.util.croak(remarks)
|
if remarks != []: damask.util.croak(remarks)
|
||||||
if errors != []:
|
if errors != []:
|
||||||
|
|
|
@ -20,7 +20,7 @@ to resolution*packing.
|
||||||
""", version = scriptID)
|
""", version = scriptID)
|
||||||
|
|
||||||
parser.add_option('-c','--coordinates',
|
parser.add_option('-c','--coordinates',
|
||||||
dest = 'coords', metavar = 'string',
|
dest = 'pos', metavar = 'string',
|
||||||
help = 'column label of coordinates [%default]')
|
help = 'column label of coordinates [%default]')
|
||||||
parser.add_option('-p','--packing',
|
parser.add_option('-p','--packing',
|
||||||
dest = 'packing', type = 'int', nargs = 3, metavar = 'int int int',
|
dest = 'packing', type = 'int', nargs = 3, metavar = 'int int int',
|
||||||
|
@ -31,7 +31,7 @@ parser.add_option('-g','--grid',
|
||||||
parser.add_option('-s','--size',
|
parser.add_option('-s','--size',
|
||||||
dest = 'dimension', type = 'float', nargs = 3, metavar = 'int int int',
|
dest = 'dimension', type = 'float', nargs = 3, metavar = 'int int int',
|
||||||
help = 'dimension in x,y,z [autodetect]')
|
help = 'dimension in x,y,z [autodetect]')
|
||||||
parser.set_defaults(coords = 'pos',
|
parser.set_defaults(pos = 'pos',
|
||||||
packing = (2,2,2),
|
packing = (2,2,2),
|
||||||
grid = (0,0,0),
|
grid = (0,0,0),
|
||||||
size = (0.0,0.0,0.0),
|
size = (0.0,0.0,0.0),
|
||||||
|
@ -63,8 +63,8 @@ for name in filenames:
|
||||||
errors = []
|
errors = []
|
||||||
remarks = []
|
remarks = []
|
||||||
|
|
||||||
if table.label_dimension(options.coords) != 3: errors.append('coordinates {} are not a vector.'.format(options.coords))
|
if table.label_dimension(options.pos) != 3: errors.append('coordinates {} are not a vector.'.format(options.pos))
|
||||||
else: colCoord = table.label_index(options.coords)
|
else: colCoord = table.label_index(options.pos)
|
||||||
|
|
||||||
colElem = table.label_index('elem')
|
colElem = table.label_index('elem')
|
||||||
|
|
||||||
|
@ -76,7 +76,7 @@ for name in filenames:
|
||||||
|
|
||||||
# --------------- figure out size and grid ---------------------------------------------------------
|
# --------------- figure out size and grid ---------------------------------------------------------
|
||||||
|
|
||||||
table.data_readArray(options.coords)
|
table.data_readArray(options.pos)
|
||||||
table.data_rewind()
|
table.data_rewind()
|
||||||
|
|
||||||
coords = [np.unique(table.data[:,i]) for i in xrange(3)]
|
coords = [np.unique(table.data[:,i]) for i in xrange(3)]
|
||||||
|
|
|
@ -19,7 +19,7 @@ to resolution/packing.
|
||||||
|
|
||||||
""", version = scriptID)
|
""", version = scriptID)
|
||||||
|
|
||||||
parser.add_option('-c','--coordinates', dest='coords', type='string',\
|
parser.add_option('-c','--coordinates', dest='pos', type='string',\
|
||||||
help='column heading for coordinates [%default]')
|
help='column heading for coordinates [%default]')
|
||||||
parser.add_option('-p','--packing', dest='packing', type='int', nargs=3, \
|
parser.add_option('-p','--packing', dest='packing', type='int', nargs=3, \
|
||||||
help='dimension of packed group %default')
|
help='dimension of packed group %default')
|
||||||
|
@ -29,7 +29,7 @@ parser.add_option('-r','--resolution', dest='resolution', type='int', nargs=3,
|
||||||
help='resolution in x,y,z [autodetect]')
|
help='resolution in x,y,z [autodetect]')
|
||||||
parser.add_option('-d','--dimension', dest='dimension', type='float', nargs=3, \
|
parser.add_option('-d','--dimension', dest='dimension', type='float', nargs=3, \
|
||||||
help='dimension in x,y,z [autodetect]')
|
help='dimension in x,y,z [autodetect]')
|
||||||
parser.set_defaults(coords = 'ipinitialcoord')
|
parser.set_defaults(coords = 'pos')
|
||||||
parser.set_defaults(packing = [2,2,2])
|
parser.set_defaults(packing = [2,2,2])
|
||||||
parser.set_defaults(shift = [0,0,0])
|
parser.set_defaults(shift = [0,0,0])
|
||||||
parser.set_defaults(resolution = [0,0,0])
|
parser.set_defaults(resolution = [0,0,0])
|
||||||
|
@ -75,12 +75,12 @@ for file in files:
|
||||||
|
|
||||||
# --------------- figure out size and grid ---------------------------------------------------------
|
# --------------- figure out size and grid ---------------------------------------------------------
|
||||||
try:
|
try:
|
||||||
locationCol = table.labels.index('1_%s'%options.coords) # columns containing location data
|
locationCol = table.labels.index('1_%s'%options.pos) # columns containing location data
|
||||||
except ValueError:
|
except ValueError:
|
||||||
try:
|
try:
|
||||||
locationCol = table.labels.index('%s.x'%options.coords) # columns containing location data (legacy naming scheme)
|
locationCol = table.labels.index('%s.x'%options.pos) # columns containing location data (legacy naming scheme)
|
||||||
except ValueError:
|
except ValueError:
|
||||||
file['croak'].write('no coordinate data (1_%s/%s.x) found...\n'%(options.coords,options.coords))
|
file['croak'].write('no coordinate data (1_%s/%s.x) found...\n'%(options.pos,options.pos))
|
||||||
continue
|
continue
|
||||||
|
|
||||||
if (any(options.resolution)==0 or any(options.dimension)==0.0):
|
if (any(options.resolution)==0 or any(options.dimension)==0.0):
|
||||||
|
|
|
@ -21,7 +21,7 @@ Generate geometry description and material configuration from position, phase, a
|
||||||
""", version = scriptID)
|
""", version = scriptID)
|
||||||
|
|
||||||
parser.add_option('--coordinates',
|
parser.add_option('--coordinates',
|
||||||
dest = 'coordinates',
|
dest = 'pos',
|
||||||
type = 'string', metavar = 'string',
|
type = 'string', metavar = 'string',
|
||||||
help = 'coordinates label')
|
help = 'coordinates label')
|
||||||
parser.add_option('--phase',
|
parser.add_option('--phase',
|
||||||
|
@ -135,11 +135,11 @@ for name in filenames:
|
||||||
|
|
||||||
# ------------------------------------------ sanity checks ---------------------------------------
|
# ------------------------------------------ sanity checks ---------------------------------------
|
||||||
|
|
||||||
coordDim = table.label_dimension(options.coordinates)
|
coordDim = table.label_dimension(options.pos)
|
||||||
|
|
||||||
errors = []
|
errors = []
|
||||||
if not 3 >= coordDim >= 2:
|
if not 3 >= coordDim >= 2:
|
||||||
errors.append('coordinates "{}" need to have two or three dimensions.'.format(options.coordinates))
|
errors.append('coordinates "{}" need to have two or three dimensions.'.format(options.pos))
|
||||||
if not np.all(table.label_dimension(label) == dim):
|
if not np.all(table.label_dimension(label) == dim):
|
||||||
errors.append('input "{}" needs to have dimension {}.'.format(label,dim))
|
errors.append('input "{}" needs to have dimension {}.'.format(label,dim))
|
||||||
if options.phase and table.label_dimension(options.phase) != 1:
|
if options.phase and table.label_dimension(options.phase) != 1:
|
||||||
|
@ -150,7 +150,7 @@ for name in filenames:
|
||||||
table.close(dismiss = True)
|
table.close(dismiss = True)
|
||||||
continue
|
continue
|
||||||
|
|
||||||
table.data_readArray([options.coordinates] \
|
table.data_readArray([options.pos] \
|
||||||
+ ([label] if isinstance(label, types.StringTypes) else label) \
|
+ ([label] if isinstance(label, types.StringTypes) else label) \
|
||||||
+ ([options.phase] if options.phase else []))
|
+ ([options.phase] if options.phase else []))
|
||||||
|
|
||||||
|
|
Loading…
Reference in New Issue