Merge remote branch 'origin/development' into pheno+
This commit is contained in:
commit
cf5a52be22
|
@ -21,7 +21,7 @@ Operates on periodic three-dimensional x,y,z-ordered data sets.
|
|||
|
||||
|
||||
parser.add_option('-c','--coordinates',
|
||||
dest = 'coords',
|
||||
dest = 'pos',
|
||||
type = 'string', metavar = 'string',
|
||||
help = 'column heading of coordinates [%default]')
|
||||
parser.add_option('-f','--defgrad',
|
||||
|
@ -36,10 +36,10 @@ parser.add_option('--no-volume','-v',
|
|||
dest = 'volume',
|
||||
action = 'store_false',
|
||||
help = 'omit volume mismatch')
|
||||
parser.set_defaults(coords = 'pos',
|
||||
defgrad = 'f',
|
||||
shape = True,
|
||||
volume = True,
|
||||
parser.set_defaults(pos = 'pos',
|
||||
defgrad = 'f',
|
||||
shape = True,
|
||||
volume = True,
|
||||
)
|
||||
|
||||
(options,filenames) = parser.parse_args()
|
||||
|
@ -64,8 +64,8 @@ for name in filenames:
|
|||
errors = []
|
||||
remarks = []
|
||||
|
||||
if table.label_dimension(options.coords) != 3: errors.append('coordinates {} are not a vector.'.format(options.coords))
|
||||
else: colCoord = table.label_index(options.coords)
|
||||
if table.label_dimension(options.pos) != 3: errors.append('coordinates {} are not a vector.'.format(options.pos))
|
||||
else: colCoord = table.label_index(options.pos)
|
||||
|
||||
if table.label_dimension(options.defgrad) != 9: errors.append('deformation gradient {} is not a tensor.'.format(options.defgrad))
|
||||
else: colF = table.label_index(options.defgrad)
|
||||
|
|
|
@ -71,7 +71,7 @@ Deals with both vector- and tensor fields.
|
|||
""", version = scriptID)
|
||||
|
||||
parser.add_option('-p','--pos','--periodiccellcenter',
|
||||
dest = 'coords',
|
||||
dest = 'pos',
|
||||
type = 'string', metavar = 'string',
|
||||
help = 'label of coordinates [%default]')
|
||||
parser.add_option('-v','--vector',
|
||||
|
@ -83,7 +83,7 @@ parser.add_option('-t','--tensor',
|
|||
action = 'extend', metavar = '<string LIST>',
|
||||
help = 'label(s) of tensor field values')
|
||||
|
||||
parser.set_defaults(coords = 'pos',
|
||||
parser.set_defaults(pos = 'pos',
|
||||
)
|
||||
|
||||
(options,filenames) = parser.parse_args()
|
||||
|
@ -114,8 +114,8 @@ for name in filenames:
|
|||
remarks = []
|
||||
column = {}
|
||||
|
||||
if table.label_dimension(options.coords) != 3: errors.append('coordinates {} are not a vector.'.format(options.coords))
|
||||
else: colCoord = table.label_index(options.coords)
|
||||
if table.label_dimension(options.pos) != 3: errors.append('coordinates {} are not a vector.'.format(options.pos))
|
||||
else: colCoord = table.label_index(options.pos)
|
||||
|
||||
for type, data in items.iteritems():
|
||||
for what in (data['labels'] if data['labels'] is not None else []):
|
||||
|
|
|
@ -101,7 +101,7 @@ parser.add_option('-f',
|
|||
help = 'column label of deformation gradient [%default]')
|
||||
parser.add_option('-p',
|
||||
'--pos', '--position',
|
||||
dest = 'coords',
|
||||
dest = 'pos',
|
||||
metavar = 'string',
|
||||
help = 'label of coordinates [%default]')
|
||||
parser.add_option('--nodal',
|
||||
|
@ -110,7 +110,7 @@ parser.add_option('--nodal',
|
|||
help = 'output nodal (instad of cell-centered) displacements')
|
||||
|
||||
parser.set_defaults(defgrad = 'f',
|
||||
coords = 'pos',
|
||||
pos = 'pos',
|
||||
nodal = False,
|
||||
)
|
||||
|
||||
|
@ -141,13 +141,13 @@ for name in filenames:
|
|||
if table.label_dimension(options.defgrad) != 9:
|
||||
errors.append('deformation gradient "{}" is not a 3x3 tensor.'.format(options.defgrad))
|
||||
|
||||
coordDim = table.label_dimension(options.coords)
|
||||
coordDim = table.label_dimension(options.pos)
|
||||
if not 3 >= coordDim >= 1:
|
||||
errors.append('coordinates "{}" need to have one, two, or three dimensions.'.format(options.coords))
|
||||
errors.append('coordinates "{}" need to have one, two, or three dimensions.'.format(options.pos))
|
||||
elif coordDim < 3:
|
||||
remarks.append('appending {} dimension{} to coordinates "{}"...'.format(3-coordDim,
|
||||
's' if coordDim < 2 else '',
|
||||
options.coords))
|
||||
options.pos))
|
||||
|
||||
if remarks != []: damask.util.croak(remarks)
|
||||
if errors != []:
|
||||
|
@ -157,7 +157,7 @@ for name in filenames:
|
|||
|
||||
# --------------- figure out size and grid ---------------------------------------------------------
|
||||
|
||||
table.data_readArray([options.defgrad,options.coords])
|
||||
table.data_readArray([options.defgrad,options.pos])
|
||||
table.data_rewind()
|
||||
|
||||
if len(table.data.shape) < 2: table.data.shape += (1,) # expand to 2D shape
|
||||
|
@ -196,8 +196,8 @@ for name in filenames:
|
|||
|
||||
table.info_append(scriptID + '\t' + ' '.join(sys.argv[1:]))
|
||||
table.labels_append((['{}_pos' .format(i+1) for i in xrange(3)] if options.nodal else []) +
|
||||
['{}_avg({}).{}' .format(i+1,options.defgrad,options.coords) for i in xrange(3)] +
|
||||
['{}_fluct({}).{}'.format(i+1,options.defgrad,options.coords) for i in xrange(3)] )
|
||||
['{}_avg({}).{}' .format(i+1,options.defgrad,options.pos) for i in xrange(3)] +
|
||||
['{}_fluct({}).{}'.format(i+1,options.defgrad,options.pos) for i in xrange(3)] )
|
||||
table.head_write()
|
||||
|
||||
# ------------------------------------------ output data -------------------------------------------
|
||||
|
|
|
@ -57,7 +57,7 @@ Deals with both vector- and tensor-valued fields.
|
|||
""", version = scriptID)
|
||||
|
||||
parser.add_option('-p','--pos','--periodiccellcenter',
|
||||
dest = 'coords',
|
||||
dest = 'pos',
|
||||
type = 'string', metavar = 'string',
|
||||
help = 'label of coordinates [%default]')
|
||||
parser.add_option('-v','--vector',
|
||||
|
@ -69,7 +69,7 @@ parser.add_option('-t','--tensor',
|
|||
action = 'extend', metavar = '<string LIST>',
|
||||
help = 'label(s) of tensor field values')
|
||||
|
||||
parser.set_defaults(coords = 'pos',
|
||||
parser.set_defaults(pos = 'pos',
|
||||
)
|
||||
|
||||
(options,filenames) = parser.parse_args()
|
||||
|
@ -100,8 +100,8 @@ for name in filenames:
|
|||
remarks = []
|
||||
column = {}
|
||||
|
||||
if table.label_dimension(options.coords) != 3: errors.append('coordinates {} are not a vector.'.format(options.coords))
|
||||
else: colCoord = table.label_index(options.coords)
|
||||
if table.label_dimension(options.pos) != 3: errors.append('coordinates {} are not a vector.'.format(options.pos))
|
||||
else: colCoord = table.label_index(options.pos)
|
||||
|
||||
for type, data in items.iteritems():
|
||||
for what in (data['labels'] if data['labels'] is not None else []):
|
||||
|
|
|
@ -90,7 +90,7 @@ Add column(s) containing Euclidean distance to grain structural features: bounda
|
|||
|
||||
parser.add_option('-p',
|
||||
'--pos', '--position',
|
||||
dest = 'coords', metavar = 'string',
|
||||
dest = 'pos', metavar = 'string',
|
||||
help = 'label of coordinates [%default]')
|
||||
parser.add_option('-i',
|
||||
'--id', '--identifier',
|
||||
|
@ -109,7 +109,7 @@ parser.add_option('-s',
|
|||
dest = 'scale', type = 'float', metavar = 'float',
|
||||
help = 'voxel size [%default]')
|
||||
|
||||
parser.set_defaults(coords = 'pos',
|
||||
parser.set_defaults(pos = 'pos',
|
||||
id = 'texture',
|
||||
neighborhood = 'neumann',
|
||||
scale = 1.0,
|
||||
|
@ -151,10 +151,10 @@ for name in filenames:
|
|||
remarks = []
|
||||
column = {}
|
||||
|
||||
coordDim = table.label_dimension(options.coords)
|
||||
coordDim = table.label_dimension(options.pos)
|
||||
if not 3 >= coordDim >= 1:
|
||||
errors.append('coordinates "{}" need to have one, two, or three dimensions.'.format(options.coords))
|
||||
else: coordCol = table.label_index(options.coords)
|
||||
errors.append('coordinates "{}" need to have one, two, or three dimensions.'.format(options.pos))
|
||||
else: coordCol = table.label_index(options.pos)
|
||||
|
||||
if table.label_dimension(options.id) != 1: errors.append('grain identifier {} not found.'.format(options.id))
|
||||
else: idCol = table.label_index(options.id)
|
||||
|
|
|
@ -61,7 +61,7 @@ Deals with both vector- and scalar fields.
|
|||
""", version = scriptID)
|
||||
|
||||
parser.add_option('-p','--pos','--periodiccellcenter',
|
||||
dest = 'coords',
|
||||
dest = 'pos',
|
||||
type = 'string', metavar = 'string',
|
||||
help = 'label of coordinates [%default]')
|
||||
parser.add_option('-v','--vector',
|
||||
|
@ -73,7 +73,7 @@ parser.add_option('-s','--scalar',
|
|||
action = 'extend', metavar = '<string LIST>',
|
||||
help = 'label(s) of scalar field values')
|
||||
|
||||
parser.set_defaults(coords = 'pos',
|
||||
parser.set_defaults(pos = 'pos',
|
||||
)
|
||||
|
||||
(options,filenames) = parser.parse_args()
|
||||
|
@ -104,8 +104,8 @@ for name in filenames:
|
|||
remarks = []
|
||||
column = {}
|
||||
|
||||
if table.label_dimension(options.coords) != 3: errors.append('coordinates {} are not a vector.'.format(options.coords))
|
||||
else: colCoord = table.label_index(options.coords)
|
||||
if table.label_dimension(options.pos) != 3: errors.append('coordinates {} are not a vector.'.format(options.pos))
|
||||
else: colCoord = table.label_index(options.pos)
|
||||
|
||||
for type, data in items.iteritems():
|
||||
for what in (data['labels'] if data['labels'] is not None else []):
|
||||
|
|
|
@ -20,7 +20,7 @@ Average each data block of size 'packing' into single values thus reducing the f
|
|||
""", version = scriptID)
|
||||
|
||||
parser.add_option('-c','--coordinates',
|
||||
dest = 'coords',
|
||||
dest = 'pos',
|
||||
type = 'string', metavar = 'string',
|
||||
help = 'column label of coordinates [%default]')
|
||||
parser.add_option('-p','--packing',
|
||||
|
@ -39,7 +39,7 @@ parser.add_option('-s', '--size',
|
|||
dest = 'size',
|
||||
type = 'float', nargs = 3, metavar = 'float float float',
|
||||
help = 'size in x,y,z [autodetect]')
|
||||
parser.set_defaults(coords = 'pos',
|
||||
parser.set_defaults(pos = 'pos',
|
||||
packing = (2,2,2),
|
||||
shift = (0,0,0),
|
||||
grid = (0,0,0),
|
||||
|
@ -75,8 +75,8 @@ for name in filenames:
|
|||
errors = []
|
||||
remarks = []
|
||||
|
||||
if table.label_dimension(options.coords) != 3: errors.append('coordinates {} are not a vector.'.format(options.coords))
|
||||
else: colCoord = table.label_index(options.coords)
|
||||
if table.label_dimension(options.pos) != 3: errors.append('coordinates {} are not a vector.'.format(options.pos))
|
||||
else: colCoord = table.label_index(options.pos)
|
||||
|
||||
if remarks != []: damask.util.croak(remarks)
|
||||
if errors != []:
|
||||
|
|
|
@ -20,7 +20,7 @@ to resolution*packing.
|
|||
""", version = scriptID)
|
||||
|
||||
parser.add_option('-c','--coordinates',
|
||||
dest = 'coords', metavar = 'string',
|
||||
dest = 'pos', metavar = 'string',
|
||||
help = 'column label of coordinates [%default]')
|
||||
parser.add_option('-p','--packing',
|
||||
dest = 'packing', type = 'int', nargs = 3, metavar = 'int int int',
|
||||
|
@ -31,7 +31,7 @@ parser.add_option('-g','--grid',
|
|||
parser.add_option('-s','--size',
|
||||
dest = 'dimension', type = 'float', nargs = 3, metavar = 'int int int',
|
||||
help = 'dimension in x,y,z [autodetect]')
|
||||
parser.set_defaults(coords = 'pos',
|
||||
parser.set_defaults(pos = 'pos',
|
||||
packing = (2,2,2),
|
||||
grid = (0,0,0),
|
||||
size = (0.0,0.0,0.0),
|
||||
|
@ -63,8 +63,8 @@ for name in filenames:
|
|||
errors = []
|
||||
remarks = []
|
||||
|
||||
if table.label_dimension(options.coords) != 3: errors.append('coordinates {} are not a vector.'.format(options.coords))
|
||||
else: colCoord = table.label_index(options.coords)
|
||||
if table.label_dimension(options.pos) != 3: errors.append('coordinates {} are not a vector.'.format(options.pos))
|
||||
else: colCoord = table.label_index(options.pos)
|
||||
|
||||
colElem = table.label_index('elem')
|
||||
|
||||
|
@ -76,7 +76,7 @@ for name in filenames:
|
|||
|
||||
# --------------- figure out size and grid ---------------------------------------------------------
|
||||
|
||||
table.data_readArray(options.coords)
|
||||
table.data_readArray(options.pos)
|
||||
table.data_rewind()
|
||||
|
||||
coords = [np.unique(table.data[:,i]) for i in xrange(3)]
|
||||
|
|
|
@ -19,7 +19,7 @@ to resolution/packing.
|
|||
|
||||
""", version = scriptID)
|
||||
|
||||
parser.add_option('-c','--coordinates', dest='coords', type='string',\
|
||||
parser.add_option('-c','--coordinates', dest='pos', type='string',\
|
||||
help='column heading for coordinates [%default]')
|
||||
parser.add_option('-p','--packing', dest='packing', type='int', nargs=3, \
|
||||
help='dimension of packed group %default')
|
||||
|
@ -29,7 +29,7 @@ parser.add_option('-r','--resolution', dest='resolution', type='int', nargs=3,
|
|||
help='resolution in x,y,z [autodetect]')
|
||||
parser.add_option('-d','--dimension', dest='dimension', type='float', nargs=3, \
|
||||
help='dimension in x,y,z [autodetect]')
|
||||
parser.set_defaults(coords = 'ipinitialcoord')
|
||||
parser.set_defaults(coords = 'pos')
|
||||
parser.set_defaults(packing = [2,2,2])
|
||||
parser.set_defaults(shift = [0,0,0])
|
||||
parser.set_defaults(resolution = [0,0,0])
|
||||
|
@ -75,12 +75,12 @@ for file in files:
|
|||
|
||||
# --------------- figure out size and grid ---------------------------------------------------------
|
||||
try:
|
||||
locationCol = table.labels.index('1_%s'%options.coords) # columns containing location data
|
||||
locationCol = table.labels.index('1_%s'%options.pos) # columns containing location data
|
||||
except ValueError:
|
||||
try:
|
||||
locationCol = table.labels.index('%s.x'%options.coords) # columns containing location data (legacy naming scheme)
|
||||
locationCol = table.labels.index('%s.x'%options.pos) # columns containing location data (legacy naming scheme)
|
||||
except ValueError:
|
||||
file['croak'].write('no coordinate data (1_%s/%s.x) found...\n'%(options.coords,options.coords))
|
||||
file['croak'].write('no coordinate data (1_%s/%s.x) found...\n'%(options.pos,options.pos))
|
||||
continue
|
||||
|
||||
if (any(options.resolution)==0 or any(options.dimension)==0.0):
|
||||
|
|
|
@ -21,7 +21,7 @@ Generate geometry description and material configuration from position, phase, a
|
|||
""", version = scriptID)
|
||||
|
||||
parser.add_option('--coordinates',
|
||||
dest = 'coordinates',
|
||||
dest = 'pos',
|
||||
type = 'string', metavar = 'string',
|
||||
help = 'coordinates label')
|
||||
parser.add_option('--phase',
|
||||
|
@ -135,11 +135,11 @@ for name in filenames:
|
|||
|
||||
# ------------------------------------------ sanity checks ---------------------------------------
|
||||
|
||||
coordDim = table.label_dimension(options.coordinates)
|
||||
coordDim = table.label_dimension(options.pos)
|
||||
|
||||
errors = []
|
||||
if not 3 >= coordDim >= 2:
|
||||
errors.append('coordinates "{}" need to have two or three dimensions.'.format(options.coordinates))
|
||||
errors.append('coordinates "{}" need to have two or three dimensions.'.format(options.pos))
|
||||
if not np.all(table.label_dimension(label) == dim):
|
||||
errors.append('input "{}" needs to have dimension {}.'.format(label,dim))
|
||||
if options.phase and table.label_dimension(options.phase) != 1:
|
||||
|
@ -150,7 +150,7 @@ for name in filenames:
|
|||
table.close(dismiss = True)
|
||||
continue
|
||||
|
||||
table.data_readArray([options.coordinates] \
|
||||
table.data_readArray([options.pos] \
|
||||
+ ([label] if isinstance(label, types.StringTypes) else label) \
|
||||
+ ([options.phase] if options.phase else []))
|
||||
|
||||
|
|
Loading…
Reference in New Issue