adopting further simplifications provided through ASCIItable class.

This commit is contained in:
Philip Eisenlohr 2015-08-20 19:42:05 +00:00
parent 9439cf7278
commit 63d5506388
35 changed files with 512 additions and 537 deletions

View File

@ -46,13 +46,14 @@ parser.set_defaults(coords = 'ipinitialcoord',
# --- loop over input files -------------------------------------------------------------------------
if filenames == []: filenames = ['STDIN']
if filenames == []: filenames = [None]
for name in filenames:
if not (name == 'STDIN' or os.path.exists(name)): continue
table = damask.ASCIItable(name = name, outname = name+'_tmp',
buffered = False)
table.croak('\033[1m'+scriptName+'\033[0m'+(': '+name if name != 'STDIN' else ''))
try:
table = damask.ASCIItable(name = name,
buffered = False)
except: continue
table.croak('\033[1m'+scriptName+'\033[0m'+(': '+name if name else ''))
# ------------------------------------------ read header ------------------------------------------
@ -86,18 +87,12 @@ for name in filenames:
table.data_readArray()
coords = [{},{},{}]
for i in xrange(len(table.data)):
for j in xrange(3):
coords[j][str(table.data[i,colCoord+j])] = True
grid = np.array(map(len,coords),'i')
size = grid/np.maximum(np.ones(3,'d'),grid-1.0)* \
np.array([max(map(float,coords[0].keys()))-min(map(float,coords[0].keys())),\
max(map(float,coords[1].keys()))-min(map(float,coords[1].keys())),\
max(map(float,coords[2].keys()))-min(map(float,coords[2].keys())),\
],'d') # size from bounding box, corrected for cell-centeredness
size = np.where(grid > 1, size, min(size[grid > 1]/grid[grid > 1])) # spacing for grid==1 equal to smallest among other spacings
coords = [np.unique(table.data[:,colCoord+i]) for i in xrange(3)]
mincorner = np.array(map(min,coords))
maxcorner = np.array(map(max,coords))
grid = np.array(map(len,coords),'i')
size = grid/np.maximum(np.ones(3,'d'), grid-1.0) * (maxcorner-mincorner) # size from edge to edge = dim * n/(n-1)
size = np.where(grid > 1, size, min(size[grid > 1]/grid[grid > 1])) # spacing for grid==1 equal to smallest among other spacings
N = grid.prod()
@ -115,9 +110,8 @@ for name in filenames:
# ------------------------------------------ output result -----------------------------------------
if len(stack) > 1: table.data = np.hstack(tuple(stack))
table.data_writeArray('%.12g')
table.data_writeArray()
# ------------------------------------------ output finalization -----------------------------------
table.close() # close ASCII tables
if name != 'STDIN': os.rename(name+'_tmp',name) # overwrite old one with tmp new

View File

@ -42,13 +42,14 @@ parser.set_defaults(coords = 'ipinitialcoord',
# --- loop over input files -------------------------------------------------------------------------
if filenames == []: filenames = ['STDIN']
if filenames == []: filenames = [None]
for name in filenames:
if not (name == 'STDIN' or os.path.exists(name)): continue
table = damask.ASCIItable(name = name, outname = name+'_tmp',
buffered = False)
table.croak('\033[1m'+scriptName+'\033[0m'+(': '+name if name != 'STDIN' else ''))
try:
table = damask.ASCIItable(name = name,
buffered = False)
except: continue
table.croak('\033[1m'+scriptName+'\033[0m'+(': '+name if name else ''))
# ------------------------------------------ read header ------------------------------------------
@ -75,21 +76,21 @@ for name in filenames:
table.data_readArray()
coords = [{},{},{}]
for i in xrange(len(table.data)):
for j in xrange(3):
coords[j][str(table.data[i,colCoord+j])] = True
grid = np.array(map(len,coords),'i')
size = grid/np.maximum(np.ones(3,'d'),grid-1.0)* \
np.array([max(map(float,coords[0].keys()))-min(map(float,coords[0].keys())),\
max(map(float,coords[1].keys()))-min(map(float,coords[1].keys())),\
max(map(float,coords[2].keys()))-min(map(float,coords[2].keys())),\
],'d') # size from bounding box, corrected for cell-centeredness
size = np.where(grid > 1, size, min(size[grid > 1]/grid[grid > 1])) # spacing for grid==1 equal to smallest among other spacings
coords = [np.unique(table.data[:,colCoord+i]) for i in xrange(3)]
mincorner = np.array(map(min,coords))
maxcorner = np.array(map(max,coords))
grid = np.array(map(len,coords),'i')
size = grid/np.maximum(np.ones(3,'d'), grid-1.0) * (maxcorner-mincorner) # size from edge to edge = dim * n/(n-1)
size = np.where(grid > 1, size, min(size[grid > 1]/grid[grid > 1])) # spacing for grid==1 equal to smallest among other spacings
N = grid.prod()
if N != len(table.data): errors.append('data count {} does not match grid {}x{}x{}.'.format(N,*grid))
if errors != []:
table.croak(errors)
table.close(dismiss = True)
continue
# ------------------------------------------ assemble header ---------------------------------------
table.info_append(scriptID + '\t' + ' '.join(sys.argv[1:]))
@ -107,9 +108,8 @@ for name in filenames:
# ------------------------------------------ output result -----------------------------------------
if len(stack) > 1: table.data = np.hstack(tuple(stack))
table.data_writeArray('%.12g')
table.data_writeArray()
# ------------------------------------------ output finalization -----------------------------------
table.close() # close ASCII tables
if name != 'STDIN': os.rename(name+'_tmp',name) # overwrite old one with tmp new

View File

@ -38,13 +38,14 @@ if options.tensor == None:
# --- loop over input files -------------------------------------------------------------------------
if filenames == []: filenames = ['STDIN']
if filenames == []: filenames = [None]
for name in filenames:
if not (name == 'STDIN' or os.path.exists(name)): continue
table = damask.ASCIItable(name = name, outname = name+'_tmp',
buffered = False)
table.croak('\033[1m'+scriptName+'\033[0m'+(': '+name if name != 'STDIN' else ''))
try:
table = damask.ASCIItable(name = name,
buffered = False)
except: continue
table.croak('\033[1m'+scriptName+'\033[0m'+(': '+name if name else ''))
# ------------------------------------------ read header ------------------------------------------
@ -90,4 +91,3 @@ for name in filenames:
# ------------------------------------------ output finalization -----------------------------------
table.close() # close input ASCII table (works for stdin)
if name != 'STDIN': os.rename(name+'_tmp',name) # overwrite old one with tmp new

View File

@ -127,13 +127,14 @@ toRadians = math.pi/180.0 if options.degrees else 1.0
# --- loop over input files -------------------------------------------------------------------------
if filenames == []: filenames = ['STDIN']
if filenames == []: filenames = [None]
for name in filenames:
if not (name == 'STDIN' or os.path.exists(name)): continue
table = damask.ASCIItable(name = name, outname = name+'_tmp',
buffered = False)
table.croak('\033[1m'+scriptName+'\033[0m'+(': '+name if name != 'STDIN' else ''))
try:
table = damask.ASCIItable(name = name,
buffered = False)
except: continue
table.croak('\033[1m'+scriptName+'\033[0m'+(': '+name if name else ''))
# ------------------------------------------ read header -------------------------------------------
@ -296,4 +297,3 @@ for name in filenames:
# ------------------------------------------ output finalization -----------------------------------
table.close() # close ASCII tables
if name != 'STDIN': os.rename(name+'_tmp',name) # overwrite old one with tmp new

View File

@ -83,13 +83,14 @@ pole /= np.linalg.norm(pole)
# --- loop over input files ------------------------------------------------------------------------
if filenames == []: filenames = ['STDIN']
if filenames == []: filenames = [None]
for name in filenames:
if not (name == 'STDIN' or os.path.exists(name)): continue
table = damask.ASCIItable(name = name, outname = name+'_tmp',
buffered = False)
table.croak('\033[1m'+scriptName+'\033[0m'+(': '+name if name != 'STDIN' else ''))
try:
table = damask.ASCIItable(name = name,
buffered = False)
except: continue
table.croak('\033[1m'+scriptName+'\033[0m'+(': '+name if name else ''))
# ------------------------------------------ read header ------------------------------------------
@ -135,4 +136,3 @@ for name in filenames:
# ------------------------------------------ output finalization -----------------------------------
table.close() # close ASCII tables
if name != 'STDIN': os.rename(name+'_tmp',name) # overwrite old one with tmp new

View File

@ -48,7 +48,8 @@ if options.map == None:
if options.asciitable != None and os.path.isfile(options.asciitable):
mappedTable = damask.ASCIItable(name = options.asciitable,buffered = False, readonly = True)
mappedTable = damask.ASCIItable(name = options.asciitable,
buffered = False, readonly = True)
mappedTable.head_read() # read ASCII header info of mapped table
missing_labels = mappedTable.data_readArray(options.label)
@ -60,13 +61,14 @@ else:
# --- loop over input files -------------------------------------------------------------------------
if filenames == []: filenames = ['STDIN']
if filenames == []: filenames = [None]
for name in filenames:
if not (name == 'STDIN' or os.path.exists(name)): continue
table = damask.ASCIItable(name = name, outname = name+'_tmp',
buffered = False)
table.croak('\033[1m'+scriptName+'\033[0m'+(': '+name if name != 'STDIN' else ''))
try:
table = damask.ASCIItable(name = name,
buffered = False)
except: continue
table.croak('\033[1m'+scriptName+'\033[0m'+(': '+name if name else ''))
# ------------------------------------------ read header ------------------------------------------
@ -100,6 +102,5 @@ for name in filenames:
# ------------------------------------------ output finalization -----------------------------------
table.close() # close ASCII tables
if name != 'STDIN': os.rename(name+'_tmp',name) # overwrite old one with tmp new
mappedTable.close() # close mapped input ASCII table

View File

@ -49,13 +49,14 @@ if len(options.stress+options.strain) == 0:
# --- loop over input files -------------------------------------------------------------------------
if filenames == []: filenames = ['STDIN']
if filenames == []: filenames = [None]
for name in filenames:
if not (name == 'STDIN' or os.path.exists(name)): continue
table = damask.ASCIItable(name = name, outname = name+'_tmp',
buffered = False)
table.croak('\033[1m'+scriptName+'\033[0m'+(': '+name if name != 'STDIN' else ''))
try:
table = damask.ASCIItable(name = name,
buffered = False)
except: continue
table.croak('\033[1m'+scriptName+'\033[0m'+(': '+name if name else ''))
# ------------------------------------------ read header ------------------------------------------
@ -103,4 +104,3 @@ for name in filenames:
# ------------------------------------------ output finalization -----------------------------------
table.close() # close input ASCII table (works for stdin)
if name != 'STDIN': os.rename(name+'_tmp',name) # overwrite old one with tmp new

View File

@ -49,13 +49,14 @@ if options.label == None:
# --- loop over input files -------------------------------------------------------------------------
if filenames == []: filenames = ['STDIN']
if filenames == []: filenames = [None]
for name in filenames:
if not (name == 'STDIN' or os.path.exists(name)): continue
table = damask.ASCIItable(name = name, outname = name+'_tmp',
buffered = False)
table.croak('\033[1m'+scriptName+'\033[0m'+(': '+name if name != 'STDIN' else ''))
try:
table = damask.ASCIItable(name = name,
buffered = False)
except: continue
table.croak('\033[1m'+scriptName+'\033[0m'+(': '+name if name else ''))
# ------------------------------------------ read header ------------------------------------------
@ -99,4 +100,3 @@ for name in filenames:
# ------------------------------------------ output finalization -----------------------------------
table.close() # close input ASCII table (works for stdin)
if name != 'STDIN': os.rename(name+'_tmp',name) # overwrite old one with tmp new

View File

@ -94,13 +94,14 @@ r = damask.Quaternion().fromAngleAxis(toRadians*options.rotation[0],options.rota
# --- loop over input files ------------------------------------------------------------------------
if filenames == []: filenames = ['STDIN']
if filenames == []: filenames = [None]
for name in filenames:
if not (name == 'STDIN' or os.path.exists(name)): continue
table = damask.ASCIItable(name = name, outname = name+'_tmp',
buffered = False)
table.croak('\033[1m'+scriptName+'\033[0m'+(': '+name if name != 'STDIN' else ''))
try:
table = damask.ASCIItable(name = name,
buffered = False)
except: continue
table.croak('\033[1m'+scriptName+'\033[0m'+(': '+name if name else ''))
# ------------------------------------------ read header ------------------------------------------
@ -157,4 +158,3 @@ for name in filenames:
# ------------------------------------------ output finalization -----------------------------------
table.close() # close ASCII tables
if name != 'STDIN': os.rename(name+'_tmp',name) # overwrite old one with tmp new

View File

@ -36,13 +36,14 @@ parser.set_defaults(defgrad = 'f',
# --- loop over input files -------------------------------------------------------------------------
if filenames == []: filenames = ['STDIN']
if filenames == []: filenames = [None]
for name in filenames:
if not (name == 'STDIN' or os.path.exists(name)): continue
table = damask.ASCIItable(name = name, outname = name+'_tmp',
buffered = False)
table.croak('\033[1m'+scriptName+'\033[0m'+(': '+name if name != 'STDIN' else ''))
try:
table = damask.ASCIItable(name = name,
buffered = False)
except: continue
table.croak('\033[1m'+scriptName+'\033[0m'+(': '+name if name else ''))
# ------------------------------------------ read header ------------------------------------------
@ -82,4 +83,3 @@ for name in filenames:
# ------------------------------------------ output finalization -----------------------------------
table.close() # close input ASCII table (works for stdin)
if name != 'STDIN': os.rename(name+'_tmp',name) # overwrite old one with tmp new

View File

@ -81,15 +81,16 @@ toRadians = math.pi/180.0 if options.degrees else 1.0
pole = np.array(options.pole)
pole /= np.linalg.norm(pole)
# --- loop over input files ------------------------------------------------------------------------
# --- loop over input files -------------------------------------------------------------------------
if filenames == []: filenames = ['STDIN']
if filenames == []: filenames = [None]
for name in filenames:
if not (name == 'STDIN' or os.path.exists(name)): continue
table = damask.ASCIItable(name = name, outname = name+'_tmp',
buffered = False)
table.croak('\033[1m'+scriptName+'\033[0m'+(': '+name if name != 'STDIN' else ''))
try:
table = damask.ASCIItable(name = name,
buffered = False)
except: continue
table.croak('\033[1m'+scriptName+'\033[0m'+(': '+name if name else ''))
# ------------------------------------------ read header ------------------------------------------
@ -139,4 +140,3 @@ for name in filenames:
# ------------------------------------------ output finalization -----------------------------------
table.close() # close ASCII tables
if name != 'STDIN': os.rename(name+'_tmp',name) # overwrite old one with tmp new

View File

@ -55,21 +55,17 @@ datainfo = {'len':4,
if options.frame != None: datainfo['label'] += options.frame
# --- loop over input files -------------------------------------------------------------------------
if filenames == []:
filenames = ['STDIN']
if filenames == []: filenames = [None]
for name in filenames:
if name == 'STDIN':
file = {'name':'STDIN', 'input':sys.stdin, 'output':sys.stdout, 'croak':sys.stderr}
file['croak'].write('\033[1m'+scriptName+'\033[0m\n')
else:
if not os.path.exists(name): continue
file = {'name':name, 'input':open(name), 'output':open(name+'_tmp','w'), 'croak':sys.stderr}
file['croak'].write('\033[1m'+scriptName+'\033[0m: '+file['name']+'\n')
try:
table = damask.ASCIItable(name = name,
buffered = False)
except: continue
table.croak('\033[1m'+scriptName+'\033[0m'+(': '+name if name else ''))
table = damask.ASCIItable(file['input'],file['output'],buffered=False) # make unbuffered ASCII_table
table.head_read() # read ASCII header info
table.info_append(scriptID + '\t' + ' '.join(sys.argv[1:]))
# --------------- figure out columns to process ---------------------------------------------------
active = []
@ -81,10 +77,11 @@ for name in filenames:
active.append(label)
column[label] = table.labels.index(key) # remember columns of requested data
else:
file['croak'].write('column %s not found...\n'%label)
table.croak('column %s not found...'%label)
# ------------------------------------------ assemble header ---------------------------------------
table.info_append(scriptID + '\t' + ' '.join(sys.argv[1:]))
table.labels_append(['Q_%i'%(i+1) for i in xrange(4)]) # extend ASCII header with new labels [1 real, 3 imaginary components]
table.head_write()
@ -117,8 +114,4 @@ for name in filenames:
# ------------------------------------------ output result -----------------------------------------
outputAlive and table.output_flush() # just in case of buffered ASCII table
table.input_close() # close input ASCII table (works for stdin)
table.output_close() # close output ASCII table (works for stdout)
if file['name'] != 'STDIN':
os.rename(file['name']+'_tmp',file['name']) # overwrite old one with tmp new
table.close() # close ASCII tables

View File

@ -31,13 +31,14 @@ if options.tensor == None:
# --- loop over input files -------------------------------------------------------------------------
if filenames == []: filenames = ['STDIN']
if filenames == []: filenames = [None]
for name in filenames:
if not (name == 'STDIN' or os.path.exists(name)): continue
table = damask.ASCIItable(name = name, outname = name+'_tmp',
buffered = False)
table.croak('\033[1m'+scriptName+'\033[0m'+(': '+name if name != 'STDIN' else ''))
try:
table = damask.ASCIItable(name = name,
buffered = False)
except: continue
table.croak('\033[1m'+scriptName+'\033[0m'+(': '+name if name else ''))
# ------------------------------------------ read header ------------------------------------------
@ -85,4 +86,3 @@ for name in filenames:
# ------------------------------------------ output finalization -----------------------------------
table.close() # close input ASCII table (works for stdin)
if name != 'STDIN': os.rename(name+'_tmp',name) # overwrite old one with tmp new

View File

@ -81,13 +81,14 @@ if options.defgrad == None:
# --- loop over input files -------------------------------------------------------------------------
if filenames == []: filenames = ['STDIN']
if filenames == []: filenames = [None]
for name in filenames:
if not (name == 'STDIN' or os.path.exists(name)): continue
table = damask.ASCIItable(name = name, outname = name+'_tmp',
buffered = False)
table.croak('\033[1m'+scriptName+'\033[0m'+(': '+name if name != 'STDIN' else ''))
try:
table = damask.ASCIItable(name = name,
buffered = False)
except: continue
table.croak('\033[1m'+scriptName+'\033[0m'+(': '+name if name else ''))
# ------------------------------------------ read header ------------------------------------------
@ -161,4 +162,3 @@ for name in filenames:
# ------------------------------------------ output finalization -----------------------------------
table.close() # close ASCII tables
if name != 'STDIN': os.rename(name+'_tmp',name) # overwrite old one with tmp new

View File

@ -3,6 +3,7 @@
import os,sys,string
import numpy as np
import scipy.ndimage
from optparse import OptionParser
import damask
@ -19,131 +20,133 @@ Average each data block of size 'packing' into single values thus reducing the f
""", version = scriptID)
parser.add_option('-c','--coordinates',
dest='coords',
metavar='string',
help='column heading for coordinates [%default]')
dest = 'coords',
type = 'string', metavar = 'string',
help = 'column heading for coordinates [%default]')
parser.add_option('-p','--packing',
dest='packing',
type='int', nargs=3,
metavar='int int int',
help='size of packed group [%default]')
dest = 'packing',
type = 'int', nargs = 3, metavar = 'int int int',
help = 'size of packed group [%default]')
parser.add_option('--shift',
dest='shift',
type='int', nargs=3,
metavar='int int int',
help='shift vector of packing stencil [%default]')
dest = 'shift',
type = 'int', nargs = 3, metavar = 'int int int',
help = 'shift vector of packing stencil [%default]')
parser.add_option('-g', '--grid',
dest='grid',
type='int', nargs=3,
metavar='int int int',
help='grid in x,y,z [autodetect]')
parser.add_option('-s', '--size', dest='size', type='float', nargs=3, metavar='float float float',
help='size in x,y,z [autodetect]')
dest = 'grid',
type = 'int', nargs = 3, metavar = 'int int int',
help = 'grid in x,y,z [autodetect]')
parser.add_option('-s', '--size',
dest = 'size',
type = 'float', nargs = 3, metavar = 'float float float',
help = 'size in x,y,z [autodetect]')
parser.set_defaults(coords = 'ipinitialcoord',
packing = (2,2,2),
shift = (0,0,0),
grid = (0,0,0),
size = (0.0,0.0,0.0))
size = (0.0,0.0,0.0),
)
(options,filenames) = parser.parse_args()
options.packing = np.array(options.packing)
options.shift = np.array(options.shift)
packing = np.array(options.packing,dtype = int)
shift = np.array(options.shift, dtype = int)
prefix = 'averagedDown%ix%ix%i_'%(options.packing[0],options.packing[1],options.packing[2])
if np.any(options.shift != 0):
prefix += 'shift%+i%+i%+i_'%(options.shift[0],options.shift[1],options.shift[2])
prefix = 'averagedDown{}x{}x{}_'.format(*packing)
if any(shift != 0): prefix += 'shift{:+}{:+}{:+}_'.format(*shift)
# --- loop over input files -------------------------------------------------------------------------
# --- loop over input files ------------------------------------------------------------------------
if filenames == []: filenames = ['STDIN']
if filenames == []: filenames = [None]
for name in filenames:
if not (name == 'STDIN' or os.path.exists(name)): continue
table = damask.ASCIItable(name = name, outname = prefix+name,
buffered = False)
table.croak('\033[1m'+scriptName+'\033[0m'+(': '+name if name != 'STDIN' else ''))
try:
table = damask.ASCIItable(name = name,
outname = os.path.join(os.path.dirname(name),
prefix+os.path.basename(name)) if name else name,
buffered = False)
except: continue
table.croak('\033[1m'+scriptName+'\033[0m'+(': '+name if name else ''))
# ------------------------------------------ read header -------------------------------------------
# ------------------------------------------ read header ------------------------------------------
table.head_read()
# --------------- figure out size and grid ---------------------------------------------------------
try:
elemCol = table.labels.index('elem')
locationCol = table.labels.index('1_%s'%options.coords) # columns containing location data
except ValueError:
try:
locationCol = table.labels.index('%s.x'%options.coords) # columns containing location data (legacy naming scheme)
except ValueError:
table.croak('no coordinate (1_%s/%s.x) and/or elem data found...\n'%(options.coords,options.coords))
continue
# ------------------------------------------ sanity checks ----------------------------------------
if (any(options.grid)==0 or any(options.size)==0.0):
coords = [{},{},{}]
while table.data_read(): # read next data line of ASCII table
for j in xrange(3):
coords[j][str(table.data[locationCol+j])] = True # remember coordinate along x,y,z
grid = np.array([len(coords[0]),\
len(coords[1]),\
len(coords[2]),],'i') # resolution is number of distinct coordinates found
size = grid/np.maximum(np.ones(3,'d'),grid-1.0)* \
np.array([max(map(float,coords[0].keys()))-min(map(float,coords[0].keys())),\
max(map(float,coords[1].keys()))-min(map(float,coords[1].keys())),\
max(map(float,coords[2].keys()))-min(map(float,coords[2].keys())),\
],'d') # size from bounding box, corrected for cell-centeredness
origin = np.array([min(map(float,coords[0].keys())),\
min(map(float,coords[1].keys())),\
min(map(float,coords[2].keys())),\
],'d') - 0.5 * size / grid
else:
grid = np.array(options.grid,'i')
size = np.array(options.size,'d')
origin = np.zeros(3,'d')
for i, res in enumerate(grid):
if res == 1:
options.packing[i] = 1
options.shift[i] = 0
mask = np.ones(3,dtype=bool)
mask[i]=0
size[i] = min(size[mask]/grid[mask]) # third spacing equal to smaller of other spacing
errors = []
remarks = []
colCoord = None
packing = np.array(options.packing,'i')
shift = np.array(options.shift,'i')
downSized = np.maximum(np.ones(3,'i'),grid//packing)
outSize = np.ceil(np.array(grid,'d')/np.array(packing,'d'))
if table.label_dimension(options.coords) != 3: errors.append('coordinates {} are not a vector.'.format(options.coords))
else: colCoord = table.label_index(options.coords)
if remarks != []: table.croak(remarks)
if errors != []:
table.croak(errors)
table.close(dismiss = True)
continue
# ------------------------------------------ assemble header ---------------------------------------
table.info_append(scriptID + '\t' + ' '.join(sys.argv[1:]))
table.head_write()
# ------------------------------------------ process data ------------------------------------------
table.data_rewind()
data = np.zeros(outSize.tolist()+[len(table.labels)])
p = np.zeros(3,'i')
# --------------- figure out size and grid ---------------------------------------------------------
table.data_readArray()
if (any(options.grid) == 0 or any(options.size) == 0.0):
coords = [np.unique(table.data[:,colCoord+i]) for i in xrange(3)]
mincorner = np.array(map(min,coords))
maxcorner = np.array(map(max,coords))
grid = np.array(map(len,coords),'i')
size = grid/np.maximum(np.ones(3,'d'), grid-1.0) * (maxcorner-mincorner) # size from edge to edge = dim * n/(n-1)
size = np.where(grid > 1, size, min(size[grid > 1]/grid[grid > 1])) # spacing for grid==1 equal to smallest among other spacings
delta = size/np.maximum(np.ones(3,'d'), grid)
origin = mincorner - 0.5*delta # shift from cell center to corner
else:
grid = np.array(options.grid,'i')
size = np.array(options.size,'d')
origin = np.zeros(3,'d')
packing = np.where(grid == 1,1,packing) # reset packing to 1 where grid==1
shift = np.where(grid == 1,0,shift) # reset shift to 0 where grid==1
packedGrid = np.maximum(np.ones(3,'i'),grid//packing)
averagedDown = scipy.ndimage.filters.uniform_filter( \
np.roll(
np.roll(
np.roll(table.data.reshape(list(grid)+[table.data.shape[1]],order = 'F'),
-shift[0],axis = 0),
-shift[1],axis = 1),
-shift[2],axis = 2),
size = list(packing) + [1],
mode = 'wrap',
origin = list(-(packing/2)) + [0])\
[::packing[0],::packing[1],::packing[2],:].reshape((packedGrid.prod(),table.data.shape[1]),order = 'F')
for p[2] in xrange(grid[2]):
for p[1] in xrange(grid[1]):
for p[0] in xrange(grid[0]):
d = ((p-shift)%grid)//packing
table.data_read()
data[d[0],d[1],d[2],:] += np.array(table.data_asFloat(),'d') # convert to np array
data /= packing.prod()
table.data = averagedDown
elementSize = size/grid*packing
posOffset = (shift+[0.5,0.5,0.5])*elementSize
elem = 1
for c in xrange(downSized[2]):
for b in xrange(downSized[1]):
for a in xrange(downSized[0]):
for i,x in enumerate([a,b,c]):
data[a,b,c,locationCol+i] = posOffset[i] + x*elementSize[i] + origin[i]
data[a,b,c,elemCol] = elem
table.data = data[a,b,c,:].tolist()
outputAlive = table.data_write() # output processed line
elem += 1
#--- generate grid --------------------------------------------------------------------------------
if colCoord:
x = (0.5 + shift[0] + np.arange(packedGrid[0],dtype=float))/packedGrid[0]*size[0] + origin[0]
y = (0.5 + shift[1] + np.arange(packedGrid[1],dtype=float))/packedGrid[1]*size[1] + origin[1]
z = (0.5 + shift[2] + np.arange(packedGrid[2],dtype=float))/packedGrid[2]*size[2] + origin[2]
xx = np.tile( x, packedGrid[1]* packedGrid[2])
yy = np.tile(np.repeat(y,packedGrid[0] ),packedGrid[2])
zz = np.repeat(z,packedGrid[0]*packedGrid[1])
table.data[:,colCoord:colCoord+3] = np.squeeze(np.dstack((xx,yy,zz)))
# ------------------------------------------ output result -----------------------------------------
table.data_writeArray()
# ------------------------------------------ output finalization -----------------------------------
table.close() # close ASCII tables

View File

@ -34,14 +34,15 @@ if options.label == None:
# --- loop over input files -------------------------------------------------------------------------
if filenames == []: filenames = ['STDIN']
if filenames == []: filenames = [None]
for name in filenames:
if not (name == 'STDIN' or os.path.exists(name)): continue
table = damask.ASCIItable(name = name,
outname = options.label+'_averaged_'+name,
buffered = False)
table.croak('\033[1m'+scriptName+'\033[0m'+(': '+name if name != 'STDIN' else ''))
try:
table = damask.ASCIItable(name = name,
outname = options.label+'_averaged_'+name if name else name,
buffered = False)
except: continue
table.croak('\033[1m'+scriptName+'\033[0m'+(': '+name if name else ''))
# ------------------------------------------ sanity checks ---------------------------------------

View File

@ -90,10 +90,11 @@ if filenames == []: filenames = [None]
for name in filenames:
try:
table = damask.ASCIItable(name = name,
outname = os.path.join(os.path.dirname(name),
'binned-%s-%s_'%(options.data[0],options.data[1])+ \
('weighted-%s_'%(options.weight) if options.weight != None else '') + \
os.path.basename(name)), buffered = False)
outname = os.path.join(os.path.dirname(name),
'binned-{}-{}_'.format(*options.data)+ \
('weighted-{}_'.format(options.weight) if options.weight else '') + \
os.path.basename(name)) if name else name,
buffered = False)
except:
continue
table.croak('\033[1m'+scriptName+'\033[0m'+(': '+name if name else ''))

View File

@ -114,9 +114,9 @@ if filenames == []: filenames = [None]
for name in filenames:
try:
table = damask.ASCIItable(name = name,
buffered = False,
labeled = options.label != None,
readonly = True)
buffered = False,
labeled = options.label != None,
readonly = True)
except: continue
table.croak('\033[1m'+scriptName+'\033[0m'+(': '+name if name else ''))
@ -174,7 +174,7 @@ for name in filenames:
# ------------------------------------------ output result -----------------------------------------
im.save(sys.stdout if name else
im.save(sys.stdout if not name else
os.path.splitext(name)[0]+ \
('' if options.label == None else '_'+options.label)+ \
'.png',

View File

@ -106,16 +106,16 @@ theColors = np.uint8(np.array(theMap.export(format='list',steps=256))*255)
# --- loop over input files -------------------------------------------------------------------------
if filenames == []: filenames = ['STDIN']
if filenames == []: filenames = [None]
for name in filenames:
if not (name == 'STDIN' or os.path.exists(name)): continue
table = damask.ASCIItable(name = name,
outname = None,
buffered = False,
labeled = options.label != None,
readonly = True)
table.croak('\033[1m'+scriptName+'\033[0m'+(': '+name if name != 'STDIN' else ''))
try:
table = damask.ASCIItable(name = name,
buffered = False,
labeled = options.label != None,
readonly = True)
except: continue
table.croak('\033[1m'+scriptName+'\033[0m'+(': '+name if name else ''))
# ------------------------------------------ read header ------------------------------------------
@ -184,10 +184,9 @@ for name in filenames:
# ------------------------------------------ output result -----------------------------------------
im.save(sys.stdout if name == 'STDIN' else
os.path.splitext(name)[0]+ \
('' if options.label == None else '_'+options.label)+ \
'.png',
im.save(os.path.splitext(name)[0]+ \
('_'+options.label if options.label else '')+ \
'.png' if name else sys.stdout,
format = "PNG")
table.close() # close ASCII table

View File

@ -74,16 +74,16 @@ if options.pixelsize > 1: (options.pixelsizex,options.pixelsizey) = [options.pix
# --- loop over input files -------------------------------------------------------------------------
if filenames == []: filenames = ['STDIN']
if filenames == []: filenames = [None]
for name in filenames:
if not (name == 'STDIN' or os.path.exists(name)): continue
table = damask.ASCIItable(name = name,
outname = None,
buffered = False,
labeled = options.label != None,
readonly = True)
table.croak('\033[1m'+scriptName+'\033[0m'+(': '+name if name != 'STDIN' else ''))
try:
table = damask.ASCIItable(name = name,
buffered = False,
labeled = options.label != None,
readonly = True)
except: continue
table.croak('\033[1m'+scriptName+'\033[0m'+(': '+name if name else ''))
# ------------------------------------------ read header ------------------------------------------
@ -126,10 +126,9 @@ for name in filenames:
# ------------------------------------------ output result -----------------------------------------
im.save(sys.stdout if name == 'STDIN' else
os.path.splitext(name)[0]+ \
('' if options.label == None else '_'+options.label)+ \
'.png',
im.save(os.path.splitext(name)[0]+ \
('_'+options.label if options.label else '')+ \
'.png' if name else sys.stdout,
format = "PNG")
table.close() # close ASCII table

View File

@ -131,8 +131,7 @@ for name in filenames:
try:
table = damask.ASCIItable(name = name,
buffered = False, labeled = False, readonly = True)
except:
continue
except: continue
table.croak('\033[1m'+scriptName+'\033[0m'+(': '+name if name else ''))
# --- interpret header ----------------------------------------------------------------------------
@ -173,8 +172,6 @@ for name in filenames:
p[2]+1] = 1
convoluted[i,:,:,:] = ndimage.convolve(structure,stencil)
# distance = np.ones(info['grid'],'d')
convoluted = np.sort(convoluted,axis = 0)
uniques = np.where(convoluted[0,1:-1,1:-1,1:-1] != 0, 1,0) # initialize unique value counter (exclude myself [= 0])
@ -186,20 +183,15 @@ for name in filenames:
for feature in feature_list:
try:
table = damask.ASCIItable(outname = features[feature]['alias'][0]+'_'+name,
table = damask.ASCIItable(outname = features[feature]['alias'][0]+'_'+name if name else name,
buffered = False, labeled = False)
except:
continue
except: continue
table.croak(features[feature]['alias'][0])
distance = np.where(uniques >= features[feature]['aliens'],0.0,1.0) # seed with 0.0 when enough unique neighbor IDs are present
distance = ndimage.morphology.distance_transform_edt(distance)*[options.scale]*3
# for i in xrange(len(feature_list)):
# distance[i,:,:,:] = ndimage.morphology.distance_transform_edt(distance[i,:,:,:])*[options.scale]*3
# for i,feature in enumerate(feature_list):
info['microstructures'] = int(math.ceil(distance.max()))
#--- write header ---------------------------------------------------------------------------------
@ -215,7 +207,6 @@ for name in filenames:
])
table.labels_clear()
table.head_write()
table.output_flush()
# --- write microstructure information ------------------------------------------------------------

View File

@ -79,8 +79,7 @@ for name in filenames:
try:
table = damask.ASCIItable(outname = name,
buffered = False, labeled = False)
except:
continue
except: continue
table.croak('\033[1m'+scriptName+'\033[0m'+(': '+name if name else ''))
@ -126,6 +125,7 @@ for name in filenames:
table.head_write()
#--- write data -----------------------------------------------------------------------------------
X = options.periods*2.0*math.pi*(np.arange(options.grid[0])+0.5)/options.grid[0]
Y = options.periods*2.0*math.pi*(np.arange(options.grid[1])+0.5)/options.grid[1]
Z = options.periods*2.0*math.pi*(np.arange(options.grid[2])+0.5)/options.grid[2]

View File

@ -96,9 +96,10 @@ input = [options.eulers != None,
options.c != None,
options.matrix != None,
options.quaternion != None,
options.microstructure != None,
]
if np.sum(input) != 1 and options.microstructure == None:
if np.sum(input) != 1:
parser.error('need either microstructure label or exactly one orientation input format.')
if options.axes != None and not set(options.axes).issubset(set(['x','+x','-x','y','+y','-y','z','+z','-z'])):
parser.error('invalid axes {} {} {}.'.format(*options.axes))
@ -107,6 +108,7 @@ if options.axes != None and not set(options.axes).issubset(set(['x','+x','-x','y
([options.a,options.b,options.c],[3,3,3],'frame'),
(options.matrix,9,'matrix'),
(options.quaternion,4,'quaternion'),
(options.microstructure,1,'microstructure'),
][np.where(input)[0][0]] # select input label that was requested
toRadians = math.pi/180.0 if options.degrees else 1.0 # rescale degrees to radians
options.tolerance *= toRadians # ensure angular tolerance in radians
@ -129,105 +131,121 @@ for name in filenames:
# ------------------------------------------ sanity checks ---------------------------------------
coordDim = table.label_dimension(options.coordinates)
errors = []
if not 3 >= table.label_dimension(options.coordinates) >= 2: # TODO need to deal with 3D case!!
if not 3 >= coordDim >= 2:
errors.append('coordinates {} need to have two or three dimensions.'.format(options.coordinates))
if not np.all(table.label_dimension(label) == dim):
errors.append('orientation {} needs to have dimension {}.'.format(label,dim))
errors.append('input {} needs to have dimension {}.'.format(label,dim))
if options.phase != None and table.label_dimension(options.phase) != 1:
errors.append('phase column {} is not scalar.'.format(options.phase))
if errors == []: # so far no errors?
table.data_readArray([options.coordinates,label]+([] if options.phase == None else [options.phase]))
if options.phase == None:
table.data = np.column_stack((table.data,np.ones(len(table.data)))) # add single phase if no phase column given
coordsX = np.unique(table.data[:,0])
coordsY = np.unique(table.data[:,1])
nX = len(coordsX)
nY = len(coordsY)
dX = (coordsX[-1]-coordsX[0])/(nX-1)
dY = (coordsY[-1]-coordsY[0])/(nY-1)
if errors != []:
table.croak(errors)
table.close(dismiss = True)
continue
if nX*nY != len(table.data) \
or np.any(np.abs(np.log10((coordsX[1:]-coordsX[:-1])/dX)) > 0.01) \
or np.any(np.abs(np.log10((coordsY[1:]-coordsY[:-1])/dY)) > 0.01):
errors.append('data is not on square grid.')
table.data_readArray([options.coordinates,label]+([] if options.phase == None else [options.phase]))
if errors != []:
if coordDim == 2:
table.data = np.insert(table.data,2,np.zeros(len(table.data)),axis=1) # add zero z coordinate for two-dimensional input
if options.phase == None:
table.data = np.column_stack((table.data,np.ones(len(table.data)))) # add single phase if no phase column given
# --------------- figure out size and grid ---------------------------------------------------------
coords = [np.unique(table.data[:,i]) for i in xrange(3)]
mincorner = np.array(map(min,coords))
maxcorner = np.array(map(max,coords))
grid = np.array(map(len,coords),'i')
size = grid/np.maximum(np.ones(3,'d'), grid-1.0) * (maxcorner-mincorner) # size from edge to edge = dim * n/(n-1)
size = np.where(grid > 1, size, min(size[grid > 1]/grid[grid > 1])) # spacing for grid==1 equal to smallest among other spacings
delta = size/np.maximum(np.ones(3,'d'), grid)
origin = mincorner - 0.5*delta # shift from cell center to corner
N = grid.prod()
if N != len(table.data):
errors.append('data count {} does not match grid {}.'.format(len(table.data),' x '.join(map(repr,grid))))
if np.any(np.abs(np.log10((coords[0][1:]-coords[0][:-1])/delta[0])) > 0.01) \
or np.any(np.abs(np.log10((coords[1][1:]-coords[1][:-1])/delta[1])) > 0.01) \
or np.any(np.abs(np.log10((coords[2][1:]-coords[2][:-1])/delta[2])) > 0.01):
errors.append('regular grid spacing {} violated.'.format(' x '.join(map(repr,delta))))
if errors != []:
table.croak(errors)
table.close(dismiss = True)
continue
# ------------------------------------------ process data ------------------------------------------
colOri = table.label_index(label)+(3-coordDim) # column(s) of orientation data (following 3 or 2 coordinates that were expanded to 3!)
if inputtype == 'microstructure':
microstructure = table.data[:,colOri]
nGrains = len(np.unique(microstructure))
else:
colPhase = colOri + np.sum(dim) # column of phase data comes after orientation
index = np.lexsort((table.data[:,0],table.data[:,1],table.data[:,2])) # index of rank when sorting x fast, z slow
rank = np.argsort(index) # rank of index
KDTree = scipy.spatial.KDTree((table.data[:,:3]-mincorner) / delta) # build KDTree with dX = dY = dZ = 1 and origin 0,0,0
colOri = table.label_index(label) # column(s) of orientation data
colPhase = colOri + np.sum(dim) # column of phase data comes after orientation
index = np.lexsort((table.data[:,0],table.data[:,1])) # index of rank when sorting x fast, y slow
rank = np.argsort(index) # rank of index
KDTree = scipy.spatial.KDTree((table.data[:,:2]-np.array([coordsX[0],coordsY[0]])) \
/ np.array([dX,dY])) # build KDTree with dX = dY = 1
microstructure = np.zeros(nX*nY,dtype = 'uint32') # initialize empty microstructure
symQuats = [] # empty list of sym equiv orientations
phases = [] # empty list of phase info
nGrains = 0 # counter for detected grains
myRank = 0 # rank of current grid point
for y in xrange(nY):
for x in xrange(nX):
if (myRank+1)%(nX*nY/100.) < 1: table.croak('.',False)
myData = table.data[index[myRank]]
mySym = options.symmetry[min(int(myData[colPhase]),len(options.symmetry))-1] # select symmetry from option (take last specified option for all with higher index)
if inputtype == 'eulers':
o = damask.Orientation(Eulers = np.array(map(float,myData[colOri:colOri+3]))*toRadians,
symmetry = mySym).reduced()
elif inputtype == 'matrix':
o = damask.Orientation(matrix = np.array([map(float,myData[colOri:colOri+9])]).reshape(3,3).transpose(),
symmetry = mySym).reduced()
elif inputtype == 'frame':
o = damask.Orientation(matrix = np.array([map(float,myData[colOri[0]:colOri[0]+3] + \
myData[colOri[1]:colOri[1]+3] + \
myData[colOri[2]:colOri[2]+3]
)]).reshape(3,3),
symmetry = mySym).reduced()
elif inputtype == 'quaternion':
o = damask.Orientation(quaternion = np.array(map(float,myData[colOri:colOri+4])),
symmetry = mySym).reduced()
microstructure = np.zeros(N,dtype = 'uint32') # initialize empty microstructure
symQuats = [] # empty list of sym equiv orientations
phases = [] # empty list of phase info
nGrains = 0 # counter for detected grains
myRank = 0 # rank of current grid point
for z in xrange(grid[2]):
for y in xrange(grid[1]):
for x in xrange(grid[0]):
if (myRank+1)%(N/100.) < 1: table.croak('.',False)
myData = table.data[index[myRank]]
mySym = options.symmetry[min(int(myData[colPhase]),len(options.symmetry))-1] # select symmetry from option (take last specified option for all with higher index)
if inputtype == 'eulers':
o = damask.Orientation(Eulers = myData[colOri:colOri+3]*toRadians,
symmetry = mySym).reduced()
elif inputtype == 'matrix':
o = damask.Orientation(matrix = myData[colOri:colOri+9].reshape(3,3).transpose(),
symmetry = mySym).reduced()
elif inputtype == 'frame':
o = damask.Orientation(matrix = np.hstack((myData[colOri[0]:colOri[0]+3],
myData[colOri[1]:colOri[1]+3],
myData[colOri[2]:colOri[2]+3],
)).reshape(3,3),
symmetry = mySym).reduced()
elif inputtype == 'quaternion':
o = damask.Orientation(quaternion = myData[colOri:colOri+4],
symmetry = mySym).reduced()
oInv = o.quaternion.conjugated()
neighbors = KDTree.query_ball_point([x,y], 3) # search points within radius
breaker = False
oInv = o.quaternion.conjugated()
neighbors = KDTree.query_ball_point([x,y,z], 3) # search points within radius
breaker = False
for n in neighbors: # check each neighbor
if myRank <= rank[n] or table.data[n,colPhase] != myData[colPhase]: continue # skip myself, anyone further ahead (cannot yet have a grain ID), and other phases
for q in symQuats[microstructure[rank[n]]-1]:
if abs((q*oInv).asAngleAxis()[0]) <= options.tolerance: # found existing orientation resembling me
microstructure[myRank] = microstructure[rank[n]]
breaker = True; break
if breaker: break
for n in neighbors: # check each neighbor
if myRank <= rank[n] or table.data[n,colPhase] != myData[colPhase]: continue # skip myself, anyone further ahead (cannot yet have a grain ID), and other phases
for q in symQuats[microstructure[rank[n]]-1]:
if abs((q*oInv).asAngleAxis()[0]) <= options.tolerance: # found existing orientation resembling me
microstructure[myRank] = microstructure[rank[n]]
breaker = True; break
if breaker: break
if microstructure[myRank] == 0: # no other orientation resembled me
nGrains += 1 # make new grain ...
microstructure[myRank] = nGrains # ... and assign to me
symQuats.append(o.equivalentQuaternions()) # store all symmetrically equivalent orientations for future comparison
phases.append(myData[colPhase]) # store phase info for future reporting
if microstructure[myRank] == 0: # no other orientation resembled me
nGrains += 1 # make new grain ...
microstructure[myRank] = nGrains # ... and assign to me
symQuats.append(o.equivalentQuaternions()) # store all symmetrically equivalent orientations for future comparison
phases.append(myData[colPhase]) # store phase info for future reporting
myRank += 1
myRank += 1
table.croak('')
table.croak('')
# --- generate header ----------------------------------------------------------------------------
info = {
'grid': np.array([nX,nY,1]),
'size': np.array([coordsX[-1]-coordsX[0],
coordsY[-1]-coordsY[0],
min((coordsX[-1]-coordsX[0])/nX,
(coordsY[-1]-coordsY[0])/nY,
)
]),
'origin': np.array([coordsX[0],coordsY[0],0.0]),
'grid': grid,
'size': size,
'origin': origin,
'microstructures': nGrains,
'homogenization': options.homogenization,
}
@ -243,29 +261,32 @@ for name in filenames:
formatwidth = 1+int(math.log10(info['microstructures']))
config_header = ['<microstructure>']
for i,phase in enumerate(phases):
config_header += ['[Grain%s]'%(str(i+1).zfill(formatwidth)),
'crystallite %i'%options.crystallite,
'(constituent)\tphase %i\ttexture %s\tfraction 1.0'%(phase,str(i+1).rjust(formatwidth)),
]
if inputtype == 'microstructure':
config_header = []
else:
config_header = ['<microstructure>']
for i,phase in enumerate(phases):
config_header += ['[Grain%s]'%(str(i+1).zfill(formatwidth)),
'crystallite %i'%options.crystallite,
'(constituent)\tphase %i\ttexture %s\tfraction 1.0'%(phase,str(i+1).rjust(formatwidth)),
]
config_header += ['<texture>']
for i,quats in enumerate(symQuats):
config_header += ['[Grain%s]'%(str(i+1).zfill(formatwidth)),
'axes\t%s %s %s'%tuple(options.axes) if options.axes != None else '',
'(gauss)\tphi1 %g\tPhi %g\tphi2 %g\tscatter 0.0\tfraction 1.0'%tuple(np.degrees(quats[0].asEulers())),
]
config_header += ['<texture>']
for i,quats in enumerate(symQuats):
config_header += ['[Grain%s]'%(str(i+1).zfill(formatwidth)),
'axes\t%s %s %s'%tuple(options.axes) if options.axes != None else '',
'(gauss)\tphi1 %g\tPhi %g\tphi2 %g\tscatter 0.0\tfraction 1.0'%tuple(np.degrees(quats[0].asEulers())),
]
table.labels_clear()
table.info_clear()
table.info_append([
scriptID + ' ' + ' '.join(sys.argv[1:]),
"grid\ta {grid[0]}\tb {grid[1]}\tc {grid[2]}".format(grid=info['grid']),
"size\tx {size[0]}\ty {size[1]}\tz {size[2]}".format(size=info['size']),
"origin\tx {origin[0]}\ty {origin[1]}\tz {origin[2]}".format(origin=info['origin']),
"homogenization\t{homog}".format(homog=info['homogenization']),
"microstructures\t{microstructures}".format(microstructures=info['microstructures']),
"grid\ta {}\tb {}\tc {}".format(*info['grid']),
"size\tx {}\ty {}\tz {}".format(*info['size']),
"origin\tx {}\ty {}\tz {}".format(*info['origin']),
"homogenization\t{}".format(info['homogenization']),
"microstructures\t{}".format(info['microstructures']),
config_header,
])
table.head_write()

View File

@ -22,13 +22,14 @@ compress geometry files with ranges "a to b" and/or multiples "n of x".
# --- loop over input files -------------------------------------------------------------------------
if filenames == []: filenames = ['STDIN']
if filenames == []: filenames = [None]
for name in filenames:
if not (name == 'STDIN' or os.path.exists(name)): continue
table = damask.ASCIItable(name = name, outname = name+'_tmp',
buffered = False, labeled = False)
table.croak('\033[1m'+scriptName+'\033[0m'+(': '+name if name != 'STDIN' else ''))
try:
table = damask.ASCIItable(name = name,
buffered = False, labeled = False)
except: continue
table.croak('\033[1m'+scriptName+'\033[0m'+(': '+name if name else ''))
# --- interpret header ----------------------------------------------------------------------------
@ -63,7 +64,6 @@ for name in filenames:
"microstructures\t{microstructures}".format(microstructures=info['microstructures']),
])
table.head_write()
table.output_flush()
# --- write packed microstructure information -----------------------------------------------------
@ -116,4 +116,3 @@ for name in filenames:
# --- output finalization --------------------------------------------------------------------------
table.close() # close ASCII table
if name != 'STDIN': os.rename(name+'_tmp',name) # overwrite old one with tmp new

View File

@ -41,13 +41,14 @@ parser.set_defaults(renumber = False,
# --- loop over input files -------------------------------------------------------------------------
if filenames == []: filenames = ['STDIN']
if filenames == []: filenames = [None]
for name in filenames:
if not (name == 'STDIN' or os.path.exists(name)): continue
table = damask.ASCIItable(name = name, outname = name+'_tmp',
buffered = False, labeled = False)
table.croak('\033[1m'+scriptName+'\033[0m'+(': '+name if name != 'STDIN' else ''))
try:
table = damask.ASCIItable(name = name,
buffered = False, labeled = False)
except: continue
table.croak('\033[1m'+scriptName+'\033[0m'+(': '+name if name else ''))
# --- interpret header ----------------------------------------------------------------------------
@ -144,7 +145,6 @@ for name in filenames:
])
table.labels_clear()
table.head_write()
table.output_flush()
# --- write microstructure information ------------------------------------------------------------
@ -155,4 +155,3 @@ for name in filenames:
# --- output finalization --------------------------------------------------------------------------
table.close() # close ASCII table
if name != 'STDIN': os.rename(name+'_tmp',name) # overwrite old one with tmp new

View File

@ -31,7 +31,7 @@ parser.set_defaults(position = 'pos',
# --- loop over input files -------------------------------------------------------------------------
if filenames == []: filenames = ['STDIN']
if filenames == []: filenames = [None]
for name in filenames:
try:

View File

@ -44,13 +44,14 @@ for i in xrange(len(options.substitute)/2):
# --- loop over input files -------------------------------------------------------------------------
if filenames == []: filenames = ['STDIN']
if filenames == []: filenames = [None]
for name in filenames:
if not (name == 'STDIN' or os.path.exists(name)): continue
table = damask.ASCIItable(name = name, outname = name+'_tmp',
buffered = False, labeled = False)
table.croak('\033[1m'+scriptName+'\033[0m'+(': '+name if name != 'STDIN' else ''))
try:
table = damask.ASCIItable(name = name,
buffered = False, labeled = False)
except: continue
table.croak('\033[1m'+scriptName+'\033[0m'+(': '+name if name else ''))
# --- interpret header ----------------------------------------------------------------------------
@ -111,7 +112,6 @@ for name in filenames:
"microstructures\t{microstructures}".format(microstructures=newInfo['microstructures']),
])
table.head_write()
table.output_flush()
# --- write microstructure information ------------------------------------------------------------
@ -122,4 +122,3 @@ for name in filenames:
# --- output finalization --------------------------------------------------------------------------
table.close() # close ASCII table
if name != 'STDIN': os.rename(name+'_tmp',name) # overwrite old one with tmp new

View File

@ -29,13 +29,14 @@ parser.set_defaults(oneD = False,
# --- loop over input files -------------------------------------------------------------------------
if filenames == []: filenames = ['STDIN']
if filenames == []: filenames = [None]
for name in filenames:
if not (name == 'STDIN' or os.path.exists(name)): continue
table = damask.ASCIItable(name = name, outname = name+'_tmp',
buffered = False, labeled = False)
table.croak('\033[1m'+scriptName+'\033[0m'+(': '+name if name != 'STDIN' else ''))
try:
table = damask.ASCIItable(name = name,
buffered = False, labeled = False)
except: continue
table.croak('\033[1m'+scriptName+'\033[0m'+(': '+name if name else ''))
# --- interpret header ----------------------------------------------------------------------------
@ -70,7 +71,6 @@ for name in filenames:
"microstructures\t{microstructures}".format(microstructures=info['microstructures']),
])
table.head_write()
table.output_flush()
# --- write microstructure information ------------------------------------------------------------
@ -83,4 +83,3 @@ for name in filenames:
#--- output finalization --------------------------------------------------------------------------
table.close() # close ASCII table
if name != 'STDIN': os.rename(name+'_tmp',name) # overwrite old one with tmp new

View File

@ -38,13 +38,14 @@ parser.set_defaults(vicinity = 1,
# --- loop over input files -------------------------------------------------------------------------
if filenames == []: filenames = ['STDIN']
if filenames == []: filenames = [None]
for name in filenames:
if not (name == 'STDIN' or os.path.exists(name)): continue
table = damask.ASCIItable(name = name, outname = name+'_tmp',
buffered = False, labeled = False)
table.croak('\033[1m'+scriptName+'\033[0m'+(': '+name if name != 'STDIN' else ''))
try:
table = damask.ASCIItable(name = name,
buffered = False, labeled = False)
except: continue
table.croak('\033[1m'+scriptName+'\033[0m'+(': '+name if name else ''))
# --- interpret header ----------------------------------------------------------------------------
@ -103,7 +104,6 @@ for name in filenames:
"microstructures\t{microstructures}".format(microstructures=newInfo['microstructures']),
])
table.head_write()
table.output_flush()
# --- write microstructure information ------------------------------------------------------------
@ -114,4 +114,3 @@ for name in filenames:
# --- output finalization --------------------------------------------------------------------------
table.close() # close ASCII table
if name != 'STDIN': os.rename(name+'_tmp',name) # overwrite old one with tmp new

View File

@ -280,18 +280,19 @@ methods = [options.algorithm]
# --- loop over input files -------------------------------------------------------------------------
if filenames == []: filenames = ['STDIN']
if filenames == []: filenames = [None]
for name in filenames:
if not (name == 'STDIN' or os.path.exists(name)): continue
table = damask.ASCIItable(name = name, outname = None,
buffered = False, readonly = True)
table.croak('\033[1m'+scriptName+'\033[0m'+(': '+name if name != 'STDIN' else ''))
try:
table = damask.ASCIItable(name = name,
buffered = False, readonly = True)
except: continue
table.croak('\033[1m'+scriptName+'\033[0m'+(': '+name if name else ''))
randomSeed = int(os.urandom(4).encode('hex'), 16) if options.randomSeed == None else options.randomSeed # random seed per file for second phase
random.seed(randomSeed)
# ------------------------------------------ read header ---------------------------------------
# ------------------------------------------ read header ------------------------------------------
table.head_read()
@ -305,26 +306,19 @@ for name in filenames:
table.close(dismiss = True)
continue
# ------------------------------------------ read data ---------------------------------------
# ------------------------------------------ read data --------------------------------------------
binnedODF = table.data_readArray(labels)
# --------------- figure out limits (left/right), delta, and interval -----------------------------
ODF = {}
limits = np.array([np.min(table.data,axis=0),
np.max(table.data,axis=0)])
ODF['limit'] = np.radians(limits[1,:])
ODF['center'] = 0.0 if all(limits[0,:]<1e-8) else 0.5 # vertex or cell centered
if all(limits[0,:]<1e-8): # vertex centered
ODF['center'] = 0.0
else: # cell centered
ODF['center'] = 0.5
eulers = [{},{},{}]
for i in xrange(table.data.shape[0]):
for j in xrange(3):
eulers[j][str(table.data[i,j]])] = True # remember eulers along phi1, Phi, and phi2
ODF['interval'] = np.array([len(eulers[0]),len(eulers[1]),len(eulers[2]),],'i') # steps are number of distict values
ODF['interval'] = np.array(map(len,[np.unique(table.data[:,i]) for i in xrange(3)]),'i') # steps are number of distict values
ODF['nBins'] = ODF['interval'].prod()
ODF['delta'] = np.radians(np.array(limits[1,0:3]-limits[0,0:3])/(ODF['interval']-1))

View File

@ -177,109 +177,90 @@ def initial_conditions(homogenization,microstructures):
return cmds
#-------------------------------------------------------------------------------------------------
def parse_geomFile(content,homog):
#-------------------------------------------------------------------------------------------------
(skip,key) = content[0].split()[:2]
if key[:4].lower() == 'head':
skip = int(skip)+1
else:
skip = 0
grid = [0,0,0]
size = [0.0,0.0,0.0]
homog = 0
for line in content[:skip]:
data = line.split()
if data[0].lower() == 'grid':
grid = map(int,data[2:8:2])
if data[0].lower() == 'size':
size = map(float,data[2:8:2])
if data[0].lower() == 'homogenization':
homog = int(data[1])
microstructures = []
for line in content[skip:]:
for word in line.split():
microstructures.append(int(word))
return (grid,size,homog,microstructures)
#--------------------------------------------------------------------------------------------------
# MAIN
#--------------------------------------------------------------------------------------------------
parser = OptionParser(option_class=damask.extendableOption, usage='%prog options [file[s]]', description = """
Generate MSC.Marc FE hexahedral mesh from spectral description file.
Generate MSC.Marc FE hexahedral mesh from geom file.
""", version = scriptID)
parser.add_option('-p', '--port', type='int',dest='port',metavar='int',
help='Mentat connection port [%default]')
parser.add_option('--homogenization', dest='homogenization', type='int', metavar = 'int',
help='homogenization index to be used [%default]')
parser.set_defaults(port = None)
parser.set_defaults(homogenization = 1)
parser.add_option('-p', '--port',
dest = 'port',
type = 'int', metavar = 'int',
help = 'Mentat connection port [%default]')
parser.add_option('--homogenization',
dest = 'homogenization',
type = 'int', metavar = 'int',
help = 'homogenization index to be used [auto]')
parser.set_defaults(port = None,
homogenization = None,
)
(options, filenames) = parser.parse_args()
if options.port:
try:
from py_mentat import *
except:
parser.error('no valid Mentat release found.')
#--- setup file handles --------------------------------------------------------------------------
files = []
if filenames == []:
files.append({'name':'STDIN',
'input':sys.stdin,
'output':sys.stdout,
'croak':sys.stderr,
})
else:
for name in filenames:
if os.path.exists(name):
files.append({'name':name,
'input':open(name),
'output':open(name+'_tmp','w'),
'croak':sys.stdout,
})
# --- loop over input files -------------------------------------------------------------------------
try:
from py_mentat import *
except:
file['croak'].write('no valid Mentat release found')
if options.port != None: sys.exit(-1)
if filenames == []: filenames = [None]
#--- loop over input files ------------------------------------------------------------------------
for file in files:
file['croak'].write('\033[1m' + scriptName + '\033[0m: ' + (file['name'] if file['name'] != 'STDIN' else '') + '\n')
for name in filenames:
try:
table = damask.ASCIItable(name = name,
outname = os.path.splitext(name)[0]+'.proc' if name else name,
buffered = False, labeled = False)
except: continue
table.croak('\033[1m'+scriptName+'\033[0m'+(': '+name if name else ''))
# --- interpret header ----------------------------------------------------------------------------
table.head_read()
info,extra_header = table.head_getGeom()
if options.homogenization: info['homogenization'] = options.homogenization
table.croak(['grid a b c: %s'%(' x '.join(map(str,info['grid']))),
'size x y z: %s'%(' x '.join(map(str,info['size']))),
'origin x y z: %s'%(' : '.join(map(str,info['origin']))),
'homogenization: %i'%info['homogenization'],
'microstructures: %i'%info['microstructures'],
])
errors = []
if np.any(info['grid'] < 1): errors.append('invalid grid a b c.')
if np.any(info['size'] <= 0.0): errors.append('invalid size x y z.')
if errors != []:
table.croak(errors)
table.close(dismiss = True)
continue
# --- read data ------------------------------------------------------------------------------------
microstructure = table.microstructure_read(info['grid']).reshape(info['grid'],order='F') # read microstructure
content = file['input'].readlines()
(grid,size,homog,microstructures) = parse_geomFile(content, options.homogenization)
#--- report ---------------------------------------------------------------------------------------
file['croak'].write('grid a b c: %s\n'%(' x '.join(map(str,grid))) +
'size x y z: %s\n'%(' x '.join(map(str,size))) +
'homogenization: %i\n'%homog +
'microstructures: %i\n\n'%(len(list(set(microstructures)))))
cmds = [\
init(),
mesh(grid,size),
mesh(info['grid'],info['size']),
material(),
geometry(),
initial_conditions(homog,microstructures),
initial_conditions(info['homogenization'],microstructure),
'*identify_sets',
'*show_model',
'*redraw',
]
outputLocals = {}
if (options.port != None):
if options.port:
py_connect('',options.port)
output(cmds,outputLocals,'Mentat')
py_disconnect()
else:
output(cmds,outputLocals,file['output'])
if file['name'] != 'STDIN':
file['output'].close()
os.rename(file['name']+'_tmp',os.path.splitext(file['name'])[0] +'.proc')
output(cmds,outputLocals,table.__IO__['out']) # bad hack into internals of table class...
table.close()

View File

@ -34,13 +34,14 @@ parser.set_defaults(size = [0.0,0.0,0.0],
# --- loop over input files -------------------------------------------------------------------------
if filenames == []: filenames = ['STDIN']
if filenames == []: filenames = [None]
for name in filenames:
if not (name == 'STDIN' or os.path.exists(name)): continue
table = damask.ASCIItable(name = name, outname = None,
buffered = False, readonly = True)
table.croak('\033[1m'+scriptName+'\033[0m'+(': '+name if name != 'STDIN' else ''))
try:
table = damask.ASCIItable(name = name,
buffered = False, readonly = True)
except: continue
table.croak('\033[1m'+scriptName+'\033[0m'+(': '+name if name else ''))
# --- interpret header ----------------------------------------------------------------------------
@ -62,13 +63,13 @@ for name in filenames:
and np.all(info['grid'] < 1): errors.append('invalid size x y z.')
else:
for i in xrange(3):
if info['size'][i] <= 0.0: # any invalid size?
info['size'][i] = float(info['grid'][i])/max(info['grid']) # normalize to grid
if info['size'][i] <= 0.0: # any invalid size?
info['size'][i] = float(info['grid'][i])/max(info['grid']) # normalize to grid
remarks.append('rescaling size {} to {}...'.format({0:'x',1:'y',2:'z'}[i],info['size'][i]))
if table.label_dimension(options.position) != 3: errors.append('columns "{}" have dimension {}'.format(options.position,
table.label_dimension(options.position)))
if remarks != []: table.croak(remarks)
if errors != []:
if errors != []:
table.croak(errors)
table.close(dismiss=True)
continue
@ -81,14 +82,14 @@ for name in filenames:
coords = table.data[:,:3] # assign coordinates
grain = table.data[:,3].astype('i') if hasGrains else 1+np.arange(len(coords),dtype='i') # assign grains
# grainIDs = np.unique(grain).astype('i') # find all grainIDs present
# grainIDs = np.unique(grain).astype('i') # find all grainIDs present
# --- generate grid --------------------------------------------------------------------------------
grid = vtk.vtkUnstructuredGrid()
pts = vtk.vtkPoints()
# --- process microstructure information --------------------------------------------------------------
# --- process microstructure information -----------------------------------------------------------
IDs = vtk.vtkIntArray()
IDs.SetNumberOfComponents(1)
@ -106,7 +107,17 @@ for name in filenames:
# --- write data -----------------------------------------------------------------------------------
if name == 'STDIN':
if name:
(dir,filename) = os.path.split(name)
writer = vtk.vtkXMLUnstructuredGridWriter()
writer.SetDataModeToBinary()
writer.SetCompressorTypeToZLib()
writer.SetFileName(os.path.join(dir,'seeds_'+os.path.splitext(filename)[0]
+'.'+writer.GetDefaultFileExtension()))
if vtk.VTK_MAJOR_VERSION <= 5: writer.SetInput(grid)
else: writer.SetInputData(grid)
writer.Write()
else:
writer = vtk.vtkUnstructuredGridWriter()
writer.WriteToOutputStringOn()
writer.SetFileTypeToASCII()
@ -115,15 +126,5 @@ for name in filenames:
else: writer.SetInputData(grid)
writer.Write()
sys.stdout.write(writer.GetOutputString()[0:writer.GetOutputStringLength()])
else:
(dir,filename) = os.path.split(name)
writer = vtk.vtkXMLUnstructuredGridWriter()
writer.SetDataModeToBinary()
writer.SetCompressorTypeToZLib()
writer.SetFileName(os.path.join(dir,'seeds_'+os.path.splitext(filename)[0]
+'.'+writer.GetDefaultFileExtension()))
if vtk.VTK_MAJOR_VERSION <= 5: writer.SetInput(grid)
else: writer.SetInputData(grid)
writer.Write()
table.close()

View File

@ -49,13 +49,15 @@ parser.set_defaults(x = False,
# --- loop over output files -------------------------------------------------------------------------
if filenames == []: filenames = ['STDIN']
if filenames == []: filenames = [None]
for name in filenames:
if not (name == 'STDIN' or os.path.exists(name)): continue
table = damask.ASCIItable(name = name, outname = name+'_tmp',
buffered = False, labeled = False)
table.croak('\033[1m'+scriptName+'\033[0m'+(': '+name if name != 'STDIN' else ''))
try:
table = damask.ASCIItable(name = name,
outname = os.path.splitext(name])[0]+'_poked_{}.seeds'.format(options.N) if name else name,
buffered = False, labeled = False)
except: continue
table.croak('\033[1m'+scriptName+'\033[0m'+(': '+name if name else ''))
# --- interpret header ----------------------------------------------------------------------------
@ -91,7 +93,7 @@ for name in filenames:
Ny = int(options.N/math.sqrt(options.N*info['size'][0]/info['size'][1]))
Nz = int((max(options.z)-min(options.z))/info['size'][2]*info['grid'][2])
table.croak('poking {0} x {1} x {2}...'.format(Nx,Ny,Nz))
table.croak('poking {} x {} x {}...'.format(Nx,Ny,Nz))
seeds = np.zeros((Nx*Ny*Nz,4),'d')
grid = np.zeros(3,'i')
@ -125,11 +127,12 @@ for name in filenames:
table.info_clear()
table.info_append(extra_header+[
scriptID + ' ' + ' '.join(sys.argv[1:]),
"grid\ta {grid[0]}\tb {grid[1]}\tc {grid[2]}".format(grid=newInfo['grid']),
"size\tx {size[0]}\ty {size[1]}\tz {size[2]}".format(size=newInfo['size']),
"origin\tx {origin[0]}\ty {origin[1]}\tz {origin[2]}".format(origin=info['origin']),
"homogenization\t{homog}".format(homog=info['homogenization']),
"microstructures\t{microstructures}".format(microstructures=newInfo['microstructures']),
"poking\ta {}\tb {}\tc {}".format(Nx,Ny,Nz),
"grid\ta {}\tb {}\tc {}".format(newInfo['grid']),
"size\tx {}\ty {}\tz {}".format(newInfo['size']),
"origin\tx {}\ty {}\tz {}".format(info['origin']),
"homogenization\t{}".format(info['homogenization']),
"microstructures\t{}".format(newInfo['microstructures']),
])
table.labels_clear()
table.labels_append(['{dim}_{label}'.format(dim = 1+i,label = options.position) for i in range(3)]+['microstructure'])
@ -144,5 +147,3 @@ for name in filenames:
# --- output finalization --------------------------------------------------------------------------
table.close() # close ASCII table
if name != 'STDIN':
os.rename(name+'_tmp',os.path.splitext(name])[0] + '_poked_%ix%ix%i.seeds'%(Nx,Ny,Nz))

View File

@ -152,15 +152,15 @@ for name in filenames:
if not options.selective:
seeds = np.zeros((3,options.N),dtype=float) # seed positions array
seeds = np.zeros((3,options.N),dtype='d') # seed positions array
gridpoints = random.sample(range(gridSize),options.N) # create random permutation of all grid positions and choose first N
seeds[0,:] = (np.mod(gridpoints ,options.grid[0])\
+np.random.random()) /options.grid[0]
+np.random.random(options.N)) /options.grid[0]
seeds[1,:] = (np.mod(gridpoints// options.grid[0] ,options.grid[1])\
+np.random.random()) /options.grid[1]
+np.random.random(options.N)) /options.grid[1]
seeds[2,:] = (np.mod(gridpoints//(options.grid[1]*options.grid[0]),options.grid[2])\
+np.random.random()) /options.grid[2]
+np.random.random(options.N)) /options.grid[2]
else:

View File

@ -56,7 +56,7 @@ if options.blacklist != None: options.blacklist = map(int,options.blacklist)
# --- loop over input files -------------------------------------------------------------------------
if filenames == []: filenames = ['STDIN']
if filenames == []: filenames = [None]
for name in filenames:
try: