Merge branch 'development' of magit1.mpie.de:damask/DAMASK into development

This commit is contained in:
Martin Diehl 2016-04-24 22:45:55 +02:00
commit 89107eaff9
10 changed files with 132 additions and 284 deletions

View File

@ -21,13 +21,13 @@ if not os.path.isdir(binDir):
#define ToDo list
processing_subDirs = ['pre','post','misc',]
processing_extensions = ['.py',]
processing_extensions = ['.py','.sh',]
for subDir in processing_subDirs:
theDir = os.path.abspath(os.path.join(baseDir,subDir))
for theFile in os.listdir(theDir):
if os.path.splitext(theFile)[1] in processing_extensions: # omit anything not fitting our script extensions (skip .py.bak, .py~, and the like)
if os.path.splitext(theFile)[1] in processing_extensions: # only consider files with proper extensions
src = os.path.abspath(os.path.join(theDir,theFile))
sym_link = os.path.abspath(os.path.join(binDir,os.path.splitext(theFile)[0]))

View File

@ -166,14 +166,6 @@ for name in filenames:
np.zeros((table.data.shape[0],
3-table.data[:,9:].shape[1]),dtype='f'))) # fill coords up to 3D with zeros
if remarks != []: damask.util.croak(remarks)
if errors != []:
damask.util.croak(errors)
table.close(dismiss = True)
continue
# --------------- figure out size and grid ---------------------------------------------------------
coords = [np.unique(table.data[:,9+i]) for i in xrange(3)]
mincorner = np.array(map(min,coords))
maxcorner = np.array(map(max,coords))

View File

@ -88,16 +88,27 @@ Add column(s) containing Euclidean distance to grain structural features: bounda
""", version = scriptID)
parser.add_option('-c','--coordinates', dest='coords', metavar='string',
help='column label of coordinates [%default]')
parser.add_option('-i','--identifier', dest='id', metavar = 'string',
help='column label of grain identifier [%default]')
parser.add_option('-t','--type', dest = 'type', action = 'extend', metavar = '<string LIST>',
help = 'feature type {%s} '%(', '.join(map(lambda x:'/'.join(x['names']),features))) )
parser.add_option('-n','--neighborhood',dest='neighborhood', choices = neighborhoods.keys(), metavar = 'string',
help = 'type of neighborhood [neumann] {%s}'%(', '.join(neighborhoods.keys())))
parser.add_option('-s', '--scale', dest = 'scale', type = 'float', metavar = 'float',
parser.add_option('-p',
'--pos', '--position',
dest = 'coords', metavar = 'string',
help = 'label of coordinates [%default]')
parser.add_option('-i',
'--id', '--identifier',
dest = 'id', metavar = 'string',
help='label of grain identifier [%default]')
parser.add_option('-t',
'--type',
dest = 'type', action = 'extend', metavar = '<string LIST>',
help = 'feature type {{{}}} '.format(', '.join(map(lambda x:'/'.join(x['names']),features))) )
parser.add_option('-n',
'--neighborhood',
dest = 'neighborhood', choices = neighborhoods.keys(), metavar = 'string',
help = 'neighborhood type [neumann] {{{}}}'.format(', '.join(neighborhoods.keys())))
parser.add_option('-s',
'--scale',
dest = 'scale', type = 'float', metavar = 'float',
help = 'voxel size [%default]')
parser.set_defaults(coords = 'pos',
id = 'texture',
neighborhood = 'neumann',
@ -111,7 +122,7 @@ if options.type is None:
if not set(options.type).issubset(set(list(itertools.chain(*map(lambda x: x['names'],features))))):
parser.error('type must be chosen from (%s).'%(', '.join(map(lambda x:'|'.join(x['names']),features))) )
if 'biplane' in options.type and 'boundary' in options.type:
parser.error("only one from aliases 'biplane' and 'boundary' possible.")
parser.error('only one from aliases "biplane" and "boundary" possible.')
feature_list = []
for i,feature in enumerate(features):
@ -172,9 +183,7 @@ for name in filenames:
N = grid.prod()
if N != len(table.data): errors.append('data count {} does not match grid '.format(N) +
'x'.join(map(str,grid)) +
'.')
if N != len(table.data): errors.append('data count {} does not match grid {}.'.format(N,'x'.join(map(str,grid))))
if errors != []:
damask.util.croak(errors)
table.close(dismiss = True)

56
processing/post/addInfo.py Executable file
View File

@ -0,0 +1,56 @@
#!/usr/bin/env python
# -*- coding: UTF-8 no BOM -*-
import os
from optparse import OptionParser
import damask
scriptName = os.path.splitext(os.path.basename(__file__))[0]
scriptID = ' '.join([scriptName,damask.version])
# --------------------------------------------------------------------
# MAIN
# --------------------------------------------------------------------
parser = OptionParser(option_class=damask.extendableOption, usage='%prog options file[s]', description = """
Add info lines to ASCIItable header.
""", version = scriptID)
parser.add_option('-i',
'--info',
dest = 'info', action = 'extend', metavar = '<string LIST>',
help = 'items to add')
parser.set_defaults(info = [],
)
(options,filenames) = parser.parse_args()
# --- loop over input files ------------------------------------------------------------------------
if filenames == []: filenames = [None]
for name in filenames:
try: table = damask.ASCIItable(name = name,
buffered = False)
except: continue
damask.util.report(scriptName,name)
# ------------------------------------------ assemble header ---------------------------------------
table.head_read()
table.info_append(options.info)
table.head_write()
# ------------------------------------------ pass through data -------------------------------------
outputAlive = True
while outputAlive and table.data_read(): # read next data line of ASCII table
outputAlive = table.data_write() # output processed line
# ------------------------------------------ output finalization -----------------------------------
table.close() # close ASCII tables

View File

@ -127,7 +127,7 @@ for name in filenames:
# ------------------------------------------ output result ---------------------------------------
if name:
writer = vtk.vtkXMLPolyDataWriter()
writer = vtk.vtkXMLRectilinearGridWriter()
writer.SetCompressorTypeToZLib()
writer.SetDataModeToBinary()
writer.SetFileName(os.path.join(os.path.split(name)[0],

View File

@ -1,108 +0,0 @@
#!/usr/bin/env python
# -*- coding: UTF-8 no BOM -*-
import os,sys,vtk
import numpy as np
from optparse import OptionParser
import damask
scriptName = os.path.splitext(os.path.basename(__file__))[0]
scriptID = ' '.join([scriptName,damask.version])
#--------------------------------------------------------------------------------------------------
# MAIN
#--------------------------------------------------------------------------------------------------
parser = OptionParser(option_class=damask.extendableOption, usage='%prog [file[s]]', description = """
Produce VTK rectilinear mesh of structure data from geom description
""", version = scriptID)
parser.add_option('-m','--nodata',
dest = 'data',
action = 'store_false',
help = 'generate mesh without microstructure index data')
parser.set_defaults(data = True,
)
(options, filenames) = parser.parse_args()
# --- loop over input files -------------------------------------------------------------------------
if filenames == []: filenames = [None]
for name in filenames:
try:
table = damask.ASCIItable(name = name,
buffered = False, labeled = False, readonly = True)
except: continue
damask.util.report(scriptName,name)
# --- interpret header ----------------------------------------------------------------------------
table.head_read()
info,extra_header = table.head_getGeom()
damask.util.croak(['grid a b c: %s'%(' x '.join(map(str,info['grid']))),
'size x y z: %s'%(' x '.join(map(str,info['size']))),
'origin x y z: %s'%(' : '.join(map(str,info['origin']))),
'homogenization: %i'%info['homogenization'],
'microstructures: %i'%info['microstructures'],
])
errors = []
if np.any(info['grid'] < 1): errors.append('invalid grid a b c.')
if np.any(info['size'] <= 0.0): errors.append('invalid size x y z.')
#--- read microstructure information --------------------------------------------------------------
if options.data:
microstructure,ok = table.microstructure_read(info['grid'],strict = True) # read microstructure
if ok:
structure = vtk.vtkIntArray()
structure.SetName('Microstructures')
for idx in microstructure: structure.InsertNextValue(idx)
else: errors.append('mismatch between data and grid dimension.')
if errors != []:
damask.util.croak(errors)
table.close(dismiss = True)
continue
# --- generate VTK rectilinear grid ---------------------------------------------------------------
grid = vtk.vtkRectilinearGrid()
grid.SetDimensions([x+1 for x in info['grid']])
for i in xrange(3):
temp = vtk.vtkDoubleArray()
temp.SetNumberOfTuples(info['grid'][i]+1)
for j in xrange(info['grid'][i]+1):
temp.InsertTuple1(j,j*info['size'][i]/info['grid'][i]+info['origin'][i])
if i == 0: grid.SetXCoordinates(temp)
elif i == 1: grid.SetYCoordinates(temp)
elif i == 2: grid.SetZCoordinates(temp)
if options.data: grid.GetCellData().AddArray(structure)
# --- write data -----------------------------------------------------------------------------------
if name:
writer = vtk.vtkXMLRectilinearGridWriter()
(directory,filename) = os.path.split(name)
writer.SetDataModeToBinary()
writer.SetCompressorTypeToZLib()
writer.SetFileName(os.path.join(directory,os.path.splitext(filename)[0]+'.'+writer.GetDefaultFileExtension()))
else:
writer = vtk.vtkDataSetWriter()
writer.WriteToOutputStringOn()
writer.SetHeader('# powered by '+scriptID)
if vtk.VTK_MAJOR_VERSION <= 5: writer.SetInput(grid)
else: writer.SetInputData(grid)
writer.Write()
if name is None: sys.stdout.write(writer.GetOutputString()[0:writer.GetOutputStringLength()])
table.close()

15
processing/pre/geom_check.sh Executable file
View File

@ -0,0 +1,15 @@
#!/bin/bash
for geom in "$@"
do
vtk_rectilinearGrid \
--geom $geom
geom_toTable \
< $geom \
| \
vtk_addRectilinearGridData \
--scalar microstructure \
--inplace \
--vtk ${geom%.*}.vtr
done

View File

@ -1,125 +0,0 @@
#!/usr/bin/env python
# -*- coding: UTF-8 no BOM -*-
import os,sys,vtk
import numpy as np
import damask
from optparse import OptionParser
scriptName = os.path.splitext(os.path.basename(__file__))[0]
scriptID = ' '.join([scriptName,damask.version])
#--------------------------------------------------------------------------------------------------
# MAIN
#--------------------------------------------------------------------------------------------------
parser = OptionParser(option_class=damask.extendableOption, usage='%prog [seedsfile[s]]', description = """
Produce VTK point mesh from seeds file
""", version = scriptID)
parser.add_option('-s', '--size',
dest = 'size',
type = 'float', nargs = 3, metavar = 'float float float',
help = 'x,y,z size of hexahedral box [1.0 along largest grid point number]')
parser.add_option('-p','--position',
dest = 'position',
type = 'string', metavar = 'string',
help = 'column label for coordinates [%default]')
parser.set_defaults(size = [0.0,0.0,0.0],
position = 'pos',
)
(options, filenames) = parser.parse_args()
# --- loop over input files -------------------------------------------------------------------------
if filenames == []: filenames = [None]
for name in filenames:
try:
table = damask.ASCIItable(name = name,
buffered = False, readonly = True)
except: continue
damask.util.report(scriptName,name)
# --- interpret header ----------------------------------------------------------------------------
table.head_read()
info,extra_header = table.head_getGeom()
damask.util.croak(['grid a b c: %s'%(' x '.join(map(str,info['grid']))),
'size x y z: %s'%(' x '.join(map(str,info['size']))),
'origin x y z: %s'%(' : '.join(map(str,info['origin']))),
'homogenization: %i'%info['homogenization'],
'microstructures: %i'%info['microstructures'],
])
remarks = []
errors = []
if np.any(info['grid'] < 1): remarks.append('invalid grid a b c.')
if np.any(info['size'] <= 0.0) \
and np.all(info['grid'] < 1): errors.append('invalid size x y z.')
else:
for i in xrange(3):
if info['size'][i] <= 0.0: # any invalid size?
info['size'][i] = float(info['grid'][i])/max(info['grid']) # normalize to grid
remarks.append('rescaling size {} to {}...'.format({0:'x',1:'y',2:'z'}[i],info['size'][i]))
if table.label_dimension(options.position) != 3:
errors.append('columns "{}" have dimension {}'.format(options.position,table.label_dimension(options.position)))
if remarks != []: damask.util.croak(remarks)
if errors != []:
damask.util.croak(errors)
table.close(dismiss=True)
continue
labels = ['{dim}_{label}'.format(dim = 1+i,label = options.position) for i in xrange(3)]
hasGrains = table.label_index('microstructure') != -1
labels += ['microstructure'] if hasGrains else []
table.data_readArray(labels) # read ASCIItable columns
coords = table.data[:,:3]*info['size'] # assign coordinates (rescaled to box size)
grain = table.data[:,3].astype('i') if hasGrains else 1+np.arange(len(coords),dtype='i') # assign grains
# --- generate grid --------------------------------------------------------------------------------
grid = vtk.vtkUnstructuredGrid()
pts = vtk.vtkPoints()
# --- process microstructure information -----------------------------------------------------------
IDs = vtk.vtkIntArray()
IDs.SetNumberOfComponents(1)
IDs.SetName("GrainID")
for i,item in enumerate(coords):
IDs.InsertNextValue(grain[i])
pid = pts.InsertNextPoint(item[0:3])
pointIds = vtk.vtkIdList()
pointIds.InsertId(0, pid)
grid.InsertNextCell(1, pointIds)
grid.SetPoints(pts)
grid.GetCellData().AddArray(IDs)
#--- write data -----------------------------------------------------------------------------------
if name:
writer = vtk.vtkXMLRectilinearGridWriter()
(directory,filename) = os.path.split(name)
writer.SetDataModeToBinary()
writer.SetCompressorTypeToZLib()
writer.SetFileName(os.path.join(directory,os.path.splitext(filename)[0]+'.'+writer.GetDefaultFileExtension()))
else:
writer = vtk.vtkDataSetWriter()
writer.WriteToOutputStringOn()
writer.SetHeader('# powered by '+scriptID)
if vtk.VTK_MAJOR_VERSION <= 5: writer.SetInput(grid)
else: writer.SetInputData(grid)
writer.Write()
if name is None: sys.stdout.write(writer.GetOutputString()[0:writer.GetOutputStringLength()])
table.close()

12
processing/pre/seeds_check.sh Executable file
View File

@ -0,0 +1,12 @@
#!/bin/bash
for seeds in "$@"
do
vtk_pointcloud $seeds
vtk_addPointCloudData $seeds \
--scalar microstructure,weight \
--inplace \
--vtk ${seeds%.*}.vtp \
done

View File

@ -14,26 +14,30 @@ scriptID = ' '.join([scriptName,damask.version])
#--------------------------------------------------------------------------------------------------
parser = OptionParser(option_class=damask.extendableOption, usage='%prog options [file[s]]', description = """
Create seed file taking microstructure indices from given geom file but excluding black-listed grains.
Create seed file taking microstructure indices from given geom file.
Indices can be black-listed or white-listed.
""", version = scriptID)
parser.add_option('-w','--white',
action = 'extend', metavar='<int LIST>',
parser.add_option('-w',
'--white',
action = 'extend', metavar = '<int LIST>',
dest = 'whitelist',
help = 'whitelist of grain IDs')
parser.add_option('-b','--black',
action = 'extend', metavar='<int LIST>',
parser.add_option('-b',
'--black',
action = 'extend', metavar = '<int LIST>',
dest = 'blacklist',
help = 'blacklist of grain IDs')
parser.add_option('-p','--position',
parser.add_option('-p',
'--pos', '--seedposition',
dest = 'position',
type = 'string', metavar = 'string',
help = 'column label for coordinates [%default]')
help = 'label of coordinates [%default]')
parser.set_defaults(whitelist = [],
blacklist = [],
position = 'pos',
pos = 'pos',
)
(options,filenames) = parser.parse_args()
@ -46,25 +50,18 @@ options.blacklist = map(int,options.blacklist)
if filenames == []: filenames = [None]
for name in filenames:
try:
table = damask.ASCIItable(name = name,
outname = os.path.splitext(name)[0]+'.seeds' if name else name,
buffered = False, labeled = False)
except:
continue
try: table = damask.ASCIItable(name = name,
outname = os.path.splitext(name)[0]+'.seeds' if name else name,
buffered = False,
labeled = False)
except: continue
damask.util.report(scriptName,name)
# --- interpret header ----------------------------------------------------------------------------
table.head_read()
info,extra_header = table.head_getGeom()
damask.util.croak(['grid a b c: %s'%(' x '.join(map(str,info['grid']))),
'size x y z: %s'%(' x '.join(map(str,info['size']))),
'origin x y z: %s'%(' : '.join(map(str,info['origin']))),
'homogenization: %i'%info['homogenization'],
'microstructures: %i'%info['microstructures'],
])
damask.util.report_geom(info)
errors = []
if np.any(info['grid'] < 1): errors.append('invalid grid a b c.')
@ -98,14 +95,14 @@ for name in filenames:
table.info_clear()
table.info_append(extra_header+[
scriptID + ' ' + ' '.join(sys.argv[1:]),
"grid\ta {grid[0]}\tb {grid[1]}\tc {grid[2]}".format(grid=info['grid']),
"size\tx {size[0]}\ty {size[1]}\tz {size[2]}".format(size=info['size']),
"origin\tx {origin[0]}\ty {origin[1]}\tz {origin[2]}".format(origin=info['origin']),
"homogenization\t{homog}".format(homog=info['homogenization']),
"microstructures\t{microstructures}".format(microstructures=info['microstructures']),
"grid\ta {}\tb {}\tc {}".format(*info['grid']),
"size\tx {}\ty {}\tz {}".format(*info['size']),
"origin\tx {}\ty {}\tz {}".format(*info['origin']),
"homogenization\t{}".format(info['homogenization']),
"microstructures\t{}".format(info['microstructures']),
])
table.labels_clear()
table.labels_append(['{dim}_{label}'.format(dim = 1+i,label = options.position) for i in range(3)]+['microstructure'])
table.labels_append(['{dim}_{label}'.format(dim = 1+i,label = options.pos) for i in range(3)]+['microstructure'])
table.head_write()
table.output_flush()