diff --git a/installation/symlink_Processing.py b/installation/symlink_Processing.py index 9543c4ca7..113a6114e 100755 --- a/installation/symlink_Processing.py +++ b/installation/symlink_Processing.py @@ -36,6 +36,7 @@ bin_link = { \ 'geom_fromMinimalSurface.py', 'geom_fromVoronoiTessellation.py', 'geom_fromOsteonGeometry.py', + 'geom_addPrimitive.py', 'geom_canvas.py', 'geom_check.py', 'geom_rescale.py', @@ -91,6 +92,10 @@ bin_link = { \ 'vtk_voxelcloud.py', 'vtk_addVoxelcloudData.py', ], + 'misc' : [ + 'gwyddion_filter.py', + 'vtk_fromGwyddion.py', + ], } for dir in bin_link: diff --git a/processing/misc/gwyddion_filter.py b/processing/misc/gwyddion_filter.py new file mode 100755 index 000000000..aaba23eb8 --- /dev/null +++ b/processing/misc/gwyddion_filter.py @@ -0,0 +1,83 @@ +#!/usr/bin/env python + +import os,sys,string,re,numpy,scipy.ndimage,scipy.signal,vtk +import damask +from optparse import OptionParser, OptionGroup, Option, SUPPRESS_HELP + +scriptID = '$Id$' +scriptName = scriptID.split()[1] + +#-------------------------------------------------------------------------------------------------- +class extendedOption(Option): +#-------------------------------------------------------------------------------------------------- +# used for definition of new option parser action 'extend', which enables to take multiple option arguments +# taken from online tutorial http://docs.python.org/library/optparse.html + + ACTIONS = Option.ACTIONS + ("extend",) + STORE_ACTIONS = Option.STORE_ACTIONS + ("extend",) + TYPED_ACTIONS = Option.TYPED_ACTIONS + ("extend",) + ALWAYS_TYPED_ACTIONS = Option.ALWAYS_TYPED_ACTIONS + ("extend",) + + def take_action(self, action, dest, opt, value, values, parser): + if action == "extend": + lvalue = value.split(",") + values.ensure_value(dest, []).extend(lvalue) + else: + Option.take_action(self, action, dest, opt, value, values, parser) + + +parser = OptionParser(option_class=extendedOption, usage='%prog options [file[s]]', description = """ +Apply filter(s) to Gwyddion data. +""" + string.replace(scriptID,'\n','\\n') +) + +for option in ['opening', + 'closing', + 'erosion', + 'dilation', + 'average', + 'median', + ]: + parser.add_option('-%s'%option[0], '--%s'%option, dest=option, type='int', + help = 'stencil size for %s filter'%option) + parser.set_default(option, 0) + +(options, filenames) = parser.parse_args() + + +# ------------------------------------------ read Gwyddion data --------------------------------------- + +for file in filenames: + filters = '' + header = [] + with open(file,'r') as f: + for line in f: + pieces = line.split() + if pieces[0] != '#': break + if pieces[1] == 'Width:': width = float(pieces[2]) + if pieces[1] == 'Height:': height = float(pieces[2]) + header.append(line.lstrip('#').strip()) + + elevation = numpy.loadtxt(file)#*1e6 + + if options.opening > 0: + elevation = scipy.ndimage.morphology.grey_opening(elevation,options.opening) + filters += '_opening%i'%options.opening + if options.closing > 0: + elevation = scipy.ndimage.morphology.grey_closing(elevation,options.closing) + filters += '_closing%i'%options.closing + if options.erosion > 0: + elevation = scipy.ndimage.morphology.grey_erosion(elevation,options.erosion) + filters += '_erosion%i'%options.erosion + if options.dilation > 0: + elevation = scipy.ndimage.morphology.grey_dilation(elevation,options.dilation) + filters += '_dilation%i'%options.dilation + if options.average > 0: + elevation = scipy.ndimage.filters.uniform_filter(elevation,options.average) + filters += '_avg%i'%options.average + if options.median > 0: + elevation = scipy.ndimage.filters.median_filter(elevation,options.median) + filters += '_median%i'%options.median + + numpy.savetxt(os.path.splitext(file)[0]+filters+os.path.splitext(file)[1],elevation,header='\n'.join(header)) + diff --git a/processing/misc/vtk_fromGwyddion.py b/processing/misc/vtk_fromGwyddion.py new file mode 100755 index 000000000..36c7efc13 --- /dev/null +++ b/processing/misc/vtk_fromGwyddion.py @@ -0,0 +1,116 @@ +#!/usr/bin/env python +# -*- coding: utf-8 -*- + +import os,sys,string,re,numpy,scipy.ndimage,scipy.signal,vtk +import damask +from optparse import OptionParser, OptionGroup, Option, SUPPRESS_HELP + +scriptID = '$Id$' +scriptName = scriptID.split()[1] + +scalingFactor = { \ + 'm': { + 'm': 1e0, + 'mm': 1e-3, + 'µm': 1e-6, + }, + 'mm': { + 'm': 1e+3, + 'mm': 1e0, + 'µm': 1e-3, + }, + 'µm': { + 'm': 1e+6, + 'mm': 1e+3, + 'µm': 1e0, + }, + } + +#-------------------------------------------------------------------------------------------------- +class extendedOption(Option): +#-------------------------------------------------------------------------------------------------- +# used for definition of new option parser action 'extend', which enables to take multiple option arguments +# taken from online tutorial http://docs.python.org/library/optparse.html + + ACTIONS = Option.ACTIONS + ("extend",) + STORE_ACTIONS = Option.STORE_ACTIONS + ("extend",) + TYPED_ACTIONS = Option.TYPED_ACTIONS + ("extend",) + ALWAYS_TYPED_ACTIONS = Option.ALWAYS_TYPED_ACTIONS + ("extend",) + + def take_action(self, action, dest, opt, value, values, parser): + if action == "extend": + lvalue = value.split(",") + values.ensure_value(dest, []).extend(lvalue) + else: + Option.take_action(self, action, dest, opt, value, values, parser) + + +parser = OptionParser(option_class=extendedOption, usage='%prog options [file[s]]', description = """ +Produce VTK rectilinear grid from Gwyddion dataset exported as text. +""" + string.replace(scriptID,'\n','\\n') +) + +parser.add_option('-s', '--scaling', dest='scaling', type='float', + help = 'scaling factor for elevation data [auto]') + +parser.set_defaults(scaling = 0.0) + +(options, filenames) = parser.parse_args() + + +# ------------------------------------------ read Gwyddion data --------------------------------------- + +for file in filenames: + with open(file,'r') as f: + for line in f: + pieces = line.split() + if pieces[0] != '#': break + if len(pieces) < 2: continue + if pieces[1] == 'Width:': + width = float(pieces[2]) + lateralunit = pieces[3] + if pieces[1] == 'Height:': + height = float(pieces[2]) + lateralunit = pieces[3] + if pieces[1] == 'Value' and pieces[2] == 'units:': + elevationunit = pieces[3] + + if options.scaling == 0.0: + options.scaling = scalingFactor[lateralunit][elevationunit] + + elevation = numpy.loadtxt(file)*options.scaling + + grid = vtk.vtkRectilinearGrid() + grid.SetDimensions(elevation.shape[1],elevation.shape[0],1) + + xCoords = vtk.vtkDoubleArray() + for x in numpy.arange(0.0,width,width/elevation.shape[1],'d'): + xCoords.InsertNextValue(x) + yCoords = vtk.vtkDoubleArray() + for y in numpy.arange(0.0,height,height/elevation.shape[0],'d'): + yCoords.InsertNextValue(y) + zCoords = vtk.vtkDoubleArray() + zCoords.InsertNextValue(0.0) + + grid.SetXCoordinates(xCoords) + grid.SetYCoordinates(yCoords) + grid.SetZCoordinates(zCoords) + + vector = vtk.vtkFloatArray() + vector.SetName("elevation"); + vector.SetNumberOfComponents(3); + vector.SetNumberOfTuples(numpy.prod(elevation.shape)); + for i,z in enumerate(numpy.ravel(elevation)): + vector.SetTuple3(i,0,0,z) + + grid.GetPointData().AddArray(vector) + + writer = vtk.vtkXMLRectilinearGridWriter() + writer.SetDataModeToBinary() + writer.SetCompressorTypeToZLib() + writer.SetFileName(os.path.splitext(file)[0]+'.vtr') + if vtk.VTK_MAJOR_VERSION <= 5: + writer.SetInput(grid) + else: + writer.SetInputData(grid) + writer.Write() diff --git a/processing/pre/geom_addPrimitive.py b/processing/pre/geom_addPrimitive.py new file mode 100755 index 000000000..031f03218 --- /dev/null +++ b/processing/pre/geom_addPrimitive.py @@ -0,0 +1,217 @@ +#!/usr/bin/env python +# -*- coding: UTF-8 no BOM -*- + +import os,sys,string,re,math,numpy +import damask +from optparse import OptionParser, OptionGroup, Option, SUPPRESS_HELP + +scriptID = '$Id$' +scriptName = scriptID.split()[1] + +#-------------------------------------------------------------------------------------------------- +class extendedOption(Option): +#-------------------------------------------------------------------------------------------------- +# used for definition of new option parser action 'extend', which enables to take multiple option arguments +# taken from online tutorial http://docs.python.org/library/optparse.html + + ACTIONS = Option.ACTIONS + ("extend",) + STORE_ACTIONS = Option.STORE_ACTIONS + ("extend",) + TYPED_ACTIONS = Option.TYPED_ACTIONS + ("extend",) + ALWAYS_TYPED_ACTIONS = Option.ALWAYS_TYPED_ACTIONS + ("extend",) + + def take_action(self, action, dest, opt, value, values, parser): + if action == "extend": + lvalue = value.split(",") + values.ensure_value(dest, []).extend(lvalue) + else: + Option.take_action(self, action, dest, opt, value, values, parser) + + +#-------------------------------------------------------------------------------------------------- +# MAIN +#-------------------------------------------------------------------------------------------------- +synonyms = { + 'grid': ['resolution'], + 'size': ['dimension'], + } +identifiers = { + 'grid': ['a','b','c'], + 'size': ['x','y','z'], + 'origin': ['x','y','z'], + } +mappings = { + 'grid': lambda x: int(x), + 'size': lambda x: float(x), + 'origin': lambda x: float(x), + 'homogenization': lambda x: int(x), + 'microstructures': lambda x: int(x), + } + +parser = OptionParser(option_class=extendedOption, usage='%prog options [file[s]]', description = """ +Changes the (three-dimensional) canvas of a spectral geometry description. +""" + string.replace(scriptID,'\n','\\n') +) + +parser.add_option('-o', '--origin', dest='origin', type='int', nargs = 3, metavar='int int int', \ + help='a,b,c origin of primitive %default') +parser.add_option('-d', '--dimension', dest='dimension', type='int', nargs = 3, metavar='int int int', \ + help='a,b,c extension of hexahedral box; negative values are diameters') +parser.add_option('-f', '--fill', dest='fill', type='int', metavar = 'int', \ + help='grain index to fill primitive. "0" selects maximum microstructure index + 1 [%default]') + +parser.set_defaults(origin = [0,0,0], + fill = 0, + ) + +(options, filenames) = parser.parse_args() + +#--- setup file handles -------------------------------------------------------------------------- +files = [] +if filenames == []: + files.append({'name':'STDIN', + 'input':sys.stdin, + 'output':sys.stdout, + 'croak':sys.stderr, + }) +else: + for name in filenames: + if os.path.exists(name): + files.append({'name':name, + 'input':open(name), + 'output':open(name+'_tmp','w'), + 'croak':sys.stdout, + }) + +#--- loop over input files ------------------------------------------------------------------------ +for file in files: + if file['name'] != 'STDIN': file['croak'].write('\033[1m'+scriptName+'\033[0m: '+file['name']+'\n') + else: file['croak'].write('\033[1m'+scriptName+'\033[0m\n') + + theTable = damask.ASCIItable(file['input'],file['output'],labels = False) + theTable.head_read() + +#--- interpret header ---------------------------------------------------------------------------- + info = { + 'grid': numpy.zeros(3,'i'), + 'size': numpy.zeros(3,'d'), + 'origin': numpy.zeros(3,'d'), + 'homogenization': 0, + 'microstructures': 0, + } + newInfo = { + 'grid': numpy.zeros(3,'i'), + 'origin': numpy.zeros(3,'d'), + 'microstructures': 0, + } + extra_header = [] + + for header in theTable.info: + headitems = map(str.lower,header.split()) + if len(headitems) == 0: continue # skip blank lines + for synonym,alternatives in synonyms.iteritems(): + if headitems[0] in alternatives: headitems[0] = synonym + if headitems[0] in mappings.keys(): + if headitems[0] in identifiers.keys(): + for i in xrange(len(identifiers[headitems[0]])): + info[headitems[0]][i] = \ + mappings[headitems[0]](headitems[headitems.index(identifiers[headitems[0]][i])+1]) + else: + info[headitems[0]] = mappings[headitems[0]](headitems[1]) + else: + extra_header.append(header) + + file['croak'].write('grid a b c: %s\n'%(' x '.join(map(str,info['grid']))) + \ + 'size x y z: %s\n'%(' x '.join(map(str,info['size']))) + \ + 'origin x y z: %s\n'%(' : '.join(map(str,info['origin']))) + \ + 'homogenization: %i\n'%info['homogenization'] + \ + 'microstructures: %i\n'%info['microstructures']) + + if numpy.any(info['grid'] < 1): + file['croak'].write('invalid grid a b c.\n') + continue + if numpy.any(info['size'] <= 0.0): + file['croak'].write('invalid size x y z.\n') + continue + +#--- read data ------------------------------------------------------------------------------------ + microstructure = numpy.zeros(info['grid'].prod(),'i') # initialize as flat array + i = 0 + + while theTable.data_read(): + items = theTable.data + if len(items) > 2: + if items[1].lower() == 'of': items = [int(items[2])]*int(items[0]) + elif items[1].lower() == 'to': items = xrange(int(items[0]),1+int(items[2])) + else: items = map(int,items) + else: items = map(int,items) + + s = len(items) + microstructure[i:i+s] = items + i += s + +#--- do work ------------------------------------------------------------------------------------ + + if options.fill == 0: + options.fill = microstructure.max()+1 + + microstructure = microstructure.reshape(info['grid'],order='F') + + if options.dimension != None: + mask = (numpy.array(options.dimension) < 0).astype(float) + dim = abs(numpy.array(options.dimension)) + scale = 2.0/dim + d = numpy.zeros(3,dtype='float') + + d[0] = (1.-dim[0])/2.0 + for x in xrange(int(math.ceil(options.origin[0]-dim[0]/2.)), + int(math.ceil(options.origin[0]+dim[0]/2.))): + i = x%info['grid'][0] + d[1] = (1.-dim[1])/2.0 + + for y in xrange(int(math.ceil(options.origin[1]-dim[1]/2.)), + int(math.ceil(options.origin[1]+dim[1]/2.))): + j = y%info['grid'][1] + d[2] = (1.-dim[2])/2.0 + + for z in xrange(int(math.ceil(options.origin[2]-dim[2]/2.)), + int(math.ceil(options.origin[2]+dim[2]/2.))): + k = z%info['grid'][2] + + if numpy.dot(d*scale*mask,d*scale*mask) <= 1.0: + microstructure[i,j,k] = options.fill + + d[2] += 1. + d[1] += 1. + d[0] += 1. + + newInfo['microstructures'] = microstructure.max() + + +#--- report --------------------------------------------------------------------------------------- + if (newInfo['microstructures'] != info['microstructures']): + file['croak'].write('--> microstructures: %i\n'%newInfo['microstructures']) + +#--- write header --------------------------------------------------------------------------------- + theTable.labels_clear() + theTable.info_clear() + theTable.info_append(extra_header+[ + scriptID + ' ' + ' '.join(sys.argv[1:]), + "grid\ta %i\tb %i\tc %i"%(info['grid'][0],info['grid'][1],info['grid'][2],), + "size\tx %f\ty %f\tz %f"%(info['size'][0],info['size'][1],info['size'][2],), + "origin\tx %f\ty %f\tz %f"%(info['origin'][0],info['origin'][1],info['origin'][2],), + "homogenization\t%i"%info['homogenization'], + "microstructures\t%i"%(newInfo['microstructures']), + ]) + theTable.head_write() + theTable.output_flush() + +# --- write microstructure information ------------------------------------------------------------ + formatwidth = int(math.floor(math.log10(microstructure.max())+1)) + theTable.data = microstructure.reshape((info['grid'][0],info['grid'][1]*info['grid'][2]),order='F').transpose() + theTable.data_writeArray('%%%ii'%(formatwidth),delimiter=' ') + +#--- output finalization -------------------------------------------------------------------------- + if file['name'] != 'STDIN': + file['input'].close() + file['output'].close() + os.rename(file['name']+'_tmp',file['name'])