2013-06-21 01:15:25 +05:30
|
|
|
#!/usr/bin/env python
|
|
|
|
# -*- coding: UTF-8 no BOM -*-
|
|
|
|
|
2013-07-02 10:02:32 +05:30
|
|
|
import os,sys,string,re,math,numpy,itertools
|
2013-06-30 19:21:21 +05:30
|
|
|
import damask
|
2013-06-21 01:15:25 +05:30
|
|
|
from optparse import OptionParser, OptionGroup, Option, SUPPRESS_HELP
|
2013-06-21 22:29:49 +05:30
|
|
|
from scipy import ndimage
|
2013-07-01 22:45:24 +05:30
|
|
|
from multiprocessing import Pool
|
2013-06-21 01:15:25 +05:30
|
|
|
|
2013-07-18 18:58:54 +05:30
|
|
|
scriptID = '$Id$'
|
|
|
|
scriptName = scriptID.split()[1]
|
|
|
|
|
2013-06-21 01:15:25 +05:30
|
|
|
#--------------------------------------------------------------------------------------------------
|
|
|
|
class extendedOption(Option):
|
|
|
|
#--------------------------------------------------------------------------------------------------
|
|
|
|
# used for definition of new option parser action 'extend', which enables to take multiple option arguments
|
|
|
|
# taken from online tutorial http://docs.python.org/library/optparse.html
|
|
|
|
|
2013-07-01 22:45:24 +05:30
|
|
|
ACTIONS = Option.ACTIONS + ("extend",)
|
|
|
|
STORE_ACTIONS = Option.STORE_ACTIONS + ("extend",)
|
|
|
|
TYPED_ACTIONS = Option.TYPED_ACTIONS + ("extend",)
|
|
|
|
ALWAYS_TYPED_ACTIONS = Option.ALWAYS_TYPED_ACTIONS + ("extend",)
|
|
|
|
|
|
|
|
def take_action(self, action, dest, opt, value, values, parser):
|
|
|
|
if action == "extend":
|
|
|
|
lvalue = value.split(",")
|
|
|
|
values.ensure_value(dest, []).extend(lvalue)
|
|
|
|
else:
|
|
|
|
Option.take_action(self, action, dest, opt, value, values, parser)
|
|
|
|
|
|
|
|
|
2013-06-21 01:15:25 +05:30
|
|
|
#--------------------------------------------------------------------------------------------------
|
|
|
|
# MAIN
|
|
|
|
#--------------------------------------------------------------------------------------------------
|
2013-06-30 19:21:21 +05:30
|
|
|
synonyms = {
|
|
|
|
'grid': ['resolution'],
|
|
|
|
'size': ['dimension'],
|
|
|
|
}
|
2013-06-21 01:15:25 +05:30
|
|
|
identifiers = {
|
|
|
|
'grid': ['a','b','c'],
|
|
|
|
'size': ['x','y','z'],
|
|
|
|
'origin': ['x','y','z'],
|
|
|
|
}
|
|
|
|
mappings = {
|
2013-06-30 19:21:21 +05:30
|
|
|
'grid': lambda x: int(x),
|
|
|
|
'size': lambda x: float(x),
|
|
|
|
'origin': lambda x: float(x),
|
|
|
|
'homogenization': lambda x: int(x),
|
|
|
|
'microstructures': lambda x: int(x),
|
2013-06-21 01:15:25 +05:30
|
|
|
}
|
|
|
|
|
|
|
|
parser = OptionParser(option_class=extendedOption, usage='%prog options [file[s]]', description = """
|
2013-06-22 00:38:20 +05:30
|
|
|
Smoothens out interface roughness by simulated curvature flow.
|
2013-06-30 19:21:21 +05:30
|
|
|
This is achieved by the diffusion of each initially sharply bounded grain volume within the periodic domain
|
|
|
|
up to a given distance 'd' voxels.
|
|
|
|
The final geometry is assembled by selecting at each voxel that grain index for which the concentration remains largest.
|
2013-07-18 18:58:54 +05:30
|
|
|
""" + string.replace(scriptID,'\n','\\n')
|
2013-06-21 01:15:25 +05:30
|
|
|
)
|
|
|
|
|
2014-01-20 20:11:56 +05:30
|
|
|
parser.add_option('-d', '--distance', dest='d', type='int', metavar='int', \
|
|
|
|
help='diffusion distance in voxels [%default]')
|
|
|
|
parser.add_option('-N', '--smooth', dest='N', type='int', metavar='int', \
|
2013-07-01 22:45:24 +05:30
|
|
|
help='N for curvature flow [%default]')
|
2014-02-14 18:47:29 +05:30
|
|
|
parser.add_option('-r', '--renumber', dest='renumber', action='store_true', \
|
|
|
|
help='renumber microstructure indices from 1...N [%default]')
|
2013-06-21 01:15:25 +05:30
|
|
|
|
2013-07-01 22:45:24 +05:30
|
|
|
parser.set_defaults(d = 1)
|
|
|
|
parser.set_defaults(N = 1)
|
2014-02-14 18:47:29 +05:30
|
|
|
parser.set_defaults(renumber = False)
|
2013-06-21 01:15:25 +05:30
|
|
|
(options, filenames) = parser.parse_args()
|
|
|
|
|
2014-01-20 20:11:56 +05:30
|
|
|
|
2013-06-22 02:47:03 +05:30
|
|
|
|
2013-06-21 01:15:25 +05:30
|
|
|
#--- setup file handles --------------------------------------------------------------------------
|
|
|
|
files = []
|
|
|
|
if filenames == []:
|
2013-07-01 22:45:24 +05:30
|
|
|
files.append({'name':'STDIN',
|
|
|
|
'input':sys.stdin,
|
|
|
|
'output':sys.stdout,
|
|
|
|
'croak':sys.stderr,
|
|
|
|
})
|
2013-06-21 01:15:25 +05:30
|
|
|
else:
|
2013-07-01 22:45:24 +05:30
|
|
|
for name in filenames:
|
|
|
|
if os.path.exists(name):
|
|
|
|
files.append({'name':name,
|
|
|
|
'input':open(name),
|
|
|
|
'output':open(name+'_tmp','w'),
|
|
|
|
'croak':sys.stdout,
|
|
|
|
})
|
2013-06-21 01:15:25 +05:30
|
|
|
|
2013-06-30 19:21:21 +05:30
|
|
|
#--- loop over input files ------------------------------------------------------------------------
|
2013-06-21 01:15:25 +05:30
|
|
|
for file in files:
|
2013-10-25 17:28:03 +05:30
|
|
|
if file['name'] != 'STDIN': file['croak'].write('\033[1m'+scriptName+'\033[0m: '+file['name']+'\n')
|
2013-07-18 18:58:54 +05:30
|
|
|
else: file['croak'].write('\033[1m'+scriptName+'\033[0m\n')
|
2013-06-21 01:15:25 +05:30
|
|
|
|
2014-02-04 05:14:29 +05:30
|
|
|
theTable = damask.ASCIItable(file['input'],file['output'],labels = False,buffered = False)
|
2013-06-30 19:21:21 +05:30
|
|
|
theTable.head_read()
|
2013-06-21 01:15:25 +05:30
|
|
|
|
2013-06-30 19:21:21 +05:30
|
|
|
#--- interpret header ----------------------------------------------------------------------------
|
2013-06-21 01:15:25 +05:30
|
|
|
info = {
|
2013-06-30 19:21:21 +05:30
|
|
|
'grid': numpy.zeros(3,'i'),
|
|
|
|
'size': numpy.zeros(3,'d'),
|
|
|
|
'origin': numpy.zeros(3,'d'),
|
2013-06-21 01:15:25 +05:30
|
|
|
'homogenization': 0,
|
2013-06-30 19:21:21 +05:30
|
|
|
'microstructures': 0,
|
|
|
|
}
|
|
|
|
newInfo = {
|
|
|
|
'microstructures': 0,
|
2013-06-21 01:15:25 +05:30
|
|
|
}
|
2013-06-30 19:21:21 +05:30
|
|
|
extra_header = []
|
2013-06-21 01:15:25 +05:30
|
|
|
|
2013-06-30 19:21:21 +05:30
|
|
|
for header in theTable.info:
|
2013-06-21 01:15:25 +05:30
|
|
|
headitems = map(str.lower,header.split())
|
2013-06-30 19:21:21 +05:30
|
|
|
if len(headitems) == 0: continue
|
|
|
|
for synonym,alternatives in synonyms.iteritems():
|
|
|
|
if headitems[0] in alternatives: headitems[0] = synonym
|
2013-06-21 01:15:25 +05:30
|
|
|
if headitems[0] in mappings.keys():
|
|
|
|
if headitems[0] in identifiers.keys():
|
|
|
|
for i in xrange(len(identifiers[headitems[0]])):
|
|
|
|
info[headitems[0]][i] = \
|
|
|
|
mappings[headitems[0]](headitems[headitems.index(identifiers[headitems[0]][i])+1])
|
|
|
|
else:
|
|
|
|
info[headitems[0]] = mappings[headitems[0]](headitems[1])
|
2013-06-30 19:21:21 +05:30
|
|
|
else:
|
|
|
|
extra_header.append(header)
|
2013-06-21 01:15:25 +05:30
|
|
|
|
|
|
|
file['croak'].write('grid a b c: %s\n'%(' x '.join(map(str,info['grid']))) + \
|
|
|
|
'size x y z: %s\n'%(' x '.join(map(str,info['size']))) + \
|
|
|
|
'origin x y z: %s\n'%(' : '.join(map(str,info['origin']))) + \
|
|
|
|
'homogenization: %i\n'%info['homogenization'] + \
|
|
|
|
'microstructures: %i\n'%info['microstructures'])
|
|
|
|
|
|
|
|
if numpy.any(info['grid'] < 1):
|
|
|
|
file['croak'].write('invalid grid a b c.\n')
|
2013-06-30 19:21:21 +05:30
|
|
|
continue
|
2013-06-21 01:15:25 +05:30
|
|
|
if numpy.any(info['size'] <= 0.0):
|
|
|
|
file['croak'].write('invalid size x y z.\n')
|
2013-06-30 19:21:21 +05:30
|
|
|
continue
|
2013-06-21 01:15:25 +05:30
|
|
|
|
2013-06-30 19:21:21 +05:30
|
|
|
#--- read data ------------------------------------------------------------------------------------
|
2013-11-12 22:34:36 +05:30
|
|
|
microstructure = numpy.zeros(numpy.prod([2 if i == 1 else i for i in info['grid']]),'i') # 2D structures do not work
|
2013-06-21 01:15:25 +05:30
|
|
|
i = 0
|
2014-02-04 05:14:29 +05:30
|
|
|
|
|
|
|
while theTable.data_read(): # read next data line of ASCII table
|
2013-06-30 19:21:21 +05:30
|
|
|
items = theTable.data
|
2013-06-22 02:47:03 +05:30
|
|
|
if len(items) > 2:
|
|
|
|
if items[1].lower() == 'of': items = [int(items[2])]*int(items[0])
|
|
|
|
elif items[1].lower() == 'to': items = xrange(int(items[0]),1+int(items[2]))
|
2013-06-30 19:21:21 +05:30
|
|
|
else: items = map(int,items)
|
|
|
|
else: items = map(int,items)
|
2013-06-22 02:47:03 +05:30
|
|
|
|
2013-06-30 19:21:21 +05:30
|
|
|
s = len(items)
|
|
|
|
microstructure[i:i+s] = items
|
|
|
|
i += s
|
2013-06-21 01:15:25 +05:30
|
|
|
|
2013-11-12 22:34:36 +05:30
|
|
|
#--- reshape, if 2D make copy ---------------------------------------------------------------------
|
2014-04-01 22:13:39 +05:30
|
|
|
expandedGrid = numpy.array([2 if i == 1 else i for i in info['grid']],'i')
|
|
|
|
nMicrostuctures = numpy.prod(expandedGrid)
|
2013-11-12 22:34:36 +05:30
|
|
|
if nMicrostuctures > info['grid'].prod():
|
|
|
|
microstructure[info['grid'].prod():nMicrostuctures] = microstructure[0:info['grid'].prod()]
|
|
|
|
microstructure = microstructure.reshape([2 if i == 1 else i for i in info['grid']],order='F')
|
2014-04-01 22:13:39 +05:30
|
|
|
grid = numpy.array([2 if i == 1 else i for i in info['grid']],'i')
|
2013-06-22 00:38:20 +05:30
|
|
|
|
2013-06-30 19:21:21 +05:30
|
|
|
|
2013-07-01 22:45:24 +05:30
|
|
|
#--- initialize helper data -----------------------------------------------------------------------
|
2014-04-01 22:13:39 +05:30
|
|
|
X,Y,Z = numpy.mgrid[0:expandedGrid[0],0:expandedGrid[1],0:expandedGrid[2]]
|
2013-11-11 18:30:31 +05:30
|
|
|
gauss = numpy.exp(-(X*X+Y*Y+Z*Z)/(2.0*options.d*options.d))/math.pow(2.0*numpy.pi*options.d*options.d,1.5)
|
2014-04-01 22:13:39 +05:30
|
|
|
gauss[:,:,(expandedGrid[2])/2::] = gauss[:,:,(expandedGrid[2])/2-1::-1]
|
|
|
|
gauss[:,(expandedGrid[1])/2::,:] = gauss[:,(expandedGrid[1])/2-1::-1,:]
|
|
|
|
gauss[(expandedGrid[0])/2::,:,:] = gauss[(expandedGrid[0])/2-1::-1,:,:]
|
2013-11-11 18:30:31 +05:30
|
|
|
gauss = numpy.fft.rfftn(gauss)
|
2014-04-01 22:13:39 +05:30
|
|
|
|
|
|
|
interfacialEnergy = lambda A,B: (A*B != 0)*(A != B)*1.0
|
|
|
|
struc = ndimage.generate_binary_structure(3,1)
|
2013-07-01 22:45:24 +05:30
|
|
|
for smoothIter in xrange(options.N):
|
2013-11-11 18:30:31 +05:30
|
|
|
boundary = numpy.zeros(microstructure.shape)
|
|
|
|
for i in range(3):
|
|
|
|
for j in range(3):
|
|
|
|
for k in range(3):
|
2014-02-04 05:14:29 +05:30
|
|
|
boundary = numpy.maximum(boundary,
|
2014-04-01 22:13:39 +05:30
|
|
|
interfacialEnergy(microstructure,numpy.roll(numpy.roll(numpy.roll(
|
|
|
|
microstructure,i-1,axis=0),j-1,axis=1),k-1,axis=2)))
|
2014-02-04 05:14:29 +05:30
|
|
|
index = ndimage.morphology.distance_transform_edt(boundary == 0.,return_distances = False,return_indices = True)
|
|
|
|
boundary = numpy.fft.irfftn(numpy.fft.rfftn(numpy.where(ndimage.morphology.binary_dilation(boundary != 0.,
|
|
|
|
structure = struc,
|
|
|
|
iterations = 2*options.d-1),
|
|
|
|
boundary[index[0].flatten(),index[1].flatten(),index[2].flatten()].reshape(microstructure.shape),
|
2014-04-01 22:13:39 +05:30
|
|
|
0.))*gauss)
|
|
|
|
boundaryExt = numpy.tile(boundary,(3,3,3))
|
|
|
|
boundaryExt = boundaryExt[(expandedGrid[0])/2:-(expandedGrid[0])/2,
|
|
|
|
(expandedGrid[1])/2:-(expandedGrid[1])/2,
|
|
|
|
(expandedGrid[2])/2:-(expandedGrid[2])/2]
|
|
|
|
microstructureExt = numpy.tile(microstructure,(3,3,3))
|
|
|
|
microstructureExt = microstructureExt[(expandedGrid[0])/2:-(expandedGrid[0])/2,
|
|
|
|
(expandedGrid[1])/2:-(expandedGrid[1])/2,
|
|
|
|
(expandedGrid[2])/2:-(expandedGrid[2])/2]
|
|
|
|
index = ndimage.morphology.distance_transform_edt(boundaryExt >= 0.5,return_distances=False,return_indices=True)
|
|
|
|
microstructureExt = microstructureExt[index[0].flatten(),index[1].flatten(),index[2].flatten()].reshape(microstructureExt.shape)
|
|
|
|
microstructure = microstructureExt[(expandedGrid[0])/2:-(expandedGrid[0])/2,
|
|
|
|
(expandedGrid[1])/2:-(expandedGrid[1])/2,
|
|
|
|
(expandedGrid[2])/2:-(expandedGrid[2])/2]
|
2013-07-01 22:45:24 +05:30
|
|
|
|
2014-02-14 18:47:29 +05:30
|
|
|
# --- renumber to sequence 1...Ngrains if requested ------------------------------------------------
|
|
|
|
# http://stackoverflow.com/questions/10741346/numpy-frequency-counts-for-unique-values-in-an-array
|
|
|
|
if options.renumber:
|
|
|
|
newID=0
|
|
|
|
for microstructureID,count in enumerate(numpy.bincount(microstructure.reshape(info['grid'].prod()))):
|
|
|
|
if count != 0:
|
|
|
|
newID+=1
|
|
|
|
microstructure=numpy.where(microstructure==microstructureID,newID,microstructure).reshape(microstructure.shape)
|
|
|
|
# --- assemble header -----------------------------------------------------------------------------
|
2013-11-12 22:34:36 +05:30
|
|
|
newInfo['microstructures'] = microstructure[0:info['grid'][0],0:info['grid'][1],0:info['grid'][2]].max()
|
2013-06-30 19:21:21 +05:30
|
|
|
|
|
|
|
#--- report ---------------------------------------------------------------------------------------
|
|
|
|
if (newInfo['microstructures'] != info['microstructures']):
|
|
|
|
file['croak'].write('--> microstructures: %i\n'%newInfo['microstructures'])
|
|
|
|
|
|
|
|
#--- write header ---------------------------------------------------------------------------------
|
|
|
|
theTable.labels_clear()
|
|
|
|
theTable.info_clear()
|
|
|
|
theTable.info_append(extra_header+[
|
2014-01-20 20:11:56 +05:30
|
|
|
scriptID+ ' ' + ' '.join(sys.argv[1:]),
|
2013-06-30 19:21:21 +05:30
|
|
|
"grid\ta %i\tb %i\tc %i"%(info['grid'][0],info['grid'][1],info['grid'][2],),
|
|
|
|
"size\tx %f\ty %f\tz %f"%(info['size'][0],info['size'][1],info['size'][2],),
|
|
|
|
"origin\tx %f\ty %f\tz %f"%(info['origin'][0],info['origin'][1],info['origin'][2],),
|
|
|
|
"homogenization\t%i"%info['homogenization'],
|
|
|
|
"microstructures\t%i"%(newInfo['microstructures']),
|
|
|
|
])
|
|
|
|
theTable.head_write()
|
|
|
|
|
2013-06-21 01:15:25 +05:30
|
|
|
# --- write microstructure information ------------------------------------------------------------
|
2013-07-01 22:45:24 +05:30
|
|
|
formatwidth = int(math.floor(math.log10(microstructure.max())+1))
|
2013-11-12 22:34:36 +05:30
|
|
|
theTable.data = microstructure[0:info['grid'][0],0:info['grid'][1],0:info['grid'][2]].reshape(numpy.prod(info['grid']),order='F').transpose()
|
2013-12-17 13:46:29 +05:30
|
|
|
theTable.data_writeArray('%%%ii'%(formatwidth),delimiter=' ')
|
2013-06-30 19:21:21 +05:30
|
|
|
|
2013-06-21 01:15:25 +05:30
|
|
|
#--- output finalization --------------------------------------------------------------------------
|
|
|
|
if file['name'] != 'STDIN':
|
2013-06-30 19:21:21 +05:30
|
|
|
file['input'].close()
|
2013-06-21 01:15:25 +05:30
|
|
|
file['output'].close()
|
|
|
|
os.rename(file['name']+'_tmp',file['name'])
|