tested the modified scripts, all scripts generating geometry now named geom_from...
This commit is contained in:
parent
0b4a5cfa9b
commit
53348db000
|
@ -1,278 +0,0 @@
|
||||||
#!/usr/bin/env python
|
|
||||||
|
|
||||||
import os,re,sys,math,string,damask,numpy
|
|
||||||
from optparse import OptionParser, Option
|
|
||||||
|
|
||||||
# -----------------------------
|
|
||||||
class extendableOption(Option):
|
|
||||||
# -----------------------------
|
|
||||||
# used for definition of new option parser action 'extend', which enables to take multiple option arguments
|
|
||||||
# taken from online tutorial http://docs.python.org/library/optparse.html
|
|
||||||
|
|
||||||
ACTIONS = Option.ACTIONS + ("extend",)
|
|
||||||
STORE_ACTIONS = Option.STORE_ACTIONS + ("extend",)
|
|
||||||
TYPED_ACTIONS = Option.TYPED_ACTIONS + ("extend",)
|
|
||||||
ALWAYS_TYPED_ACTIONS = Option.ALWAYS_TYPED_ACTIONS + ("extend",)
|
|
||||||
|
|
||||||
def take_action(self, action, dest, opt, value, values, parser):
|
|
||||||
if action == "extend":
|
|
||||||
lvalue = value.split(",")
|
|
||||||
values.ensure_value(dest, []).extend(lvalue)
|
|
||||||
else:
|
|
||||||
Option.take_action(self, action, dest, opt, value, values, parser)
|
|
||||||
|
|
||||||
def integerFactorization(i):
|
|
||||||
|
|
||||||
j = int(math.floor(math.sqrt(float(i))))
|
|
||||||
while (j>1 and int(i)%j != 0):
|
|
||||||
j -= 1
|
|
||||||
return j
|
|
||||||
|
|
||||||
def positiveRadians(angle):
|
|
||||||
|
|
||||||
angle = math.radians(float(angle))
|
|
||||||
while angle < 0.0:
|
|
||||||
angle += 2.0*math.pi
|
|
||||||
|
|
||||||
return angle
|
|
||||||
|
|
||||||
|
|
||||||
def getHeader(sizeX,sizeY,step):
|
|
||||||
|
|
||||||
return [ \
|
|
||||||
'# TEM_PIXperUM 1.000000', \
|
|
||||||
'# x-star 0.509548', \
|
|
||||||
'# y-star 0.795272', \
|
|
||||||
'# z-star 0.611799', \
|
|
||||||
'# WorkingDistance 18.000000', \
|
|
||||||
'#', \
|
|
||||||
'# Phase 1', \
|
|
||||||
'# MaterialName Al', \
|
|
||||||
'# Formula Fe', \
|
|
||||||
'# Info', \
|
|
||||||
'# Symmetry 43', \
|
|
||||||
'# LatticeConstants 2.870 2.870 2.870 90.000 90.000 90.000', \
|
|
||||||
'# NumberFamilies 4', \
|
|
||||||
'# hklFamilies 1 1 0 1 0.000000 1', \
|
|
||||||
'# hklFamilies 2 0 0 1 0.000000 1', \
|
|
||||||
'# hklFamilies 2 1 1 1 0.000000 1', \
|
|
||||||
'# hklFamilies 3 1 0 1 0.000000 1', \
|
|
||||||
'# Categories 0 0 0 0 0 ', \
|
|
||||||
'#', \
|
|
||||||
'# GRID: SquareGrid', \
|
|
||||||
'# XSTEP: ' + str(step), \
|
|
||||||
'# YSTEP: ' + str(step), \
|
|
||||||
'# NCOLS_ODD: ' + str(sizeX), \
|
|
||||||
'# NCOLS_EVEN: ' + str(sizeX), \
|
|
||||||
'# NROWS: ' + str(sizeY), \
|
|
||||||
'#', \
|
|
||||||
'# OPERATOR: ODFsammpling', \
|
|
||||||
'#', \
|
|
||||||
'# SAMPLEID: ', \
|
|
||||||
'#', \
|
|
||||||
'# SCANID: ', \
|
|
||||||
'#', \
|
|
||||||
]
|
|
||||||
|
|
||||||
# --------------------------------------------------------------------
|
|
||||||
# MAIN
|
|
||||||
# --------------------------------------------------------------------
|
|
||||||
|
|
||||||
parser = OptionParser(option_class=extendableOption, usage='%prog options [file[s]]', description = """
|
|
||||||
Builds an ang file out of ASCII table.
|
|
||||||
|
|
||||||
""" + string.replace('$Id$','\n','\\n')
|
|
||||||
)
|
|
||||||
|
|
||||||
|
|
||||||
parser.add_option('--coords', dest='coords', type='string', \
|
|
||||||
help='label of coords in ASCII table')
|
|
||||||
parser.add_option('--eulerangles', dest='eulerangles', type='string', \
|
|
||||||
help='label of euler angles in ASCII table')
|
|
||||||
parser.add_option('--defgrad', dest='defgrad', type='string', \
|
|
||||||
help='label of deformation gradient in ASCII table')
|
|
||||||
parser.add_option('-n','--normal', dest='normal', type='float', nargs=3, \
|
|
||||||
help='normal of slices to visualize')
|
|
||||||
parser.add_option('-s','--size', dest='size', type='float', nargs=3, \
|
|
||||||
help='physical size of ang file')
|
|
||||||
parser.add_option('-u','--up', dest='up', type='float', nargs=3,
|
|
||||||
help='up direction of slices to visualize')
|
|
||||||
parser.add_option('-r','--resolution', dest='res', type='float',
|
|
||||||
help='scaling factor for resolution')
|
|
||||||
parser.add_option('--hexagonal', dest='hex', action='store_true',
|
|
||||||
help='use in plane hexagonal grid')
|
|
||||||
parser.add_option('-c','--center', dest='center', type='float', nargs=3,
|
|
||||||
help='center of ang file in cube, negative for center')
|
|
||||||
parser.set_defaults(coords = 'coords')
|
|
||||||
parser.set_defaults(eulerangles = 'eulerangles')
|
|
||||||
parser.set_defaults(defgrad = 'f')
|
|
||||||
parser.set_defaults(hexagonal = False)
|
|
||||||
parser.set_defaults(normal = [0.0,0.0,1.0])
|
|
||||||
parser.set_defaults(size = [1.0,1.0,0.0])
|
|
||||||
parser.set_defaults(up = [1.0,0.0,0.0])
|
|
||||||
parser.set_defaults(center = [-1.0,-1.0,-1.0])
|
|
||||||
parser.set_defaults(res = 1.0)
|
|
||||||
(options,filenames) = parser.parse_args()
|
|
||||||
|
|
||||||
datainfo = {
|
|
||||||
'vector': {'len':3,
|
|
||||||
'label':[]},
|
|
||||||
'tensor': {'len':9,
|
|
||||||
'label':[]}
|
|
||||||
}
|
|
||||||
|
|
||||||
datainfo['vector']['label'].append(options.coords)
|
|
||||||
datainfo['vector']['label'].append(options.eulerangles)
|
|
||||||
datainfo['tensor']['label'].append(options.defgrad)
|
|
||||||
|
|
||||||
# ------------------------------------------ setup file handles ---------------------------------------
|
|
||||||
|
|
||||||
files = []
|
|
||||||
if filenames == []:
|
|
||||||
files.append({'name':'STDIN', 'input':sys.stdin, 'output':sys.stdout})
|
|
||||||
else:
|
|
||||||
for name in filenames:
|
|
||||||
if os.path.exists(name):
|
|
||||||
files.append({'name':name, 'input':open(name)})
|
|
||||||
|
|
||||||
|
|
||||||
# ------------------------------------------ loop over input files ---------------------------------------
|
|
||||||
|
|
||||||
for file in files:
|
|
||||||
if file['name'] != 'STDIN': print file['name']
|
|
||||||
|
|
||||||
table = damask.ASCIItable(file['input']) # open ASCII_table for reading
|
|
||||||
table.head_read() # read ASCII header info
|
|
||||||
|
|
||||||
# --------------- figure out dimension and resolution
|
|
||||||
|
|
||||||
try:
|
|
||||||
locationCol = table.labels.index('ip.x') # columns containing location data
|
|
||||||
|
|
||||||
except ValueError:
|
|
||||||
print 'no coordinate data found...'
|
|
||||||
continue
|
|
||||||
|
|
||||||
grid = [{},{},{}]
|
|
||||||
while table.data_read(): # read next data line of ASCII table
|
|
||||||
for j in xrange(3):
|
|
||||||
grid[j][str(table.data[locationCol+j])] = True # remember coordinate along x,y,z
|
|
||||||
resolution = numpy.array([len(grid[0]),\
|
|
||||||
len(grid[1]),\
|
|
||||||
len(grid[2]),],'i') # resolution is number of distinct coordinates found
|
|
||||||
dimension = resolution/numpy.maximum(numpy.ones(3,'d'),resolution-1.0)* \
|
|
||||||
numpy.array([max(map(float,grid[0].keys()))-min(map(float,grid[0].keys())),\
|
|
||||||
max(map(float,grid[1].keys()))-min(map(float,grid[1].keys())),\
|
|
||||||
max(map(float,grid[2].keys()))-min(map(float,grid[2].keys())),\
|
|
||||||
],'d') # dimension from bounding box, corrected for cell-centeredness
|
|
||||||
if resolution[2] == 1:
|
|
||||||
dimension[2] = min(dimension[:2]/resolution[:2])
|
|
||||||
|
|
||||||
N = resolution.prod()
|
|
||||||
print '\t%s @ %s'%(dimension,resolution)
|
|
||||||
|
|
||||||
|
|
||||||
# --------------- figure out columns to process
|
|
||||||
active = {}
|
|
||||||
column = {}
|
|
||||||
values = {}
|
|
||||||
|
|
||||||
head = []
|
|
||||||
for datatype,info in datainfo.items():
|
|
||||||
for label in info['label']:
|
|
||||||
key = {True :'1_%s',
|
|
||||||
False:'%s' }[info['len']>1]%label
|
|
||||||
if key not in table.labels:
|
|
||||||
sys.stderr.write('column %s not found...\n'%key)
|
|
||||||
else:
|
|
||||||
if datatype not in active: active[datatype] = []
|
|
||||||
if datatype not in column: column[datatype] = {}
|
|
||||||
if datatype not in values: values[datatype] = {}
|
|
||||||
active[datatype].append(label)
|
|
||||||
column[datatype][label] = table.labels.index(key) # remember columns of requested data
|
|
||||||
values[datatype][label] = numpy.array([0.0 for i in xrange(N*datainfo[datatype]['len'])])
|
|
||||||
|
|
||||||
# ------------------------------------------ read value field ---------------------------------------
|
|
||||||
|
|
||||||
table.data_rewind()
|
|
||||||
idx = 0
|
|
||||||
while table.data_read(): # read next data line of ASCII table
|
|
||||||
for datatype,labels in active.items(): # loop over vector,tensor
|
|
||||||
for label in labels: # loop over all requested curls
|
|
||||||
begin = idx*datainfo[datatype]['len']
|
|
||||||
end = begin + datainfo[datatype]['len']
|
|
||||||
values[datatype][label][begin:end]= numpy.array(map(float,table.data[column[datatype][label]:
|
|
||||||
column[datatype][label]+datainfo[datatype]['len']]),'d')
|
|
||||||
idx+=1
|
|
||||||
|
|
||||||
stepSize = 0.0
|
|
||||||
for i in xrange(3): stepSize+=dimension[i]/resolution[i]/3.0/options.res
|
|
||||||
print 'step size', stepSize
|
|
||||||
|
|
||||||
if options.hexagonal:
|
|
||||||
stepSize0 = stepSize * math.sin(1.0/3.0*math.pi)
|
|
||||||
else:
|
|
||||||
stepSize0 = stepSize
|
|
||||||
|
|
||||||
print 'step Size in x direction', stepSize0
|
|
||||||
|
|
||||||
angRes = int(options.size[0]/stepSize0),\
|
|
||||||
int(options.size[1]/stepSize),\
|
|
||||||
max(int(options.size[2]/stepSize),1)
|
|
||||||
print 'resolution of ang file', angRes
|
|
||||||
|
|
||||||
if options.hexagonal:
|
|
||||||
NpointsSlice = angRes[0]//2*(angRes[1]-1)+(angRes[0]-angRes[0]//2)*angRes[1]
|
|
||||||
else:
|
|
||||||
NpointsSlice = angRes[0]*angRes[1]
|
|
||||||
|
|
||||||
z = numpy.array(options.normal,dtype='float')
|
|
||||||
z = z/numpy.linalg.norm(z)
|
|
||||||
x = numpy.array(options.up,dtype='float')
|
|
||||||
x = x/numpy.linalg.norm(x)
|
|
||||||
y = numpy.cross(z,x)
|
|
||||||
x = numpy.cross(y,z)
|
|
||||||
print 'x unit vector', x, 'with norm ', numpy.linalg.norm(x)
|
|
||||||
print 'y unit vector', y, 'with norm ', numpy.linalg.norm(y)
|
|
||||||
print 'z unit vector', z, 'with norm ', numpy.linalg.norm(z)
|
|
||||||
Favg = damask.core.math.tensorAvg(values['tensor']['%s'%(options.defgrad)].\
|
|
||||||
reshape(resolution[0],resolution[1],resolution[2],3,3))
|
|
||||||
|
|
||||||
coordTransform = numpy.array([x,y,z])
|
|
||||||
print 'rotation matrix', coordTransform
|
|
||||||
|
|
||||||
mySlice = numpy.zeros(NpointsSlice*3)
|
|
||||||
eulerangles = values['vector']['%s'%options.eulerangles].reshape([3,N],order='F')
|
|
||||||
|
|
||||||
offset = ((dimension - options.size)/2.0 + (dimension/angRes)/2.0)/options.res
|
|
||||||
print 'offset', offset
|
|
||||||
# offset = numpy.array([0.5,0.5,0.5],dtype='float')/[float(options.res[0]),float(options.res[1]),float(options.res[2])]*[dimension[0],dimension[1],dimension[2]]
|
|
||||||
for i in xrange(angRes[2]):
|
|
||||||
idx = 0
|
|
||||||
for j in xrange(angRes[0]):
|
|
||||||
if options.hexagonal:
|
|
||||||
res1=angRes[1]-j%2
|
|
||||||
#myOffset = offset +float(j%2)* numpy.array([0.0,0.5,0.0],dtype='float')/[float(options.res[0]),float(options.res[1]),float(options.res[2])]*[dimension[0],dimension[1],dimension[2]]
|
|
||||||
myOffset = offset +float(j%2)* numpy.array([0.0,0.5*stepSize,0.0],dtype='float')
|
|
||||||
else:
|
|
||||||
res1=angRes[1]
|
|
||||||
myOffset = offset
|
|
||||||
for k in xrange(res1):
|
|
||||||
mySlice[idx*3:idx*3+3] = numpy.dot(coordTransform,[j*stepSize0,k*stepSize,i*stepSize]+myOffset)
|
|
||||||
#print mySlice[idx*3:idx*3+3]
|
|
||||||
idx+=1
|
|
||||||
mySlice = mySlice.reshape([3,NpointsSlice],order='F')
|
|
||||||
indices=damask.core.math.math_nearestNeighborSearch(3,Favg,numpy.array(
|
|
||||||
dimension,dtype='float'),NpointsSlice,N,mySlice,values['vector']['%s'%options.coords].reshape([3,N],order='F'))/27
|
|
||||||
fileOut=open(os.path.join(os.path.dirname(name),os.path.splitext(os.path.basename(name))[0]+'_%s.ang'%(angRes[2]-i-1)),'w')
|
|
||||||
for line in getHeader(angRes[0],angRes[1],angRes[2]):
|
|
||||||
fileOut.write(line + '\n')
|
|
||||||
|
|
||||||
# write data
|
|
||||||
for idx in xrange(NpointsSlice):
|
|
||||||
fileOut.write(''.join(['%10.5f'%positiveRadians(angle) for angle in eulerangles[:,indices[idx]]])+
|
|
||||||
' %10.5f %10.5f'%(mySlice[1,idx],mySlice[0,idx])+
|
|
||||||
' 100.0 1.0 0 1 1.0\n')
|
|
||||||
|
|
||||||
fileOut.close()
|
|
||||||
|
|
|
@ -105,25 +105,26 @@ for file in files:
|
||||||
'homogenization: %i\n'%info['homogenization'] + \
|
'homogenization: %i\n'%info['homogenization'] + \
|
||||||
'microstructures: %i\n'%info['microstructures'])
|
'microstructures: %i\n'%info['microstructures'])
|
||||||
|
|
||||||
if numpy.all(any['grid'] < 1):
|
if numpy.any(info['grid'] < 1):
|
||||||
file['croak'].write('no valid grid info found.\n')
|
file['croak'].write('invalid grid a b c.\n')
|
||||||
sys.exit()
|
sys.exit()
|
||||||
if numpy.any(info['size'] <= 0.0):
|
if numpy.any(info['size'] <= 0.0):
|
||||||
file['croak'].write('no valid size info found.\n')
|
file['croak'].write('invalid size x y z.\n')
|
||||||
sys.exit()
|
sys.exit()
|
||||||
|
|
||||||
|
|
||||||
#--- generate grid --------------------------------------------------------------------------------
|
#--- generate grid --------------------------------------------------------------------------------
|
||||||
grid = vtk.vtkRectilinearGrid()
|
grid = vtk.vtkRectilinearGrid()
|
||||||
grid.SetDimensions([x+1 for x in info['grid']])
|
grid.SetDimensions([x+1 for x in info['grid']])
|
||||||
temp = vtk.vtkDoubleArray()
|
temp = []
|
||||||
for i in xrange(3):
|
for i in xrange(3):
|
||||||
temp.SetNumberOfTuples(info['grid'][i]+1)
|
temp.append(vtk.vtkDoubleArray())
|
||||||
|
temp[i].SetNumberOfTuples(info['grid'][i]+1)
|
||||||
for j in range(info['grid'][i]+1):
|
for j in range(info['grid'][i]+1):
|
||||||
temp.InsertTuple1(j,j*info['size'][i]/info['grid'][i]+info['origin'][i])
|
temp[i].InsertTuple1(j,j*info['size'][i]/info['grid'][i]+info['origin'][i])
|
||||||
if i == 0: grid.SetXCoordinates(temp)
|
if i == 0: grid.SetXCoordinates(temp[0])
|
||||||
if i == 1: grid.SetYCoordinates(temp)
|
if i == 1: grid.SetYCoordinates(temp[1])
|
||||||
if i == 2: grid.SetZCoordinates(temp)
|
if i == 2: grid.SetZCoordinates(temp[2])
|
||||||
|
|
||||||
#--- read microstructure information --------------------------------------------------------------
|
#--- read microstructure information --------------------------------------------------------------
|
||||||
structure = vtk.vtkIntArray()
|
structure = vtk.vtkIntArray()
|
||||||
|
|
|
@ -63,9 +63,9 @@ mappings = {
|
||||||
}
|
}
|
||||||
|
|
||||||
features = [
|
features = [
|
||||||
{'aliens': 1, 'names': ['boundary, biplane'],},
|
{'aliens': 1, 'names': ['boundary(biplane)'],},
|
||||||
{'aliens': 2, 'names': ['tripleline',],},
|
{'aliens': 2, 'names': ['tripleline'],},
|
||||||
{'aliens': 3, 'names': ['quadruplepoint',],}
|
{'aliens': 3, 'names': ['quadruplepoint'],}
|
||||||
]
|
]
|
||||||
|
|
||||||
neighborhoods = {
|
neighborhoods = {
|
||||||
|
@ -140,7 +140,7 @@ for i,feature in enumerate(features):
|
||||||
feature_list.append(i) # remember valid features
|
feature_list.append(i) # remember valid features
|
||||||
break
|
break
|
||||||
|
|
||||||
|
print feature_list
|
||||||
#--- setup file handles ---------------------------------------------------------------------------
|
#--- setup file handles ---------------------------------------------------------------------------
|
||||||
files = []
|
files = []
|
||||||
if filenames == []:
|
if filenames == []:
|
||||||
|
@ -150,12 +150,13 @@ if filenames == []:
|
||||||
'croak':sys.stderr,
|
'croak':sys.stderr,
|
||||||
})
|
})
|
||||||
else:
|
else:
|
||||||
|
print [string.split(''.join((features[feature]['names'])),sep='(')[0] for feature in feature_list]
|
||||||
for name in filenames:
|
for name in filenames:
|
||||||
if os.path.exists(name):
|
if os.path.exists(name):
|
||||||
files.append({'name':name,
|
files.append({'name':name,
|
||||||
'input':open(name),
|
'input':open(name),
|
||||||
'output':[open(features[feature]['names'][0]+'_'+name,'w')
|
'output':[open(string.split(''.join((features[feature]['names'])),sep='(')[0]+'_'+name,'w')
|
||||||
for string.split(feature,sep=',')[0] in feature_list],
|
for feature in feature_list],
|
||||||
'croak':sys.stdout,
|
'croak':sys.stdout,
|
||||||
})
|
})
|
||||||
|
|
||||||
|
@ -209,10 +210,10 @@ for file in files:
|
||||||
'microstructures: %i\n'%info['microstructures'])
|
'microstructures: %i\n'%info['microstructures'])
|
||||||
|
|
||||||
if numpy.any(info['grid'] < 1):
|
if numpy.any(info['grid'] < 1):
|
||||||
file['croak'].write('no valid grid info found.\n')
|
file['croak'].write('invalid grid a b c.\n')
|
||||||
sys.exit()
|
sys.exit()
|
||||||
if numpy.any(info['size'] <= 0.0):
|
if numpy.any(info['size'] <= 0.0):
|
||||||
file['croak'].write('no valid size info found.\n')
|
file['croak'].write('invalid size x y z.\n')
|
||||||
sys.exit()
|
sys.exit()
|
||||||
|
|
||||||
new_header.append('$Id$\n')
|
new_header.append('$Id$\n')
|
||||||
|
@ -255,7 +256,6 @@ for file in files:
|
||||||
check = convoluted[i,1:-1,1:-1,1:-1]
|
check = convoluted[i,1:-1,1:-1,1:-1]
|
||||||
for i,feature_id in enumerate(feature_list):
|
for i,feature_id in enumerate(feature_list):
|
||||||
distance[i,:,:,:] = numpy.where(uniques > features[feature_id]['aliens'],0.0,1.0)
|
distance[i,:,:,:] = numpy.where(uniques > features[feature_id]['aliens'],0.0,1.0)
|
||||||
|
|
||||||
for i in xrange(len(feature_list)):
|
for i in xrange(len(feature_list)):
|
||||||
distance[i,:,:,:] = ndimage.morphology.distance_transform_edt(distance[i,:,:,:])*\
|
distance[i,:,:,:] = ndimage.morphology.distance_transform_edt(distance[i,:,:,:])*\
|
||||||
[max(info['size']/info['grid'])]*3
|
[max(info['size']/info['grid'])]*3
|
||||||
|
@ -276,7 +276,6 @@ for file in files:
|
||||||
output += {True:' ',False:'\n'}[options.twoD].join(map(lambda x: \
|
output += {True:' ',False:'\n'}[options.twoD].join(map(lambda x: \
|
||||||
('%%%ii'%formatwidth)%(round(x)), distance[i,:,y,z])) + '\n'
|
('%%%ii'%formatwidth)%(round(x)), distance[i,:,y,z])) + '\n'
|
||||||
file['output'][i].write(output)
|
file['output'][i].write(output)
|
||||||
|
|
||||||
if file['name'] != 'STDIN':
|
if file['name'] != 'STDIN':
|
||||||
file['output'][i].close()
|
file['output'][i].close()
|
||||||
|
|
|
@ -90,10 +90,10 @@ file['croak'].write('grid a b c: %s\n'%(' x '.join(map(str,info['grid'])))
|
||||||
'microstructures: %i\n\n'%info['microstructures'])
|
'microstructures: %i\n\n'%info['microstructures'])
|
||||||
|
|
||||||
if numpy.any(info['grid'] < 1):
|
if numpy.any(info['grid'] < 1):
|
||||||
file['croak'].write('no valid grid info found.\n')
|
file['croak'].write('invalid grid a b c.\n')
|
||||||
sys.exit()
|
sys.exit()
|
||||||
if numpy.any(info['size'] <= 0.0):
|
if numpy.any(info['size'] <= 0.0):
|
||||||
file['croak'].write('no valid size info found.\n')
|
file['croak'].write('invalid size x y z.\n')
|
||||||
sys.exit()
|
sys.exit()
|
||||||
|
|
||||||
#--- write header ---------------------------------------------------------------------------------
|
#--- write header ---------------------------------------------------------------------------------
|
||||||
|
|
|
@ -140,10 +140,10 @@ file['croak'].write('grid a b c: %s\n'%(' x '.join(map(str,info['grid'])))
|
||||||
file['croak'].write("bounding box: %s\n"%(numpy.sqrt(numpy.sum(box*box,0))))
|
file['croak'].write("bounding box: %s\n"%(numpy.sqrt(numpy.sum(box*box,0))))
|
||||||
|
|
||||||
if numpy.any(info['grid'] < 1):
|
if numpy.any(info['grid'] < 1):
|
||||||
file['croak'].write('no valid grid info found.\n')
|
file['croak'].write('invalid grid a b c.\n')
|
||||||
sys.exit()
|
sys.exit()
|
||||||
if numpy.any(info['size'] <= 0.0):
|
if numpy.any(info['size'] <= 0.0):
|
||||||
file['croak'].write('no valid size info found.\n')
|
file['croak'].write('invalid size x y z.\n')
|
||||||
sys.exit()
|
sys.exit()
|
||||||
|
|
||||||
# -------------------------------------- switch according to task ----------------------------------
|
# -------------------------------------- switch according to task ----------------------------------
|
|
@ -4,10 +4,9 @@
|
||||||
import os,sys,math,string,re,numpy, damask
|
import os,sys,math,string,re,numpy, damask
|
||||||
from optparse import OptionParser, OptionGroup, Option, SUPPRESS_HELP
|
from optparse import OptionParser, OptionGroup, Option, SUPPRESS_HELP
|
||||||
|
|
||||||
|
#--------------------------------------------------------------------------------------------------
|
||||||
# -----------------------------
|
|
||||||
class extendedOption(Option):
|
class extendedOption(Option):
|
||||||
# -----------------------------
|
#--------------------------------------------------------------------------------------------------
|
||||||
# used for definition of new option parser action 'extend', which enables to take multiple option arguments
|
# used for definition of new option parser action 'extend', which enables to take multiple option arguments
|
||||||
# taken from online tutorial http://docs.python.org/library/optparse.html
|
# taken from online tutorial http://docs.python.org/library/optparse.html
|
||||||
|
|
||||||
|
@ -24,9 +23,9 @@ class extendedOption(Option):
|
||||||
Option.take_action(self, action, dest, opt, value, values, parser)
|
Option.take_action(self, action, dest, opt, value, values, parser)
|
||||||
|
|
||||||
|
|
||||||
# --------------------------------------------------------------------
|
#--------------------------------------------------------------------------------------------------
|
||||||
# MAIN
|
# MAIN
|
||||||
# --------------------------------------------------------------------
|
#--------------------------------------------------------------------------------------------------
|
||||||
identifiers = {
|
identifiers = {
|
||||||
'grid': ['a','b','c'],
|
'grid': ['a','b','c'],
|
||||||
}
|
}
|
||||||
|
@ -66,9 +65,7 @@ parser.set_defaults(twoD = False)
|
||||||
|
|
||||||
(options,filenames) = parser.parse_args()
|
(options,filenames) = parser.parse_args()
|
||||||
|
|
||||||
|
#--- setup file handles ---------------------------------------------------------------------------
|
||||||
# ------------------------------------------ setup file handles ---------------------------------------
|
|
||||||
|
|
||||||
files = []
|
files = []
|
||||||
if filenames == []:
|
if filenames == []:
|
||||||
files.append({'name':'STDIN',
|
files.append({'name':'STDIN',
|
||||||
|
@ -86,8 +83,7 @@ else:
|
||||||
})
|
})
|
||||||
|
|
||||||
|
|
||||||
# ------------------------------------------ loop over input files ---------------------------------------
|
#--- loop over input files ------------------------------------------------------------------------
|
||||||
|
|
||||||
for file in files:
|
for file in files:
|
||||||
if file['name'] != 'STDIN': file['croak'].write(file['name']+'\n')
|
if file['name'] != 'STDIN': file['croak'].write(file['name']+'\n')
|
||||||
|
|
||||||
|
@ -103,11 +99,13 @@ for file in files:
|
||||||
content = file['input'].readlines()
|
content = file['input'].readlines()
|
||||||
file['input'].close()
|
file['input'].close()
|
||||||
|
|
||||||
info = {'grains': 0,
|
#--- interprete header ----------------------------------------------------------------------------
|
||||||
'grid': numpy.array([0,0,0]),
|
info = {
|
||||||
|
'grid': numpy.zeros(3,'i'),
|
||||||
'size': numpy.array(options.size),
|
'size': numpy.array(options.size),
|
||||||
'origin': numpy.array([0.0,0.0,0.0]),
|
'origin': numpy.zeros(3,'d'),
|
||||||
'homogenization': options.homogenization,
|
'grains': 0,
|
||||||
|
'homogenization': 0,
|
||||||
}
|
}
|
||||||
|
|
||||||
new_header = []
|
new_header = []
|
||||||
|
@ -122,9 +120,6 @@ for file in files:
|
||||||
else:
|
else:
|
||||||
info[headitems[0]] = mappings[headitems[0]](headitems[1])
|
info[headitems[0]] = mappings[headitems[0]](headitems[1])
|
||||||
|
|
||||||
if info['grains'] == 0:
|
|
||||||
file['croak'].write('no grains found.\n')
|
|
||||||
continue
|
|
||||||
if info['grains'] != len(content):
|
if info['grains'] != len(content):
|
||||||
file['croak'].write('grain data not matching grain count...\n')
|
file['croak'].write('grain data not matching grain count...\n')
|
||||||
info['grains'] = min(info['grains'],len(content))
|
info['grains'] = min(info['grains'],len(content))
|
||||||
|
@ -132,12 +127,6 @@ for file in files:
|
||||||
if 0 not in options.grid: # user-specified grid
|
if 0 not in options.grid: # user-specified grid
|
||||||
info['grid'] = numpy.array(options.grid)
|
info['grid'] = numpy.array(options.grid)
|
||||||
|
|
||||||
if numpy.any(info['grid'] < 1):
|
|
||||||
file['croak'].write('no valid grid info found.\n')
|
|
||||||
continue
|
|
||||||
|
|
||||||
twoD = info['grid'][2] < 2
|
|
||||||
|
|
||||||
for i in xrange(3):
|
for i in xrange(3):
|
||||||
if info['size'][i] <= 0.0: # any invalid size?
|
if info['size'][i] <= 0.0: # any invalid size?
|
||||||
info['size'][i] = float(info['grid'][i])/max(info['grid'])
|
info['size'][i] = float(info['grid'][i])/max(info['grid'])
|
||||||
|
@ -149,9 +138,17 @@ for file in files:
|
||||||
'origin x y z: %s\n'%(' : '.join(map(str,info['origin']))) + \
|
'origin x y z: %s\n'%(' : '.join(map(str,info['origin']))) + \
|
||||||
'homogenization: %i\n'%info['homogenization'])
|
'homogenization: %i\n'%info['homogenization'])
|
||||||
|
|
||||||
|
if numpy.any(info['grid'] < 1):
|
||||||
|
file['croak'].write('invalid grid a b c.\n')
|
||||||
|
sys.exit()
|
||||||
|
if numpy.any(info['size'] <= 0.0):
|
||||||
|
file['croak'].write('invalid size x y z.\n')
|
||||||
|
sys.exit()
|
||||||
|
if info['grains'] == 0:
|
||||||
|
file['croak'].write('no grain info found.\n')
|
||||||
|
sys.exit()
|
||||||
|
|
||||||
# -------------------------------------- prepare data ----------------------------------
|
#--- prepare data ---------------------------------------------------------------------------------
|
||||||
|
|
||||||
formatwidth = 1+int(math.log10(info['grains']))
|
formatwidth = 1+int(math.log10(info['grains']))
|
||||||
coords = numpy.zeros((3,info['grains']),'d')
|
coords = numpy.zeros((3,info['grains']),'d')
|
||||||
eulers = numpy.zeros((3,info['grains']),'d')
|
eulers = numpy.zeros((3,info['grains']),'d')
|
||||||
|
@ -160,8 +157,7 @@ for file in files:
|
||||||
coords[:,i] = map(float,content[i].split()[:3])*info['size']
|
coords[:,i] = map(float,content[i].split()[:3])*info['size']
|
||||||
eulers[:,i] = map(float,content[i].split()[3:6])
|
eulers[:,i] = map(float,content[i].split()[3:6])
|
||||||
|
|
||||||
# -------------------------------------- switch according to task ----------------------------------
|
#--- switch according to task ---------------------------------------------------------------------
|
||||||
|
|
||||||
if options.config: # write config file
|
if options.config: # write config file
|
||||||
file['output'].write('<microstructure>\n')
|
file['output'].write('<microstructure>\n')
|
||||||
for i in xrange(info['grains']):
|
for i in xrange(info['grains']):
|
||||||
|
@ -175,6 +171,7 @@ for file in files:
|
||||||
'(gauss)\tphi1 %g\tPhi %g\tphi2 %g\tscatter 0.0\tfraction 1.0\n'%(eulers[0,i],eulers[1,i],eulers[2,i]))
|
'(gauss)\tphi1 %g\tPhi %g\tphi2 %g\tscatter 0.0\tfraction 1.0\n'%(eulers[0,i],eulers[1,i],eulers[2,i]))
|
||||||
|
|
||||||
else: # write geometry file
|
else: # write geometry file
|
||||||
|
twoD = info['grid'][2] < 2
|
||||||
N = info['grid'].prod()
|
N = info['grid'].prod()
|
||||||
shift = 0.5*info['size']/info['grid'] # shift by half of side length to center of element
|
shift = 0.5*info['size']/info['grid'] # shift by half of side length to center of element
|
||||||
undeformed = numpy.zeros((3,N),'d')
|
undeformed = numpy.zeros((3,N),'d')
|
||||||
|
@ -200,13 +197,13 @@ for file in files:
|
||||||
if i+1 not in indices: missing += 1
|
if i+1 not in indices: missing += 1
|
||||||
file['croak'].write({True:'all',False:'only'}[missing == 0] + ' %i grains mapped.\n'%(info['grains']-missing))
|
file['croak'].write({True:'all',False:'only'}[missing == 0] + ' %i grains mapped.\n'%(info['grains']-missing))
|
||||||
|
|
||||||
|
#--- write header ---------------------------------------------------------------------------------
|
||||||
new_header.append("$Id$ \n")
|
new_header.append("$Id$ \n")
|
||||||
new_header.append("grid\ta %i\tb %i\tc %i\n"%(info['grid'][0],info['grid'][1],info['grid'][2],))
|
new_header.append("grid\ta %i\tb %i\tc %i\n"%(info['grid'][0],info['grid'][1],info['grid'][2],))
|
||||||
new_header.append("size\tx %f\ty %f\tz %f\n"%(info['size'][0],info['size'][1],info['size'][2],))
|
new_header.append("size\tx %f\ty %f\tz %f\n"%(info['size'][0],info['size'][1],info['size'][2],))
|
||||||
new_header.append("origin\tx %f\ty %f\tz %f\n"%(info['origin'][0],info['origin'][1],info['origin'][2],))
|
new_header.append("origin\tx %f\ty %f\tz %f\n"%(info['origin'][0],info['origin'][1],info['origin'][2],))
|
||||||
new_header.append("microstructures\t%i\n"%(info['grains']-missing))
|
new_header.append("microstructures\t%i\n"%(info['grains']-missing))
|
||||||
new_header.append("homogenization\t%i\n"%info['homogenization'])
|
new_header.append("homogenization\t%i\n"%info['homogenization'])
|
||||||
|
|
||||||
file['output'].write('%i\theader\n'%(len(new_header)) + ''.join(new_header))
|
file['output'].write('%i\theader\n'%(len(new_header)) + ''.join(new_header))
|
||||||
|
|
||||||
for n in xrange(info['grid'][1:3].prod()): # loop over 2nd and 3rd size
|
for n in xrange(info['grid'][1:3].prod()): # loop over 2nd and 3rd size
|
||||||
|
@ -216,11 +213,9 @@ for file in files:
|
||||||
indices[n*info['grid'][0]:(n+1)*info['grid'][0]]))+'\n')
|
indices[n*info['grid'][0]:(n+1)*info['grid'][0]]))+'\n')
|
||||||
|
|
||||||
|
|
||||||
# ------------------------------------------ output finalization ---------------------------------------
|
#--- output finalization --------------------------------------------------------------------------
|
||||||
|
|
||||||
if file['name'] != 'STDIN':
|
if file['name'] != 'STDIN':
|
||||||
file['output'].close()
|
file['output'].close()
|
||||||
os.rename(file['name']+'_tmp',os.path.splitext(file['name'])[0] + \
|
os.rename(file['name']+'_tmp',os.path.splitext(file['name'])[0] + \
|
||||||
{True: '_material.config',
|
{True: '_material.config',
|
||||||
False:'.geom'}[options.config])
|
False:'.geom'}[options.config])
|
||||||
|
|
||||||
|
|
|
@ -102,18 +102,19 @@ for file in files:
|
||||||
info[headitems[0]] = mappings[headitems[0]](headitems[1])
|
info[headitems[0]] = mappings[headitems[0]](headitems[1])
|
||||||
new_header.append(header)
|
new_header.append(header)
|
||||||
|
|
||||||
if numpy.all(info['grid'] == 0):
|
|
||||||
file['croak'].write('no grid info found.\n')
|
|
||||||
continue
|
|
||||||
if numpy.all(info['size'] == 0.0):
|
|
||||||
file['croak'].write('no size info found.\n')
|
|
||||||
continue
|
|
||||||
|
|
||||||
file['croak'].write('grid a b c: %s\n'%(' x '.join(map(str,info['grid']))) + \
|
file['croak'].write('grid a b c: %s\n'%(' x '.join(map(str,info['grid']))) + \
|
||||||
'size x y z: %s\n'%(' x '.join(map(str,info['size']))) + \
|
'size x y z: %s\n'%(' x '.join(map(str,info['size']))) + \
|
||||||
'origin x y z: %s\n'%(' : '.join(map(str,info['origin']))) + \
|
'origin x y z: %s\n'%(' : '.join(map(str,info['origin']))) + \
|
||||||
'homogenization: %i\n'%info['homogenization'] + \
|
'homogenization: %i\n'%info['homogenization'] + \
|
||||||
'microstructures: %i\n'%info['microstructures'])
|
'microstructures: %i\n'%info['microstructures'])
|
||||||
|
|
||||||
|
if numpy.any(info['grid'] < 1):
|
||||||
|
file['croak'].write('invalid grid a b c.\n')
|
||||||
|
sys.exit()
|
||||||
|
if numpy.any(info['size'] <= 0.0):
|
||||||
|
file['croak'].write('invalid size x y z.\n')
|
||||||
|
sys.exit()
|
||||||
|
|
||||||
file['output'].write('%i\theader\n'%(len(new_header))+''.join(new_header))
|
file['output'].write('%i\theader\n'%(len(new_header))+''.join(new_header))
|
||||||
|
|
||||||
#--- pack input -----------------------------------------------------------------------------------
|
#--- pack input -----------------------------------------------------------------------------------
|
||||||
|
|
|
@ -107,13 +107,6 @@ for file in files:
|
||||||
info[headitems[0]] = mappings[headitems[0]](headitems[1])
|
info[headitems[0]] = mappings[headitems[0]](headitems[1])
|
||||||
new_header.append(header)
|
new_header.append(header)
|
||||||
|
|
||||||
if numpy.all(info['grid'] == 0):
|
|
||||||
file['croak'].write('no grid info found.\n')
|
|
||||||
continue
|
|
||||||
if numpy.all(info['size'] == 0.0):
|
|
||||||
file['croak'].write('no size info found.\n')
|
|
||||||
continue
|
|
||||||
|
|
||||||
format = {True: info['grid'][0],
|
format = {True: info['grid'][0],
|
||||||
False: 1}[options.twoD]
|
False: 1}[options.twoD]
|
||||||
|
|
||||||
|
@ -122,6 +115,14 @@ for file in files:
|
||||||
'origin x y z: %s\n'%(' : '.join(map(str,info['origin']))) + \
|
'origin x y z: %s\n'%(' : '.join(map(str,info['origin']))) + \
|
||||||
'homogenization: %i\n'%info['homogenization'] + \
|
'homogenization: %i\n'%info['homogenization'] + \
|
||||||
'microstructures: %i\n'%info['microstructures'])
|
'microstructures: %i\n'%info['microstructures'])
|
||||||
|
|
||||||
|
if numpy.any(info['grid'] < 1):
|
||||||
|
file['croak'].write('invalid grid a b c.\n')
|
||||||
|
sys.exit()
|
||||||
|
if numpy.any(info['size'] <= 0.0):
|
||||||
|
file['croak'].write('invalid size x y z.\n')
|
||||||
|
sys.exit()
|
||||||
|
|
||||||
file['output'].write('%i\theader\n'%(len(new_header))+''.join(new_header))
|
file['output'].write('%i\theader\n'%(len(new_header))+''.join(new_header))
|
||||||
|
|
||||||
if info['microstructures'] > 0:
|
if info['microstructures'] > 0:
|
||||||
|
|
|
@ -126,10 +126,10 @@ for file in files:
|
||||||
'microstructures: %i\n'%info['microstructures'])
|
'microstructures: %i\n'%info['microstructures'])
|
||||||
|
|
||||||
if numpy.any(info['grid'] < 1):
|
if numpy.any(info['grid'] < 1):
|
||||||
file['croak'].write('no valid grid info found.\n')
|
file['croak'].write('invalid grid a b c.\n')
|
||||||
sys.exit()
|
sys.exit()
|
||||||
if numpy.any(info['size'] <= 0.0):
|
if numpy.any(info['size'] <= 0.0):
|
||||||
file['croak'].write('no valid size info found.\n')
|
file['croak'].write('invalid size x y z.\n')
|
||||||
sys.exit()
|
sys.exit()
|
||||||
|
|
||||||
#--- read data ------------------------------------------------------------------------------------
|
#--- read data ------------------------------------------------------------------------------------
|
||||||
|
@ -184,10 +184,10 @@ for file in files:
|
||||||
# --- write microstructure information ------------------------------------------------------------
|
# --- write microstructure information ------------------------------------------------------------
|
||||||
for z in xrange(info['grid'][2]):
|
for z in xrange(info['grid'][2]):
|
||||||
for y in xrange(info['grid'][1]):
|
for y in xrange(info['grid'][1]):
|
||||||
output += {True:' ',False:'\n'}[options.twoD].join(map(lambda x: str(x).rjust(formatwidth), microstructure[:,y,z])) + '\n'
|
file['output'].write({True:' ',False:'\n'}[options.twoD].
|
||||||
|
join(map(lambda x: str(x).rjust(formatwidth), microstructure[:,y,z])) + '\n')
|
||||||
|
|
||||||
output += '\n'
|
file['output'].write('\n')
|
||||||
file['output'].write(output)
|
|
||||||
|
|
||||||
#--- output finalization --------------------------------------------------------------------------
|
#--- output finalization --------------------------------------------------------------------------
|
||||||
if file['name'] != 'STDIN':
|
if file['name'] != 'STDIN':
|
||||||
|
|
|
@ -53,7 +53,10 @@ parser.set_defaults(N = 20)
|
||||||
|
|
||||||
(options, extras) = parser.parse_args()
|
(options, extras) = parser.parse_args()
|
||||||
|
|
||||||
Npoints = options.grid[0]*options.grid[1]*options.grid[2]
|
Npoints = reduce(lambda x, y: x * y, options.grid)
|
||||||
|
if 0 in options.grid:
|
||||||
|
file['croak'].write('invalid grid a b c.\n')
|
||||||
|
sys.exit()
|
||||||
if options.N > Npoints:
|
if options.N > Npoints:
|
||||||
sys.stderr.write('Warning: more seeds than grid points at minimum resolution.\n')
|
sys.stderr.write('Warning: more seeds than grid points at minimum resolution.\n')
|
||||||
options.N = Npoints
|
options.N = Npoints
|
||||||
|
|
|
@ -21,7 +21,7 @@ bin_link = { \
|
||||||
'geom_fromVPSC.py',
|
'geom_fromVPSC.py',
|
||||||
'geom_fromMinimalSurface.py',
|
'geom_fromMinimalSurface.py',
|
||||||
'geom_fromVoronoiTessellation.py',
|
'geom_fromVoronoiTessellation.py',
|
||||||
'geom_Osteon.py',
|
'geom_fromOsteonGeometry.py',
|
||||||
'geom_canvas.py',
|
'geom_canvas.py',
|
||||||
'geom_check.py',
|
'geom_check.py',
|
||||||
'geom_rescale.py',
|
'geom_rescale.py',
|
||||||
|
@ -29,7 +29,7 @@ bin_link = { \
|
||||||
'geom_unpack.py',
|
'geom_unpack.py',
|
||||||
'geom_translate.py',
|
'geom_translate.py',
|
||||||
'geom_vicinityOffset.py',
|
'geom_vicinityOffset.py',
|
||||||
'geom_euclideanDistance.py'
|
'geom_fromEuclideanDistance.py'
|
||||||
],
|
],
|
||||||
'post' : [
|
'post' : [
|
||||||
'3Dvisualize.py',
|
'3Dvisualize.py',
|
||||||
|
@ -63,7 +63,6 @@ bin_link = { \
|
||||||
'perceptualUniformColorMap.py',
|
'perceptualUniformColorMap.py',
|
||||||
'postResults.py',
|
'postResults.py',
|
||||||
'showTable.py',
|
'showTable.py',
|
||||||
'table2ang.py',
|
|
||||||
'tagLabel.py',
|
'tagLabel.py',
|
||||||
'vtk2ang.py',
|
'vtk2ang.py',
|
||||||
],
|
],
|
||||||
|
|
Loading…
Reference in New Issue