2016-07-18 23:05:35 +05:30
|
|
|
#!/usr/bin/env python2.7
|
2013-05-13 16:57:59 +05:30
|
|
|
# -*- coding: UTF-8 no BOM -*-
|
2010-08-17 02:17:27 +05:30
|
|
|
|
2016-03-02 17:13:09 +05:30
|
|
|
import sys,os,math,re
|
2016-09-26 06:43:02 +05:30
|
|
|
import numpy as np
|
2014-11-18 13:30:45 +05:30
|
|
|
from optparse import OptionParser
|
|
|
|
import damask
|
2011-06-09 18:22:54 +05:30
|
|
|
|
2016-01-27 22:36:00 +05:30
|
|
|
scriptName = os.path.splitext(os.path.basename(__file__))[0]
|
|
|
|
scriptID = ' '.join([scriptName,damask.version])
|
2013-07-18 18:58:54 +05:30
|
|
|
|
2011-08-18 13:22:07 +05:30
|
|
|
|
|
|
|
try: # check for Python Image Lib
|
2014-08-21 22:53:16 +05:30
|
|
|
from PIL import Image,ImageDraw
|
2011-06-08 00:45:34 +05:30
|
|
|
ImageCapability = True
|
2010-09-23 13:29:52 +05:30
|
|
|
except:
|
2011-06-08 00:45:34 +05:30
|
|
|
ImageCapability = False
|
|
|
|
|
2016-07-18 09:47:22 +05:30
|
|
|
sys.path.append(damask.solver.Marc().libraryPath())
|
2010-08-17 23:51:22 +05:30
|
|
|
|
2011-08-18 13:22:07 +05:30
|
|
|
try: # check for MSC.Mentat Python interface
|
2016-03-02 17:13:09 +05:30
|
|
|
import py_mentat
|
2011-06-08 00:45:34 +05:30
|
|
|
MentatCapability = True
|
2010-10-26 21:56:55 +05:30
|
|
|
except:
|
2011-06-08 00:45:34 +05:30
|
|
|
MentatCapability = False
|
2010-10-26 21:56:55 +05:30
|
|
|
|
2010-08-17 02:17:27 +05:30
|
|
|
|
|
|
|
def outMentat(cmd,locals):
|
2010-09-23 13:29:52 +05:30
|
|
|
if cmd[0:3] == '(!)':
|
|
|
|
exec(cmd[3:])
|
|
|
|
elif cmd[0:3] == '(?)':
|
|
|
|
cmd = eval(cmd[3:])
|
2016-03-02 17:13:09 +05:30
|
|
|
py_mentat.py_send(cmd)
|
2011-06-08 00:45:34 +05:30
|
|
|
if 'log' in locals: locals['log'].append(cmd)
|
2010-09-23 13:29:52 +05:30
|
|
|
else:
|
2016-03-02 17:13:09 +05:30
|
|
|
py_mentat.py_send(cmd)
|
2011-06-08 00:45:34 +05:30
|
|
|
if 'log' in locals: locals['log'].append(cmd)
|
2010-09-23 13:29:52 +05:30
|
|
|
return
|
2010-08-17 02:17:27 +05:30
|
|
|
|
|
|
|
def outStdout(cmd,locals):
|
2010-09-23 13:29:52 +05:30
|
|
|
if cmd[0:3] == '(!)':
|
|
|
|
exec(cmd[3:])
|
|
|
|
elif cmd[0:3] == '(?)':
|
|
|
|
cmd = eval(cmd[3:])
|
|
|
|
print cmd
|
|
|
|
else:
|
|
|
|
print cmd
|
|
|
|
return
|
2010-08-17 02:17:27 +05:30
|
|
|
|
|
|
|
|
|
|
|
def output(cmds,locals,dest):
|
2010-09-23 13:29:52 +05:30
|
|
|
for cmd in cmds:
|
|
|
|
if isinstance(cmd,list):
|
|
|
|
output(cmd,locals,dest)
|
|
|
|
else:
|
|
|
|
{\
|
|
|
|
'Mentat': outMentat,\
|
|
|
|
'Stdout': outStdout,\
|
|
|
|
}[dest](cmd,locals)
|
|
|
|
return
|
2010-08-17 02:17:27 +05:30
|
|
|
|
|
|
|
|
2014-03-12 19:56:44 +05:30
|
|
|
def rcbOrientationParser(content,idcolumn):
|
2010-08-17 02:17:27 +05:30
|
|
|
|
2010-09-23 13:29:52 +05:30
|
|
|
grains = []
|
|
|
|
myOrientation = [0.0,0.0,0.0]
|
2016-09-26 06:43:02 +05:30
|
|
|
for j,line in enumerate(content):
|
|
|
|
if re.match(r'^\s*(#|$)',line): continue # skip comments and blank lines
|
2012-01-16 15:02:36 +05:30
|
|
|
for grain in range(2):
|
2014-03-12 19:56:44 +05:30
|
|
|
myID = int(line.split()[idcolumn+grain]) # get grain id
|
2012-01-16 15:02:36 +05:30
|
|
|
myOrientation = map(float,line.split())[3*grain:3+3*grain] # get orientation
|
|
|
|
if len(grains) < myID:
|
2014-03-12 19:56:44 +05:30
|
|
|
for i in range(myID-len(grains)): # extend list to necessary length
|
2012-01-16 15:02:36 +05:30
|
|
|
grains.append([0.0,0.0,0.0])
|
2014-03-12 19:56:44 +05:30
|
|
|
try:
|
|
|
|
grains[myID-1] = myOrientation # store Euler angles
|
|
|
|
except IndexError:
|
2016-09-26 06:43:02 +05:30
|
|
|
damask.util.croak('You might not have chosen the correct column for the grain IDs! '+
|
|
|
|
'Please check the "--id" option.')
|
2014-03-12 19:56:44 +05:30
|
|
|
raise
|
|
|
|
except:
|
|
|
|
raise
|
2012-01-16 15:02:36 +05:30
|
|
|
|
2010-09-23 13:29:52 +05:30
|
|
|
return grains
|
|
|
|
|
2016-03-02 17:13:09 +05:30
|
|
|
def rcbParser(content,M,size,tolerance,idcolumn,segmentcolumn):
|
|
|
|
"""parser for TSL-OIM reconstructed boundary files"""
|
2010-08-17 02:17:27 +05:30
|
|
|
# find bounding box
|
2010-09-23 13:29:52 +05:30
|
|
|
boxX = [1.*sys.maxint,-1.*sys.maxint]
|
|
|
|
boxY = [1.*sys.maxint,-1.*sys.maxint]
|
|
|
|
x = [0.,0.]
|
|
|
|
y = [0.,0.]
|
|
|
|
for line in content:
|
2016-09-26 06:43:02 +05:30
|
|
|
m = re.match(r'^\s*(#|$)',line)
|
2012-01-16 15:02:36 +05:30
|
|
|
if m: continue # skip comments and blank lines
|
2014-03-12 19:56:44 +05:30
|
|
|
try:
|
|
|
|
(x[0],y[0],x[1],y[1]) = map(float,line.split())[segmentcolumn:segmentcolumn+4] # get start and end coordinates of each segment.
|
|
|
|
except IndexError:
|
2016-09-26 06:43:02 +05:30
|
|
|
damask.util.croak('You might not have chosen the correct column for the segment end points! '+
|
|
|
|
'Please check the "--segment" option.')
|
2014-03-12 19:56:44 +05:30
|
|
|
raise
|
|
|
|
except:
|
|
|
|
raise
|
2012-01-16 15:02:36 +05:30
|
|
|
(x[0],y[0]) = (M[0]*x[0]+M[1]*y[0],M[2]*x[0]+M[3]*y[0]) # apply transformation to coordinates
|
|
|
|
(x[1],y[1]) = (M[0]*x[1]+M[1]*y[1],M[2]*x[1]+M[3]*y[1]) # to get rcb --> Euler system
|
|
|
|
boxX[0] = min(boxX[0],x[0],x[1])
|
|
|
|
boxX[1] = max(boxX[1],x[0],x[1])
|
|
|
|
boxY[0] = min(boxY[0],y[0],y[1])
|
|
|
|
boxY[1] = max(boxY[1],y[0],y[1])
|
2010-09-23 13:29:52 +05:30
|
|
|
dX = boxX[1]-boxX[0]
|
|
|
|
dY = boxY[1]-boxY[0]
|
2010-08-17 02:17:27 +05:30
|
|
|
|
2016-09-26 06:43:02 +05:30
|
|
|
damask.util.croak(' bounding box {},{} -- {},{}'.format(boxX[0],boxY[0],boxX[1],boxY[1]))
|
|
|
|
damask.util.croak(' dimension {} x {}'.format(dX,dY))
|
|
|
|
|
2012-06-20 21:28:43 +05:30
|
|
|
if size > 0.0: scalePatch = size/dX
|
|
|
|
else: scalePatch = 1.0
|
2010-08-17 02:17:27 +05:30
|
|
|
|
2011-06-08 00:45:34 +05:30
|
|
|
# read segments
|
2010-09-23 13:29:52 +05:30
|
|
|
segment = 0
|
|
|
|
connectivityXY = {"0": {"0":[],"%g"%dY:[],},\
|
2011-11-03 17:48:07 +05:30
|
|
|
"%g"%dX: {"0":[],"%g"%dY:[],},}
|
2010-09-23 13:29:52 +05:30
|
|
|
connectivityYX = {"0": {"0":[],"%g"%dX:[],},\
|
2011-11-03 17:48:07 +05:30
|
|
|
"%g"%dY: {"0":[],"%g"%dX:[],},}
|
2010-09-23 13:29:52 +05:30
|
|
|
grainNeighbors = []
|
|
|
|
|
|
|
|
for line in content:
|
2016-09-26 06:43:02 +05:30
|
|
|
if re.match(r'^\s*(#|$)',line): continue # skip comments and blank lines
|
2014-03-12 19:56:44 +05:30
|
|
|
(x[0],y[0],x[1],y[1]) = map(float,line.split())[segmentcolumn:segmentcolumn+4] # get start and end coordinates of each segment.
|
2012-01-16 15:02:36 +05:30
|
|
|
(x[0],y[0]) = (M[0]*x[0]+M[1]*y[0],M[2]*x[0]+M[3]*y[0]) # apply transformation to coordinates
|
|
|
|
(x[1],y[1]) = (M[0]*x[1]+M[1]*y[1],M[2]*x[1]+M[3]*y[1]) # to get rcb --> Euler system
|
|
|
|
|
|
|
|
x[0] -= boxX[0] # make relative to origin of bounding box
|
|
|
|
x[1] -= boxX[0]
|
|
|
|
y[0] -= boxY[0]
|
|
|
|
y[1] -= boxY[0]
|
2014-03-12 19:56:44 +05:30
|
|
|
grainNeighbors.append(map(int,line.split()[idcolumn:idcolumn+2])) # remember right and left grain per segment
|
2016-09-26 06:43:02 +05:30
|
|
|
for i in range(2): # store segment to both points
|
|
|
|
match = False # check whether point is already known (within a small range)
|
2012-01-16 15:02:36 +05:30
|
|
|
for posX in connectivityXY.keys():
|
|
|
|
if (abs(float(posX)-x[i])<dX*tolerance):
|
|
|
|
for posY in connectivityXY[posX].keys():
|
|
|
|
if (abs(float(posY)-y[i])<dY*tolerance):
|
|
|
|
keyX = posX
|
|
|
|
keyY = posY
|
|
|
|
match = True
|
|
|
|
break
|
|
|
|
break
|
2016-09-26 06:43:02 +05:30
|
|
|
# force onto boundary if inside tolerance to it
|
2012-01-16 15:02:36 +05:30
|
|
|
if (not match):
|
|
|
|
if (abs(x[i])<dX*tolerance):
|
|
|
|
x[i] = 0
|
|
|
|
if (abs(dX-x[i])<dX*tolerance):
|
|
|
|
x[i] = dX
|
|
|
|
if (abs(y[i])<dY*tolerance):
|
|
|
|
y[i] = 0
|
|
|
|
if (abs(dY-y[i])<dY*tolerance):
|
|
|
|
y[i] = dY
|
|
|
|
keyX = "%g"%x[i]
|
|
|
|
keyY = "%g"%y[i]
|
|
|
|
if keyX not in connectivityXY: # create new hash entry for so far unknown point
|
|
|
|
connectivityXY[keyX] = {}
|
|
|
|
if keyY not in connectivityXY[keyX]: # create new hash entry for so far unknown point
|
|
|
|
connectivityXY[keyX][keyY] = []
|
|
|
|
if keyY not in connectivityYX: # create new hash entry for so far unknown point
|
|
|
|
connectivityYX[keyY] = {}
|
|
|
|
if keyX not in connectivityYX[keyY]: # create new hash entry for so far unknown point
|
|
|
|
connectivityYX[keyY][keyX] = []
|
|
|
|
connectivityXY[keyX][keyY].append(segment)
|
|
|
|
connectivityYX[keyY][keyX].append(segment)
|
|
|
|
segment += 1
|
2010-09-23 13:29:52 +05:30
|
|
|
|
2010-08-17 02:17:27 +05:30
|
|
|
# top border
|
2010-09-23 13:29:52 +05:30
|
|
|
keyId = "0"
|
|
|
|
boundary = connectivityYX[keyId].keys()
|
|
|
|
boundary.sort(key=float)
|
|
|
|
for indexBdy in range(len(boundary)-1):
|
|
|
|
connectivityXY[boundary[indexBdy]][keyId].append(segment)
|
|
|
|
connectivityXY[boundary[indexBdy+1]][keyId].append(segment)
|
|
|
|
connectivityYX[keyId][boundary[indexBdy]].append(segment)
|
|
|
|
connectivityYX[keyId][boundary[indexBdy+1]].append(segment)
|
|
|
|
segment += 1
|
2010-08-17 02:17:27 +05:30
|
|
|
|
|
|
|
# right border
|
2010-09-23 13:29:52 +05:30
|
|
|
keyId = "%g"%(boxX[1]-boxX[0])
|
|
|
|
boundary = connectivityXY[keyId].keys()
|
|
|
|
boundary.sort(key=float)
|
|
|
|
for indexBdy in range(len(boundary)-1):
|
|
|
|
connectivityYX[boundary[indexBdy]][keyId].append(segment)
|
|
|
|
connectivityYX[boundary[indexBdy+1]][keyId].append(segment)
|
|
|
|
connectivityXY[keyId][boundary[indexBdy]].append(segment)
|
|
|
|
connectivityXY[keyId][boundary[indexBdy+1]].append(segment)
|
|
|
|
segment += 1
|
2010-08-17 02:17:27 +05:30
|
|
|
|
|
|
|
# bottom border
|
2010-09-23 13:29:52 +05:30
|
|
|
keyId = "%g"%(boxY[1]-boxY[0])
|
|
|
|
boundary = connectivityYX[keyId].keys()
|
|
|
|
boundary.sort(key=float,reverse=True)
|
|
|
|
for indexBdy in range(len(boundary)-1):
|
|
|
|
connectivityXY[boundary[indexBdy]][keyId].append(segment)
|
|
|
|
connectivityXY[boundary[indexBdy+1]][keyId].append(segment)
|
|
|
|
connectivityYX[keyId][boundary[indexBdy]].append(segment)
|
|
|
|
connectivityYX[keyId][boundary[indexBdy+1]].append(segment)
|
|
|
|
segment += 1
|
2010-08-17 02:17:27 +05:30
|
|
|
|
|
|
|
# left border
|
2010-09-23 13:29:52 +05:30
|
|
|
keyId = "0"
|
|
|
|
boundary = connectivityXY[keyId].keys()
|
|
|
|
boundary.sort(key=float,reverse=True)
|
|
|
|
for indexBdy in range(len(boundary)-1):
|
|
|
|
connectivityYX[boundary[indexBdy]][keyId].append(segment)
|
|
|
|
connectivityYX[boundary[indexBdy+1]][keyId].append(segment)
|
|
|
|
connectivityXY[keyId][boundary[indexBdy]].append(segment)
|
|
|
|
connectivityXY[keyId][boundary[indexBdy+1]].append(segment)
|
|
|
|
segment += 1
|
|
|
|
|
|
|
|
|
|
|
|
allkeysX = connectivityXY.keys()
|
|
|
|
allkeysX.sort()
|
|
|
|
points = []
|
|
|
|
segments = [[] for i in range(segment)]
|
|
|
|
pointId = 0
|
|
|
|
for keyX in allkeysX:
|
|
|
|
allkeysY = connectivityXY[keyX].keys()
|
|
|
|
allkeysY.sort()
|
|
|
|
for keyY in allkeysY:
|
|
|
|
points.append({'coords': [float(keyX)*scalePatch,float(keyY)*scalePatch], 'segments': connectivityXY[keyX][keyY]})
|
|
|
|
for segment in connectivityXY[keyX][keyY]:
|
2016-09-26 06:43:02 +05:30
|
|
|
segments[segment].append(pointId)
|
2010-09-23 13:29:52 +05:30
|
|
|
pointId += 1
|
|
|
|
|
2016-09-26 06:43:02 +05:30
|
|
|
dupSegments = []
|
|
|
|
for pointId,point in enumerate(points):
|
|
|
|
ends = []
|
|
|
|
goners = []
|
|
|
|
for segment in point['segments']:
|
|
|
|
end = segments[segment][1 if segments[segment][0] == pointId else 0]
|
|
|
|
if end in ends:
|
|
|
|
goners.append(segment)
|
|
|
|
dupSegments.append(segment)
|
|
|
|
else:
|
|
|
|
ends.append(end)
|
|
|
|
|
|
|
|
for item in goners:
|
|
|
|
point['segments'].remove(item)
|
|
|
|
|
|
|
|
if len(dupSegments) > 0:
|
|
|
|
damask.util.croak(' culling {} duplicate segments...'.format(len(dupSegments)))
|
|
|
|
for rm in dupSegments:
|
|
|
|
segments[rm] = None
|
|
|
|
|
2013-10-25 00:25:16 +05:30
|
|
|
crappyData = False
|
|
|
|
for pointId,point in enumerate(points):
|
|
|
|
if len(point['segments']) < 2: # point marks a dead end!
|
2016-09-26 06:43:02 +05:30
|
|
|
damask.util.croak('dead end at segment {} for point {} ({},{}).'
|
|
|
|
.format(point['segments'][0],
|
|
|
|
pointId,
|
|
|
|
boxX[0]+point['coords'][0]/scalePatch,boxY[0]+point['coords'][1]/scalePatch,))
|
2013-10-25 00:25:16 +05:30
|
|
|
crappyData = True
|
|
|
|
|
2010-09-23 13:29:52 +05:30
|
|
|
grains = {'draw': [], 'legs': []}
|
2013-10-25 00:25:16 +05:30
|
|
|
|
2014-08-21 22:53:16 +05:30
|
|
|
if not crappyData:
|
|
|
|
|
|
|
|
for pointId,point in enumerate(points):
|
|
|
|
while point['segments']:
|
|
|
|
myStart = pointId
|
|
|
|
grainDraw = [points[myStart]['coords']]
|
|
|
|
innerAngleSum = 0.0
|
|
|
|
myWalk = point['segments'].pop()
|
|
|
|
grainLegs = [myWalk]
|
2016-09-26 06:43:02 +05:30
|
|
|
myEnd = segments[myWalk][1 if segments[myWalk][0] == myStart else 0]
|
2014-08-21 22:53:16 +05:30
|
|
|
|
|
|
|
while (myEnd != pointId):
|
2016-09-26 06:43:02 +05:30
|
|
|
myV = [points[myEnd]['coords'][0]-points[myStart]['coords'][0],
|
|
|
|
points[myEnd]['coords'][1]-points[myStart]['coords'][1]]
|
2014-08-21 22:53:16 +05:30
|
|
|
myLen = math.sqrt(myV[0]**2+myV[1]**2)
|
2016-09-26 06:43:02 +05:30
|
|
|
if myLen == 0.0: damask.util.croak('mylen is zero: point {} --> {}'.format(myStart,myEnd))
|
2014-08-21 22:53:16 +05:30
|
|
|
best = {'product': -2.0, 'peek': -1, 'len': -1, 'point': -1}
|
2016-03-02 17:13:09 +05:30
|
|
|
for peek in points[myEnd]['segments']: # trying in turn all segments emanating from current end
|
2014-08-21 22:53:16 +05:30
|
|
|
if peek == myWalk:
|
2016-09-26 06:43:02 +05:30
|
|
|
continue # do not go back same path
|
|
|
|
peekEnd = segments[peek][1 if segments[peek][0] == myEnd else 0]
|
2014-08-21 22:53:16 +05:30
|
|
|
peekV = [points[myEnd]['coords'][0]-points[peekEnd]['coords'][0],
|
|
|
|
points[myEnd]['coords'][1]-points[peekEnd]['coords'][1]]
|
|
|
|
peekLen = math.sqrt(peekV[0]**2+peekV[1]**2)
|
2016-09-26 06:43:02 +05:30
|
|
|
if peekLen == 0.0: damask.util.croak('peeklen is zero: peek point {}'.format(peek))
|
|
|
|
crossproduct = (myV[0]*peekV[1] - myV[1]*peekV[0])/myLen/peekLen
|
|
|
|
dotproduct = (myV[0]*peekV[0] + myV[1]*peekV[1])/myLen/peekLen
|
|
|
|
innerAngle = crossproduct*(dotproduct+1.0)
|
|
|
|
if innerAngle >= best['product']: # takes sharpest left turn
|
|
|
|
best['product'] = innerAngle
|
2014-08-21 22:53:16 +05:30
|
|
|
best['peek'] = peek
|
|
|
|
best['point'] = peekEnd
|
2016-09-26 06:43:02 +05:30
|
|
|
|
2014-08-21 22:53:16 +05:30
|
|
|
innerAngleSum += best['product']
|
|
|
|
myWalk = best['peek']
|
|
|
|
myStart = myEnd
|
|
|
|
myEnd = best['point']
|
2016-09-26 06:43:02 +05:30
|
|
|
|
2014-08-21 22:53:16 +05:30
|
|
|
if myWalk in points[myStart]['segments']:
|
|
|
|
points[myStart]['segments'].remove(myWalk)
|
2010-09-23 13:29:52 +05:30
|
|
|
else:
|
2016-09-26 06:43:02 +05:30
|
|
|
damask.utilcroak('{} not in segments of point {}'.format(myWalk,myStart))
|
2014-08-21 22:53:16 +05:30
|
|
|
grainDraw.append(points[myStart]['coords'])
|
|
|
|
grainLegs.append(myWalk)
|
2016-09-26 06:43:02 +05:30
|
|
|
|
2014-08-21 22:53:16 +05:30
|
|
|
if innerAngleSum > 0.0:
|
|
|
|
grains['draw'].append(grainDraw)
|
|
|
|
grains['legs'].append(grainLegs)
|
2010-09-23 13:29:52 +05:30
|
|
|
else:
|
2014-08-21 22:53:16 +05:30
|
|
|
grains['box'] = grainLegs
|
2010-08-17 02:17:27 +05:30
|
|
|
|
2012-01-16 15:02:36 +05:30
|
|
|
|
2010-08-17 02:17:27 +05:30
|
|
|
# build overall data structure
|
|
|
|
|
2016-09-26 06:43:02 +05:30
|
|
|
rcData = {'dimension':[dX,dY],
|
|
|
|
'bounds': [[boxX[0],boxY[0]],[boxX[1],boxY[1]]],
|
|
|
|
'scale': scalePatch,
|
|
|
|
'point': [],
|
|
|
|
'segment': [],
|
|
|
|
'neighbors': [],
|
|
|
|
'grain': [],
|
|
|
|
'grainMapping': [],
|
|
|
|
}
|
2010-09-23 13:29:52 +05:30
|
|
|
|
|
|
|
for point in points:
|
|
|
|
rcData['point'].append(point['coords'])
|
2016-09-26 06:43:02 +05:30
|
|
|
damask.util.croak(' found {} points'.format(len(rcData['point'])))
|
2010-09-23 13:29:52 +05:30
|
|
|
|
2016-09-26 06:43:02 +05:30
|
|
|
for segment in segments:
|
2010-09-23 13:29:52 +05:30
|
|
|
rcData['segment'].append(segment)
|
2016-09-26 06:43:02 +05:30
|
|
|
damask.util.croak(' built {} segments'.format(len(rcData['segment'])))
|
|
|
|
|
|
|
|
for neighbors in grainNeighbors:
|
|
|
|
rcData['neighbors'].append(neighbors)
|
2010-09-23 13:29:52 +05:30
|
|
|
|
2012-01-16 15:02:36 +05:30
|
|
|
for legs in grains['legs']: # loop over grains
|
|
|
|
rcData['grain'].append(legs) # store list of boundary segments
|
2010-09-23 13:29:52 +05:30
|
|
|
myNeighbors = {}
|
2012-01-16 15:02:36 +05:30
|
|
|
for leg in legs: # test each boundary segment
|
|
|
|
if leg < len(grainNeighbors): # a valid segment index?
|
|
|
|
for side in range(2): # look at both sides of the segment
|
|
|
|
if grainNeighbors[leg][side] in myNeighbors: # count occurrence of grain IDs
|
2010-09-23 13:29:52 +05:30
|
|
|
myNeighbors[grainNeighbors[leg][side]] += 1
|
|
|
|
else:
|
|
|
|
myNeighbors[grainNeighbors[leg][side]] = 1
|
2012-01-16 15:02:36 +05:30
|
|
|
if myNeighbors: # do I have any neighbors (i.e., non-bounding box segment)
|
|
|
|
candidateGrains = sorted(myNeighbors.iteritems(), key=lambda (k,v): (v,k), reverse=True) # sort grain counting
|
2016-09-26 06:43:02 +05:30
|
|
|
# most frequent one not yet seen?
|
|
|
|
rcData['grainMapping'].append(candidateGrains[0 if candidateGrains[0][0] not in rcData['grainMapping'] else 1][0]) # must be me then
|
|
|
|
# special case of bi-crystal situation...
|
2012-01-16 15:02:36 +05:30
|
|
|
|
2016-09-26 06:43:02 +05:30
|
|
|
damask.util.croak(' found {} grains'.format(len(rcData['grain'])))
|
2010-09-23 13:29:52 +05:30
|
|
|
|
2014-08-21 22:53:16 +05:30
|
|
|
rcData['box'] = grains['box'] if 'box' in grains else []
|
2010-09-23 13:29:52 +05:30
|
|
|
|
|
|
|
return rcData
|
2010-08-17 02:17:27 +05:30
|
|
|
|
|
|
|
|
|
|
|
def init():
|
|
|
|
return ["*new_model yes",
|
2010-09-23 13:29:52 +05:30
|
|
|
"*select_clear",
|
|
|
|
"*reset",
|
|
|
|
"*set_nodes off",
|
|
|
|
"*elements_solid",
|
|
|
|
"*show_view 4",
|
|
|
|
"*reset_view",
|
|
|
|
"*view_perspective",
|
|
|
|
"*redraw",
|
|
|
|
]
|
2010-08-17 02:17:27 +05:30
|
|
|
|
|
|
|
|
2011-06-08 00:45:34 +05:30
|
|
|
def sample(size,aspect,n,xmargin,ymargin):
|
2010-08-17 02:17:27 +05:30
|
|
|
|
2010-09-23 13:29:52 +05:30
|
|
|
cmds = [\
|
2010-08-17 02:17:27 +05:30
|
|
|
# gauge
|
2011-06-08 00:45:34 +05:30
|
|
|
"*add_points %f %f %f"%(-size*(0.5+xmargin), size*(0.5*aspect+ymargin),0),
|
|
|
|
"*add_points %f %f %f"%( size*(0.5+xmargin), size*(0.5*aspect+ymargin),0),
|
|
|
|
"*add_points %f %f %f"%( size*(0.5+xmargin),-size*(0.5*aspect+ymargin),0),
|
|
|
|
"*add_points %f %f %f"%(-size*(0.5+xmargin),-size*(0.5*aspect+ymargin),0),
|
2010-09-23 13:29:52 +05:30
|
|
|
"*set_curve_type line",
|
|
|
|
"*add_curves %i %i"%(1,2),
|
|
|
|
"*add_curves %i %i"%(3,4),
|
|
|
|
"*set_curve_div_type_fix_ndiv",
|
|
|
|
"*set_curve_div_num %i"%n,
|
|
|
|
"*apply_curve_divisions",
|
|
|
|
"1 2 #",
|
|
|
|
"*add_curves %i %i"%(2,3), # right side
|
|
|
|
"*add_curves %i %i"%(4,1), # left side
|
|
|
|
"*set_curve_div_type_fix_ndiv",
|
|
|
|
"*set_curve_div_num %i"%n,
|
|
|
|
"*apply_curve_divisions",
|
|
|
|
"3 4 #",
|
|
|
|
]
|
|
|
|
|
|
|
|
return cmds
|
|
|
|
|
2010-08-17 02:17:27 +05:30
|
|
|
|
|
|
|
def patch(a,n,mesh,rcData):
|
2010-09-23 13:29:52 +05:30
|
|
|
cmds = []
|
|
|
|
for l in range(len(rcData['point'])): # generate all points
|
2016-03-03 19:14:51 +05:30
|
|
|
cmds.append("*add_points %f %f %f"\
|
|
|
|
%(rcData['point'][l][0]-a/2.0,rcData['point'][l][1]-a/rcData['dimension'][0]*rcData['dimension'][1]/2.0,0))
|
2010-09-23 13:29:52 +05:30
|
|
|
|
|
|
|
cmds.append(["*set_curve_type line",
|
|
|
|
"*set_curve_div_type_fix_ndiv",
|
|
|
|
])
|
|
|
|
for m in range(len(rcData['segment'])): # generate all curves and subdivide them for overall balanced piece length
|
|
|
|
start = rcData['segment'][m][0]
|
|
|
|
end = rcData['segment'][m][1]
|
|
|
|
cmds.append([\
|
|
|
|
"*add_curves %i %i" %(start+rcData['offsetPoints'],
|
|
|
|
end +rcData['offsetPoints']),
|
|
|
|
"*set_curve_div_num %i"%(max(1,round(math.sqrt((rcData['point'][start][0]-rcData['point'][end][0])**2+\
|
|
|
|
(rcData['point'][start][1]-rcData['point'][end][1])**2)/a*n))),
|
|
|
|
"*apply_curve_divisions",
|
|
|
|
"%i #"%(m+rcData['offsetSegments']),
|
|
|
|
])
|
|
|
|
|
|
|
|
grain = 0
|
|
|
|
cmds.append('(!)locals["last"] = py_get_int("nelements()")')
|
|
|
|
for g in rcData['grain']:
|
|
|
|
cmds.append([\
|
|
|
|
'(!)locals["first"] = locals["last"]+1',
|
|
|
|
"*%s "%mesh+" ".join([str(rcData['offsetSegments']+x) for x in g])+" #",
|
|
|
|
'(!)locals["last"] = py_get_int("nelements()")',
|
|
|
|
"*select_elements",
|
|
|
|
'(?)"%i to %i #"%(locals["first"],locals["last"])',
|
|
|
|
"*store_elements grain_%i"%rcData['grainMapping'][grain],
|
|
|
|
"all_selected",
|
|
|
|
"*select_clear",
|
|
|
|
])
|
|
|
|
grain += 1
|
|
|
|
|
|
|
|
return cmds
|
2010-08-17 02:17:27 +05:30
|
|
|
|
|
|
|
|
|
|
|
def gage(mesh,rcData):
|
|
|
|
|
2010-09-23 13:29:52 +05:30
|
|
|
return([\
|
|
|
|
"*%s "%mesh +
|
|
|
|
" ".join([str(x) for x in range(1,rcData['offsetSegments'])]) +
|
|
|
|
" " +
|
|
|
|
" ".join([str(rcData['offsetSegments']+x)for x in rcData['box']]) +
|
|
|
|
" #",
|
|
|
|
"*select_reset",
|
|
|
|
"*select_clear",
|
|
|
|
"*select_elements",
|
|
|
|
"all_existing",
|
|
|
|
"*select_mode_except",
|
2010-09-28 22:26:49 +05:30
|
|
|
['grain_%i'%rcData['grainMapping'][i] for i in range(len(rcData['grain']))],
|
2010-09-23 13:29:52 +05:30
|
|
|
"#",
|
|
|
|
"*store_elements matrix",
|
|
|
|
"all_selected",
|
|
|
|
"*select_mode_invert",
|
|
|
|
"*select_elements",
|
|
|
|
"all_existing",
|
2011-06-08 00:45:34 +05:30
|
|
|
"*store_elements _grains",
|
2010-09-23 13:29:52 +05:30
|
|
|
"all_selected",
|
|
|
|
"*select_clear",
|
|
|
|
"*select_reset",
|
|
|
|
])
|
2010-08-17 02:17:27 +05:30
|
|
|
|
|
|
|
|
|
|
|
def expand3D(thickness,steps):
|
2010-09-23 13:29:52 +05:30
|
|
|
return([\
|
|
|
|
"*set_expand_translation z %f"%(thickness/steps),
|
|
|
|
"*set_expand_repetitions %i"%steps,
|
|
|
|
"*expand_elements",
|
|
|
|
"all_existing",
|
|
|
|
])
|
2010-08-17 02:17:27 +05:30
|
|
|
|
|
|
|
|
|
|
|
def initial_conditions(grainNumber,grainMapping):
|
2010-09-23 13:29:52 +05:30
|
|
|
cmds = [\
|
|
|
|
"*new_icond",
|
2011-06-08 00:45:34 +05:30
|
|
|
"*icond_name _temperature",
|
2010-09-23 13:29:52 +05:30
|
|
|
"*icond_type state_variable",
|
|
|
|
"*icond_param_value state_var_id 1",
|
|
|
|
"*icond_dof_value var 300",
|
|
|
|
"*add_icond_elements",
|
|
|
|
"all_existing",
|
|
|
|
"*new_icond",
|
2011-06-08 00:45:34 +05:30
|
|
|
"*icond_name _homogenization",
|
2010-09-23 13:29:52 +05:30
|
|
|
"*icond_type state_variable",
|
|
|
|
"*icond_param_value state_var_id 2",
|
|
|
|
"*icond_dof_value var 1",
|
|
|
|
"*add_icond_elements",
|
|
|
|
"all_existing",
|
|
|
|
]
|
|
|
|
|
|
|
|
for grain in range(grainNumber):
|
|
|
|
cmds.append([\
|
|
|
|
"*new_icond",
|
|
|
|
"*icond_name grain_%i"%grainMapping[grain],
|
|
|
|
"*icond_type state_variable",
|
|
|
|
"*icond_param_value state_var_id 3",
|
|
|
|
"*icond_dof_value var %i"%(grain+1),
|
|
|
|
"*add_icond_elements",
|
|
|
|
"grain_%i"%grainMapping[grain],
|
|
|
|
"",
|
|
|
|
])
|
|
|
|
cmds.append([\
|
|
|
|
"*new_icond",
|
|
|
|
"*icond_name rim",
|
|
|
|
"*icond_type state_variable",
|
|
|
|
"*icond_param_value state_var_id 3",
|
|
|
|
"*icond_dof_value var %i"%(grainNumber+1),
|
|
|
|
"*add_icond_elements",
|
|
|
|
"matrix",
|
|
|
|
])
|
|
|
|
return cmds
|
2010-08-17 02:17:27 +05:30
|
|
|
|
|
|
|
|
2011-06-08 00:45:34 +05:30
|
|
|
def boundary_conditions(rate,thickness, size,aspect,xmargin,ymargin):
|
|
|
|
|
|
|
|
inner = (1 - 1.0e-4) * size*(0.5+xmargin)
|
|
|
|
outer = (1 + 1.0e-4) * size*(0.5+xmargin)
|
|
|
|
lower = (1 - 1.0e-4) * size*(0.5*aspect+ymargin)
|
|
|
|
upper = (1 + 1.0e-4) * size*(0.5*aspect+ymargin)
|
2010-09-23 13:29:52 +05:30
|
|
|
|
|
|
|
return [\
|
|
|
|
"*new_md_table 1 1",
|
|
|
|
"*table_name linear",
|
|
|
|
"*set_md_table_type 1 time",
|
|
|
|
"*table_add",
|
|
|
|
"0 0",
|
|
|
|
"1 1",
|
|
|
|
"*select_method_box",
|
|
|
|
"*new_apply",
|
|
|
|
"*apply_name pull_bottom",
|
|
|
|
"*apply_type fixed_displacement",
|
|
|
|
"*apply_dof y",
|
2012-06-13 14:58:59 +05:30
|
|
|
"*apply_dof_value y %f"%(-rate*(lower+upper)/2.0),
|
2010-09-23 13:29:52 +05:30
|
|
|
"*apply_dof_table y linear",
|
|
|
|
"*select_clear_nodes",
|
|
|
|
"*select_nodes",
|
|
|
|
"%f %f"%(-outer,outer),
|
|
|
|
"%f %f"%(-upper,-lower),
|
2011-06-08 00:45:34 +05:30
|
|
|
"%f %f"%(-.0001*thickness,1.0001*thickness),
|
2010-09-23 13:29:52 +05:30
|
|
|
"*add_apply_nodes",
|
|
|
|
"all_selected",
|
|
|
|
"*new_apply",
|
|
|
|
"*apply_name pull_top",
|
|
|
|
"*apply_type fixed_displacement",
|
|
|
|
"*apply_dof y",
|
2012-06-13 14:58:59 +05:30
|
|
|
"*apply_dof_value y %f"%(rate*(lower+upper)/2.0),
|
2010-09-23 13:29:52 +05:30
|
|
|
"*apply_dof_table y linear",
|
|
|
|
"*select_clear_nodes",
|
|
|
|
"*select_nodes",
|
|
|
|
"%f %f"%(-outer,outer),
|
|
|
|
"%f %f"%(lower,upper),
|
2011-06-08 00:45:34 +05:30
|
|
|
"%f %f"%(-.0001*thickness,1.0001*thickness),
|
2010-09-23 13:29:52 +05:30
|
|
|
"*add_apply_nodes",
|
|
|
|
"all_selected",
|
|
|
|
"*new_apply",
|
|
|
|
"*apply_name fix_x",
|
|
|
|
"*apply_type fixed_displacement",
|
|
|
|
"*apply_dof x",
|
|
|
|
"*apply_dof_value x 0",
|
|
|
|
"*select_clear_nodes",
|
|
|
|
"*select_nodes",
|
|
|
|
"%f %f"%(-outer,-inner),
|
|
|
|
"%f %f"%(lower,upper),
|
2011-06-08 00:45:34 +05:30
|
|
|
"%f %f"%(-.0001*thickness,.0001*thickness),
|
2010-09-23 13:29:52 +05:30
|
|
|
"%f %f"%(-outer,-inner),
|
|
|
|
"%f %f"%(lower,upper),
|
2011-06-08 00:45:34 +05:30
|
|
|
"%f %f"%(0.9999*thickness,1.0001*thickness),
|
2010-09-23 13:29:52 +05:30
|
|
|
"%f %f"%(-outer,-inner),
|
|
|
|
"%f %f"%(-upper,-lower),
|
2011-06-08 00:45:34 +05:30
|
|
|
"%f %f"%(-.0001*thickness,.0001*thickness),
|
2010-09-23 13:29:52 +05:30
|
|
|
"%f %f"%(-outer,-inner),
|
|
|
|
"%f %f"%(-upper,-lower),
|
2011-06-08 00:45:34 +05:30
|
|
|
"%f %f"%(0.9999*thickness,1.0001*thickness),
|
2010-09-23 13:29:52 +05:30
|
|
|
"*add_apply_nodes",
|
|
|
|
"all_selected",
|
|
|
|
"*new_apply",
|
|
|
|
"*apply_name fix_z",
|
|
|
|
"*apply_type fixed_displacement",
|
|
|
|
"*apply_dof z",
|
|
|
|
"*apply_dof_value z 0",
|
|
|
|
"*select_clear_nodes",
|
|
|
|
"*select_nodes",
|
|
|
|
"%f %f"%(-outer,-inner),
|
|
|
|
"%f %f"%(lower,upper),
|
2011-06-08 00:45:34 +05:30
|
|
|
"%f %f"%(-.0001*thickness,.0001*thickness),
|
2010-09-23 13:29:52 +05:30
|
|
|
"%f %f"%(-outer,-inner),
|
|
|
|
"%f %f"%(-upper,-lower),
|
2011-06-08 00:45:34 +05:30
|
|
|
"%f %f"%(-.0001*thickness,.0001*thickness),
|
2010-09-23 13:29:52 +05:30
|
|
|
"%f %f"%(inner,outer),
|
|
|
|
"%f %f"%(lower,upper),
|
2011-06-08 00:45:34 +05:30
|
|
|
"%f %f"%(-.0001*thickness,.0001*thickness),
|
2010-09-23 13:29:52 +05:30
|
|
|
"%f %f"%(inner,outer),
|
|
|
|
"%f %f"%(-upper,-lower),
|
2011-06-08 00:45:34 +05:30
|
|
|
"%f %f"%(-.0001*thickness,.0001*thickness),
|
2010-09-23 13:29:52 +05:30
|
|
|
"*add_apply_nodes",
|
|
|
|
"all_selected",
|
|
|
|
"*select_clear",
|
|
|
|
"*select_reset",
|
|
|
|
]
|
2010-08-17 02:17:27 +05:30
|
|
|
|
|
|
|
def materials():
|
2010-09-23 13:29:52 +05:30
|
|
|
return [\
|
|
|
|
"*new_material",
|
|
|
|
"*material_name patch",
|
|
|
|
"*material_type mechanical:hypoelastic",
|
|
|
|
"*material_option hypoelastic:method:hypela2",
|
|
|
|
"*material_option hypoelastic:pass:def_rot",
|
|
|
|
"*add_material_elements",
|
|
|
|
"all_existing",
|
|
|
|
]
|
|
|
|
|
|
|
|
|
|
|
|
def loadcase(time,incs,Ftol):
|
|
|
|
return [\
|
|
|
|
"*new_loadcase",
|
|
|
|
"*loadcase_name puller",
|
|
|
|
"*loadcase_type static",
|
|
|
|
"*loadcase_value time",
|
|
|
|
"%g"%time,
|
|
|
|
"*loadcase_value nsteps",
|
|
|
|
"%i"%incs,
|
|
|
|
"*loadcase_value maxrec",
|
|
|
|
"20",
|
|
|
|
"*loadcase_value ntime_cuts",
|
|
|
|
"30",
|
|
|
|
"*loadcase_value force",
|
|
|
|
"%g"%Ftol,
|
|
|
|
]
|
|
|
|
|
|
|
|
|
2011-04-04 21:17:09 +05:30
|
|
|
def job(grainNumber,grainMapping,twoD):
|
2010-09-23 13:29:52 +05:30
|
|
|
return [\
|
|
|
|
"*new_job",
|
|
|
|
"*job_name pull",
|
|
|
|
"*job_class mechanical",
|
|
|
|
"*add_job_loadcases puller",
|
|
|
|
"*add_job_iconds homogenization",
|
|
|
|
["*add_job_iconds grain_%i"%i for i in grainMapping[:grainNumber]],
|
|
|
|
"*add_job_iconds rim",
|
2015-05-15 19:08:27 +05:30
|
|
|
"*job_option dimen:%s | analysis dimension"%('two ' if twoD else 'three'),
|
2010-09-23 13:29:52 +05:30
|
|
|
"*job_option strain:large | finite strains",
|
|
|
|
"*job_option large_strn_proc:upd_lagrange | updated Lagrange framework",
|
|
|
|
"*job_option plas_proc:multiplicative | multiplicative decomp of F",
|
|
|
|
"*job_option solver_nonsym:on | nonsymmetrical solution",
|
|
|
|
"*job_option solver:mfront_sparse | multi-frontal sparse",
|
|
|
|
"*job_param stef_boltz 5.670400e-8",
|
|
|
|
"*job_param univ_gas_const 8.314472",
|
|
|
|
"*job_param planck_radiation_2 1.4387752e-2",
|
|
|
|
"*job_param speed_light_vacuum 299792458",
|
2011-06-08 20:36:37 +05:30
|
|
|
# "*job_usersub_file /san/%s/FEM/DAMASK/code/mpie_cpfem_marc2010.f90 | subroutine definition"%(pwd.getpwuid(os.geteuid())[0].rpartition("\\")[2]),
|
2010-09-23 13:29:52 +05:30
|
|
|
"*job_option user_source:compile_save",
|
|
|
|
]
|
2010-08-17 02:17:27 +05:30
|
|
|
|
2011-06-08 00:45:34 +05:30
|
|
|
# "*job_option large:on | large displacement",
|
|
|
|
# "*job_option plasticity:l_strn_mn_add | large strain additive",
|
|
|
|
# "*job_option cdilatation:on | constant dilatation",
|
|
|
|
# "*job_option update:on | updated lagrange procedure",
|
|
|
|
# "*job_option finite:on | large strains",
|
|
|
|
# "*job_option restart_mode:write | enable restarting",
|
|
|
|
|
2010-08-17 02:17:27 +05:30
|
|
|
|
|
|
|
def postprocess():
|
2010-09-23 13:29:52 +05:30
|
|
|
return [\
|
|
|
|
"*add_post_tensor stress",
|
|
|
|
"*add_post_tensor strain",
|
|
|
|
"*add_post_var von_mises",
|
|
|
|
"",
|
|
|
|
]
|
2010-08-17 02:17:27 +05:30
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
def cleanUp(a):
|
2010-09-23 13:29:52 +05:30
|
|
|
return [\
|
|
|
|
"*remove_curves",
|
|
|
|
"all_existing",
|
|
|
|
"*remove_points",
|
|
|
|
"all_existing",
|
2011-06-08 00:45:34 +05:30
|
|
|
"*set_sweep_tolerance %f"%(1e-5*a),
|
2010-09-23 13:29:52 +05:30
|
|
|
"*sweep_all",
|
|
|
|
"*renumber_all",
|
|
|
|
]
|
2011-06-08 00:45:34 +05:30
|
|
|
|
|
|
|
|
|
|
|
# -------------------------
|
|
|
|
def image(name,imgsize,marginX,marginY,rcData):
|
|
|
|
|
|
|
|
dX = max([coords[0] for coords in rcData['point']])
|
|
|
|
dY = max([coords[1] for coords in rcData['point']])
|
|
|
|
offsetX = imgsize*marginX
|
|
|
|
offsetY = imgsize*marginY
|
|
|
|
sizeX = int(imgsize*(1 +2*marginX))
|
|
|
|
sizeY = int(imgsize*(dY/dX+2*marginY))
|
|
|
|
|
|
|
|
scaleImg = imgsize/dX # rescale from max x coord
|
|
|
|
|
|
|
|
img = Image.new("RGB",(sizeX,sizeY),(255,255,255))
|
|
|
|
draw = ImageDraw.Draw(img)
|
|
|
|
|
|
|
|
for id,point in enumerate(rcData['point']):
|
|
|
|
draw.text([offsetX+point[0]*scaleImg,sizeY-(offsetY+point[1]*scaleImg)],"%i"%id,fill=(0,0,0))
|
|
|
|
|
|
|
|
for id,vertex in enumerate(rcData['segment']):
|
2016-09-26 06:43:02 +05:30
|
|
|
if vertex:
|
2011-06-08 00:45:34 +05:30
|
|
|
start = rcData['point'][vertex[0]]
|
|
|
|
end = rcData['point'][vertex[1]]
|
2016-09-26 06:43:02 +05:30
|
|
|
draw.text([offsetX+(start[0]+end[0])/2.0*scaleImg,sizeY-(offsetY+(start[1]+end[1])/2.0*scaleImg)],"%i"%id,fill=(255,0,128))
|
2011-06-08 00:45:34 +05:30
|
|
|
draw.line([offsetX+start[0]*scaleImg,sizeY-(offsetY+start[1]*scaleImg),
|
|
|
|
offsetX+ end[0]*scaleImg,sizeY-(offsetY+ end[1]*scaleImg)],width=1,fill=(128,128,128))
|
|
|
|
|
|
|
|
for id,segment in enumerate(rcData['box']):
|
|
|
|
start = rcData['point'][rcData['segment'][segment][0]]
|
|
|
|
end = rcData['point'][rcData['segment'][segment][1]]
|
|
|
|
draw.line([offsetX+start[0]*scaleImg,sizeY-(offsetY+start[1]*scaleImg),
|
|
|
|
offsetX+ end[0]*scaleImg,sizeY-(offsetY+ end[1]*scaleImg)],width=3,fill=(128,128*(id%2),0))
|
|
|
|
|
|
|
|
for grain,origGrain in enumerate(rcData['grainMapping']):
|
|
|
|
center = [0.0,0.0]
|
|
|
|
for segment in rcData['grain'][grain]: # loop thru segments around grain
|
|
|
|
for point in rcData['segment'][segment]: # take start and end points
|
|
|
|
center[0] += rcData['point'][point][0] # build vector sum
|
|
|
|
center[1] += rcData['point'][point][1]
|
|
|
|
|
|
|
|
center[0] /= len(rcData['grain'][grain])*2 # normalize by two times segment count, i.e. point count
|
|
|
|
center[1] /= len(rcData['grain'][grain])*2
|
|
|
|
|
|
|
|
draw.text([offsetX+center[0]*scaleImg,sizeY-(offsetY+center[1]*scaleImg)],'%i -> %i'%(grain,origGrain),fill=(128,32,32))
|
|
|
|
|
2012-01-16 15:02:36 +05:30
|
|
|
img.save(name+'.png',"PNG")
|
2011-06-08 00:45:34 +05:30
|
|
|
|
|
|
|
# -------------------------
|
2016-03-02 17:13:09 +05:30
|
|
|
def inside(x,y,points):
|
|
|
|
"""tests whether point(x,y) is within polygon described by points"""
|
|
|
|
inside = False
|
|
|
|
npoints=len(points)
|
|
|
|
(x1,y1) = points[npoints-1] # start with last point of points
|
|
|
|
startover = (y1 >= y) # am I above testpoint?
|
|
|
|
for i in range(npoints): # loop through all points
|
|
|
|
(x2,y2) = points[i] # next point
|
|
|
|
endover = (y2 >= y) # am I above testpoint?
|
|
|
|
if (startover != endover): # one above one below testpoint?
|
|
|
|
if((y2 - y)*(x2 - x1) <= (y2 - y1)*(x2 - x)): # check for intersection
|
|
|
|
if (endover):
|
|
|
|
inside = not inside # found intersection
|
|
|
|
else:
|
|
|
|
if (not endover):
|
|
|
|
inside = not inside # found intersection
|
|
|
|
startover = endover # make second point first point
|
|
|
|
(x1,y1) = (x2,y2)
|
|
|
|
|
|
|
|
return inside
|
2010-09-23 13:29:52 +05:30
|
|
|
|
2011-06-08 00:45:34 +05:30
|
|
|
# -------------------------
|
2016-03-02 17:13:09 +05:30
|
|
|
def fftbuild(rcData,height,xframe,yframe,resolution,extrusion):
|
|
|
|
"""build array of grain numbers"""
|
|
|
|
maxX = -1.*sys.maxint
|
|
|
|
maxY = -1.*sys.maxint
|
|
|
|
for line in rcData['point']: # find data range
|
|
|
|
(x,y) = line
|
|
|
|
maxX = max(maxX, x)
|
|
|
|
maxY = max(maxY, y)
|
|
|
|
xsize = maxX+2*xframe # add framsize
|
|
|
|
ysize = maxY+2*yframe
|
|
|
|
xres = int(round(resolution/2.0)*2) # use only even resolution
|
|
|
|
yres = int(round(xres/xsize*ysize/2.0)*2) # calculate other resolutions
|
|
|
|
zres = extrusion
|
|
|
|
zsize = extrusion*min([xsize/xres,ysize/yres])
|
|
|
|
|
|
|
|
fftdata = {'fftpoints':[], \
|
|
|
|
'resolution':(xres,yres,zres), \
|
|
|
|
'dimension':(xsize,ysize,zsize)}
|
|
|
|
|
|
|
|
frameindex=len(rcData['grain'])+1 # calculate frame index as largest grain index plus one
|
|
|
|
dx = xsize/(xres+1) # calculate step sizes
|
|
|
|
dy = ysize/(yres+1)
|
2011-06-08 00:45:34 +05:30
|
|
|
|
2016-03-02 17:13:09 +05:30
|
|
|
grainpoints = []
|
|
|
|
for segments in rcData['grain']: # get segments of each grain
|
|
|
|
points = {}
|
|
|
|
for i,segment in enumerate(segments[:-1]): # loop thru segments except last (s=[start,end])
|
|
|
|
points[rcData['segment'][segment][0]] = i # assign segment index to start point
|
|
|
|
points[rcData['segment'][segment][1]] = i # assigne segment index to endpoint
|
|
|
|
for i in range(2): # check points of last segment
|
|
|
|
if points[rcData['segment'][segments[-1]][i]] != 0: # not on first segment
|
|
|
|
points[rcData['segment'][segments[-1]][i]] = len(segments)-1 # assign segment index to last point
|
|
|
|
|
|
|
|
grainpoints.append([]) # start out blank for current grain
|
|
|
|
for p in sorted(points, key=points.get): # loop thru set of sorted points
|
|
|
|
grainpoints[-1].append([rcData['point'][p][0],rcData['point'][p][1]]) # append x,y of point
|
2011-06-08 00:45:34 +05:30
|
|
|
|
2016-03-02 17:13:09 +05:30
|
|
|
bestGuess = 0 # assume grain 0 as best guess
|
|
|
|
for i in range(int(xres*yres)): # walk through all points in xy plane
|
|
|
|
xtest = -xframe+((i%xres)+0.5)*dx # calculate coordinates
|
|
|
|
ytest = -yframe+(int(i/xres)+0.5)*dy
|
|
|
|
if(xtest < 0 or xtest > maxX): # check wether part of frame
|
|
|
|
if( ytest < 0 or ytest > maxY): # part of edges
|
|
|
|
fftdata['fftpoints'].append(frameindex+2) # append frameindex to result array
|
|
|
|
else: # part of xframe
|
|
|
|
fftdata['fftpoints'].append(frameindex) # append frameindex to result array
|
|
|
|
elif( ytest < 0 or ytest > maxY): # part of yframe
|
|
|
|
fftdata['fftpoints'].append(frameindex+1) # append frameindex to result array
|
|
|
|
else:
|
|
|
|
if inside(xtest,ytest,grainpoints[bestGuess]): # check best guess first
|
|
|
|
fftdata['fftpoints'].append(bestGuess+1)
|
|
|
|
else: # no success
|
|
|
|
for g in range(len(grainpoints)): # test all
|
|
|
|
if inside(xtest,ytest,grainpoints[g]):
|
|
|
|
fftdata['fftpoints'].append(g+1)
|
|
|
|
bestGuess = g
|
|
|
|
break
|
|
|
|
|
|
|
|
return fftdata
|
2011-06-08 00:45:34 +05:30
|
|
|
|
2010-08-17 02:17:27 +05:30
|
|
|
|
|
|
|
# ----------------------- MAIN -------------------------------
|
2014-11-18 13:30:45 +05:30
|
|
|
parser = OptionParser(option_class=damask.extendableOption, usage='%prog [options] datafile[s]', description = """
|
2011-08-18 13:22:07 +05:30
|
|
|
Produce image, spectral geometry description, and (auto) Mentat procedure from TSL/OIM
|
|
|
|
reconstructed boundary file
|
2014-11-18 13:30:45 +05:30
|
|
|
|
|
|
|
""", version = scriptID)
|
2011-06-08 00:45:34 +05:30
|
|
|
|
2015-11-16 16:22:56 +05:30
|
|
|
meshes=['dt_planar_trimesh','af_planar_trimesh','af_planar_quadmesh']
|
|
|
|
parser.add_option('-o', '--output', action='extend', dest='output', metavar = '<string LIST>',
|
2016-09-26 06:43:02 +05:30
|
|
|
help='types of output {rcb, image, mentat, procedure, spectral}')
|
2015-11-16 16:22:56 +05:30
|
|
|
parser.add_option('-p', '--port', type='int', metavar = 'int',
|
|
|
|
dest='port', help='Mentat connection port [%default]')
|
|
|
|
parser.add_option('-2', '--twodimensional', action='store_true',
|
|
|
|
dest='twoD',help='use 2D model')
|
|
|
|
parser.add_option('-s','--patchsize', type='float', metavar = 'float',
|
|
|
|
dest='size', help='height of patch [%default]')
|
|
|
|
parser.add_option('-e', '--strain', type='float', metavar = 'float',
|
|
|
|
dest='strain', help='final strain to reach in simulation [%default]')
|
|
|
|
parser.add_option('--rate', type='float', metavar = 'float',
|
|
|
|
dest='strainrate', help='engineering strain rate to simulate [%default]')
|
|
|
|
parser.add_option('-N', '--increments', type='int', metavar = 'int',
|
|
|
|
dest='increments', help='number of increments to take [%default]')
|
|
|
|
parser.add_option('-t', '--tolerance', type='float', metavar = 'float',
|
|
|
|
dest='tolerance', help='relative tolerance of pixel positions to be swept [%default]')
|
|
|
|
parser.add_option('-m', '--mesh', choices = meshes,
|
|
|
|
metavar = '<string LIST>', dest='mesh',
|
|
|
|
help='algorithm and element type for automeshing {%s} [dt_planar_trimesh]'%(', '.join(meshes)))
|
|
|
|
parser.add_option('-x', '--xmargin', type='float', metavar = 'float',
|
|
|
|
dest='xmargin',help='margin in x in units of patch size [%default]')
|
|
|
|
parser.add_option('-y', '--ymargin', type='float', metavar = 'float',
|
|
|
|
dest='ymargin', help='margin in y in units of patch size [%default]')
|
|
|
|
parser.add_option('-r', '--resolution', type='int', metavar = 'int',
|
|
|
|
dest='resolution',help='number of Fourier points/Finite Elements across patch size + x_margin [%default]')
|
|
|
|
parser.add_option('-z', '--extrusion', type='int', metavar = 'int',
|
|
|
|
dest='extrusion', help='number of repetitions in z-direction [%default]')
|
|
|
|
parser.add_option('-i', '--imagesize', type='int', metavar = 'int',
|
|
|
|
dest='imgsize', help='size of PNG image [%default]')
|
|
|
|
parser.add_option('-M', '--coordtransformation', type='float', nargs=4, metavar = ' '.join(['float']*4),
|
|
|
|
dest='M', help='2x2 transformation from rcb to Euler coords [%default]')
|
|
|
|
parser.add_option('--scatter', type='float', metavar = 'float',
|
|
|
|
dest='scatter',help='orientation scatter [%default]')
|
|
|
|
parser.add_option('--segment', type='int', metavar = 'int', dest='segmentcolumn',
|
|
|
|
help='column holding the first entry for the segment end points in the rcb file [%default]')
|
|
|
|
parser.add_option('--id', type='int', dest='idcolumn', metavar = 'int',
|
|
|
|
help='column holding the right hand grain ID in the rcb file [%default]')
|
|
|
|
|
|
|
|
parser.set_defaults(output = [],
|
|
|
|
size = 1.0,
|
|
|
|
port = 40007,
|
|
|
|
xmargin = 0.0,
|
|
|
|
ymargin = 0.0,
|
|
|
|
resolution = 64,
|
|
|
|
extrusion = 2,
|
|
|
|
imgsize = 512,
|
|
|
|
M = (0.0,1.0,1.0,0.0), # M_11, M_12, M_21, M_22. x,y in RCB is y,x of Eulers!!
|
|
|
|
tolerance = 1.0e-3,
|
|
|
|
scatter = 0.0,
|
|
|
|
strain = 0.2,
|
|
|
|
strainrate = 1.0e-3,
|
|
|
|
increments = 200,
|
|
|
|
mesh = 'dt_planar_trimesh',
|
|
|
|
twoD = False,
|
|
|
|
segmentcolumn = 9,
|
|
|
|
idcolumn = 13)
|
2010-08-17 02:17:27 +05:30
|
|
|
|
|
|
|
(options, args) = parser.parse_args()
|
2011-08-18 13:22:07 +05:30
|
|
|
|
2010-08-17 02:17:27 +05:30
|
|
|
if not len(args):
|
2016-09-26 06:43:02 +05:30
|
|
|
parser.error('no boundary file specified.')
|
2010-09-23 13:29:52 +05:30
|
|
|
|
2010-08-17 02:17:27 +05:30
|
|
|
try:
|
2010-09-23 13:29:52 +05:30
|
|
|
boundaryFile = open(args[0])
|
|
|
|
boundarySegments = boundaryFile.readlines()
|
|
|
|
boundaryFile.close()
|
2010-08-17 02:17:27 +05:30
|
|
|
except:
|
2016-09-26 06:43:02 +05:30
|
|
|
damask.util.croak('unable to read boundary file "{}".'.format(args[0]))
|
2014-03-12 19:56:44 +05:30
|
|
|
raise
|
2010-08-17 02:17:27 +05:30
|
|
|
|
2011-06-08 00:45:34 +05:30
|
|
|
options.output = [s.lower() for s in options.output] # lower case
|
2014-03-12 19:56:44 +05:30
|
|
|
options.idcolumn -= 1 # python indexing starts with 0
|
|
|
|
options.segmentcolumn -= 1 # python indexing starts with 0
|
2011-06-08 00:45:34 +05:30
|
|
|
|
2010-08-17 02:17:27 +05:30
|
|
|
myName = os.path.splitext(args[0])[0]
|
2016-09-26 06:43:02 +05:30
|
|
|
damask.util.report(scriptName,myName)
|
2011-06-08 00:45:34 +05:30
|
|
|
|
2014-03-12 19:56:44 +05:30
|
|
|
orientationData = rcbOrientationParser(boundarySegments,options.idcolumn)
|
|
|
|
rcData = rcbParser(boundarySegments,options.M,options.size,options.tolerance,options.idcolumn,options.segmentcolumn)
|
2011-04-04 17:42:01 +05:30
|
|
|
|
2016-09-26 06:43:02 +05:30
|
|
|
# ----- write corrected RCB -----
|
|
|
|
|
|
|
|
Minv = np.linalg.inv(np.array(options.M).reshape(2,2))
|
|
|
|
|
|
|
|
if 'rcb' in options.output:
|
|
|
|
print """# Header:
|
|
|
|
#
|
|
|
|
# Column 1-3: right hand average orientation (phi1, PHI, phi2 in radians)
|
|
|
|
# Column 4-6: left hand average orientation (phi1, PHI, phi2 in radians)
|
|
|
|
# Column 7: length (in microns)
|
|
|
|
# Column 8: trace angle (in degrees)
|
|
|
|
# Column 9-12: x,y coordinates of endpoints (in microns)
|
|
|
|
# Column 13-14: IDs of right hand and left hand grains"""
|
|
|
|
for i,(left,right) in enumerate(rcData['neighbors']):
|
|
|
|
if rcData['segment'][i]:
|
|
|
|
first = np.dot(Minv,np.array([rcData['bounds'][0][0]+rcData['point'][rcData['segment'][i][0]][0]/rcData['scale'],
|
|
|
|
rcData['bounds'][0][1]+rcData['point'][rcData['segment'][i][0]][1]/rcData['scale'],
|
|
|
|
]))
|
|
|
|
second = np.dot(Minv,np.array([rcData['bounds'][0][0]+rcData['point'][rcData['segment'][i][1]][0]/rcData['scale'],
|
|
|
|
rcData['bounds'][0][1]+rcData['point'][rcData['segment'][i][1]][1]/rcData['scale'],
|
|
|
|
]))
|
|
|
|
print ' '.join(map(str,orientationData[left-1]+orientationData[right-1])),
|
|
|
|
print np.linalg.norm(first-second),
|
|
|
|
print '0',
|
|
|
|
print ' '.join(map(str,first)),
|
|
|
|
print ' '.join(map(str,second)),
|
|
|
|
print ' '.join(map(str,[left,right]))
|
|
|
|
|
2011-06-08 00:45:34 +05:30
|
|
|
# ----- write image -----
|
|
|
|
|
2011-08-18 13:22:07 +05:30
|
|
|
if 'image' in options.output and options.imgsize > 0:
|
|
|
|
if ImageCapability:
|
|
|
|
image(myName,options.imgsize,options.xmargin,options.ymargin,rcData)
|
|
|
|
else:
|
2016-09-26 06:43:02 +05:30
|
|
|
damask.util.croak('...no image drawing possible (PIL missing)...')
|
2011-06-08 00:45:34 +05:30
|
|
|
|
|
|
|
# ----- write spectral geom -----
|
|
|
|
|
|
|
|
if 'spectral' in options.output:
|
|
|
|
fftdata = fftbuild(rcData, options.size, options.xmargin, options.ymargin, options.resolution, options.extrusion)
|
|
|
|
|
|
|
|
geomFile = open(myName+'_'+str(int(fftdata['resolution'][0]))+'.geom','w') # open geom file for writing
|
2012-06-20 21:28:43 +05:30
|
|
|
geomFile.write('3\theader\n') # write header info
|
|
|
|
geomFile.write('resolution a %i b %i c %i\n'%(fftdata['resolution'])) # resolution
|
|
|
|
geomFile.write('dimension x %f y %f z %f\n'%(fftdata['dimension'])) # size
|
|
|
|
geomFile.write('homogenization 1\n') # homogenization
|
|
|
|
for z in xrange(fftdata['resolution'][2]): # z repetions
|
|
|
|
for y in xrange(fftdata['resolution'][1]): # each x-row separately
|
|
|
|
geomFile.write('\t'.join(map(str,fftdata['fftpoints'][ y *fftdata['resolution'][0]:
|
|
|
|
(y+1)*fftdata['resolution'][0]]))+'\n') # grain indexes, x-row per line
|
2011-06-08 00:45:34 +05:30
|
|
|
geomFile.close() # close geom file
|
|
|
|
|
2016-09-26 06:43:02 +05:30
|
|
|
damask.util.croak('assigned {} out of {} (2D) Fourier points...'
|
|
|
|
.format(len(fftdata['fftpoints']),
|
|
|
|
int(fftdata['resolution'][0])*int(fftdata['resolution'][1])))
|
2011-06-08 00:45:34 +05:30
|
|
|
|
|
|
|
|
|
|
|
# ----- write Mentat procedure -----
|
|
|
|
|
2011-08-18 13:22:07 +05:30
|
|
|
if 'mentat' in options.output:
|
|
|
|
if MentatCapability:
|
|
|
|
rcData['offsetPoints'] = 1+4 # gage definition generates 4 points
|
|
|
|
rcData['offsetSegments'] = 1+4 # gage definition generates 4 segments
|
|
|
|
|
|
|
|
cmds = [\
|
|
|
|
init(),
|
|
|
|
sample(options.size,rcData['dimension'][1]/rcData['dimension'][0],12,options.xmargin,options.ymargin),
|
|
|
|
patch(options.size,options.resolution,options.mesh,rcData),
|
|
|
|
gage(options.mesh,rcData),
|
|
|
|
]
|
|
|
|
|
|
|
|
if not options.twoD:
|
|
|
|
cmds += [expand3D(options.size*(1.0+2.0*options.xmargin)/options.resolution*options.extrusion,options.extrusion),]
|
|
|
|
|
|
|
|
cmds += [\
|
|
|
|
cleanUp(options.size),
|
|
|
|
materials(),
|
|
|
|
initial_conditions(len(rcData['grain']),rcData['grainMapping']),
|
|
|
|
boundary_conditions(options.strainrate,options.size*(1.0+2.0*options.xmargin)/options.resolution*options.extrusion,\
|
|
|
|
options.size,rcData['dimension'][1]/rcData['dimension'][0],options.xmargin,options.ymargin),
|
|
|
|
loadcase(options.strain/options.strainrate,options.increments,0.01),
|
|
|
|
job(len(rcData['grain']),rcData['grainMapping'],options.twoD),
|
|
|
|
postprocess(),
|
|
|
|
["*identify_sets","*regen","*fill_view","*save_as_model %s yes"%(myName)],
|
2011-06-08 00:45:34 +05:30
|
|
|
]
|
2011-08-18 13:22:07 +05:30
|
|
|
|
|
|
|
outputLocals = {'log':[]}
|
2016-03-02 17:13:09 +05:30
|
|
|
if (options.port is not None):
|
|
|
|
py_mentat.py_connect('',options.port)
|
2014-03-12 19:56:44 +05:30
|
|
|
try:
|
|
|
|
output(cmds,outputLocals,'Mentat')
|
|
|
|
finally:
|
2016-03-02 17:13:09 +05:30
|
|
|
py_mentat.py_disconnect()
|
2011-08-18 13:22:07 +05:30
|
|
|
if 'procedure' in options.output:
|
|
|
|
output(outputLocals['log'],outputLocals,'Stdout')
|
|
|
|
else:
|
2016-09-26 06:43:02 +05:30
|
|
|
damask.util.croak('...no interaction with Mentat possible...')
|
2011-06-08 00:45:34 +05:30
|
|
|
|
|
|
|
|
|
|
|
# ----- write config data to file -----
|
|
|
|
|
|
|
|
if 'mentat' in options.output or 'spectral' in options.output:
|
|
|
|
output = ''
|
|
|
|
output += '\n\n<homogenization>\n' + \
|
|
|
|
'\n[SX]\n' + \
|
|
|
|
'type\tisostrain\n' + \
|
|
|
|
'Ngrains\t1\n' + \
|
|
|
|
'\n\n<microstructure>\n'
|
|
|
|
|
|
|
|
for i,grain in enumerate(rcData['grainMapping']):
|
|
|
|
output += '\n[grain %i]\n'%grain + \
|
|
|
|
'crystallite\t1\n' + \
|
|
|
|
'(constituent)\tphase 1\ttexture %i\tfraction 1.0\n'%(i+1)
|
2011-06-08 20:36:37 +05:30
|
|
|
if (options.xmargin > 0.0):
|
|
|
|
output += '\n[x-margin]\n' + \
|
|
|
|
'crystallite\t1\n' + \
|
|
|
|
'(constituent)\tphase 2\ttexture %i\tfraction 1.0\n'%(len(rcData['grainMapping'])+1)
|
|
|
|
if (options.ymargin > 0.0):
|
|
|
|
output += '\n[y-margin]\n' + \
|
|
|
|
'crystallite\t1\n' + \
|
|
|
|
'(constituent)\tphase 2\ttexture %i\tfraction 1.0\n'%(len(rcData['grainMapping'])+1)
|
|
|
|
if (options.xmargin > 0.0 and options.ymargin > 0.0):
|
|
|
|
output += '\n[margin edge]\n' + \
|
2011-06-08 00:45:34 +05:30
|
|
|
'crystallite\t1\n' + \
|
|
|
|
'(constituent)\tphase 2\ttexture %i\tfraction 1.0\n'%(len(rcData['grainMapping'])+1)
|
|
|
|
|
|
|
|
output += '\n\n<crystallite>\n' + \
|
|
|
|
'\n[fillMeIn]\n' + \
|
|
|
|
'\n\n<phase>\n' + \
|
|
|
|
'\n[patch]\n'
|
|
|
|
if (options.xmargin > 0.0 or options.ymargin > 0.0):
|
|
|
|
output += '\n[margin]\n'
|
|
|
|
|
|
|
|
output += '\n\n<texture>\n\n'
|
|
|
|
for grain in rcData['grainMapping']:
|
|
|
|
output += '\n[grain %i]\n'%grain + \
|
|
|
|
'(gauss)\tphi1\t%f\tphi\t%f\tphi2\t%f\tscatter\t%f\tfraction\t1.0\n'\
|
2016-03-03 19:14:51 +05:30
|
|
|
%(math.degrees(orientationData[grain-1][0]),math.degrees(orientationData[grain-1][1]),\
|
|
|
|
math.degrees(orientationData[grain-1][2]),options.scatter)
|
2011-06-08 00:45:34 +05:30
|
|
|
if (options.xmargin > 0.0 or options.ymargin > 0.0):
|
2011-06-08 12:17:23 +05:30
|
|
|
output += '\n[margin]\n' + \
|
2011-06-08 00:45:34 +05:30
|
|
|
'(random)\t\tscatter\t0.0\tfraction\t1.0\n'
|
|
|
|
|
|
|
|
configFile = open(myName+'.config','w')
|
|
|
|
configFile.write(output)
|
|
|
|
configFile.close()
|