adjusting style (mainly long lines an "== None")
This commit is contained in:
parent
4d8aad36f1
commit
f77de7ac6e
|
@ -161,7 +161,8 @@ for name in filenames:
|
|||
if p > 0 and p % 1000 == 0:
|
||||
|
||||
time_delta = (time.clock()-tick) * (len(grainID) - p) / p
|
||||
bg.set_message('(%02i:%02i:%02i) processing point %i of %i (grain count %i)...'%(time_delta//3600,time_delta%3600//60,time_delta%60,p,len(grainID),len(orientations)))
|
||||
bg.set_message('(%02i:%02i:%02i) processing point %i of %i (grain count %i)...'\
|
||||
%(time_delta//3600,time_delta%3600//60,time_delta%60,p,len(grainID),len(orientations)))
|
||||
|
||||
if inputtype == 'eulers':
|
||||
o = damask.Orientation(Eulers = np.array(map(float,table.data[column:column+3]))*toRadians,
|
||||
|
@ -191,11 +192,11 @@ for name in filenames:
|
|||
bestDisorientation = damask.Quaternion([0,0,0,1]) # initialize to 180 deg rotation as worst case
|
||||
for i in kdtree.query_ball_point(kdtree.data[p],options.radius): # check all neighboring points
|
||||
gID = grainID[i]
|
||||
if gID != -1 and gID not in alreadyChecked: # an already indexed point belonging to a grain not yet tested?
|
||||
if gID != -1 and gID not in alreadyChecked: # indexed point belonging to a grain not yet tested?
|
||||
alreadyChecked[gID] = True # remember not to check again
|
||||
disorientation = o.disorientation(orientations[gID],SST = False)[0] # compare against that grain's orientation (and skip requirement of axis within SST)
|
||||
disorientation = o.disorientation(orientations[gID],SST = False)[0] # compare against other orientation
|
||||
if disorientation.quaternion.w > cos_disorientation and \
|
||||
disorientation.quaternion.w >= bestDisorientation.w: # within disorientation threshold and better than current best?
|
||||
disorientation.quaternion.w >= bestDisorientation.w: # within threshold and betterthan current best?
|
||||
matched = True
|
||||
matchedID = gID # remember that grain
|
||||
bestDisorientation = disorientation.quaternion
|
||||
|
@ -235,10 +236,11 @@ for name in filenames:
|
|||
if p > 0 and p % 1000 == 0:
|
||||
|
||||
time_delta = (time.clock()-tick) * (len(grainID) - p) / p
|
||||
bg.set_message('(%02i:%02i:%02i) shifting ID of point %i out of %i (grain count %i)...'%(time_delta//3600,time_delta%3600//60,time_delta%60,p,len(grainID),len(orientations)))
|
||||
bg.set_message('(%02i:%02i:%02i) shifting ID of point %i out of %i (grain count %i)...'
|
||||
%(time_delta//3600,time_delta%3600//60,time_delta%60,p,len(grainID),len(orientations)))
|
||||
if similarOrientations[gID] != []: # orientation of my grainID is similar to someone else?
|
||||
similarNeighbors = defaultdict(int) # dict holding frequency of neighboring grainIDs that share my orientation (freq info not used...)
|
||||
for i in kdtree.query_ball_point(kdtree.data[p],options.radius): # check all neighboring points
|
||||
similarNeighbors = defaultdict(int) # frequency of neighboring grainIDs sharing my orientation
|
||||
for i in kdtree.query_ball_point(kdtree.data[p],options.radius): # check all neighboring point
|
||||
if grainID[i] in similarOrientations[gID]: # neighboring point shares my orientation?
|
||||
similarNeighbors[grainID[i]] += 1 # remember its grainID
|
||||
if similarNeighbors != {}: # found similar orientation(s) in neighborhood
|
||||
|
|
|
@ -55,7 +55,7 @@ else:
|
|||
|
||||
sys.path.append(damask.solver.Marc().libraryPath('../../'))
|
||||
try:
|
||||
from py_post import *
|
||||
import py_post
|
||||
except:
|
||||
print('error: no valid Mentat release found')
|
||||
sys.exit(-1)
|
||||
|
@ -63,7 +63,7 @@ except:
|
|||
|
||||
# --------------------------- open results file and initialize mesh ----------
|
||||
|
||||
p = post_open(filename+'.t16')
|
||||
p = py_post.post_open(filename+'.t16')
|
||||
p.moveto(0)
|
||||
Nnodes = p.nodes()
|
||||
Nincrements = p.increments() - 1 # t16 contains one "virtual" increment (at 0)
|
||||
|
@ -114,7 +114,7 @@ for incCount,position in enumerate(locations): # walk through locations
|
|||
|
||||
p.moveto(position+1) # wind to correct position
|
||||
|
||||
# --- get displacements
|
||||
# --- get displacements
|
||||
|
||||
node_displacement = [[0,0,0] for i in range(Nnodes)]
|
||||
for n in range(Nnodes):
|
||||
|
@ -124,10 +124,11 @@ for incCount,position in enumerate(locations): # walk through locations
|
|||
cellnode_displacement = [[c[i][n] for i in range(3)] for n in range(Ncellnodes)]
|
||||
|
||||
|
||||
# --- append displacements to corresponding files
|
||||
# --- append displacements to corresponding files
|
||||
|
||||
for geomtype in options.type:
|
||||
outFilename = eval('"'+eval("'%%s_%%s_inc%%0%ii.vtk'%(math.log10(max(increments+[1]))+1)")+'"%(dirname + os.sep + os.path.split(filename)[1],geomtype,increments[incCount])')
|
||||
outFilename = eval('"'+eval("'%%s_%%s_inc%%0%ii.vtk'%(math.log10(max(increments+[1]))+1)")\
|
||||
+'"%(dirname + os.sep + os.path.split(filename)[1],geomtype,increments[incCount])')
|
||||
print outFilename
|
||||
shutil.copyfile('%s_%s.vtk'%(filename,geomtype),outFilename)
|
||||
|
||||
|
|
|
@ -10,12 +10,8 @@ scriptID = ' '.join([scriptName,damask.version])
|
|||
|
||||
# -----------------------------
|
||||
def ParseOutputFormat(filename,homogID,crystID,phaseID):
|
||||
#
|
||||
# parse .output* files in order to get a list of outputs
|
||||
# -----------------------------
|
||||
|
||||
myID = {
|
||||
'Homogenization': homogID,
|
||||
"""parse .output* files in order to get a list of outputs"""
|
||||
myID = {'Homogenization': homogID,
|
||||
'Crystallite': crystID,
|
||||
'Constitutive': phaseID,
|
||||
}
|
||||
|
@ -61,7 +57,7 @@ def ParseOutputFormat(filename,homogID,crystID,phaseID):
|
|||
elif length > 0:
|
||||
format[what]['outputs'].append([output,length])
|
||||
|
||||
if not '_id' in format[what]['specials']:
|
||||
if '_id' not in format[what]['specials']:
|
||||
print "\nsection '%s' not found in <%s>"%(myID[what], what)
|
||||
print '\n'.join(map(lambda x:' [%s]'%x, format[what]['specials']['brothers']))
|
||||
|
||||
|
@ -70,15 +66,15 @@ def ParseOutputFormat(filename,homogID,crystID,phaseID):
|
|||
|
||||
# -----------------------------
|
||||
def ParsePostfile(p,filename, outputFormat, legacyFormat):
|
||||
#
|
||||
# parse postfile in order to get position and labels of outputs
|
||||
# needs "outputFormat" for mapping of output names to postfile output indices
|
||||
# -----------------------------
|
||||
"""
|
||||
parse postfile in order to get position and labels of outputs
|
||||
|
||||
needs "outputFormat" for mapping of output names to postfile output indices
|
||||
"""
|
||||
startVar = {True: 'GrainCount',
|
||||
False:'HomogenizationCount'}
|
||||
|
||||
# --- build statistics
|
||||
# --- build statistics
|
||||
|
||||
stat = { \
|
||||
'IndexOfLabel': {}, \
|
||||
|
@ -95,7 +91,7 @@ def ParsePostfile(p,filename, outputFormat, legacyFormat):
|
|||
'LabelOfElementalTensor': [None]*p.element_tensors(), \
|
||||
}
|
||||
|
||||
# --- find labels
|
||||
# --- find labels
|
||||
|
||||
for labelIndex in range(stat['NumberOfNodalScalars']):
|
||||
label = p.node_scalar_label(labelIndex)
|
||||
|
@ -119,9 +115,9 @@ def ParsePostfile(p,filename, outputFormat, legacyFormat):
|
|||
startIndex = stat['IndexOfLabel'][startVar[legacyFormat]]
|
||||
stat['LabelOfElementalScalar'][startIndex] = startVar[legacyFormat]
|
||||
|
||||
# We now have to find a mapping for each output label as defined in the .output* files to the output position in the post file
|
||||
# Since we know where the user defined outputs start ("startIndex"), we can simply assign increasing indices to the labels
|
||||
# given in the .output* file
|
||||
# We now have to find a mapping for each output label as defined in the .output* files to the output position in the post file
|
||||
# Since we know where the user defined outputs start ("startIndex"), we can simply assign increasing indices to the labels
|
||||
# given in the .output* file
|
||||
|
||||
offset = 1
|
||||
if legacyFormat:
|
||||
|
@ -177,10 +173,7 @@ def ParsePostfile(p,filename, outputFormat, legacyFormat):
|
|||
|
||||
# -----------------------------
|
||||
def GetIncrementLocations(p,Nincrements,options):
|
||||
#
|
||||
# get mapping between positions in postfile and increment number
|
||||
# -----------------------------
|
||||
|
||||
"""get mapping between positions in postfile and increment number"""
|
||||
incAtPosition = {}
|
||||
positionOfInc = {}
|
||||
|
||||
|
@ -209,7 +202,6 @@ def GetIncrementLocations(p,Nincrements,options):
|
|||
|
||||
# -----------------------------
|
||||
def SummarizePostfile(stat,where=sys.stdout):
|
||||
# -----------------------------
|
||||
|
||||
where.write('\n\n')
|
||||
where.write('title:\t%s'%stat['Title'] + '\n\n')
|
||||
|
@ -226,7 +218,6 @@ def SummarizePostfile(stat,where=sys.stdout):
|
|||
|
||||
# -----------------------------
|
||||
def SummarizeOutputfile(format,where=sys.stdout):
|
||||
# -----------------------------
|
||||
|
||||
where.write('\nUser Defined Outputs')
|
||||
for what in format.keys():
|
||||
|
@ -239,7 +230,6 @@ def SummarizeOutputfile(format,where=sys.stdout):
|
|||
|
||||
# -----------------------------
|
||||
def writeHeader(myfile,stat,geomtype):
|
||||
# -----------------------------
|
||||
|
||||
myfile.write('2\theader\n')
|
||||
myfile.write(string.replace('$Id$','\n','\\n')+
|
||||
|
@ -316,7 +306,7 @@ if not os.path.exists(filename+'.t16'):
|
|||
|
||||
sys.path.append(damask.solver.Marc().libraryPath('../../'))
|
||||
try:
|
||||
from py_post import *
|
||||
import py_post
|
||||
except:
|
||||
print('error: no valid Mentat release found')
|
||||
sys.exit(-1)
|
||||
|
@ -336,14 +326,14 @@ if damask.core.mesh.mesh_init_postprocessing(filename+'.mesh'):
|
|||
|
||||
# --- check if ip data available for all elements; if not, then .t19 file is required
|
||||
|
||||
p = post_open(filename+'.t16')
|
||||
p = py_post.post_open(filename+'.t16')
|
||||
asciiFile = False
|
||||
p.moveto(1)
|
||||
for e in range(p.elements()):
|
||||
if not damask.core.mesh.mesh_get_nodeAtIP(str(p.element(e).type),1):
|
||||
if os.path.exists(filename+'.t19'):
|
||||
p.close()
|
||||
p = post_open(filename+'.t19')
|
||||
p = py_post.post_open(filename+'.t19')
|
||||
asciiFile = True
|
||||
break
|
||||
|
||||
|
@ -383,14 +373,16 @@ time_start = time.time()
|
|||
for incCount,position in enumerate(locations): # walk through locations
|
||||
p.moveto(position+1) # wind to correct position
|
||||
time_delta = (float(len(locations)) / float(incCount+1) - 1.0) * (time.time() - time_start)
|
||||
sys.stdout.write("\r(%02i:%02i:%02i) processing increment %i of %i..."%(time_delta//3600,time_delta%3600//60,time_delta%60,incCount+1,len(locations)))
|
||||
sys.stdout.write("\r(%02i:%02i:%02i) processing increment %i of %i..."\
|
||||
%(time_delta//3600,time_delta%3600//60,time_delta%60,incCount+1,len(locations)))
|
||||
sys.stdout.flush()
|
||||
|
||||
# --- write header
|
||||
# --- write header
|
||||
|
||||
outFilename = {}
|
||||
for geomtype in options.type:
|
||||
outFilename[geomtype] = eval('"'+eval("'%%s_%%s_inc%%0%ii.txt'%(math.log10(max(increments+[1]))+1)")+'"%(dirname + os.sep + os.path.split(filename)[1],geomtype,increments[incCount])')
|
||||
outFilename[geomtype] = eval('"'+eval("'%%s_%%s_inc%%0%ii.txt'%(math.log10(max(increments+[1]))+1)")\
|
||||
+'"%(dirname + os.sep + os.path.split(filename)[1],geomtype,increments[incCount])')
|
||||
with open(outFilename[geomtype],'w') as myfile:
|
||||
writeHeader(myfile,stat,geomtype)
|
||||
|
||||
|
@ -424,5 +416,3 @@ for incCount,position in enumerate(locations): # walk through locations
|
|||
|
||||
p.close()
|
||||
sys.stdout.write("\n")
|
||||
|
||||
# --------------------------- DONE --------------------------------
|
||||
|
|
|
@ -14,9 +14,9 @@ def outMentat(cmd,locals):
|
|||
exec(cmd[3:])
|
||||
elif cmd[0:3] == '(?)':
|
||||
cmd = eval(cmd[3:])
|
||||
py_send(cmd)
|
||||
py_mentat.py_send(cmd)
|
||||
else:
|
||||
py_send(cmd)
|
||||
py_mentat.py_send(cmd)
|
||||
return
|
||||
|
||||
|
||||
|
@ -121,13 +121,12 @@ if options.palettef:
|
|||
elif options.palette:
|
||||
for theColor in theMap.export(format='list',steps=options.colorcount):
|
||||
print '\t'.join(map(lambda x: str(int(255*x)),theColor))
|
||||
else:
|
||||
### connect to Mentat and change colorMap
|
||||
else: # connect to Mentat and change colorMap
|
||||
sys.path.append(damask.solver.Marc().libraryPath('../../'))
|
||||
try:
|
||||
from py_mentat import *
|
||||
import py_mentat
|
||||
print 'waiting to connect...'
|
||||
py_connect('',options.port)
|
||||
py_mentat.py_connect('',options.port)
|
||||
print 'connected...'
|
||||
mentat = True
|
||||
except:
|
||||
|
@ -138,7 +137,7 @@ else:
|
|||
cmds = colorMap(theMap.export(format='list',steps=options.colorcount),options.baseIdx)
|
||||
if mentat:
|
||||
output(['*show_table']+cmds+['*show_model *redraw'],outputLocals,'Mentat')
|
||||
py_disconnect()
|
||||
py_mentat.py_disconnect()
|
||||
|
||||
if options.verbose:
|
||||
output(cmds,outputLocals,'Stdout')
|
||||
|
|
|
@ -79,7 +79,7 @@ for name in filenames:
|
|||
|
||||
# ------------------------------------------ assemble header ---------------------------------------
|
||||
|
||||
randomSeed = int(os.urandom(4).encode('hex'), 16) if options.randomSeed == None else options.randomSeed # random seed per file
|
||||
randomSeed = int(os.urandom(4).encode('hex'), 16) if options.randomSeed is None else options.randomSeed # random seed per file
|
||||
np.random.seed(randomSeed)
|
||||
|
||||
table.info_append([scriptID + '\t' + ' '.join(sys.argv[1:]),
|
||||
|
|
|
@ -41,7 +41,7 @@ parser.set_defaults(rotation = (0.,1.,1.,1.),
|
|||
|
||||
(options,filenames) = parser.parse_args()
|
||||
|
||||
if options.vector == None and options.tensor == None:
|
||||
if options.vector is None and options.tensor is None:
|
||||
parser.error('no data column specified.')
|
||||
|
||||
toRadians = math.pi/180.0 if options.degrees else 1.0 # rescale degrees to radians
|
||||
|
@ -107,8 +107,7 @@ for name in filenames:
|
|||
for column in items[datatype]['column']: # loop over all requested labels
|
||||
table.data[column:column+items[datatype]['dim']] = \
|
||||
np.dot(R,np.dot(np.array(map(float,table.data[column:column+items[datatype]['dim']])).\
|
||||
reshape(items[datatype]['shape']),R.transpose())).\
|
||||
reshape(items[datatype]['dim'])
|
||||
reshape(items[datatype]['shape']),R.transpose())).reshape(items[datatype]['dim'])
|
||||
|
||||
outputAlive = table.data_write() # output processed line
|
||||
|
||||
|
|
|
@ -164,10 +164,10 @@ if options.render:
|
|||
actor = vtk.vtkActor()
|
||||
actor.SetMapper(mapper)
|
||||
|
||||
# Create the graphics structure. The renderer renders into the
|
||||
# render window. The render window interactor captures mouse events
|
||||
# and will perform appropriate camera or actor manipulation
|
||||
# depending on the nature of the events.
|
||||
# Create the graphics structure. The renderer renders into the
|
||||
# render window. The render window interactor captures mouse events
|
||||
# and will perform appropriate camera or actor manipulation
|
||||
# depending on the nature of the events.
|
||||
|
||||
ren = vtk.vtkRenderer()
|
||||
|
||||
|
@ -181,9 +181,6 @@ if options.render:
|
|||
iren = vtk.vtkRenderWindowInteractor()
|
||||
iren.SetRenderWindow(renWin)
|
||||
|
||||
#ren.ResetCamera()
|
||||
#ren.GetActiveCamera().Zoom(1.5)
|
||||
|
||||
iren.Initialize()
|
||||
renWin.Render()
|
||||
iren.Start()
|
||||
|
|
Loading…
Reference in New Issue