adjusting style (mainly long lines an "== None")
This commit is contained in:
parent
4d8aad36f1
commit
f77de7ac6e
|
@ -123,7 +123,7 @@ for name in filenames:
|
||||||
|
|
||||||
table.info_append(scriptID + '\t' + ' '.join(sys.argv[1:]))
|
table.info_append(scriptID + '\t' + ' '.join(sys.argv[1:]))
|
||||||
table.labels_append('grainID_{}@{}'.format(label,
|
table.labels_append('grainID_{}@{}'.format(label,
|
||||||
options.disorientation if options.degrees else np.degrees(options.disorientation))) # report orientation source and disorientation in degrees
|
options.disorientation if options.degrees else np.degrees(options.disorientation))) # report orientation source and disorientation in degrees
|
||||||
table.head_write()
|
table.head_write()
|
||||||
|
|
||||||
# ------------------------------------------ process data ------------------------------------------
|
# ------------------------------------------ process data ------------------------------------------
|
||||||
|
@ -161,7 +161,8 @@ for name in filenames:
|
||||||
if p > 0 and p % 1000 == 0:
|
if p > 0 and p % 1000 == 0:
|
||||||
|
|
||||||
time_delta = (time.clock()-tick) * (len(grainID) - p) / p
|
time_delta = (time.clock()-tick) * (len(grainID) - p) / p
|
||||||
bg.set_message('(%02i:%02i:%02i) processing point %i of %i (grain count %i)...'%(time_delta//3600,time_delta%3600//60,time_delta%60,p,len(grainID),len(orientations)))
|
bg.set_message('(%02i:%02i:%02i) processing point %i of %i (grain count %i)...'\
|
||||||
|
%(time_delta//3600,time_delta%3600//60,time_delta%60,p,len(grainID),len(orientations)))
|
||||||
|
|
||||||
if inputtype == 'eulers':
|
if inputtype == 'eulers':
|
||||||
o = damask.Orientation(Eulers = np.array(map(float,table.data[column:column+3]))*toRadians,
|
o = damask.Orientation(Eulers = np.array(map(float,table.data[column:column+3]))*toRadians,
|
||||||
|
@ -191,11 +192,11 @@ for name in filenames:
|
||||||
bestDisorientation = damask.Quaternion([0,0,0,1]) # initialize to 180 deg rotation as worst case
|
bestDisorientation = damask.Quaternion([0,0,0,1]) # initialize to 180 deg rotation as worst case
|
||||||
for i in kdtree.query_ball_point(kdtree.data[p],options.radius): # check all neighboring points
|
for i in kdtree.query_ball_point(kdtree.data[p],options.radius): # check all neighboring points
|
||||||
gID = grainID[i]
|
gID = grainID[i]
|
||||||
if gID != -1 and gID not in alreadyChecked: # an already indexed point belonging to a grain not yet tested?
|
if gID != -1 and gID not in alreadyChecked: # indexed point belonging to a grain not yet tested?
|
||||||
alreadyChecked[gID] = True # remember not to check again
|
alreadyChecked[gID] = True # remember not to check again
|
||||||
disorientation = o.disorientation(orientations[gID],SST = False)[0] # compare against that grain's orientation (and skip requirement of axis within SST)
|
disorientation = o.disorientation(orientations[gID],SST = False)[0] # compare against other orientation
|
||||||
if disorientation.quaternion.w > cos_disorientation and \
|
if disorientation.quaternion.w > cos_disorientation and \
|
||||||
disorientation.quaternion.w >= bestDisorientation.w: # within disorientation threshold and better than current best?
|
disorientation.quaternion.w >= bestDisorientation.w: # within threshold and betterthan current best?
|
||||||
matched = True
|
matched = True
|
||||||
matchedID = gID # remember that grain
|
matchedID = gID # remember that grain
|
||||||
bestDisorientation = disorientation.quaternion
|
bestDisorientation = disorientation.quaternion
|
||||||
|
@ -217,11 +218,11 @@ for name in filenames:
|
||||||
memberCounts = np.array(memberCounts)
|
memberCounts = np.array(memberCounts)
|
||||||
similarOrientations = [[] for i in xrange(len(orientations))]
|
similarOrientations = [[] for i in xrange(len(orientations))]
|
||||||
|
|
||||||
for i,orientation in enumerate(orientations[:-1]): # compare each identified orientation...
|
for i,orientation in enumerate(orientations[:-1]): # compare each identified orientation...
|
||||||
for j in xrange(i+1,len(orientations)): # ...against all others that were defined afterwards
|
for j in xrange(i+1,len(orientations)): # ...against all others that were defined afterwards
|
||||||
if orientation.disorientation(orientations[j],SST = False)[0].quaternion.w > cos_disorientation: # similar orientations in both grainIDs?
|
if orientation.disorientation(orientations[j],SST = False)[0].quaternion.w > cos_disorientation: # similar orientations in both grainIDs?
|
||||||
similarOrientations[i].append(j) # remember in upper triangle...
|
similarOrientations[i].append(j) # remember in upper triangle...
|
||||||
similarOrientations[j].append(i) # ...and lower triangle of matrix
|
similarOrientations[j].append(i) # ...and lower triangle of matrix
|
||||||
|
|
||||||
if similarOrientations[i] != []:
|
if similarOrientations[i] != []:
|
||||||
bg.set_message('grainID {} is as: {}'.format(i,' '.join(map(str,similarOrientations[i]))))
|
bg.set_message('grainID {} is as: {}'.format(i,' '.join(map(str,similarOrientations[i]))))
|
||||||
|
@ -235,10 +236,11 @@ for name in filenames:
|
||||||
if p > 0 and p % 1000 == 0:
|
if p > 0 and p % 1000 == 0:
|
||||||
|
|
||||||
time_delta = (time.clock()-tick) * (len(grainID) - p) / p
|
time_delta = (time.clock()-tick) * (len(grainID) - p) / p
|
||||||
bg.set_message('(%02i:%02i:%02i) shifting ID of point %i out of %i (grain count %i)...'%(time_delta//3600,time_delta%3600//60,time_delta%60,p,len(grainID),len(orientations)))
|
bg.set_message('(%02i:%02i:%02i) shifting ID of point %i out of %i (grain count %i)...'
|
||||||
|
%(time_delta//3600,time_delta%3600//60,time_delta%60,p,len(grainID),len(orientations)))
|
||||||
if similarOrientations[gID] != []: # orientation of my grainID is similar to someone else?
|
if similarOrientations[gID] != []: # orientation of my grainID is similar to someone else?
|
||||||
similarNeighbors = defaultdict(int) # dict holding frequency of neighboring grainIDs that share my orientation (freq info not used...)
|
similarNeighbors = defaultdict(int) # frequency of neighboring grainIDs sharing my orientation
|
||||||
for i in kdtree.query_ball_point(kdtree.data[p],options.radius): # check all neighboring points
|
for i in kdtree.query_ball_point(kdtree.data[p],options.radius): # check all neighboring point
|
||||||
if grainID[i] in similarOrientations[gID]: # neighboring point shares my orientation?
|
if grainID[i] in similarOrientations[gID]: # neighboring point shares my orientation?
|
||||||
similarNeighbors[grainID[i]] += 1 # remember its grainID
|
similarNeighbors[grainID[i]] += 1 # remember its grainID
|
||||||
if similarNeighbors != {}: # found similar orientation(s) in neighborhood
|
if similarNeighbors != {}: # found similar orientation(s) in neighborhood
|
||||||
|
|
|
@ -55,7 +55,7 @@ else:
|
||||||
|
|
||||||
sys.path.append(damask.solver.Marc().libraryPath('../../'))
|
sys.path.append(damask.solver.Marc().libraryPath('../../'))
|
||||||
try:
|
try:
|
||||||
from py_post import *
|
import py_post
|
||||||
except:
|
except:
|
||||||
print('error: no valid Mentat release found')
|
print('error: no valid Mentat release found')
|
||||||
sys.exit(-1)
|
sys.exit(-1)
|
||||||
|
@ -63,7 +63,7 @@ except:
|
||||||
|
|
||||||
# --------------------------- open results file and initialize mesh ----------
|
# --------------------------- open results file and initialize mesh ----------
|
||||||
|
|
||||||
p = post_open(filename+'.t16')
|
p = py_post.post_open(filename+'.t16')
|
||||||
p.moveto(0)
|
p.moveto(0)
|
||||||
Nnodes = p.nodes()
|
Nnodes = p.nodes()
|
||||||
Nincrements = p.increments() - 1 # t16 contains one "virtual" increment (at 0)
|
Nincrements = p.increments() - 1 # t16 contains one "virtual" increment (at 0)
|
||||||
|
@ -114,7 +114,7 @@ for incCount,position in enumerate(locations): # walk through locations
|
||||||
|
|
||||||
p.moveto(position+1) # wind to correct position
|
p.moveto(position+1) # wind to correct position
|
||||||
|
|
||||||
# --- get displacements
|
# --- get displacements
|
||||||
|
|
||||||
node_displacement = [[0,0,0] for i in range(Nnodes)]
|
node_displacement = [[0,0,0] for i in range(Nnodes)]
|
||||||
for n in range(Nnodes):
|
for n in range(Nnodes):
|
||||||
|
@ -124,10 +124,11 @@ for incCount,position in enumerate(locations): # walk through locations
|
||||||
cellnode_displacement = [[c[i][n] for i in range(3)] for n in range(Ncellnodes)]
|
cellnode_displacement = [[c[i][n] for i in range(3)] for n in range(Ncellnodes)]
|
||||||
|
|
||||||
|
|
||||||
# --- append displacements to corresponding files
|
# --- append displacements to corresponding files
|
||||||
|
|
||||||
for geomtype in options.type:
|
for geomtype in options.type:
|
||||||
outFilename = eval('"'+eval("'%%s_%%s_inc%%0%ii.vtk'%(math.log10(max(increments+[1]))+1)")+'"%(dirname + os.sep + os.path.split(filename)[1],geomtype,increments[incCount])')
|
outFilename = eval('"'+eval("'%%s_%%s_inc%%0%ii.vtk'%(math.log10(max(increments+[1]))+1)")\
|
||||||
|
+'"%(dirname + os.sep + os.path.split(filename)[1],geomtype,increments[incCount])')
|
||||||
print outFilename
|
print outFilename
|
||||||
shutil.copyfile('%s_%s.vtk'%(filename,geomtype),outFilename)
|
shutil.copyfile('%s_%s.vtk'%(filename,geomtype),outFilename)
|
||||||
|
|
||||||
|
|
|
@ -10,12 +10,8 @@ scriptID = ' '.join([scriptName,damask.version])
|
||||||
|
|
||||||
# -----------------------------
|
# -----------------------------
|
||||||
def ParseOutputFormat(filename,homogID,crystID,phaseID):
|
def ParseOutputFormat(filename,homogID,crystID,phaseID):
|
||||||
#
|
"""parse .output* files in order to get a list of outputs"""
|
||||||
# parse .output* files in order to get a list of outputs
|
myID = {'Homogenization': homogID,
|
||||||
# -----------------------------
|
|
||||||
|
|
||||||
myID = {
|
|
||||||
'Homogenization': homogID,
|
|
||||||
'Crystallite': crystID,
|
'Crystallite': crystID,
|
||||||
'Constitutive': phaseID,
|
'Constitutive': phaseID,
|
||||||
}
|
}
|
||||||
|
@ -61,7 +57,7 @@ def ParseOutputFormat(filename,homogID,crystID,phaseID):
|
||||||
elif length > 0:
|
elif length > 0:
|
||||||
format[what]['outputs'].append([output,length])
|
format[what]['outputs'].append([output,length])
|
||||||
|
|
||||||
if not '_id' in format[what]['specials']:
|
if '_id' not in format[what]['specials']:
|
||||||
print "\nsection '%s' not found in <%s>"%(myID[what], what)
|
print "\nsection '%s' not found in <%s>"%(myID[what], what)
|
||||||
print '\n'.join(map(lambda x:' [%s]'%x, format[what]['specials']['brothers']))
|
print '\n'.join(map(lambda x:' [%s]'%x, format[what]['specials']['brothers']))
|
||||||
|
|
||||||
|
@ -70,15 +66,15 @@ def ParseOutputFormat(filename,homogID,crystID,phaseID):
|
||||||
|
|
||||||
# -----------------------------
|
# -----------------------------
|
||||||
def ParsePostfile(p,filename, outputFormat, legacyFormat):
|
def ParsePostfile(p,filename, outputFormat, legacyFormat):
|
||||||
#
|
"""
|
||||||
# parse postfile in order to get position and labels of outputs
|
parse postfile in order to get position and labels of outputs
|
||||||
# needs "outputFormat" for mapping of output names to postfile output indices
|
|
||||||
# -----------------------------
|
|
||||||
|
|
||||||
|
needs "outputFormat" for mapping of output names to postfile output indices
|
||||||
|
"""
|
||||||
startVar = {True: 'GrainCount',
|
startVar = {True: 'GrainCount',
|
||||||
False:'HomogenizationCount'}
|
False:'HomogenizationCount'}
|
||||||
|
|
||||||
# --- build statistics
|
# --- build statistics
|
||||||
|
|
||||||
stat = { \
|
stat = { \
|
||||||
'IndexOfLabel': {}, \
|
'IndexOfLabel': {}, \
|
||||||
|
@ -95,7 +91,7 @@ def ParsePostfile(p,filename, outputFormat, legacyFormat):
|
||||||
'LabelOfElementalTensor': [None]*p.element_tensors(), \
|
'LabelOfElementalTensor': [None]*p.element_tensors(), \
|
||||||
}
|
}
|
||||||
|
|
||||||
# --- find labels
|
# --- find labels
|
||||||
|
|
||||||
for labelIndex in range(stat['NumberOfNodalScalars']):
|
for labelIndex in range(stat['NumberOfNodalScalars']):
|
||||||
label = p.node_scalar_label(labelIndex)
|
label = p.node_scalar_label(labelIndex)
|
||||||
|
@ -119,9 +115,9 @@ def ParsePostfile(p,filename, outputFormat, legacyFormat):
|
||||||
startIndex = stat['IndexOfLabel'][startVar[legacyFormat]]
|
startIndex = stat['IndexOfLabel'][startVar[legacyFormat]]
|
||||||
stat['LabelOfElementalScalar'][startIndex] = startVar[legacyFormat]
|
stat['LabelOfElementalScalar'][startIndex] = startVar[legacyFormat]
|
||||||
|
|
||||||
# We now have to find a mapping for each output label as defined in the .output* files to the output position in the post file
|
# We now have to find a mapping for each output label as defined in the .output* files to the output position in the post file
|
||||||
# Since we know where the user defined outputs start ("startIndex"), we can simply assign increasing indices to the labels
|
# Since we know where the user defined outputs start ("startIndex"), we can simply assign increasing indices to the labels
|
||||||
# given in the .output* file
|
# given in the .output* file
|
||||||
|
|
||||||
offset = 1
|
offset = 1
|
||||||
if legacyFormat:
|
if legacyFormat:
|
||||||
|
@ -177,10 +173,7 @@ def ParsePostfile(p,filename, outputFormat, legacyFormat):
|
||||||
|
|
||||||
# -----------------------------
|
# -----------------------------
|
||||||
def GetIncrementLocations(p,Nincrements,options):
|
def GetIncrementLocations(p,Nincrements,options):
|
||||||
#
|
"""get mapping between positions in postfile and increment number"""
|
||||||
# get mapping between positions in postfile and increment number
|
|
||||||
# -----------------------------
|
|
||||||
|
|
||||||
incAtPosition = {}
|
incAtPosition = {}
|
||||||
positionOfInc = {}
|
positionOfInc = {}
|
||||||
|
|
||||||
|
@ -209,7 +202,6 @@ def GetIncrementLocations(p,Nincrements,options):
|
||||||
|
|
||||||
# -----------------------------
|
# -----------------------------
|
||||||
def SummarizePostfile(stat,where=sys.stdout):
|
def SummarizePostfile(stat,where=sys.stdout):
|
||||||
# -----------------------------
|
|
||||||
|
|
||||||
where.write('\n\n')
|
where.write('\n\n')
|
||||||
where.write('title:\t%s'%stat['Title'] + '\n\n')
|
where.write('title:\t%s'%stat['Title'] + '\n\n')
|
||||||
|
@ -226,7 +218,6 @@ def SummarizePostfile(stat,where=sys.stdout):
|
||||||
|
|
||||||
# -----------------------------
|
# -----------------------------
|
||||||
def SummarizeOutputfile(format,where=sys.stdout):
|
def SummarizeOutputfile(format,where=sys.stdout):
|
||||||
# -----------------------------
|
|
||||||
|
|
||||||
where.write('\nUser Defined Outputs')
|
where.write('\nUser Defined Outputs')
|
||||||
for what in format.keys():
|
for what in format.keys():
|
||||||
|
@ -239,7 +230,6 @@ def SummarizeOutputfile(format,where=sys.stdout):
|
||||||
|
|
||||||
# -----------------------------
|
# -----------------------------
|
||||||
def writeHeader(myfile,stat,geomtype):
|
def writeHeader(myfile,stat,geomtype):
|
||||||
# -----------------------------
|
|
||||||
|
|
||||||
myfile.write('2\theader\n')
|
myfile.write('2\theader\n')
|
||||||
myfile.write(string.replace('$Id$','\n','\\n')+
|
myfile.write(string.replace('$Id$','\n','\\n')+
|
||||||
|
@ -316,7 +306,7 @@ if not os.path.exists(filename+'.t16'):
|
||||||
|
|
||||||
sys.path.append(damask.solver.Marc().libraryPath('../../'))
|
sys.path.append(damask.solver.Marc().libraryPath('../../'))
|
||||||
try:
|
try:
|
||||||
from py_post import *
|
import py_post
|
||||||
except:
|
except:
|
||||||
print('error: no valid Mentat release found')
|
print('error: no valid Mentat release found')
|
||||||
sys.exit(-1)
|
sys.exit(-1)
|
||||||
|
@ -336,14 +326,14 @@ if damask.core.mesh.mesh_init_postprocessing(filename+'.mesh'):
|
||||||
|
|
||||||
# --- check if ip data available for all elements; if not, then .t19 file is required
|
# --- check if ip data available for all elements; if not, then .t19 file is required
|
||||||
|
|
||||||
p = post_open(filename+'.t16')
|
p = py_post.post_open(filename+'.t16')
|
||||||
asciiFile = False
|
asciiFile = False
|
||||||
p.moveto(1)
|
p.moveto(1)
|
||||||
for e in range(p.elements()):
|
for e in range(p.elements()):
|
||||||
if not damask.core.mesh.mesh_get_nodeAtIP(str(p.element(e).type),1):
|
if not damask.core.mesh.mesh_get_nodeAtIP(str(p.element(e).type),1):
|
||||||
if os.path.exists(filename+'.t19'):
|
if os.path.exists(filename+'.t19'):
|
||||||
p.close()
|
p.close()
|
||||||
p = post_open(filename+'.t19')
|
p = py_post.post_open(filename+'.t19')
|
||||||
asciiFile = True
|
asciiFile = True
|
||||||
break
|
break
|
||||||
|
|
||||||
|
@ -383,18 +373,20 @@ time_start = time.time()
|
||||||
for incCount,position in enumerate(locations): # walk through locations
|
for incCount,position in enumerate(locations): # walk through locations
|
||||||
p.moveto(position+1) # wind to correct position
|
p.moveto(position+1) # wind to correct position
|
||||||
time_delta = (float(len(locations)) / float(incCount+1) - 1.0) * (time.time() - time_start)
|
time_delta = (float(len(locations)) / float(incCount+1) - 1.0) * (time.time() - time_start)
|
||||||
sys.stdout.write("\r(%02i:%02i:%02i) processing increment %i of %i..."%(time_delta//3600,time_delta%3600//60,time_delta%60,incCount+1,len(locations)))
|
sys.stdout.write("\r(%02i:%02i:%02i) processing increment %i of %i..."\
|
||||||
|
%(time_delta//3600,time_delta%3600//60,time_delta%60,incCount+1,len(locations)))
|
||||||
sys.stdout.flush()
|
sys.stdout.flush()
|
||||||
|
|
||||||
# --- write header
|
# --- write header
|
||||||
|
|
||||||
outFilename = {}
|
outFilename = {}
|
||||||
for geomtype in options.type:
|
for geomtype in options.type:
|
||||||
outFilename[geomtype] = eval('"'+eval("'%%s_%%s_inc%%0%ii.txt'%(math.log10(max(increments+[1]))+1)")+'"%(dirname + os.sep + os.path.split(filename)[1],geomtype,increments[incCount])')
|
outFilename[geomtype] = eval('"'+eval("'%%s_%%s_inc%%0%ii.txt'%(math.log10(max(increments+[1]))+1)")\
|
||||||
|
+'"%(dirname + os.sep + os.path.split(filename)[1],geomtype,increments[incCount])')
|
||||||
with open(outFilename[geomtype],'w') as myfile:
|
with open(outFilename[geomtype],'w') as myfile:
|
||||||
writeHeader(myfile,stat,geomtype)
|
writeHeader(myfile,stat,geomtype)
|
||||||
|
|
||||||
# --- write node based data
|
# --- write node based data
|
||||||
|
|
||||||
if geomtype == 'nodebased':
|
if geomtype == 'nodebased':
|
||||||
for n in range(stat['NumberOfNodes']):
|
for n in range(stat['NumberOfNodes']):
|
||||||
|
@ -403,7 +395,7 @@ for incCount,position in enumerate(locations): # walk through locations
|
||||||
myfile.write('\t'+str(p.node_scalar(n,l)))
|
myfile.write('\t'+str(p.node_scalar(n,l)))
|
||||||
myfile.write('\n')
|
myfile.write('\n')
|
||||||
|
|
||||||
# --- write ip based data
|
# --- write ip based data
|
||||||
|
|
||||||
elif geomtype == 'ipbased':
|
elif geomtype == 'ipbased':
|
||||||
for e in range(stat['NumberOfElements']):
|
for e in range(stat['NumberOfElements']):
|
||||||
|
@ -424,5 +416,3 @@ for incCount,position in enumerate(locations): # walk through locations
|
||||||
|
|
||||||
p.close()
|
p.close()
|
||||||
sys.stdout.write("\n")
|
sys.stdout.write("\n")
|
||||||
|
|
||||||
# --------------------------- DONE --------------------------------
|
|
||||||
|
|
|
@ -14,9 +14,9 @@ def outMentat(cmd,locals):
|
||||||
exec(cmd[3:])
|
exec(cmd[3:])
|
||||||
elif cmd[0:3] == '(?)':
|
elif cmd[0:3] == '(?)':
|
||||||
cmd = eval(cmd[3:])
|
cmd = eval(cmd[3:])
|
||||||
py_send(cmd)
|
py_mentat.py_send(cmd)
|
||||||
else:
|
else:
|
||||||
py_send(cmd)
|
py_mentat.py_send(cmd)
|
||||||
return
|
return
|
||||||
|
|
||||||
|
|
||||||
|
@ -121,13 +121,12 @@ if options.palettef:
|
||||||
elif options.palette:
|
elif options.palette:
|
||||||
for theColor in theMap.export(format='list',steps=options.colorcount):
|
for theColor in theMap.export(format='list',steps=options.colorcount):
|
||||||
print '\t'.join(map(lambda x: str(int(255*x)),theColor))
|
print '\t'.join(map(lambda x: str(int(255*x)),theColor))
|
||||||
else:
|
else: # connect to Mentat and change colorMap
|
||||||
### connect to Mentat and change colorMap
|
|
||||||
sys.path.append(damask.solver.Marc().libraryPath('../../'))
|
sys.path.append(damask.solver.Marc().libraryPath('../../'))
|
||||||
try:
|
try:
|
||||||
from py_mentat import *
|
import py_mentat
|
||||||
print 'waiting to connect...'
|
print 'waiting to connect...'
|
||||||
py_connect('',options.port)
|
py_mentat.py_connect('',options.port)
|
||||||
print 'connected...'
|
print 'connected...'
|
||||||
mentat = True
|
mentat = True
|
||||||
except:
|
except:
|
||||||
|
@ -138,7 +137,7 @@ else:
|
||||||
cmds = colorMap(theMap.export(format='list',steps=options.colorcount),options.baseIdx)
|
cmds = colorMap(theMap.export(format='list',steps=options.colorcount),options.baseIdx)
|
||||||
if mentat:
|
if mentat:
|
||||||
output(['*show_table']+cmds+['*show_model *redraw'],outputLocals,'Mentat')
|
output(['*show_table']+cmds+['*show_model *redraw'],outputLocals,'Mentat')
|
||||||
py_disconnect()
|
py_mentat.py_disconnect()
|
||||||
|
|
||||||
if options.verbose:
|
if options.verbose:
|
||||||
output(cmds,outputLocals,'Stdout')
|
output(cmds,outputLocals,'Stdout')
|
||||||
|
|
|
@ -79,7 +79,7 @@ for name in filenames:
|
||||||
|
|
||||||
# ------------------------------------------ assemble header ---------------------------------------
|
# ------------------------------------------ assemble header ---------------------------------------
|
||||||
|
|
||||||
randomSeed = int(os.urandom(4).encode('hex'), 16) if options.randomSeed == None else options.randomSeed # random seed per file
|
randomSeed = int(os.urandom(4).encode('hex'), 16) if options.randomSeed is None else options.randomSeed # random seed per file
|
||||||
np.random.seed(randomSeed)
|
np.random.seed(randomSeed)
|
||||||
|
|
||||||
table.info_append([scriptID + '\t' + ' '.join(sys.argv[1:]),
|
table.info_append([scriptID + '\t' + ' '.join(sys.argv[1:]),
|
||||||
|
|
|
@ -41,7 +41,7 @@ parser.set_defaults(rotation = (0.,1.,1.,1.),
|
||||||
|
|
||||||
(options,filenames) = parser.parse_args()
|
(options,filenames) = parser.parse_args()
|
||||||
|
|
||||||
if options.vector == None and options.tensor == None:
|
if options.vector is None and options.tensor is None:
|
||||||
parser.error('no data column specified.')
|
parser.error('no data column specified.')
|
||||||
|
|
||||||
toRadians = math.pi/180.0 if options.degrees else 1.0 # rescale degrees to radians
|
toRadians = math.pi/180.0 if options.degrees else 1.0 # rescale degrees to radians
|
||||||
|
@ -107,8 +107,7 @@ for name in filenames:
|
||||||
for column in items[datatype]['column']: # loop over all requested labels
|
for column in items[datatype]['column']: # loop over all requested labels
|
||||||
table.data[column:column+items[datatype]['dim']] = \
|
table.data[column:column+items[datatype]['dim']] = \
|
||||||
np.dot(R,np.dot(np.array(map(float,table.data[column:column+items[datatype]['dim']])).\
|
np.dot(R,np.dot(np.array(map(float,table.data[column:column+items[datatype]['dim']])).\
|
||||||
reshape(items[datatype]['shape']),R.transpose())).\
|
reshape(items[datatype]['shape']),R.transpose())).reshape(items[datatype]['dim'])
|
||||||
reshape(items[datatype]['dim'])
|
|
||||||
|
|
||||||
outputAlive = table.data_write() # output processed line
|
outputAlive = table.data_write() # output processed line
|
||||||
|
|
||||||
|
|
|
@ -164,10 +164,10 @@ if options.render:
|
||||||
actor = vtk.vtkActor()
|
actor = vtk.vtkActor()
|
||||||
actor.SetMapper(mapper)
|
actor.SetMapper(mapper)
|
||||||
|
|
||||||
# Create the graphics structure. The renderer renders into the
|
# Create the graphics structure. The renderer renders into the
|
||||||
# render window. The render window interactor captures mouse events
|
# render window. The render window interactor captures mouse events
|
||||||
# and will perform appropriate camera or actor manipulation
|
# and will perform appropriate camera or actor manipulation
|
||||||
# depending on the nature of the events.
|
# depending on the nature of the events.
|
||||||
|
|
||||||
ren = vtk.vtkRenderer()
|
ren = vtk.vtkRenderer()
|
||||||
|
|
||||||
|
@ -181,9 +181,6 @@ if options.render:
|
||||||
iren = vtk.vtkRenderWindowInteractor()
|
iren = vtk.vtkRenderWindowInteractor()
|
||||||
iren.SetRenderWindow(renWin)
|
iren.SetRenderWindow(renWin)
|
||||||
|
|
||||||
#ren.ResetCamera()
|
|
||||||
#ren.GetActiveCamera().Zoom(1.5)
|
|
||||||
|
|
||||||
iren.Initialize()
|
iren.Initialize()
|
||||||
renWin.Render()
|
renWin.Render()
|
||||||
iren.Start()
|
iren.Start()
|
||||||
|
|
Loading…
Reference in New Issue