Merge commit 'v2.0.1-943-g84ec8bf'

This commit is contained in:
Test User 2017-10-06 19:22:20 +02:00
commit 023fdda21f
1 changed files with 52 additions and 48 deletions

View File

@ -451,9 +451,9 @@ def mapIncremental(label, mapping, N, base, new):
# ----------------------------- # -----------------------------
def OpenPostfile(name,type,nodal = False): def OpenPostfile(name,type,nodal = False):
"""Open postfile with extrapolation mode 'translate'""" """Open postfile with extrapolation mode 'translate'"""
p = {\ p = {
'spectral': MPIEspectral_result,\ 'spectral': MPIEspectral_result,
'marc': post_open,\ 'marc': post_open,
}[type](name) }[type](name)
p.extrapolation({True:'linear',False:'translate'}[nodal]) p.extrapolation({True:'linear',False:'translate'}[nodal])
p.moveto(1) p.moveto(1)
@ -512,19 +512,19 @@ def ParsePostfile(p,filename, outputFormat):
needs "outputFormat" for mapping of output names to postfile output indices needs "outputFormat" for mapping of output names to postfile output indices
""" """
stat = { \ stat = {
'IndexOfLabel': {}, \ 'IndexOfLabel': {},
'Title': p.title(), \ 'Title': p.title(),
'Extrapolation': p.extrapolate, \ 'Extrapolation': p.extrapolate,
'NumberOfIncrements': p.increments(), \ 'NumberOfIncrements': p.increments(),
'NumberOfNodes': p.nodes(), \ 'NumberOfNodes': p.nodes(),
'NumberOfNodalScalars': p.node_scalars(), \ 'NumberOfNodalScalars': p.node_scalars(),
'LabelOfNodalScalar': [None]*p.node_scalars() , \ 'LabelOfNodalScalar': [None]*p.node_scalars(),
'NumberOfElements': p.elements(), \ 'NumberOfElements': p.elements(),
'NumberOfElementalScalars': p.element_scalars(), \ 'NumberOfElementalScalars': p.element_scalars(),
'LabelOfElementalScalar': [None]*p.element_scalars() , \ 'LabelOfElementalScalar': [None]*p.element_scalars(),
'NumberOfElementalTensors': p.element_tensors(), \ 'NumberOfElementalTensors': p.element_tensors(),
'LabelOfElementalTensor': [None]*p.element_tensors(), \ 'LabelOfElementalTensor': [None]*p.element_tensors(),
} }
# --- find labels # --- find labels
@ -671,6 +671,9 @@ parser.add_option('-m','--map', dest='func',
parser.add_option('-p','--type', dest='filetype', parser.add_option('-p','--type', dest='filetype',
metavar = 'string', metavar = 'string',
help = 'type of result file [auto]') help = 'type of result file [auto]')
parser.add_option('-q','--quiet', dest='verbose',
action = 'store_false',
help = 'suppress verbose output')
group_material = OptionGroup(parser,'Material identifier') group_material = OptionGroup(parser,'Material identifier')
@ -711,24 +714,26 @@ parser.add_option_group(group_material)
parser.add_option_group(group_general) parser.add_option_group(group_general)
parser.add_option_group(group_special) parser.add_option_group(group_special)
parser.set_defaults(info = False) parser.set_defaults(info = False,
parser.set_defaults(legacy = False) verbose = True,
parser.set_defaults(nodal = False) legacy = False,
parser.set_defaults(prefix = '') nodal = False,
parser.set_defaults(suffix = '') prefix = '',
parser.set_defaults(dir = 'postProc') suffix = '',
parser.set_defaults(filetype = None) dir = 'postProc',
parser.set_defaults(func = 'avg') filetype = None,
parser.set_defaults(homog = '1') func = 'avg',
parser.set_defaults(cryst = '1') homog = '1',
parser.set_defaults(phase = '1') cryst = '1',
parser.set_defaults(filter = '') phase = '1',
parser.set_defaults(sep = []) filter = '',
parser.set_defaults(sort = []) sep = [],
parser.set_defaults(inc = False) sort = [],
parser.set_defaults(time = False) inc = False,
parser.set_defaults(separateFiles = False) time = False,
parser.set_defaults(getIncrements= False) separateFiles = False,
getIncrements= False,
)
(options, files) = parser.parse_args() (options, files) = parser.parse_args()
@ -797,8 +802,9 @@ options.sep.reverse()
# --- start background messaging # --- start background messaging
bg = damask.util.backgroundMessage() if options.verbose:
bg.start() bg = damask.util.backgroundMessage()
bg.start()
# --- parse .output and .t16 files # --- parse .output and .t16 files
@ -816,7 +822,7 @@ me = {
'Constitutive': options.phase, 'Constitutive': options.phase,
} }
bg.set_message('parsing .output files...') if options.verbose: bg.set_message('parsing .output files...')
for what in me: for what in me:
outputFormat[what] = ParseOutputFormat(filename, what, me[what]) outputFormat[what] = ParseOutputFormat(filename, what, me[what])
@ -824,9 +830,10 @@ for what in me:
print("\nsection '{}' not found in <{}>".format(me[what], what)) print("\nsection '{}' not found in <{}>".format(me[what], what))
print('\n'.join(map(lambda x:' [%s]'%x, outputFormat[what]['specials']['brothers']))) print('\n'.join(map(lambda x:' [%s]'%x, outputFormat[what]['specials']['brothers'])))
bg.set_message('opening result file...') if options.verbose: bg.set_message('opening result file...')
p = OpenPostfile(filename+extension,options.filetype,options.nodal) p = OpenPostfile(filename+extension,options.filetype,options.nodal)
bg.set_message('parsing result file...') if options.verbose: bg.set_message('parsing result file...')
stat = ParsePostfile(p, filename, outputFormat) stat = ParsePostfile(p, filename, outputFormat)
if options.filetype == 'marc': if options.filetype == 'marc':
stat['NumberOfIncrements'] -= 1 # t16 contains one "virtual" increment (at 0) stat['NumberOfIncrements'] -= 1 # t16 contains one "virtual" increment (at 0)
@ -870,8 +877,7 @@ if options.info:
elementsOfNode = {} elementsOfNode = {}
for e in range(stat['NumberOfElements']): for e in range(stat['NumberOfElements']):
if e%1000 == 0: if options.verbose and e%1000 == 0: bg.set_message('connect elem %i...'%e)
bg.set_message('connect elem %i...'%e)
for n in map(p.node_sequence,p.element(e).items): for n in map(p.node_sequence,p.element(e).items):
if n not in elementsOfNode: if n not in elementsOfNode:
elementsOfNode[n] = [p.element_id(e)] elementsOfNode[n] = [p.element_id(e)]
@ -893,8 +899,7 @@ memberCount = 0
if options.nodalScalar: if options.nodalScalar:
for n in range(stat['NumberOfNodes']): for n in range(stat['NumberOfNodes']):
if n%1000 == 0: if options.verbose and n%1000 == 0: bg.set_message('scan node %i...'%n)
bg.set_message('scan node %i...'%n)
myNodeID = p.node_id(n) myNodeID = p.node_id(n)
myNodeCoordinates = [p.node(n).x, p.node(n).y, p.node(n).z] myNodeCoordinates = [p.node(n).x, p.node(n).y, p.node(n).z]
myElemID = 0 myElemID = 0
@ -928,8 +933,7 @@ if options.nodalScalar:
else: else:
for e in range(stat['NumberOfElements']): for e in range(stat['NumberOfElements']):
if e%1000 == 0: if options.verbose and e%1000 == 0: bg.set_message('scan elem %i...'%e)
bg.set_message('scan elem %i...'%e)
myElemID = p.element_id(e) myElemID = p.element_id(e)
myIpCoordinates = ipCoords(p.element(e).type, map(lambda node: [node.x, node.y, node.z], myIpCoordinates = ipCoords(p.element(e).type, map(lambda node: [node.x, node.y, node.z],
map(p.node, map(p.node_sequence, p.element(e).items)))) map(p.node, map(p.node_sequence, p.element(e).items))))
@ -995,7 +999,7 @@ if 'none' not in map(str.lower, options.sort):
theKeys.append('x[0][%i]'%where[criterium]) theKeys.append('x[0][%i]'%where[criterium])
sortKeys = eval('lambda x:(%s)'%(','.join(theKeys))) sortKeys = eval('lambda x:(%s)'%(','.join(theKeys)))
bg.set_message('sorting groups...') if options.verbose: bg.set_message('sorting groups...')
groups.sort(key = sortKeys) # in-place sorting to save mem groups.sort(key = sortKeys) # in-place sorting to save mem
@ -1014,7 +1018,7 @@ standard = ['inc'] + \
# --------------------------- loop over positions -------------------------------- # --------------------------- loop over positions --------------------------------
bg.set_message('getting map between positions and increments...') if options.verbose: bg.set_message('getting map between positions and increments...')
incAtPosition = {} incAtPosition = {}
positionOfInc = {} positionOfInc = {}
@ -1075,7 +1079,7 @@ for incCount,position in enumerate(locations): # walk through locations
member += 1 member += 1
if member%1000 == 0: if member%1000 == 0:
time_delta = ((len(locations)*memberCount)/float(member+incCount*memberCount)-1.0)*(time.time()-time_start) time_delta = ((len(locations)*memberCount)/float(member+incCount*memberCount)-1.0)*(time.time()-time_start)
bg.set_message('(%02i:%02i:%02i) processing point %i of %i from increment %i (position %i)...' if options.verbose: bg.set_message('(%02i:%02i:%02i) processing point %i of %i from increment %i (position %i)...'
%(time_delta//3600,time_delta%3600//60,time_delta%60,member,memberCount,increments[incCount],position)) %(time_delta//3600,time_delta%3600//60,time_delta%60,member,memberCount,increments[incCount],position))
newby = [] # current member's data newby = [] # current member's data