consistent name
This commit is contained in:
parent
767650e002
commit
1f2fbbee21
|
@ -134,7 +134,7 @@ class extendableOption(Option):
|
||||||
|
|
||||||
# Print iterations progress
|
# Print iterations progress
|
||||||
# from https://gist.github.com/aubricus/f91fb55dc6ba5557fbab06119420dd6a
|
# from https://gist.github.com/aubricus/f91fb55dc6ba5557fbab06119420dd6a
|
||||||
def print_progress(iteration, total, prefix='', suffix='', decimals=1, bar_length=100):
|
def progressBar(iteration, total, prefix='', suffix='', decimals=1, bar_length=100):
|
||||||
"""
|
"""
|
||||||
Call in a loop to create terminal progress bar
|
Call in a loop to create terminal progress bar
|
||||||
|
|
||||||
|
|
|
@ -117,7 +117,7 @@ for name in filenames:
|
||||||
while table.data_read(): # read next data line of ASCII table
|
while table.data_read(): # read next data line of ASCII table
|
||||||
|
|
||||||
if options.verbose and Npoints > 100 and p%(Npoints//100) == 0: # report in 1% steps if possible and avoid modulo by zero
|
if options.verbose and Npoints > 100 and p%(Npoints//100) == 0: # report in 1% steps if possible and avoid modulo by zero
|
||||||
damask.util.print_progress(iteration=p,total=Npoints)
|
damask.util.progressBar(iteration=p,total=Npoints)
|
||||||
|
|
||||||
o = damask.Orientation(quaternion = np.array(list(map(float,table.data[column:column+4]))),
|
o = damask.Orientation(quaternion = np.array(list(map(float,table.data[column:column+4]))),
|
||||||
symmetry = options.symmetry).reduced()
|
symmetry = options.symmetry).reduced()
|
||||||
|
@ -165,7 +165,7 @@ for name in filenames:
|
||||||
|
|
||||||
outputAlive = True
|
outputAlive = True
|
||||||
p = 0
|
p = 0
|
||||||
damask.util.print_progress(iteration=1,total=1)
|
damask.util.progressBar(iteration=1,total=1)
|
||||||
while outputAlive and table.data_read(): # read next data line of ASCII table
|
while outputAlive and table.data_read(): # read next data line of ASCII table
|
||||||
table.data_append(1+packingMap[grainID[p]]) # add (condensed) grain ID
|
table.data_append(1+packingMap[grainID[p]]) # add (condensed) grain ID
|
||||||
outputAlive = table.data_write() # output processed line
|
outputAlive = table.data_write() # output processed line
|
||||||
|
|
|
@ -832,7 +832,7 @@ elementsOfNode = {}
|
||||||
Nelems = stat['NumberOfElements']
|
Nelems = stat['NumberOfElements']
|
||||||
for e in range(Nelems):
|
for e in range(Nelems):
|
||||||
if options.verbose and Nelems > 100 and e%(Nelems//100) == 0: # report in 1% steps if possible and avoid modulo by zero
|
if options.verbose and Nelems > 100 and e%(Nelems//100) == 0: # report in 1% steps if possible and avoid modulo by zero
|
||||||
damask.util.print_progress(iteration=e,total=Nelems,prefix='1/3: connecting elements')
|
damask.util.progressBar(iteration=e,total=Nelems,prefix='1/3: connecting elements')
|
||||||
for n in map(p.node_sequence,p.element(e).items):
|
for n in map(p.node_sequence,p.element(e).items):
|
||||||
if n not in elementsOfNode:
|
if n not in elementsOfNode:
|
||||||
elementsOfNode[n] = [p.element_id(e)]
|
elementsOfNode[n] = [p.element_id(e)]
|
||||||
|
@ -851,13 +851,13 @@ index = {}
|
||||||
groups = []
|
groups = []
|
||||||
groupCount = 0
|
groupCount = 0
|
||||||
memberCount = 0
|
memberCount = 0
|
||||||
damask.util.print_progress(iteration=1,total=1,prefix='1/3: connecting elements')
|
damask.util.progressBar(iteration=1,total=1,prefix='1/3: connecting elements')
|
||||||
|
|
||||||
if options.nodalScalar:
|
if options.nodalScalar:
|
||||||
Npoints = stat['NumberOfNodes']
|
Npoints = stat['NumberOfNodes']
|
||||||
for n in range(Npoints):
|
for n in range(Npoints):
|
||||||
if options.verbose and Npoints > 100 and e%(Npoints//100) == 0: # report in 1% steps if possible and avoid modulo by zero
|
if options.verbose and Npoints > 100 and e%(Npoints//100) == 0: # report in 1% steps if possible and avoid modulo by zero
|
||||||
damask.util.print_progress(iteration=n,total=Npoints,prefix='2/3: scanning nodes ')
|
damask.util.progressBar(iteration=n,total=Npoints,prefix='2/3: scanning nodes ')
|
||||||
myNodeID = p.node_id(n)
|
myNodeID = p.node_id(n)
|
||||||
myNodeCoordinates = [p.node(n).x, p.node(n).y, p.node(n).z]
|
myNodeCoordinates = [p.node(n).x, p.node(n).y, p.node(n).z]
|
||||||
myElemID = 0
|
myElemID = 0
|
||||||
|
@ -888,13 +888,13 @@ if options.nodalScalar:
|
||||||
myNodeCoordinates) # incrementally update average location
|
myNodeCoordinates) # incrementally update average location
|
||||||
groups[index[grp]].append([myElemID,myNodeID,myIpID,myGrainID,0]) # append a new list defining each group member
|
groups[index[grp]].append([myElemID,myNodeID,myIpID,myGrainID,0]) # append a new list defining each group member
|
||||||
memberCount += 1
|
memberCount += 1
|
||||||
damask.util.print_progress(iteration=1,total=1,prefix='2/3: scanning nodes ')
|
damask.util.progressBar(iteration=1,total=1,prefix='2/3: scanning nodes ')
|
||||||
|
|
||||||
else:
|
else:
|
||||||
Nelems = stat['NumberOfElements']
|
Nelems = stat['NumberOfElements']
|
||||||
for e in range(Nelems):
|
for e in range(Nelems):
|
||||||
if options.verbose and Nelems > 100 and e%(Nelems//100) == 0: # report in 1% steps if possible and avoid modulo by zero
|
if options.verbose and Nelems > 100 and e%(Nelems//100) == 0: # report in 1% steps if possible and avoid modulo by zero
|
||||||
damask.util.print_progress(iteration=e,total=Nelems,prefix='2/3: scanning elements ')
|
damask.util.progressBar(iteration=e,total=Nelems,prefix='2/3: scanning elements ')
|
||||||
myElemID = p.element_id(e)
|
myElemID = p.element_id(e)
|
||||||
myIpCoordinates = ipCoords(p.element(e).type, list(map(lambda node: [node.x, node.y, node.z],
|
myIpCoordinates = ipCoords(p.element(e).type, list(map(lambda node: [node.x, node.y, node.z],
|
||||||
list(map(p.node, map(p.node_sequence, p.element(e).items))))))
|
list(map(p.node, map(p.node_sequence, p.element(e).items))))))
|
||||||
|
@ -934,7 +934,7 @@ else:
|
||||||
myIpCoordinates[n]) # incrementally update average location
|
myIpCoordinates[n]) # incrementally update average location
|
||||||
groups[index[grp]].append([myElemID,myNodeID,myIpID,myGrainID,n]) # append a new list defining each group member
|
groups[index[grp]].append([myElemID,myNodeID,myIpID,myGrainID,n]) # append a new list defining each group member
|
||||||
memberCount += 1
|
memberCount += 1
|
||||||
damask.util.print_progress(iteration=1,total=1,prefix='2/3: scanning elements ')
|
damask.util.progressBar(iteration=1,total=1,prefix='2/3: scanning elements ')
|
||||||
|
|
||||||
|
|
||||||
# --------------------------- sort groups --------------------------------
|
# --------------------------- sort groups --------------------------------
|
||||||
|
@ -1035,7 +1035,7 @@ for incCount,position in enumerate(locations): # walk through locations
|
||||||
for j,group in enumerate(groups):
|
for j,group in enumerate(groups):
|
||||||
f = incCount*Ngroups + j
|
f = incCount*Ngroups + j
|
||||||
if options.verbose and (Ngroups*Nincs) > 100 and f%((Ngroups*Nincs)//100) == 0: # report in 1% steps if possible and avoid modulo by zero
|
if options.verbose and (Ngroups*Nincs) > 100 and f%((Ngroups*Nincs)//100) == 0: # report in 1% steps if possible and avoid modulo by zero
|
||||||
damask.util.print_progress(iteration=f,total=Ngroups*Nincs,prefix='3/3: processing points ')
|
damask.util.progressBar(iteration=f,total=Ngroups*Nincs,prefix='3/3: processing points ')
|
||||||
N = 0 # group member counter
|
N = 0 # group member counter
|
||||||
for (e,n,i,g,n_local) in group[1:]: # loop over group members
|
for (e,n,i,g,n_local) in group[1:]: # loop over group members
|
||||||
member += 1
|
member += 1
|
||||||
|
@ -1126,7 +1126,7 @@ for incCount,position in enumerate(locations): # walk through locations
|
||||||
group[0] + \
|
group[0] + \
|
||||||
mappedResult)
|
mappedResult)
|
||||||
)) + '\n')
|
)) + '\n')
|
||||||
damask.util.print_progress(iteration=1,total=1,prefix='3/3: processing points ')
|
damask.util.progressBar(iteration=1,total=1,prefix='3/3: processing points ')
|
||||||
|
|
||||||
if fileOpen:
|
if fileOpen:
|
||||||
file.close()
|
file.close()
|
||||||
|
|
Loading…
Reference in New Issue