Merge remote branch 'origin/development' into cmake
This commit is contained in:
commit
6c9ca73534
|
@ -7,3 +7,4 @@
|
||||||
*.png binary
|
*.png binary
|
||||||
*.jpg binary
|
*.jpg binary
|
||||||
*.cae binary
|
*.cae binary
|
||||||
|
installation/mods_Abaqus/abaqus_v6_windows.env -crlf
|
||||||
|
|
|
@ -159,7 +159,9 @@ for name in filenames:
|
||||||
if table.label_dimension(options.id) != 1: errors.append('grain identifier {} not found.'.format(options.id))
|
if table.label_dimension(options.id) != 1: errors.append('grain identifier {} not found.'.format(options.id))
|
||||||
else: idCol = table.label_index(options.id)
|
else: idCol = table.label_index(options.id)
|
||||||
|
|
||||||
if remarks != []: damask.util.croak(remarks)
|
if remarks != []:
|
||||||
|
damask.util.croak(remarks)
|
||||||
|
remarks = []
|
||||||
if errors != []:
|
if errors != []:
|
||||||
damask.util.croak(errors)
|
damask.util.croak(errors)
|
||||||
table.close(dismiss = True)
|
table.close(dismiss = True)
|
||||||
|
@ -184,6 +186,8 @@ for name in filenames:
|
||||||
N = grid.prod()
|
N = grid.prod()
|
||||||
|
|
||||||
if N != len(table.data): errors.append('data count {} does not match grid {}.'.format(N,'x'.join(map(str,grid))))
|
if N != len(table.data): errors.append('data count {} does not match grid {}.'.format(N,'x'.join(map(str,grid))))
|
||||||
|
else: remarks.append('grid: {}x{}x{}'.format(*grid))
|
||||||
|
if remarks != []: damask.util.croak(remarks)
|
||||||
if errors != []:
|
if errors != []:
|
||||||
damask.util.croak(errors)
|
damask.util.croak(errors)
|
||||||
table.close(dismiss = True)
|
table.close(dismiss = True)
|
||||||
|
@ -194,33 +198,37 @@ for name in filenames:
|
||||||
stack = [table.data]
|
stack = [table.data]
|
||||||
|
|
||||||
neighborhood = neighborhoods[options.neighborhood]
|
neighborhood = neighborhoods[options.neighborhood]
|
||||||
convoluted = np.empty([len(neighborhood)]+list(grid+2),'i')
|
diffToNeighbor = np.empty(list(grid+2)+[len(neighborhood)],'i')
|
||||||
microstructure = periodic_3Dpad(np.array(table.data[:,idCol].reshape(grid),'i'))
|
microstructure = periodic_3Dpad(table.data[:,idCol].astype('i').reshape(grid,order='F'))
|
||||||
|
|
||||||
for i,p in enumerate(neighborhood):
|
for i,p in enumerate(neighborhood):
|
||||||
stencil = np.zeros((3,3,3),'i')
|
stencil = np.zeros((3,3,3),'i')
|
||||||
stencil[1,1,1] = -1
|
stencil[1,1,1] = -1
|
||||||
stencil[p[0]+1,
|
stencil[p[0]+1,
|
||||||
p[1]+1,
|
p[1]+1,
|
||||||
p[2]+1] = 1
|
p[2]+1] = 1
|
||||||
convoluted[i,:,:,:] = ndimage.convolve(microstructure,stencil)
|
diffToNeighbor[:,:,:,i] = ndimage.convolve(microstructure,stencil) # compare ID at each point...
|
||||||
|
# ...to every one in the specified neighborhood
|
||||||
|
# for same IDs at both locations ==> 0
|
||||||
|
|
||||||
distance = np.ones((len(feature_list),grid[0],grid[1],grid[2]),'d')
|
diffToNeighbor = np.sort(diffToNeighbor) # sort diff such that number of changes in diff (steps)...
|
||||||
|
# ...reflects number of unique neighbors
|
||||||
convoluted = np.sort(convoluted,axis = 0)
|
uniques = np.where(diffToNeighbor[1:-1,1:-1,1:-1,0] != 0, 1,0) # initialize unique value counter (exclude myself [= 0])
|
||||||
uniques = np.where(convoluted[0,1:-1,1:-1,1:-1] != 0, 1,0) # initialize unique value counter (exclude myself [= 0])
|
|
||||||
|
|
||||||
for i in xrange(1,len(neighborhood)): # check remaining points in neighborhood
|
for i in xrange(1,len(neighborhood)): # check remaining points in neighborhood
|
||||||
uniques += np.where(np.logical_and(
|
uniques += np.where(np.logical_and(
|
||||||
convoluted[i,1:-1,1:-1,1:-1] != convoluted[i-1,1:-1,1:-1,1:-1], # flip of ID difference detected?
|
diffToNeighbor[1:-1,1:-1,1:-1,i] != 0, # not myself?
|
||||||
convoluted[i,1:-1,1:-1,1:-1] != 0), # not myself?
|
diffToNeighbor[1:-1,1:-1,1:-1,i] != diffToNeighbor[1:-1,1:-1,1:-1,i-1],
|
||||||
1,0) # count flip
|
), # flip of ID difference detected?
|
||||||
|
1,0) # count that flip
|
||||||
|
|
||||||
|
distance = np.ones((len(feature_list),grid[0],grid[1],grid[2]),'d')
|
||||||
|
|
||||||
for i,feature_id in enumerate(feature_list):
|
for i,feature_id in enumerate(feature_list):
|
||||||
distance[i,:,:,:] = np.where(uniques >= features[feature_id]['aliens'],0.0,1.0) # seed with 0.0 when enough unique neighbor IDs are present
|
distance[i,:,:,:] = np.where(uniques >= features[feature_id]['aliens'],0.0,1.0) # seed with 0.0 when enough unique neighbor IDs are present
|
||||||
distance[i,:,:,:] = ndimage.morphology.distance_transform_edt(distance[i,:,:,:])*[options.scale]*3
|
distance[i,:,:,:] = ndimage.morphology.distance_transform_edt(distance[i,:,:,:])*[options.scale]*3
|
||||||
|
|
||||||
distance.shape = ([len(feature_list),grid.prod(),1])
|
distance = distance.reshape([len(feature_list),grid.prod(),1],order='F')
|
||||||
for i in xrange(len(feature_list)):
|
for i in xrange(len(feature_list)):
|
||||||
stack.append(distance[i,:])
|
stack.append(distance[i,:])
|
||||||
|
|
||||||
|
|
|
@ -14,7 +14,7 @@ scriptID = ' '.join([scriptName,damask.version])
|
||||||
# --------------------------------------------------------------------
|
# --------------------------------------------------------------------
|
||||||
|
|
||||||
parser = OptionParser(option_class=damask.extendableOption, usage='%prog options [file[s]]', description = """
|
parser = OptionParser(option_class=damask.extendableOption, usage='%prog options [file[s]]', description = """
|
||||||
Sort rows by given column label(s).
|
Sort rows by given (or all) column label(s).
|
||||||
|
|
||||||
Examples:
|
Examples:
|
||||||
With coordinates in columns "x", "y", and "z"; sorting with x slowest and z fastest varying index: --label x,y,z.
|
With coordinates in columns "x", "y", and "z"; sorting with x slowest and z fastest varying index: --label x,y,z.
|
||||||
|
@ -30,25 +30,19 @@ parser.add_option('-r','--reverse',
|
||||||
action = 'store_true',
|
action = 'store_true',
|
||||||
help = 'sort in reverse')
|
help = 'sort in reverse')
|
||||||
|
|
||||||
parser.set_defaults(key = [],
|
parser.set_defaults(reverse = False,
|
||||||
reverse = False,
|
|
||||||
)
|
)
|
||||||
|
|
||||||
(options,filenames) = parser.parse_args()
|
(options,filenames) = parser.parse_args()
|
||||||
|
|
||||||
if options.keys is None:
|
|
||||||
parser.error('No sorting column(s) specified.')
|
|
||||||
|
|
||||||
options.keys.reverse() # numpy sorts with most significant column as last
|
|
||||||
|
|
||||||
# --- loop over input files -------------------------------------------------------------------------
|
# --- loop over input files -------------------------------------------------------------------------
|
||||||
|
|
||||||
if filenames == []: filenames = [None]
|
if filenames == []: filenames = [None]
|
||||||
|
|
||||||
for name in filenames:
|
for name in filenames:
|
||||||
try:
|
try: table = damask.ASCIItable(name = name,
|
||||||
table = damask.ASCIItable(name = name,
|
buffered = False)
|
||||||
buffered = False)
|
|
||||||
except: continue
|
except: continue
|
||||||
damask.util.report(scriptName,name)
|
damask.util.report(scriptName,name)
|
||||||
|
|
||||||
|
@ -61,15 +55,16 @@ for name in filenames:
|
||||||
# ------------------------------------------ process data ---------------------------------------
|
# ------------------------------------------ process data ---------------------------------------
|
||||||
|
|
||||||
table.data_readArray()
|
table.data_readArray()
|
||||||
|
|
||||||
|
keys = table.labels[::-1] if options.keys is None else options.keys[::-1] # numpy sorts with most significant column as last
|
||||||
|
|
||||||
cols = []
|
cols = []
|
||||||
remarks = []
|
remarks = []
|
||||||
for i,column in enumerate(table.label_index(options.keys)):
|
for i,column in enumerate(table.label_index(keys)):
|
||||||
if column < 0:
|
if column < 0: remarks.append('label "{}" not present...'.format(keys[i]))
|
||||||
remarks.append("label {0} not present.".format(options.keys[i]))
|
else: cols += [table.data[:,column]]
|
||||||
else:
|
|
||||||
cols += [table.data[:,column]]
|
|
||||||
if remarks != []: damask.util.croak(remarks)
|
if remarks != []: damask.util.croak(remarks)
|
||||||
|
|
||||||
ind = np.lexsort(cols) if cols != [] else np.arange(table.data.shape[0])
|
ind = np.lexsort(cols) if cols != [] else np.arange(table.data.shape[0])
|
||||||
if options.reverse: ind = ind[::-1]
|
if options.reverse: ind = ind[::-1]
|
||||||
|
|
||||||
|
|
Loading…
Reference in New Issue