next bunch of accepted scripts
This commit is contained in:
parent
3ed3073eff
commit
f7fedc4744
|
@ -1,7 +1,7 @@
|
||||||
#!/usr/bin/env python
|
#!/usr/bin/env python
|
||||||
# -*- coding: UTF-8 no BOM -*-
|
# -*- coding: UTF-8 no BOM -*-
|
||||||
|
|
||||||
import os,sys,string,math,types,time
|
import os,sys,math,types,time
|
||||||
import scipy.spatial, numpy as np
|
import scipy.spatial, numpy as np
|
||||||
from optparse import OptionParser
|
from optparse import OptionParser
|
||||||
import damask
|
import damask
|
||||||
|
@ -94,18 +94,18 @@ parser.set_defaults(symmetry = [damask.Symmetry.lattices[-1]],
|
||||||
|
|
||||||
(options,filenames) = parser.parse_args()
|
(options,filenames) = parser.parse_args()
|
||||||
|
|
||||||
input = [options.eulers != None,
|
input = [options.eulers is not None,
|
||||||
options.a != None and \
|
options.a is not None and \
|
||||||
options.b != None and \
|
options.b is not None and \
|
||||||
options.c != None,
|
options.c is not None,
|
||||||
options.matrix != None,
|
options.matrix is not None,
|
||||||
options.quaternion != None,
|
options.quaternion is not None,
|
||||||
options.microstructure != None,
|
options.microstructure is not None,
|
||||||
]
|
]
|
||||||
|
|
||||||
if np.sum(input) != 1:
|
if np.sum(input) != 1:
|
||||||
parser.error('need either microstructure label or exactly one orientation input format.')
|
parser.error('need either microstructure label or exactly one orientation input format.')
|
||||||
if options.axes != None and not set(options.axes).issubset(set(['x','+x','-x','y','+y','-y','z','+z','-z'])):
|
if options.axes is not None and not set(options.axes).issubset(set(['x','+x','-x','y','+y','-y','z','+z','-z'])):
|
||||||
parser.error('invalid axes {} {} {}.'.format(*options.axes))
|
parser.error('invalid axes {} {} {}.'.format(*options.axes))
|
||||||
|
|
||||||
(label,dim,inputtype) = [(options.eulers,3,'eulers'),
|
(label,dim,inputtype) = [(options.eulers,3,'eulers'),
|
||||||
|
@ -157,7 +157,7 @@ for name in filenames:
|
||||||
if coordDim == 2:
|
if coordDim == 2:
|
||||||
table.data = np.insert(table.data,2,np.zeros(len(table.data)),axis=1) # add zero z coordinate for two-dimensional input
|
table.data = np.insert(table.data,2,np.zeros(len(table.data)),axis=1) # add zero z coordinate for two-dimensional input
|
||||||
if options.verbose: damask.util.croak('extending to 3D...')
|
if options.verbose: damask.util.croak('extending to 3D...')
|
||||||
if options.phase == None:
|
if options.phase is None:
|
||||||
table.data = np.column_stack((table.data,np.ones(len(table.data)))) # add single phase if no phase column given
|
table.data = np.column_stack((table.data,np.ones(len(table.data)))) # add single phase if no phase column given
|
||||||
if options.verbose: damask.util.croak('adding dummy phase info...')
|
if options.verbose: damask.util.croak('adding dummy phase info...')
|
||||||
|
|
||||||
|
@ -168,7 +168,7 @@ for name in filenames:
|
||||||
maxcorner = np.array(map(max,coords))
|
maxcorner = np.array(map(max,coords))
|
||||||
grid = np.array(map(len,coords),'i')
|
grid = np.array(map(len,coords),'i')
|
||||||
size = grid/np.maximum(np.ones(3,'d'), grid-1.0) * (maxcorner-mincorner) # size from edge to edge = dim * n/(n-1)
|
size = grid/np.maximum(np.ones(3,'d'), grid-1.0) * (maxcorner-mincorner) # size from edge to edge = dim * n/(n-1)
|
||||||
size = np.where(grid > 1, size, min(size[grid > 1]/grid[grid > 1])) # spacing for grid==1 equal to smallest among other spacings
|
size = np.where(grid > 1, size, min(size[grid > 1]/grid[grid > 1])) # spacing for grid==1 set to smallest among other spacings
|
||||||
delta = size/np.maximum(np.ones(3,'d'), grid)
|
delta = size/np.maximum(np.ones(3,'d'), grid)
|
||||||
origin = mincorner - 0.5*delta # shift from cell center to corner
|
origin = mincorner - 0.5*delta # shift from cell center to corner
|
||||||
|
|
||||||
|
@ -188,7 +188,7 @@ for name in filenames:
|
||||||
|
|
||||||
# ------------------------------------------ process data ------------------------------------------
|
# ------------------------------------------ process data ------------------------------------------
|
||||||
|
|
||||||
colOri = table.label_index(label)+(3-coordDim) # column(s) of orientation data (following 3 or 2 coordinates that were expanded to 3!)
|
colOri = table.label_index(label)+(3-coordDim) # column(s) of orientation data followed by 3 coordinates
|
||||||
|
|
||||||
if inputtype == 'microstructure':
|
if inputtype == 'microstructure':
|
||||||
|
|
||||||
|
@ -207,9 +207,9 @@ for name in filenames:
|
||||||
|
|
||||||
statistics = {'global': 0, 'local': 0}
|
statistics = {'global': 0, 'local': 0}
|
||||||
grain = -np.ones(N,dtype = 'int32') # initialize empty microstructure
|
grain = -np.ones(N,dtype = 'int32') # initialize empty microstructure
|
||||||
orientations = [] # empty list of orientations
|
orientations = [] # orientations
|
||||||
multiplicity = [] # empty list of orientation multiplicity (number of group members)
|
multiplicity = [] # orientation multiplicity (number of group members)
|
||||||
phases = [] # empty list of phase info
|
phases = [] # phase info
|
||||||
nGrains = 0 # counter for detected grains
|
nGrains = 0 # counter for detected grains
|
||||||
existingGrains = np.arange(nGrains)
|
existingGrains = np.arange(nGrains)
|
||||||
myPos = 0 # position (in list) of current grid point
|
myPos = 0 # position (in list) of current grid point
|
||||||
|
@ -227,7 +227,7 @@ for name in filenames:
|
||||||
|
|
||||||
myData = table.data[index[myPos]] # read data for current grid point
|
myData = table.data[index[myPos]] # read data for current grid point
|
||||||
myPhase = int(myData[colPhase])
|
myPhase = int(myData[colPhase])
|
||||||
mySym = options.symmetry[min(myPhase,len(options.symmetry))-1] # select symmetry from option (take last specified option for all with higher index)
|
mySym = options.symmetry[min(myPhase,len(options.symmetry))-1] # take last specified option for all with higher index
|
||||||
|
|
||||||
if inputtype == 'eulers':
|
if inputtype == 'eulers':
|
||||||
o = damask.Orientation(Eulers = myData[colOri:colOri+3]*toRadians,
|
o = damask.Orientation(Eulers = myData[colOri:colOri+3]*toRadians,
|
||||||
|
@ -250,26 +250,27 @@ for name in filenames:
|
||||||
|
|
||||||
if options.tolerance > 0.0: # only try to compress orientations if asked to
|
if options.tolerance > 0.0: # only try to compress orientations if asked to
|
||||||
neighbors = np.array(KDTree.query_ball_point([x,y,z], 3)) # point indices within radius
|
neighbors = np.array(KDTree.query_ball_point([x,y,z], 3)) # point indices within radius
|
||||||
|
# filter neighbors: skip myself, anyone further ahead (cannot yet have a grain ID), and other phases
|
||||||
neighbors = neighbors[(neighbors < myPos) & \
|
neighbors = neighbors[(neighbors < myPos) & \
|
||||||
(table.data[index[neighbors],colPhase] == myPhase)] # filter neighbors: skip myself, anyone further ahead (cannot yet have a grain ID), and other phases
|
(table.data[index[neighbors],colPhase] == myPhase)]
|
||||||
grains = np.unique(grain[neighbors]) # unique grain IDs among valid neighbors
|
grains = np.unique(grain[neighbors]) # unique grain IDs among valid neighbors
|
||||||
|
|
||||||
if len(grains) > 0: # check immediate neighborhood first
|
if len(grains) > 0: # check immediate neighborhood first
|
||||||
cos_disorientations = np.array([o.disorientation(orientations[grainID],
|
cos_disorientations = np.array([o.disorientation(orientations[grainID],
|
||||||
SST = False)[0].quaternion.w \
|
SST = False)[0].quaternion.w \
|
||||||
for grainID in grains]) # store disorientation per grainID
|
for grainID in grains]) # store disorientation per grainID
|
||||||
closest_grain = np.argmax(cos_disorientations) # find grain among grains that has closest orientation to myself
|
closest_grain = np.argmax(cos_disorientations) # grain among grains with closest orientation to myself
|
||||||
match = 'local'
|
match = 'local'
|
||||||
|
|
||||||
if cos_disorientations[closest_grain] < threshold: # orientation not close enough?
|
if cos_disorientations[closest_grain] < threshold: # orientation not close enough?
|
||||||
grains = existingGrains[np.atleast_1d( ( np.array(phases) == myPhase ) & \
|
grains = existingGrains[np.atleast_1d( (np.array(phases) == myPhase ) & \
|
||||||
( np.in1d(existingGrains,grains,invert=True) ) )] # check every other already identified grain (of my phase)
|
(np.in1d(existingGrains,grains,invert=True)))] # other already identified grains (of my phase)
|
||||||
|
|
||||||
if len(grains) > 0:
|
if len(grains) > 0:
|
||||||
cos_disorientations = np.array([o.disorientation(orientations[grainID],
|
cos_disorientations = np.array([o.disorientation(orientations[grainID],
|
||||||
SST = False)[0].quaternion.w \
|
SST = False)[0].quaternion.w \
|
||||||
for grainID in grains]) # store disorientation per grainID
|
for grainID in grains]) # store disorientation per grainID
|
||||||
closest_grain = np.argmax(cos_disorientations) # find grain among grains that has closest orientation to myself
|
closest_grain = np.argmax(cos_disorientations) # grain among grains with closest orientation to myself
|
||||||
match = 'global'
|
match = 'global'
|
||||||
|
|
||||||
if cos_disorientations[closest_grain] >= threshold: # orientation now close enough?
|
if cos_disorientations[closest_grain] >= threshold: # orientation now close enough?
|
||||||
|
@ -331,7 +332,7 @@ for name in filenames:
|
||||||
config_header += ['<texture>']
|
config_header += ['<texture>']
|
||||||
for i,orientation in enumerate(orientations):
|
for i,orientation in enumerate(orientations):
|
||||||
config_header += ['[Grain%s]'%(str(i+1).zfill(formatwidth)),
|
config_header += ['[Grain%s]'%(str(i+1).zfill(formatwidth)),
|
||||||
'axes\t%s %s %s'%tuple(options.axes) if options.axes != None else '',
|
'axes\t%s %s %s'%tuple(options.axes) if options.axes is not None else '',
|
||||||
'(gauss)\tphi1 %g\tPhi %g\tphi2 %g\tscatter 0.0\tfraction 1.0'%tuple(orientation.asEulers(degrees = True)),
|
'(gauss)\tphi1 %g\tPhi %g\tphi2 %g\tscatter 0.0\tfraction 1.0'%tuple(orientation.asEulers(degrees = True)),
|
||||||
]
|
]
|
||||||
|
|
||||||
|
|
|
@ -1,7 +1,7 @@
|
||||||
#!/usr/bin/env python
|
#!/usr/bin/env python
|
||||||
# -*- coding: UTF-8 no BOM -*-
|
# -*- coding: UTF-8 no BOM -*-
|
||||||
|
|
||||||
import os,sys,string,math
|
import os,sys,math
|
||||||
import numpy as np
|
import numpy as np
|
||||||
from optparse import OptionParser
|
from optparse import OptionParser
|
||||||
from scipy import ndimage
|
from scipy import ndimage
|
||||||
|
@ -73,15 +73,16 @@ for name in filenames:
|
||||||
|
|
||||||
# --- read data ------------------------------------------------------------------------------------
|
# --- read data ------------------------------------------------------------------------------------
|
||||||
microstructure = np.tile(np.array(table.microstructure_read(info['grid']),'i').reshape(info['grid'],order='F'),
|
microstructure = np.tile(np.array(table.microstructure_read(info['grid']),'i').reshape(info['grid'],order='F'),
|
||||||
np.where(info['grid'] == 1, 2,1)) # make one copy along dimensions with grid == 1
|
np.where(info['grid'] == 1, 2,1)) # make one copy along dimensions with grid == 1
|
||||||
grid = np.array(microstructure.shape)
|
grid = np.array(microstructure.shape)
|
||||||
|
|
||||||
#--- initialize support data -----------------------------------------------------------------------
|
#--- initialize support data -----------------------------------------------------------------------
|
||||||
|
|
||||||
periodic_microstructure = np.tile(microstructure,(3,3,3))[grid[0]/2:-grid[0]/2,
|
periodic_microstructure = np.tile(microstructure,(3,3,3))[grid[0]/2:-grid[0]/2,
|
||||||
grid[1]/2:-grid[1]/2,
|
grid[1]/2:-grid[1]/2,
|
||||||
grid[2]/2:-grid[2]/2] # periodically extend the microstructure
|
grid[2]/2:-grid[2]/2] # periodically extend the microstructure
|
||||||
microstructure_original = np.copy(microstructure) # store a copy the initial microstructure to find locations of immutable indices
|
# store a copy the initial microstructure to find locations of immutable indices
|
||||||
|
microstructure_original = np.copy(microstructure)
|
||||||
|
|
||||||
X,Y,Z = np.mgrid[0:grid[0],0:grid[1],0:grid[2]]
|
X,Y,Z = np.mgrid[0:grid[0],0:grid[1],0:grid[2]]
|
||||||
gauss = np.exp(-(X*X + Y*Y + Z*Z)/(2.0*options.d*options.d))/math.pow(2.0*np.pi*options.d*options.d,1.5)
|
gauss = np.exp(-(X*X + Y*Y + Z*Z)/(2.0*options.d*options.d))/math.pow(2.0*np.pi*options.d*options.d,1.5)
|
||||||
|
@ -99,44 +100,50 @@ for name in filenames:
|
||||||
for i in (-1,0,1):
|
for i in (-1,0,1):
|
||||||
for j in (-1,0,1):
|
for j in (-1,0,1):
|
||||||
for k in (-1,0,1):
|
for k in (-1,0,1):
|
||||||
|
# assign interfacial energy to all voxels that have a differing neighbor (in Moore neighborhood)
|
||||||
interfaceEnergy = np.maximum(boundary,
|
interfaceEnergy = np.maximum(boundary,
|
||||||
interfacialEnergy(microstructure,np.roll(np.roll(np.roll(
|
interfacialEnergy(microstructure,np.roll(np.roll(np.roll(
|
||||||
microstructure,i,axis=0), j,axis=1), k,axis=2))) # assign interfacial energy to all voxels that have a differing neighbor (in Moore neighborhood)
|
microstructure,i,axis=0), j,axis=1), k,axis=2)))
|
||||||
|
# periodically extend interfacial energy array by half a grid size in positive and negative directions
|
||||||
periodic_interfaceEnergy = np.tile(interfaceEnergy,(3,3,3))[grid[0]/2:-grid[0]/2,
|
periodic_interfaceEnergy = np.tile(interfaceEnergy,(3,3,3))[grid[0]/2:-grid[0]/2,
|
||||||
grid[1]/2:-grid[1]/2,
|
grid[1]/2:-grid[1]/2,
|
||||||
grid[2]/2:-grid[2]/2] # periodically extend interfacial energy array by half a grid size in positive and negative directions
|
grid[2]/2:-grid[2]/2]
|
||||||
index = ndimage.morphology.distance_transform_edt(periodic_interfaceEnergy == 0., # transform bulk volume (i.e. where interfacial energy is zero)
|
# transform bulk volume (i.e. where interfacial energy is zero)
|
||||||
|
index = ndimage.morphology.distance_transform_edt(periodic_interfaceEnergy == 0.,
|
||||||
return_distances = False,
|
return_distances = False,
|
||||||
return_indices = True) # want array index of nearest voxel on periodically extended boundary
|
return_indices = True)
|
||||||
# boundaryExt = boundaryExt[index[0].flatten(),index[1].flatten(),index[2].flatten()].reshape(boundaryExt.shape) # fill bulk with energy of nearest interface | question PE: what "flatten" for?
|
# want array index of nearest voxel on periodically extended boundary
|
||||||
periodic_bulkEnergy = periodic_interfaceEnergy[index[0],
|
periodic_bulkEnergy = periodic_interfaceEnergy[index[0],
|
||||||
index[1],
|
index[1],
|
||||||
index[2]].reshape(2*grid) # fill bulk with energy of nearest interface
|
index[2]].reshape(2*grid) # fill bulk with energy of nearest interface
|
||||||
diffusedEnergy = np.fft.irfftn(np.fft.rfftn(np.where(ndimage.morphology.binary_dilation(interfaceEnergy > 0.,
|
diffusedEnergy = np.fft.irfftn(np.fft.rfftn(
|
||||||
structure = struc,
|
np.where(
|
||||||
iterations = options.d/2 + 1), # fat boundary | question PE: why 2d - 1? I would argue for d/2 + 1 !!
|
ndimage.morphology.binary_dilation(interfaceEnergy > 0.,
|
||||||
periodic_bulkEnergy[grid[0]/2:-grid[0]/2, # retain filled energy on fat boundary...
|
structure = struc,
|
||||||
grid[1]/2:-grid[1]/2,
|
terations = options.d/2 + 1), # fat boundary | PE: why 2d-1? I would argue for d/2 + 1
|
||||||
grid[2]/2:-grid[2]/2], # ...and zero everywhere else
|
periodic_bulkEnergy[grid[0]/2:-grid[0]/2, # retain filled energy on fat boundary...
|
||||||
0.)\
|
grid[1]/2:-grid[1]/2,
|
||||||
)*gauss)
|
grid[2]/2:-grid[2]/2], # ...and zero everywhere else
|
||||||
|
0.))*gauss)
|
||||||
periodic_diffusedEnergy = np.tile(diffusedEnergy,(3,3,3))[grid[0]/2:-grid[0]/2,
|
periodic_diffusedEnergy = np.tile(diffusedEnergy,(3,3,3))[grid[0]/2:-grid[0]/2,
|
||||||
grid[1]/2:-grid[1]/2,
|
grid[1]/2:-grid[1]/2,
|
||||||
grid[2]/2:-grid[2]/2] # periodically extend the smoothed bulk energy
|
grid[2]/2:-grid[2]/2] # periodically extend the smoothed bulk energy
|
||||||
index = ndimage.morphology.distance_transform_edt(periodic_diffusedEnergy >= 0.5, # transform voxels close to interface region | question PE: what motivates 1/2 (could be any small number, or)?
|
# transform voxels close to interface region | question PE: what motivates 1/2 (could be any small number, or)?
|
||||||
|
index = ndimage.morphology.distance_transform_edt(periodic_diffusedEnergy >= 0.5,
|
||||||
return_distances = False,
|
return_distances = False,
|
||||||
return_indices = True) # want index of closest bulk grain
|
return_indices = True) # want index of closest bulk grain
|
||||||
microstructure = periodic_microstructure[index[0],
|
microstructure = periodic_microstructure[index[0],
|
||||||
index[1],
|
index[1],
|
||||||
index[2]].reshape(2*grid)[grid[0]/2:-grid[0]/2,
|
index[2]].reshape(2*grid)[grid[0]/2:-grid[0]/2,
|
||||||
grid[1]/2:-grid[1]/2,
|
grid[1]/2:-grid[1]/2,
|
||||||
grid[2]/2:-grid[2]/2] # extent grains into interface region
|
grid[2]/2:-grid[2]/2] # extent grains into interface region
|
||||||
|
|
||||||
immutable = np.zeros(microstructure.shape, dtype=bool)
|
immutable = np.zeros(microstructure.shape, dtype=bool)
|
||||||
|
# find locations where immutable microstructures have been or are now
|
||||||
for micro in options.immutable:
|
for micro in options.immutable:
|
||||||
immutable += np.logical_or(microstructure == micro, microstructure_original == micro) # find locations where immutable microstructures have been or are now
|
immutable += np.logical_or(microstructure == micro, microstructure_original == micro)
|
||||||
|
# undo any changes involving immutable microstructures
|
||||||
microstructure = np.where(immutable, microstructure_original,microstructure) # undo any changes involving immutable microstructures
|
microstructure = np.where(immutable, microstructure_original,microstructure)
|
||||||
|
|
||||||
# --- renumber to sequence 1...Ngrains if requested ------------------------------------------------
|
# --- renumber to sequence 1...Ngrains if requested ------------------------------------------------
|
||||||
# http://stackoverflow.com/questions/10741346/np-frequency-counts-for-unique-values-in-an-array
|
# http://stackoverflow.com/questions/10741346/np-frequency-counts-for-unique-values-in-an-array
|
||||||
|
|
|
@ -1,7 +1,7 @@
|
||||||
#!/usr/bin/env python
|
#!/usr/bin/env python
|
||||||
# -*- coding: UTF-8 no BOM -*-
|
# -*- coding: UTF-8 no BOM -*-
|
||||||
|
|
||||||
import os,string,re
|
import os,re
|
||||||
from optparse import OptionParser
|
from optparse import OptionParser
|
||||||
import damask
|
import damask
|
||||||
|
|
||||||
|
@ -142,7 +142,7 @@ if (options.dimension == 3):
|
||||||
|
|
||||||
elif (options.dimension == 2):
|
elif (options.dimension == 2):
|
||||||
for i,l in enumerate(line):
|
for i,l in enumerate(line):
|
||||||
# for pts in line[int(abs(lines)-1)]:
|
# for pts in line[int(abs(lines)-1)]:
|
||||||
for pts in l:
|
for pts in l:
|
||||||
x_coord.append(point[int(pts)-1][0])
|
x_coord.append(point[int(pts)-1][0])
|
||||||
y_coord.append(point[int(pts)-1][1])
|
y_coord.append(point[int(pts)-1][1])
|
||||||
|
|
|
@ -3,7 +3,7 @@
|
||||||
|
|
||||||
from optparse import OptionParser
|
from optparse import OptionParser
|
||||||
import damask
|
import damask
|
||||||
import os,sys,math,re,random,string
|
import os,sys,math,random
|
||||||
import numpy as np
|
import numpy as np
|
||||||
|
|
||||||
scriptName = os.path.splitext(os.path.basename(__file__))[0]
|
scriptName = os.path.splitext(os.path.basename(__file__))[0]
|
||||||
|
@ -19,7 +19,7 @@ def integerFactorization(i):
|
||||||
return j
|
return j
|
||||||
|
|
||||||
def binAsBins(bin,intervals):
|
def binAsBins(bin,intervals):
|
||||||
""" explode compound bin into 3D bins list """
|
"""explode compound bin into 3D bins list"""
|
||||||
bins = [0]*3
|
bins = [0]*3
|
||||||
bins[0] = (bin//(intervals[1] * intervals[2])) % intervals[0]
|
bins[0] = (bin//(intervals[1] * intervals[2])) % intervals[0]
|
||||||
bins[1] = (bin//intervals[2]) % intervals[1]
|
bins[1] = (bin//intervals[2]) % intervals[1]
|
||||||
|
@ -27,17 +27,17 @@ def binAsBins(bin,intervals):
|
||||||
return bins
|
return bins
|
||||||
|
|
||||||
def binsAsBin(bins,intervals):
|
def binsAsBin(bins,intervals):
|
||||||
""" implode 3D bins into compound bin """
|
"""implode 3D bins into compound bin"""
|
||||||
return (bins[0]*intervals[1] + bins[1])*intervals[2] + bins[2]
|
return (bins[0]*intervals[1] + bins[1])*intervals[2] + bins[2]
|
||||||
|
|
||||||
def EulersAsBins(Eulers,intervals,deltas,center):
|
def EulersAsBins(Eulers,intervals,deltas,center):
|
||||||
""" return list of Eulers translated into 3D bins list """
|
"""return list of Eulers translated into 3D bins list"""
|
||||||
return [int((euler+(0.5-center)*delta)//delta)%interval \
|
return [int((euler+(0.5-center)*delta)//delta)%interval \
|
||||||
for euler,delta,interval in zip(Eulers,deltas,intervals) \
|
for euler,delta,interval in zip(Eulers,deltas,intervals) \
|
||||||
]
|
]
|
||||||
|
|
||||||
def binAsEulers(bin,intervals,deltas,center):
|
def binAsEulers(bin,intervals,deltas,center):
|
||||||
""" compound bin number translated into list of Eulers """
|
"""compound bin number translated into list of Eulers"""
|
||||||
Eulers = [0.0]*3
|
Eulers = [0.0]*3
|
||||||
Eulers[2] = (bin%intervals[2] + center)*deltas[2]
|
Eulers[2] = (bin%intervals[2] + center)*deltas[2]
|
||||||
Eulers[1] = (bin//intervals[2]%intervals[1] + center)*deltas[1]
|
Eulers[1] = (bin//intervals[2]%intervals[1] + center)*deltas[1]
|
||||||
|
@ -45,7 +45,7 @@ def binAsEulers(bin,intervals,deltas,center):
|
||||||
return Eulers
|
return Eulers
|
||||||
|
|
||||||
def directInvRepetitions(probability,scale):
|
def directInvRepetitions(probability,scale):
|
||||||
""" calculate number of samples drawn by direct inversion """
|
"""calculate number of samples drawn by direct inversion"""
|
||||||
nDirectInv = 0
|
nDirectInv = 0
|
||||||
for bin in range(len(probability)): # loop over bins
|
for bin in range(len(probability)): # loop over bins
|
||||||
nDirectInv += int(round(probability[bin]*scale)) # calc repetition
|
nDirectInv += int(round(probability[bin]*scale)) # calc repetition
|
||||||
|
@ -55,15 +55,12 @@ def directInvRepetitions(probability,scale):
|
||||||
# ---------------------- sampling methods -----------------------------------------------------------------------
|
# ---------------------- sampling methods -----------------------------------------------------------------------
|
||||||
|
|
||||||
# ----- efficient algorithm ---------
|
# ----- efficient algorithm ---------
|
||||||
|
|
||||||
def directInversion (ODF,nSamples):
|
def directInversion (ODF,nSamples):
|
||||||
""" ODF contains 'dV_V' (normalized to 1), 'center', 'intervals', 'limits' (in radians) """
|
"""ODF contains 'dV_V' (normalized to 1), 'center', 'intervals', 'limits' (in radians)"""
|
||||||
|
|
||||||
nOptSamples = max(ODF['nNonZero'],nSamples) # random subsampling if too little samples requested
|
nOptSamples = max(ODF['nNonZero'],nSamples) # random subsampling if too little samples requested
|
||||||
|
|
||||||
nInvSamples = 0
|
nInvSamples = 0
|
||||||
repetition = [None]*ODF['nBins']
|
repetition = [None]*ODF['nBins']
|
||||||
probabilityScale = nOptSamples # guess
|
|
||||||
|
|
||||||
scaleLower = 0.0
|
scaleLower = 0.0
|
||||||
nInvSamplesLower = 0
|
nInvSamplesLower = 0
|
||||||
|
@ -96,7 +93,7 @@ def directInversion (ODF,nSamples):
|
||||||
for bin in range(ODF['nBins']): # loop over bins
|
for bin in range(ODF['nBins']): # loop over bins
|
||||||
repetition[bin] = int(round(ODF['dV_V'][bin]*scale)) # calc repetition
|
repetition[bin] = int(round(ODF['dV_V'][bin]*scale)) # calc repetition
|
||||||
|
|
||||||
# build set
|
# build set
|
||||||
set = [None]*nInvSamples
|
set = [None]*nInvSamples
|
||||||
i = 0
|
i = 0
|
||||||
for bin in range(ODF['nBins']):
|
for bin in range(ODF['nBins']):
|
||||||
|
@ -110,7 +107,6 @@ def directInversion (ODF,nSamples):
|
||||||
if (j == nInvSamples-1): ex = j
|
if (j == nInvSamples-1): ex = j
|
||||||
else: ex = int(round(random.uniform(j+0.5,nInvSamples-0.5)))
|
else: ex = int(round(random.uniform(j+0.5,nInvSamples-0.5)))
|
||||||
bin = set[ex]
|
bin = set[ex]
|
||||||
bins = binAsBins(bin,ODF['interval']) # PE: why are we doing this??
|
|
||||||
Eulers = binAsEulers(bin,ODF['interval'],ODF['delta'],ODF['center'])
|
Eulers = binAsEulers(bin,ODF['interval'],ODF['delta'],ODF['center'])
|
||||||
orientations[j] = np.degrees(Eulers)
|
orientations[j] = np.degrees(Eulers)
|
||||||
reconstructedODF[bin] += unitInc
|
reconstructedODF[bin] += unitInc
|
||||||
|
@ -122,8 +118,7 @@ def directInversion (ODF,nSamples):
|
||||||
# ----- trial and error algorithms ---------
|
# ----- trial and error algorithms ---------
|
||||||
|
|
||||||
def MonteCarloEulers (ODF,nSamples):
|
def MonteCarloEulers (ODF,nSamples):
|
||||||
""" ODF contains 'dV_V' (normalized to 1), 'center', 'intervals', 'limits' (in radians) """
|
"""ODF contains 'dV_V' (normalized to 1), 'center', 'intervals', 'limits' (in radians)"""
|
||||||
|
|
||||||
countMC = 0
|
countMC = 0
|
||||||
maxdV_V = max(ODF['dV_V'])
|
maxdV_V = max(ODF['dV_V'])
|
||||||
orientations = np.zeros((nSamples,3),'f')
|
orientations = np.zeros((nSamples,3),'f')
|
||||||
|
@ -146,8 +141,7 @@ def MonteCarloEulers (ODF,nSamples):
|
||||||
|
|
||||||
|
|
||||||
def MonteCarloBins (ODF,nSamples):
|
def MonteCarloBins (ODF,nSamples):
|
||||||
""" ODF contains 'dV_V' (normalized to 1), 'center', 'intervals', 'limits' (in radians) """
|
"""ODF contains 'dV_V' (normalized to 1), 'center', 'intervals', 'limits' (in radians)"""
|
||||||
|
|
||||||
countMC = 0
|
countMC = 0
|
||||||
maxdV_V = max(ODF['dV_V'])
|
maxdV_V = max(ODF['dV_V'])
|
||||||
orientations = np.zeros((nSamples,3),'f')
|
orientations = np.zeros((nSamples,3),'f')
|
||||||
|
@ -169,8 +163,7 @@ def MonteCarloBins (ODF,nSamples):
|
||||||
|
|
||||||
|
|
||||||
def TothVanHoutteSTAT (ODF,nSamples):
|
def TothVanHoutteSTAT (ODF,nSamples):
|
||||||
""" ODF contains 'dV_V' (normalized to 1), 'center', 'intervals', 'limits' (in radians) """
|
"""ODF contains 'dV_V' (normalized to 1), 'center', 'intervals', 'limits' (in radians)"""
|
||||||
|
|
||||||
orientations = np.zeros((nSamples,3),'f')
|
orientations = np.zeros((nSamples,3),'f')
|
||||||
reconstructedODF = np.zeros(ODF['nBins'],'f')
|
reconstructedODF = np.zeros(ODF['nBins'],'f')
|
||||||
unitInc = 1.0/nSamples
|
unitInc = 1.0/nSamples
|
||||||
|
@ -199,7 +192,7 @@ def TothVanHoutteSTAT (ODF,nSamples):
|
||||||
# --------------------------------------------------------------------
|
# --------------------------------------------------------------------
|
||||||
# MAIN
|
# MAIN
|
||||||
# --------------------------------------------------------------------
|
# --------------------------------------------------------------------
|
||||||
parser = OptionParser(option_class=damask.extendableOption, usage='%prog options [file[s]]', description = """
|
parser = OptionParser(option_class=damask.extendableOption, usage='%prog options [file[s]]', description ="""
|
||||||
Transform linear binned ODF data into given number of orientations.
|
Transform linear binned ODF data into given number of orientations.
|
||||||
IA: integral approximation, STAT: Van Houtte, MC: Monte Carlo
|
IA: integral approximation, STAT: Van Houtte, MC: Monte Carlo
|
||||||
|
|
||||||
|
@ -251,7 +244,7 @@ for name in filenames:
|
||||||
continue
|
continue
|
||||||
damask.util.report(scriptName,name)
|
damask.util.report(scriptName,name)
|
||||||
|
|
||||||
randomSeed = int(os.urandom(4).encode('hex'), 16) if options.randomSeed == None else options.randomSeed # random seed per file for second phase
|
randomSeed = int(os.urandom(4).encode('hex'), 16) if options.randomSeed is None else options.randomSeed # random seed per file for second phase
|
||||||
random.seed(randomSeed)
|
random.seed(randomSeed)
|
||||||
|
|
||||||
# ------------------------------------------ read header and data ---------------------------------
|
# ------------------------------------------ read header and data ---------------------------------
|
||||||
|
@ -308,13 +301,13 @@ for name in filenames:
|
||||||
'Reference Integral: %12.11f\n'%(ODF['limit'][0]*ODF['limit'][2]*(1-math.cos(ODF['limit'][1]))),
|
'Reference Integral: %12.11f\n'%(ODF['limit'][0]*ODF['limit'][2]*(1-math.cos(ODF['limit'][1]))),
|
||||||
])
|
])
|
||||||
|
|
||||||
# call methods
|
# call methods
|
||||||
Functions = {'IA': 'directInversion', 'STAT': 'TothVanHoutteSTAT', 'MC': 'MonteCarloBins'}
|
Functions = {'IA': 'directInversion', 'STAT': 'TothVanHoutteSTAT', 'MC': 'MonteCarloBins'}
|
||||||
method = Functions[options.algorithm]
|
method = Functions[options.algorithm]
|
||||||
|
|
||||||
Orientations, ReconstructedODF = (globals()[method])(ODF,nSamples)
|
Orientations, ReconstructedODF = (globals()[method])(ODF,nSamples)
|
||||||
|
|
||||||
# calculate accuracy of sample
|
# calculate accuracy of sample
|
||||||
squaredDiff = {'orig':0.0,method:0.0}
|
squaredDiff = {'orig':0.0,method:0.0}
|
||||||
squaredRelDiff = {'orig':0.0,method:0.0}
|
squaredRelDiff = {'orig':0.0,method:0.0}
|
||||||
mutualProd = {'orig':0.0,method:0.0}
|
mutualProd = {'orig':0.0,method:0.0}
|
||||||
|
@ -375,7 +368,7 @@ for name in filenames:
|
||||||
'(gauss) phi1 {} Phi {} phi2 {} scatter 0.0 fraction 1.0'.format(*eulers),
|
'(gauss) phi1 {} Phi {} phi2 {} scatter 0.0 fraction 1.0'.format(*eulers),
|
||||||
]
|
]
|
||||||
|
|
||||||
#--- output finalization --------------------------------------------------------------------------
|
#--- output finalization --------------------------------------------------------------------------
|
||||||
|
|
||||||
with (open(os.path.splitext(name)[0]+'_'+method+'_'+str(nSamples)+'_material.config','w')) as outfile:
|
with (open(os.path.splitext(name)[0]+'_'+method+'_'+str(nSamples)+'_material.config','w')) as outfile:
|
||||||
outfile.write('\n'.join(materialConfig)+'\n')
|
outfile.write('\n'.join(materialConfig)+'\n')
|
||||||
|
|
|
@ -1,13 +1,14 @@
|
||||||
#!/usr/bin/env python
|
#!/usr/bin/env python
|
||||||
# -*- coding: UTF-8 no BOM -*-
|
# -*- coding: UTF-8 no BOM -*-
|
||||||
|
|
||||||
'''
|
"""
|
||||||
Writes meaningful labels to the marc input file (*.dat)
|
Writes meaningful labels to the marc input file (*.dat)
|
||||||
based on the files
|
|
||||||
|
output is based on the files
|
||||||
<modelname_jobname>.output<Homogenization/Crystallite/Constitutive>
|
<modelname_jobname>.output<Homogenization/Crystallite/Constitutive>
|
||||||
that are written during the first run of the model.
|
that are written during the first run of the model.
|
||||||
'''
|
"""
|
||||||
import sys,os,re,string
|
import sys,os,re
|
||||||
from optparse import OptionParser
|
from optparse import OptionParser
|
||||||
import damask
|
import damask
|
||||||
|
|
||||||
|
@ -16,7 +17,6 @@ scriptID = ' '.join([scriptName,damask.version])
|
||||||
|
|
||||||
# -----------------------------
|
# -----------------------------
|
||||||
def ParseOutputFormat(filename,what,me):
|
def ParseOutputFormat(filename,what,me):
|
||||||
# -----------------------------
|
|
||||||
format = {'outputs':{},'specials':{'brothers':[]}}
|
format = {'outputs':{},'specials':{'brothers':[]}}
|
||||||
|
|
||||||
outputmetafile = filename+'.output'+what
|
outputmetafile = filename+'.output'+what
|
||||||
|
@ -121,7 +121,7 @@ for file in files:
|
||||||
|
|
||||||
for what in me:
|
for what in me:
|
||||||
outputFormat[what] = ParseOutputFormat(formatFile,what,me[what])
|
outputFormat[what] = ParseOutputFormat(formatFile,what,me[what])
|
||||||
if not '_id' in outputFormat[what]['specials']:
|
if '_id' not in outputFormat[what]['specials']:
|
||||||
print "'%s' not found in <%s>"%(me[what],what)
|
print "'%s' not found in <%s>"%(me[what],what)
|
||||||
print '\n'.join(map(lambda x:' '+x,outputFormat[what]['specials']['brothers']))
|
print '\n'.join(map(lambda x:' '+x,outputFormat[what]['specials']['brothers']))
|
||||||
sys.exit(1)
|
sys.exit(1)
|
||||||
|
|
|
@ -1,7 +1,7 @@
|
||||||
#!/usr/bin/env python
|
#!/usr/bin/env python
|
||||||
# -*- coding: UTF-8 no BOM -*-
|
# -*- coding: UTF-8 no BOM -*-
|
||||||
|
|
||||||
import sys,os,string
|
import sys,os
|
||||||
import numpy as np
|
import numpy as np
|
||||||
from optparse import OptionParser
|
from optparse import OptionParser
|
||||||
import damask
|
import damask
|
||||||
|
@ -17,14 +17,13 @@ def outMentat(cmd,locals):
|
||||||
exec(cmd[3:])
|
exec(cmd[3:])
|
||||||
elif cmd[0:3] == '(?)':
|
elif cmd[0:3] == '(?)':
|
||||||
cmd = eval(cmd[3:])
|
cmd = eval(cmd[3:])
|
||||||
py_send(cmd)
|
py_mentat.py_send(cmd)
|
||||||
else:
|
else:
|
||||||
py_send(cmd)
|
py_mentat.py_send(cmd)
|
||||||
return
|
return
|
||||||
|
|
||||||
#-------------------------------------------------------------------------------------------------
|
#-------------------------------------------------------------------------------------------------
|
||||||
def outFile(cmd,locals,dest):
|
def outFile(cmd,locals,dest):
|
||||||
#-------------------------------------------------------------------------------------------------
|
|
||||||
if cmd[0:3] == '(!)':
|
if cmd[0:3] == '(!)':
|
||||||
exec(cmd[3:])
|
exec(cmd[3:])
|
||||||
elif cmd[0:3] == '(?)':
|
elif cmd[0:3] == '(?)':
|
||||||
|
@ -37,7 +36,6 @@ def outFile(cmd,locals,dest):
|
||||||
|
|
||||||
#-------------------------------------------------------------------------------------------------
|
#-------------------------------------------------------------------------------------------------
|
||||||
def output(cmds,locals,dest):
|
def output(cmds,locals,dest):
|
||||||
#-------------------------------------------------------------------------------------------------
|
|
||||||
for cmd in cmds:
|
for cmd in cmds:
|
||||||
if isinstance(cmd,list):
|
if isinstance(cmd,list):
|
||||||
output(cmd,locals,dest)
|
output(cmd,locals,dest)
|
||||||
|
@ -58,12 +56,12 @@ def servoLink():
|
||||||
'max': np.zeros(3,dtype='d'),
|
'max': np.zeros(3,dtype='d'),
|
||||||
'delta': np.zeros(3,dtype='d'),
|
'delta': np.zeros(3,dtype='d'),
|
||||||
}
|
}
|
||||||
Nnodes = py_get_int("nnodes()")
|
Nnodes = py_mentat.py_get_int("nnodes()")
|
||||||
NodeCoords = np.zeros((Nnodes,3),dtype='d')
|
NodeCoords = np.zeros((Nnodes,3),dtype='d')
|
||||||
for node in xrange(Nnodes):
|
for node in xrange(Nnodes):
|
||||||
NodeCoords[node,0] = py_get_float("node_x(%i)"%(node+1))
|
NodeCoords[node,0] = py_mentat.py_get_float("node_x(%i)"%(node+1))
|
||||||
NodeCoords[node,1] = py_get_float("node_y(%i)"%(node+1))
|
NodeCoords[node,1] = py_mentat.py_get_float("node_y(%i)"%(node+1))
|
||||||
NodeCoords[node,2] = py_get_float("node_z(%i)"%(node+1))
|
NodeCoords[node,2] = py_mentat.py_get_float("node_z(%i)"%(node+1))
|
||||||
box['min'] = NodeCoords.min(axis=0) # find the bounding box
|
box['min'] = NodeCoords.min(axis=0) # find the bounding box
|
||||||
box['max'] = NodeCoords.max(axis=0)
|
box['max'] = NodeCoords.max(axis=0)
|
||||||
box['delta'] = box['max']-box['min']
|
box['delta'] = box['max']-box['min']
|
||||||
|
@ -79,7 +77,6 @@ def servoLink():
|
||||||
#!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!
|
#!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!
|
||||||
# loop over all nodes
|
# loop over all nodes
|
||||||
for node in xrange(Nnodes):
|
for node in xrange(Nnodes):
|
||||||
pos = {}
|
|
||||||
key = {}
|
key = {}
|
||||||
maxFlag = [False, False, False]
|
maxFlag = [False, False, False]
|
||||||
Nmax = 0
|
Nmax = 0
|
||||||
|
@ -97,7 +94,7 @@ def servoLink():
|
||||||
maxFlag[coord] = True # remember face membership (for linked nodes)
|
maxFlag[coord] = True # remember face membership (for linked nodes)
|
||||||
|
|
||||||
if Nmin > 0: # node is on a back face
|
if Nmin > 0: # node is on a back face
|
||||||
# prepare for any non-existing entries in the data structure
|
# prepare for any non-existing entries in the data structure
|
||||||
if key['x'] not in baseNode.keys():
|
if key['x'] not in baseNode.keys():
|
||||||
baseNode[key['x']] = {}
|
baseNode[key['x']] = {}
|
||||||
if key['y'] not in baseNode[key['x']].keys():
|
if key['y'] not in baseNode[key['x']].keys():
|
||||||
|
@ -166,7 +163,7 @@ else:
|
||||||
file={'croak':sys.stdout}
|
file={'croak':sys.stdout}
|
||||||
|
|
||||||
try:
|
try:
|
||||||
from py_mentat import *
|
import py_mentat
|
||||||
except:
|
except:
|
||||||
file['croak'].write('error: no valid Mentat release found')
|
file['croak'].write('error: no valid Mentat release found')
|
||||||
sys.exit(-1)
|
sys.exit(-1)
|
||||||
|
@ -176,8 +173,9 @@ outputLocals = {}
|
||||||
file['croak'].write('\033[1m'+scriptName+'\033[0m\n\n')
|
file['croak'].write('\033[1m'+scriptName+'\033[0m\n\n')
|
||||||
file['croak'].write( 'waiting to connect...\n')
|
file['croak'].write( 'waiting to connect...\n')
|
||||||
try:
|
try:
|
||||||
py_connect('',options.port)
|
py_mentat.py_connect('',options.port)
|
||||||
output(['*draw_manual'],outputLocals,'Mentat') # prevent redrawing in Mentat, should be much faster. Since py_connect has no return value, try this to determine if failed or not
|
# prevent redrawing in Mentat, should be much faster. Since py_connect has no return value, try this to determine if failed or not
|
||||||
|
output(['*draw_manual'],outputLocals,'Mentat')
|
||||||
except:
|
except:
|
||||||
file['croak'].write('Could not connect. Set Tools/Python/"Run as Separate Process" & "Initiate"...\n')
|
file['croak'].write('Could not connect. Set Tools/Python/"Run as Separate Process" & "Initiate"...\n')
|
||||||
sys.exit()
|
sys.exit()
|
||||||
|
@ -191,7 +189,7 @@ output(['*remove_all_servos',
|
||||||
|
|
||||||
cmds = servoLink()
|
cmds = servoLink()
|
||||||
output(cmds,outputLocals,'Mentat')
|
output(cmds,outputLocals,'Mentat')
|
||||||
py_disconnect()
|
py_mentat.py_disconnect()
|
||||||
|
|
||||||
if options.verbose:
|
if options.verbose:
|
||||||
output(cmds,outputLocals,sys.stdout)
|
output(cmds,outputLocals,sys.stdout)
|
||||||
|
|
|
@ -1,7 +1,7 @@
|
||||||
#!/usr/bin/env python
|
#!/usr/bin/env python
|
||||||
# -*- coding: UTF-8 no BOM -*-
|
# -*- coding: UTF-8 no BOM -*-
|
||||||
|
|
||||||
import os, sys, string
|
import os,sys
|
||||||
import numpy as np
|
import numpy as np
|
||||||
from optparse import OptionParser
|
from optparse import OptionParser
|
||||||
import damask
|
import damask
|
||||||
|
@ -12,19 +12,17 @@ sys.path.append(damask.solver.Marc().libraryPath('../../'))
|
||||||
|
|
||||||
#-------------------------------------------------------------------------------------------------
|
#-------------------------------------------------------------------------------------------------
|
||||||
def outMentat(cmd,locals):
|
def outMentat(cmd,locals):
|
||||||
#-------------------------------------------------------------------------------------------------
|
|
||||||
if cmd[0:3] == '(!)':
|
if cmd[0:3] == '(!)':
|
||||||
exec(cmd[3:])
|
exec(cmd[3:])
|
||||||
elif cmd[0:3] == '(?)':
|
elif cmd[0:3] == '(?)':
|
||||||
cmd = eval(cmd[3:])
|
cmd = eval(cmd[3:])
|
||||||
py_send(cmd)
|
py_mentat.py_send(cmd)
|
||||||
else:
|
else:
|
||||||
py_send(cmd)
|
py_mentat.py_send(cmd)
|
||||||
return
|
return
|
||||||
|
|
||||||
#-------------------------------------------------------------------------------------------------
|
#-------------------------------------------------------------------------------------------------
|
||||||
def outFile(cmd,locals,dest):
|
def outFile(cmd,locals,dest):
|
||||||
#-------------------------------------------------------------------------------------------------
|
|
||||||
if cmd[0:3] == '(!)':
|
if cmd[0:3] == '(!)':
|
||||||
exec(cmd[3:])
|
exec(cmd[3:])
|
||||||
elif cmd[0:3] == '(?)':
|
elif cmd[0:3] == '(?)':
|
||||||
|
@ -36,7 +34,6 @@ def outFile(cmd,locals,dest):
|
||||||
|
|
||||||
#-------------------------------------------------------------------------------------------------
|
#-------------------------------------------------------------------------------------------------
|
||||||
def output(cmds,locals,dest):
|
def output(cmds,locals,dest):
|
||||||
#-------------------------------------------------------------------------------------------------
|
|
||||||
for cmd in cmds:
|
for cmd in cmds:
|
||||||
if isinstance(cmd,list):
|
if isinstance(cmd,list):
|
||||||
output(cmd,locals,dest)
|
output(cmd,locals,dest)
|
||||||
|
@ -51,26 +48,24 @@ def output(cmds,locals,dest):
|
||||||
|
|
||||||
#-------------------------------------------------------------------------------------------------
|
#-------------------------------------------------------------------------------------------------
|
||||||
def init():
|
def init():
|
||||||
#-------------------------------------------------------------------------------------------------
|
return [
|
||||||
return [
|
"#"+' '.join([scriptID] + sys.argv[1:]),
|
||||||
"#"+' '.join([scriptID] + sys.argv[1:]),
|
"*draw_manual", # prevent redrawing in Mentat, should be much faster
|
||||||
"*draw_manual", # prevent redrawing in Mentat, should be much faster
|
"*new_model yes",
|
||||||
"*new_model yes",
|
"*reset",
|
||||||
"*reset",
|
"*select_clear",
|
||||||
"*select_clear",
|
"*set_element_class hex8",
|
||||||
"*set_element_class hex8",
|
"*set_nodes off",
|
||||||
"*set_nodes off",
|
"*elements_solid",
|
||||||
"*elements_solid",
|
"*show_view 4",
|
||||||
"*show_view 4",
|
"*reset_view",
|
||||||
"*reset_view",
|
"*view_perspective",
|
||||||
"*view_perspective",
|
"*redraw",
|
||||||
"*redraw",
|
]
|
||||||
]
|
|
||||||
|
|
||||||
|
|
||||||
#-------------------------------------------------------------------------------------------------
|
#-------------------------------------------------------------------------------------------------
|
||||||
def mesh(r,d):
|
def mesh(r,d):
|
||||||
#-------------------------------------------------------------------------------------------------
|
|
||||||
return [
|
return [
|
||||||
"*add_nodes",
|
"*add_nodes",
|
||||||
"%f %f %f"%(0.0,0.0,0.0),
|
"%f %f %f"%(0.0,0.0,0.0),
|
||||||
|
@ -102,7 +97,6 @@ def mesh(r,d):
|
||||||
|
|
||||||
#-------------------------------------------------------------------------------------------------
|
#-------------------------------------------------------------------------------------------------
|
||||||
def material():
|
def material():
|
||||||
#-------------------------------------------------------------------------------------------------
|
|
||||||
cmds = [\
|
cmds = [\
|
||||||
"*new_mater standard",
|
"*new_mater standard",
|
||||||
"*mater_option general:state:solid",
|
"*mater_option general:state:solid",
|
||||||
|
@ -112,7 +106,7 @@ def material():
|
||||||
"*add_mater_elements",
|
"*add_mater_elements",
|
||||||
"all_existing",
|
"all_existing",
|
||||||
"*geometry_type mech_three_solid",
|
"*geometry_type mech_three_solid",
|
||||||
# "*geometry_option red_integ_capacity:on", # see below: reduced integration with one IP gave trouble being always OUTDATED...
|
# "*geometry_option red_integ_capacity:on", reduced integration with one IP gave trouble being always OUTDATED...
|
||||||
"*add_geometry_elements",
|
"*add_geometry_elements",
|
||||||
"all_existing",
|
"all_existing",
|
||||||
]
|
]
|
||||||
|
@ -122,13 +116,13 @@ def material():
|
||||||
|
|
||||||
#-------------------------------------------------------------------------------------------------
|
#-------------------------------------------------------------------------------------------------
|
||||||
def geometry():
|
def geometry():
|
||||||
#-------------------------------------------------------------------------------------------------
|
|
||||||
cmds = [\
|
cmds = [\
|
||||||
"*geometry_type mech_three_solid",
|
"*geometry_type mech_three_solid",
|
||||||
# "*geometry_option red_integ_capacity:on",
|
# "*geometry_option red_integ_capacity:on",
|
||||||
"*add_geometry_elements",
|
"*add_geometry_elements",
|
||||||
"all_existing",
|
"all_existing",
|
||||||
"*element_type 7", # we are NOT using reduced integration (type 117) but opt for /elementhomogeneous/ in the respective phase description (material.config)
|
# we are NOT using reduced integration (type 117) but opt for /elementhomogeneous/ in the respective phase description (material.config)
|
||||||
|
"*element_type 7",
|
||||||
"all_existing",
|
"all_existing",
|
||||||
]
|
]
|
||||||
|
|
||||||
|
@ -137,7 +131,6 @@ def geometry():
|
||||||
|
|
||||||
#-------------------------------------------------------------------------------------------------
|
#-------------------------------------------------------------------------------------------------
|
||||||
def initial_conditions(homogenization,microstructures):
|
def initial_conditions(homogenization,microstructures):
|
||||||
#-------------------------------------------------------------------------------------------------
|
|
||||||
elements = []
|
elements = []
|
||||||
element = 0
|
element = 0
|
||||||
for id in microstructures:
|
for id in microstructures:
|
||||||
|
@ -204,7 +197,7 @@ parser.set_defaults(port = None,
|
||||||
|
|
||||||
if options.port:
|
if options.port:
|
||||||
try:
|
try:
|
||||||
from py_mentat import *
|
import py_mentat
|
||||||
except:
|
except:
|
||||||
parser.error('no valid Mentat release found.')
|
parser.error('no valid Mentat release found.')
|
||||||
|
|
||||||
|
@ -258,9 +251,9 @@ for name in filenames:
|
||||||
|
|
||||||
outputLocals = {}
|
outputLocals = {}
|
||||||
if options.port:
|
if options.port:
|
||||||
py_connect('',options.port)
|
py_mentat.py_connect('',options.port)
|
||||||
output(cmds,outputLocals,'Mentat')
|
output(cmds,outputLocals,'Mentat')
|
||||||
py_disconnect()
|
py_mentat.py_disconnect()
|
||||||
else:
|
else:
|
||||||
output(cmds,outputLocals,table.__IO__['out']) # bad hack into internals of table class...
|
output(cmds,outputLocals,table.__IO__['out']) # bad hack into internals of table class...
|
||||||
|
|
||||||
|
|
|
@ -1,7 +1,7 @@
|
||||||
#!/usr/bin/env python
|
#!/usr/bin/env python
|
||||||
# -*- coding: UTF-8 no BOM -*-
|
# -*- coding: UTF-8 no BOM -*-
|
||||||
|
|
||||||
import sys,os,math,re,string
|
import sys,os,math,re
|
||||||
from optparse import OptionParser
|
from optparse import OptionParser
|
||||||
import damask
|
import damask
|
||||||
|
|
||||||
|
@ -18,7 +18,7 @@ except:
|
||||||
sys.path.append(damask.solver.Marc().libraryPath('../../'))
|
sys.path.append(damask.solver.Marc().libraryPath('../../'))
|
||||||
|
|
||||||
try: # check for MSC.Mentat Python interface
|
try: # check for MSC.Mentat Python interface
|
||||||
from py_mentat import *
|
import py_mentat
|
||||||
MentatCapability = True
|
MentatCapability = True
|
||||||
except:
|
except:
|
||||||
MentatCapability = False
|
MentatCapability = False
|
||||||
|
@ -29,10 +29,10 @@ def outMentat(cmd,locals):
|
||||||
exec(cmd[3:])
|
exec(cmd[3:])
|
||||||
elif cmd[0:3] == '(?)':
|
elif cmd[0:3] == '(?)':
|
||||||
cmd = eval(cmd[3:])
|
cmd = eval(cmd[3:])
|
||||||
py_send(cmd)
|
py_mentat.py_send(cmd)
|
||||||
if 'log' in locals: locals['log'].append(cmd)
|
if 'log' in locals: locals['log'].append(cmd)
|
||||||
else:
|
else:
|
||||||
py_send(cmd)
|
py_mentat.py_send(cmd)
|
||||||
if 'log' in locals: locals['log'].append(cmd)
|
if 'log' in locals: locals['log'].append(cmd)
|
||||||
return
|
return
|
||||||
|
|
||||||
|
@ -83,10 +83,9 @@ def rcbOrientationParser(content,idcolumn):
|
||||||
|
|
||||||
return grains
|
return grains
|
||||||
|
|
||||||
def rcbParser(content,M,size,tolerance,idcolumn,segmentcolumn): # parser for TSL-OIM reconstructed boundary files
|
def rcbParser(content,M,size,tolerance,idcolumn,segmentcolumn):
|
||||||
|
"""parser for TSL-OIM reconstructed boundary files"""
|
||||||
# find bounding box
|
# find bounding box
|
||||||
|
|
||||||
boxX = [1.*sys.maxint,-1.*sys.maxint]
|
boxX = [1.*sys.maxint,-1.*sys.maxint]
|
||||||
boxY = [1.*sys.maxint,-1.*sys.maxint]
|
boxY = [1.*sys.maxint,-1.*sys.maxint]
|
||||||
x = [0.,0.]
|
x = [0.,0.]
|
||||||
|
@ -145,8 +144,8 @@ def rcbParser(content,M,size,tolerance,idcolumn,segmentcolumn): # parser for T
|
||||||
match = True
|
match = True
|
||||||
break
|
break
|
||||||
break
|
break
|
||||||
|
# force to boundary if inside tolerance to it
|
||||||
if (not match):
|
if (not match):
|
||||||
# force to boundary if inside tolerance to it
|
|
||||||
if (abs(x[i])<dX*tolerance):
|
if (abs(x[i])<dX*tolerance):
|
||||||
x[i] = 0
|
x[i] = 0
|
||||||
if (abs(dX-x[i])<dX*tolerance):
|
if (abs(dX-x[i])<dX*tolerance):
|
||||||
|
@ -226,7 +225,7 @@ def rcbParser(content,M,size,tolerance,idcolumn,segmentcolumn): # parser for T
|
||||||
for keyY in allkeysY:
|
for keyY in allkeysY:
|
||||||
points.append({'coords': [float(keyX)*scalePatch,float(keyY)*scalePatch], 'segments': connectivityXY[keyX][keyY]})
|
points.append({'coords': [float(keyX)*scalePatch,float(keyY)*scalePatch], 'segments': connectivityXY[keyX][keyY]})
|
||||||
for segment in connectivityXY[keyX][keyY]:
|
for segment in connectivityXY[keyX][keyY]:
|
||||||
if (segments[segment] == None):
|
if (segments[segment] is None):
|
||||||
segments[segment] = pointId
|
segments[segment] = pointId
|
||||||
else:
|
else:
|
||||||
segments[segment].append(pointId)
|
segments[segment].append(pointId)
|
||||||
|
@ -259,7 +258,7 @@ def rcbParser(content,M,size,tolerance,idcolumn,segmentcolumn): # parser for T
|
||||||
points[myEnd]['coords'][1]-points[myStart]['coords'][1]]
|
points[myEnd]['coords'][1]-points[myStart]['coords'][1]]
|
||||||
myLen = math.sqrt(myV[0]**2+myV[1]**2)
|
myLen = math.sqrt(myV[0]**2+myV[1]**2)
|
||||||
best = {'product': -2.0, 'peek': -1, 'len': -1, 'point': -1}
|
best = {'product': -2.0, 'peek': -1, 'len': -1, 'point': -1}
|
||||||
for peek in points[myEnd]['segments']: # trying in turn all segments emanating from current endPoint
|
for peek in points[myEnd]['segments']: # trying in turn all segments emanating from current end
|
||||||
if peek == myWalk:
|
if peek == myWalk:
|
||||||
continue
|
continue
|
||||||
peekEnd = segments[peek][1] if segments[peek][0] == myEnd else segments[peek][0]
|
peekEnd = segments[peek][1] if segments[peek][0] == myEnd else segments[peek][0]
|
||||||
|
@ -652,7 +651,6 @@ def cleanUp(a):
|
||||||
|
|
||||||
# -------------------------
|
# -------------------------
|
||||||
def image(name,imgsize,marginX,marginY,rcData):
|
def image(name,imgsize,marginX,marginY,rcData):
|
||||||
# -------------------------
|
|
||||||
|
|
||||||
dX = max([coords[0] for coords in rcData['point']])
|
dX = max([coords[0] for coords in rcData['point']])
|
||||||
dY = max([coords[1] for coords in rcData['point']])
|
dY = max([coords[1] for coords in rcData['point']])
|
||||||
|
@ -697,87 +695,87 @@ def image(name,imgsize,marginX,marginY,rcData):
|
||||||
img.save(name+'.png',"PNG")
|
img.save(name+'.png',"PNG")
|
||||||
|
|
||||||
# -------------------------
|
# -------------------------
|
||||||
def inside(x,y,points): # tests whether point(x,y) is within polygon described by points
|
def inside(x,y,points):
|
||||||
# -------------------------
|
"""tests whether point(x,y) is within polygon described by points"""
|
||||||
inside = False
|
inside = False
|
||||||
npoints=len(points)
|
npoints=len(points)
|
||||||
(x1,y1) = points[npoints-1] # start with last point of points
|
(x1,y1) = points[npoints-1] # start with last point of points
|
||||||
startover = (y1 >= y) # am I above testpoint?
|
startover = (y1 >= y) # am I above testpoint?
|
||||||
for i in range(npoints): # loop through all points
|
for i in range(npoints): # loop through all points
|
||||||
(x2,y2) = points[i] # next point
|
(x2,y2) = points[i] # next point
|
||||||
endover = (y2 >= y) # am I above testpoint?
|
endover = (y2 >= y) # am I above testpoint?
|
||||||
if (startover != endover): # one above one below testpoint?
|
if (startover != endover): # one above one below testpoint?
|
||||||
if((y2 - y)*(x2 - x1) <= (y2 - y1)*(x2 - x)): # check for intersection
|
if((y2 - y)*(x2 - x1) <= (y2 - y1)*(x2 - x)): # check for intersection
|
||||||
if (endover):
|
if (endover):
|
||||||
inside = not inside # found intersection
|
inside = not inside # found intersection
|
||||||
else:
|
else:
|
||||||
if (not endover):
|
if (not endover):
|
||||||
inside = not inside # found intersection
|
inside = not inside # found intersection
|
||||||
startover = endover # make second point first point
|
startover = endover # make second point first point
|
||||||
(x1,y1) = (x2,y2)
|
(x1,y1) = (x2,y2)
|
||||||
|
|
||||||
return inside
|
return inside
|
||||||
|
|
||||||
# -------------------------
|
# -------------------------
|
||||||
def fftbuild(rcData,height,xframe,yframe,resolution,extrusion): # build array of grain numbers
|
def fftbuild(rcData,height,xframe,yframe,resolution,extrusion):
|
||||||
# -------------------------
|
"""build array of grain numbers"""
|
||||||
maxX = -1.*sys.maxint
|
maxX = -1.*sys.maxint
|
||||||
maxY = -1.*sys.maxint
|
maxY = -1.*sys.maxint
|
||||||
for line in rcData['point']: # find data range
|
for line in rcData['point']: # find data range
|
||||||
(x,y) = line
|
(x,y) = line
|
||||||
maxX = max(maxX, x)
|
maxX = max(maxX, x)
|
||||||
maxY = max(maxY, y)
|
maxY = max(maxY, y)
|
||||||
xsize = maxX+2*xframe # add framsize
|
xsize = maxX+2*xframe # add framsize
|
||||||
ysize = maxY+2*yframe
|
ysize = maxY+2*yframe
|
||||||
xres = int(round(resolution/2.0)*2) # use only even resolution
|
xres = int(round(resolution/2.0)*2) # use only even resolution
|
||||||
yres = int(round(xres/xsize*ysize/2.0)*2) # calculate other resolutions
|
yres = int(round(xres/xsize*ysize/2.0)*2) # calculate other resolutions
|
||||||
zres = extrusion
|
zres = extrusion
|
||||||
zsize = extrusion*min([xsize/xres,ysize/yres])
|
zsize = extrusion*min([xsize/xres,ysize/yres])
|
||||||
|
|
||||||
fftdata = {'fftpoints':[], \
|
fftdata = {'fftpoints':[], \
|
||||||
'resolution':(xres,yres,zres), \
|
'resolution':(xres,yres,zres), \
|
||||||
'dimension':(xsize,ysize,zsize)}
|
'dimension':(xsize,ysize,zsize)}
|
||||||
|
|
||||||
frameindex=len(rcData['grain'])+1 # calculate frame index as largest grain index plus one
|
frameindex=len(rcData['grain'])+1 # calculate frame index as largest grain index plus one
|
||||||
dx = xsize/(xres+1) # calculate step sizes
|
dx = xsize/(xres+1) # calculate step sizes
|
||||||
dy = ysize/(yres+1)
|
dy = ysize/(yres+1)
|
||||||
|
|
||||||
grainpoints = []
|
grainpoints = []
|
||||||
for segments in rcData['grain']: # get segments of each grain
|
for segments in rcData['grain']: # get segments of each grain
|
||||||
points = {}
|
points = {}
|
||||||
for i,segment in enumerate(segments[:-1]): # loop thru segments except last (s=[start,end])
|
for i,segment in enumerate(segments[:-1]): # loop thru segments except last (s=[start,end])
|
||||||
points[rcData['segment'][segment][0]] = i # assign segment index to start point
|
points[rcData['segment'][segment][0]] = i # assign segment index to start point
|
||||||
points[rcData['segment'][segment][1]] = i # assigne segment index to endpoint
|
points[rcData['segment'][segment][1]] = i # assigne segment index to endpoint
|
||||||
for i in range(2): # check points of last segment
|
for i in range(2): # check points of last segment
|
||||||
if points[rcData['segment'][segments[-1]][i]] != 0: # not on first segment
|
if points[rcData['segment'][segments[-1]][i]] != 0: # not on first segment
|
||||||
points[rcData['segment'][segments[-1]][i]] = len(segments)-1 # assign segment index to last point
|
points[rcData['segment'][segments[-1]][i]] = len(segments)-1 # assign segment index to last point
|
||||||
|
|
||||||
grainpoints.append([]) # start out blank for current grain
|
grainpoints.append([]) # start out blank for current grain
|
||||||
for p in sorted(points, key=points.get): # loop thru set of sorted points
|
for p in sorted(points, key=points.get): # loop thru set of sorted points
|
||||||
grainpoints[-1].append([rcData['point'][p][0],rcData['point'][p][1]]) # append x,y of point
|
grainpoints[-1].append([rcData['point'][p][0],rcData['point'][p][1]]) # append x,y of point
|
||||||
|
|
||||||
bestGuess = 0 # assume grain 0 as best guess
|
bestGuess = 0 # assume grain 0 as best guess
|
||||||
for i in range(int(xres*yres)): # walk through all points in xy plane
|
for i in range(int(xres*yres)): # walk through all points in xy plane
|
||||||
xtest = -xframe+((i%xres)+0.5)*dx # calculate coordinates
|
xtest = -xframe+((i%xres)+0.5)*dx # calculate coordinates
|
||||||
ytest = -yframe+(int(i/xres)+0.5)*dy
|
ytest = -yframe+(int(i/xres)+0.5)*dy
|
||||||
if(xtest < 0 or xtest > maxX): # check wether part of frame
|
if(xtest < 0 or xtest > maxX): # check wether part of frame
|
||||||
if( ytest < 0 or ytest > maxY): # part of edges
|
if( ytest < 0 or ytest > maxY): # part of edges
|
||||||
fftdata['fftpoints'].append(frameindex+2) # append frameindex to result array
|
fftdata['fftpoints'].append(frameindex+2) # append frameindex to result array
|
||||||
else: # part of xframe
|
else: # part of xframe
|
||||||
fftdata['fftpoints'].append(frameindex) # append frameindex to result array
|
fftdata['fftpoints'].append(frameindex) # append frameindex to result array
|
||||||
elif( ytest < 0 or ytest > maxY): # part of yframe
|
elif( ytest < 0 or ytest > maxY): # part of yframe
|
||||||
fftdata['fftpoints'].append(frameindex+1) # append frameindex to result array
|
fftdata['fftpoints'].append(frameindex+1) # append frameindex to result array
|
||||||
else:
|
else:
|
||||||
if inside(xtest,ytest,grainpoints[bestGuess]): # check best guess first
|
if inside(xtest,ytest,grainpoints[bestGuess]): # check best guess first
|
||||||
fftdata['fftpoints'].append(bestGuess+1)
|
fftdata['fftpoints'].append(bestGuess+1)
|
||||||
else: # no success
|
else: # no success
|
||||||
for g in range(len(grainpoints)): # test all
|
for g in range(len(grainpoints)): # test all
|
||||||
if inside(xtest,ytest,grainpoints[g]):
|
if inside(xtest,ytest,grainpoints[g]):
|
||||||
fftdata['fftpoints'].append(g+1)
|
fftdata['fftpoints'].append(g+1)
|
||||||
bestGuess = g
|
bestGuess = g
|
||||||
break
|
break
|
||||||
|
|
||||||
return fftdata
|
return fftdata
|
||||||
|
|
||||||
|
|
||||||
# ----------------------- MAIN -------------------------------
|
# ----------------------- MAIN -------------------------------
|
||||||
|
@ -926,12 +924,12 @@ if 'mentat' in options.output:
|
||||||
]
|
]
|
||||||
|
|
||||||
outputLocals = {'log':[]}
|
outputLocals = {'log':[]}
|
||||||
if (options.port != None):
|
if (options.port is not None):
|
||||||
py_connect('',options.port)
|
py_mentat.py_connect('',options.port)
|
||||||
try:
|
try:
|
||||||
output(cmds,outputLocals,'Mentat')
|
output(cmds,outputLocals,'Mentat')
|
||||||
finally:
|
finally:
|
||||||
py_disconnect()
|
py_mentat.py_disconnect()
|
||||||
if 'procedure' in options.output:
|
if 'procedure' in options.output:
|
||||||
output(outputLocals['log'],outputLocals,'Stdout')
|
output(outputLocals['log'],outputLocals,'Stdout')
|
||||||
else:
|
else:
|
||||||
|
|
|
@ -1,7 +1,7 @@
|
||||||
#!/usr/bin/env python
|
#!/usr/bin/env python
|
||||||
# -*- coding: UTF-8 no BOM -*-
|
# -*- coding: UTF-8 no BOM -*-
|
||||||
|
|
||||||
import os,sys,string,math,random
|
import os,sys,math,random
|
||||||
import numpy as np
|
import numpy as np
|
||||||
import damask
|
import damask
|
||||||
from optparse import OptionParser,OptionGroup
|
from optparse import OptionParser,OptionGroup
|
||||||
|
@ -14,9 +14,7 @@ scriptID = ' '.join([scriptName,damask.version])
|
||||||
# ------------------------------------------ aux functions ---------------------------------
|
# ------------------------------------------ aux functions ---------------------------------
|
||||||
|
|
||||||
def kdtree_search(cloud, queryPoints):
|
def kdtree_search(cloud, queryPoints):
|
||||||
'''
|
"""find distances to nearest neighbor among cloud (N,d) for each of the queryPoints (n,d)"""
|
||||||
find distances to nearest neighbor among cloud (N,d) for each of the queryPoints (n,d)
|
|
||||||
'''
|
|
||||||
n = queryPoints.shape[0]
|
n = queryPoints.shape[0]
|
||||||
distances = np.zeros(n,dtype=float)
|
distances = np.zeros(n,dtype=float)
|
||||||
tree = spatial.cKDTree(cloud)
|
tree = spatial.cKDTree(cloud)
|
||||||
|
@ -112,7 +110,7 @@ parser.set_defaults(randomSeed = None,
|
||||||
options.grid = np.array(options.grid)
|
options.grid = np.array(options.grid)
|
||||||
gridSize = options.grid.prod()
|
gridSize = options.grid.prod()
|
||||||
|
|
||||||
if options.randomSeed == None: options.randomSeed = int(os.urandom(4).encode('hex'), 16)
|
if options.randomSeed is None: options.randomSeed = int(os.urandom(4).encode('hex'), 16)
|
||||||
np.random.seed(options.randomSeed) # init random generators
|
np.random.seed(options.randomSeed) # init random generators
|
||||||
random.seed(options.randomSeed)
|
random.seed(options.randomSeed)
|
||||||
|
|
||||||
|
@ -133,10 +131,12 @@ for name in filenames:
|
||||||
|
|
||||||
remarks = []
|
remarks = []
|
||||||
errors = []
|
errors = []
|
||||||
if gridSize == 0: errors.append('zero grid dimension for %s.'%(', '.join([['a','b','c'][x] for x in np.where(options.grid == 0)[0]])))
|
if gridSize == 0:
|
||||||
|
errors.append('zero grid dimension for %s.'%(', '.join([['a','b','c'][x] for x in np.where(options.grid == 0)[0]])))
|
||||||
if options.N > gridSize/10.: errors.append('seed count exceeds 0.1 of grid points.')
|
if options.N > gridSize/10.: errors.append('seed count exceeds 0.1 of grid points.')
|
||||||
if options.selective and 4./3.*math.pi*(options.distance/2.)**3*options.N > 0.5:
|
if options.selective and 4./3.*math.pi*(options.distance/2.)**3*options.N > 0.5:
|
||||||
(remarks if options.force else errors).append('maximum recommended seed point count for given distance is {}.{}'.format(int(3./8./math.pi/(options.distance/2.)**3),'..'*options.force))
|
(remarks if options.force else errors).append('maximum recommended seed point count for given distance is {}.{}'.
|
||||||
|
format(int(3./8./math.pi/(options.distance/2.)**3),'..'*options.force))
|
||||||
|
|
||||||
if remarks != []: damask.util.croak(remarks)
|
if remarks != []: damask.util.croak(remarks)
|
||||||
if errors != []:
|
if errors != []:
|
||||||
|
@ -153,7 +153,7 @@ for name in filenames:
|
||||||
if not options.selective:
|
if not options.selective:
|
||||||
|
|
||||||
seeds = np.zeros((3,options.N),dtype='d') # seed positions array
|
seeds = np.zeros((3,options.N),dtype='d') # seed positions array
|
||||||
gridpoints = random.sample(range(gridSize),options.N) # create random permutation of all grid positions and choose first N
|
gridpoints = random.sample(range(gridSize),options.N) # choose first N from random permutation of grid positions
|
||||||
|
|
||||||
seeds[0,:] = (np.mod(gridpoints ,options.grid[0])\
|
seeds[0,:] = (np.mod(gridpoints ,options.grid[0])\
|
||||||
+np.random.random(options.N)) /options.grid[0]
|
+np.random.random(options.N)) /options.grid[0]
|
||||||
|
@ -174,7 +174,7 @@ for name in filenames:
|
||||||
distances = kdtree_search(seeds[:i],candidates)
|
distances = kdtree_search(seeds[:i],candidates)
|
||||||
best = distances.argmax()
|
best = distances.argmax()
|
||||||
if distances[best] > options.distance: # require minimum separation
|
if distances[best] > options.distance: # require minimum separation
|
||||||
seeds[i] = candidates[best] # take candidate with maximum separation to existing point cloud
|
seeds[i] = candidates[best] # maximum separation to existing point cloud
|
||||||
i += 1
|
i += 1
|
||||||
if i%(options.N/100.) < 1: damask.util.croak('.',False)
|
if i%(options.N/100.) < 1: damask.util.croak('.',False)
|
||||||
|
|
||||||
|
|
|
@ -1,7 +1,7 @@
|
||||||
#!/usr/bin/env python
|
#!/usr/bin/env python
|
||||||
# -*- coding: UTF-8 no BOM -*-
|
# -*- coding: UTF-8 no BOM -*-
|
||||||
|
|
||||||
import os,string,itertools
|
import os,itertools
|
||||||
import numpy as np
|
import numpy as np
|
||||||
from optparse import OptionParser
|
from optparse import OptionParser
|
||||||
import damask
|
import damask
|
||||||
|
@ -50,8 +50,8 @@ parser.set_defaults(pos = 'pos',
|
||||||
|
|
||||||
(options,filenames) = parser.parse_args()
|
(options,filenames) = parser.parse_args()
|
||||||
|
|
||||||
if options.whitelist != None: options.whitelist = map(int,options.whitelist)
|
if options.whitelist is not None: options.whitelist = map(int,options.whitelist)
|
||||||
if options.blacklist != None: options.blacklist = map(int,options.blacklist)
|
if options.blacklist is not None: options.blacklist = map(int,options.blacklist)
|
||||||
|
|
||||||
# --- loop over input files -------------------------------------------------------------------------
|
# --- loop over input files -------------------------------------------------------------------------
|
||||||
|
|
||||||
|
@ -101,13 +101,11 @@ for name in filenames:
|
||||||
|
|
||||||
# --- filtering of grain voxels --------------------------------------------------------------------
|
# --- filtering of grain voxels --------------------------------------------------------------------
|
||||||
|
|
||||||
mask = np.logical_and(\
|
mask = np.logical_and(
|
||||||
np.ones_like(table.data[:,3],bool) \
|
np.ones_like(table.data[:,3],bool) if options.whitelist is None \
|
||||||
if options.whitelist == None \
|
else np.in1d(table.data[:,3].ravel(), options.whitelist).reshape(table.data[:,3].shape),
|
||||||
else np.in1d(table.data[:,3].ravel(), options.whitelist).reshape(table.data[:,3].shape),
|
np.ones_like(table.data[:,3],bool) if options.blacklist is None \
|
||||||
np.ones_like(table.data[:,3],bool) \
|
else np.invert(np.in1d(table.data[:,3].ravel(), options.blacklist).reshape(table.data[:,3].shape))
|
||||||
if options.blacklist == None \
|
|
||||||
else np.invert(np.in1d(table.data[:,3].ravel(), options.blacklist).reshape(table.data[:,3].shape))
|
|
||||||
)
|
)
|
||||||
table.data = table.data[mask]
|
table.data = table.data[mask]
|
||||||
|
|
||||||
|
|
Loading…
Reference in New Issue