next bunch of accepted scripts
This commit is contained in:
parent
3ed3073eff
commit
f7fedc4744
|
@ -1,7 +1,7 @@
|
|||
#!/usr/bin/env python
|
||||
# -*- coding: UTF-8 no BOM -*-
|
||||
|
||||
import os,sys,string,math,types,time
|
||||
import os,sys,math,types,time
|
||||
import scipy.spatial, numpy as np
|
||||
from optparse import OptionParser
|
||||
import damask
|
||||
|
@ -94,18 +94,18 @@ parser.set_defaults(symmetry = [damask.Symmetry.lattices[-1]],
|
|||
|
||||
(options,filenames) = parser.parse_args()
|
||||
|
||||
input = [options.eulers != None,
|
||||
options.a != None and \
|
||||
options.b != None and \
|
||||
options.c != None,
|
||||
options.matrix != None,
|
||||
options.quaternion != None,
|
||||
options.microstructure != None,
|
||||
input = [options.eulers is not None,
|
||||
options.a is not None and \
|
||||
options.b is not None and \
|
||||
options.c is not None,
|
||||
options.matrix is not None,
|
||||
options.quaternion is not None,
|
||||
options.microstructure is not None,
|
||||
]
|
||||
|
||||
if np.sum(input) != 1:
|
||||
parser.error('need either microstructure label or exactly one orientation input format.')
|
||||
if options.axes != None and not set(options.axes).issubset(set(['x','+x','-x','y','+y','-y','z','+z','-z'])):
|
||||
if options.axes is not None and not set(options.axes).issubset(set(['x','+x','-x','y','+y','-y','z','+z','-z'])):
|
||||
parser.error('invalid axes {} {} {}.'.format(*options.axes))
|
||||
|
||||
(label,dim,inputtype) = [(options.eulers,3,'eulers'),
|
||||
|
@ -157,7 +157,7 @@ for name in filenames:
|
|||
if coordDim == 2:
|
||||
table.data = np.insert(table.data,2,np.zeros(len(table.data)),axis=1) # add zero z coordinate for two-dimensional input
|
||||
if options.verbose: damask.util.croak('extending to 3D...')
|
||||
if options.phase == None:
|
||||
if options.phase is None:
|
||||
table.data = np.column_stack((table.data,np.ones(len(table.data)))) # add single phase if no phase column given
|
||||
if options.verbose: damask.util.croak('adding dummy phase info...')
|
||||
|
||||
|
@ -168,7 +168,7 @@ for name in filenames:
|
|||
maxcorner = np.array(map(max,coords))
|
||||
grid = np.array(map(len,coords),'i')
|
||||
size = grid/np.maximum(np.ones(3,'d'), grid-1.0) * (maxcorner-mincorner) # size from edge to edge = dim * n/(n-1)
|
||||
size = np.where(grid > 1, size, min(size[grid > 1]/grid[grid > 1])) # spacing for grid==1 equal to smallest among other spacings
|
||||
size = np.where(grid > 1, size, min(size[grid > 1]/grid[grid > 1])) # spacing for grid==1 set to smallest among other spacings
|
||||
delta = size/np.maximum(np.ones(3,'d'), grid)
|
||||
origin = mincorner - 0.5*delta # shift from cell center to corner
|
||||
|
||||
|
@ -188,7 +188,7 @@ for name in filenames:
|
|||
|
||||
# ------------------------------------------ process data ------------------------------------------
|
||||
|
||||
colOri = table.label_index(label)+(3-coordDim) # column(s) of orientation data (following 3 or 2 coordinates that were expanded to 3!)
|
||||
colOri = table.label_index(label)+(3-coordDim) # column(s) of orientation data followed by 3 coordinates
|
||||
|
||||
if inputtype == 'microstructure':
|
||||
|
||||
|
@ -207,9 +207,9 @@ for name in filenames:
|
|||
|
||||
statistics = {'global': 0, 'local': 0}
|
||||
grain = -np.ones(N,dtype = 'int32') # initialize empty microstructure
|
||||
orientations = [] # empty list of orientations
|
||||
multiplicity = [] # empty list of orientation multiplicity (number of group members)
|
||||
phases = [] # empty list of phase info
|
||||
orientations = [] # orientations
|
||||
multiplicity = [] # orientation multiplicity (number of group members)
|
||||
phases = [] # phase info
|
||||
nGrains = 0 # counter for detected grains
|
||||
existingGrains = np.arange(nGrains)
|
||||
myPos = 0 # position (in list) of current grid point
|
||||
|
@ -227,7 +227,7 @@ for name in filenames:
|
|||
|
||||
myData = table.data[index[myPos]] # read data for current grid point
|
||||
myPhase = int(myData[colPhase])
|
||||
mySym = options.symmetry[min(myPhase,len(options.symmetry))-1] # select symmetry from option (take last specified option for all with higher index)
|
||||
mySym = options.symmetry[min(myPhase,len(options.symmetry))-1] # take last specified option for all with higher index
|
||||
|
||||
if inputtype == 'eulers':
|
||||
o = damask.Orientation(Eulers = myData[colOri:colOri+3]*toRadians,
|
||||
|
@ -250,26 +250,27 @@ for name in filenames:
|
|||
|
||||
if options.tolerance > 0.0: # only try to compress orientations if asked to
|
||||
neighbors = np.array(KDTree.query_ball_point([x,y,z], 3)) # point indices within radius
|
||||
# filter neighbors: skip myself, anyone further ahead (cannot yet have a grain ID), and other phases
|
||||
neighbors = neighbors[(neighbors < myPos) & \
|
||||
(table.data[index[neighbors],colPhase] == myPhase)] # filter neighbors: skip myself, anyone further ahead (cannot yet have a grain ID), and other phases
|
||||
(table.data[index[neighbors],colPhase] == myPhase)]
|
||||
grains = np.unique(grain[neighbors]) # unique grain IDs among valid neighbors
|
||||
|
||||
if len(grains) > 0: # check immediate neighborhood first
|
||||
cos_disorientations = np.array([o.disorientation(orientations[grainID],
|
||||
SST = False)[0].quaternion.w \
|
||||
for grainID in grains]) # store disorientation per grainID
|
||||
closest_grain = np.argmax(cos_disorientations) # find grain among grains that has closest orientation to myself
|
||||
closest_grain = np.argmax(cos_disorientations) # grain among grains with closest orientation to myself
|
||||
match = 'local'
|
||||
|
||||
if cos_disorientations[closest_grain] < threshold: # orientation not close enough?
|
||||
grains = existingGrains[np.atleast_1d( ( np.array(phases) == myPhase ) & \
|
||||
( np.in1d(existingGrains,grains,invert=True) ) )] # check every other already identified grain (of my phase)
|
||||
grains = existingGrains[np.atleast_1d( (np.array(phases) == myPhase ) & \
|
||||
(np.in1d(existingGrains,grains,invert=True)))] # other already identified grains (of my phase)
|
||||
|
||||
if len(grains) > 0:
|
||||
cos_disorientations = np.array([o.disorientation(orientations[grainID],
|
||||
SST = False)[0].quaternion.w \
|
||||
for grainID in grains]) # store disorientation per grainID
|
||||
closest_grain = np.argmax(cos_disorientations) # find grain among grains that has closest orientation to myself
|
||||
closest_grain = np.argmax(cos_disorientations) # grain among grains with closest orientation to myself
|
||||
match = 'global'
|
||||
|
||||
if cos_disorientations[closest_grain] >= threshold: # orientation now close enough?
|
||||
|
@ -331,7 +332,7 @@ for name in filenames:
|
|||
config_header += ['<texture>']
|
||||
for i,orientation in enumerate(orientations):
|
||||
config_header += ['[Grain%s]'%(str(i+1).zfill(formatwidth)),
|
||||
'axes\t%s %s %s'%tuple(options.axes) if options.axes != None else '',
|
||||
'axes\t%s %s %s'%tuple(options.axes) if options.axes is not None else '',
|
||||
'(gauss)\tphi1 %g\tPhi %g\tphi2 %g\tscatter 0.0\tfraction 1.0'%tuple(orientation.asEulers(degrees = True)),
|
||||
]
|
||||
|
||||
|
|
|
@ -1,7 +1,7 @@
|
|||
#!/usr/bin/env python
|
||||
# -*- coding: UTF-8 no BOM -*-
|
||||
|
||||
import os,sys,string,math
|
||||
import os,sys,math
|
||||
import numpy as np
|
||||
from optparse import OptionParser
|
||||
from scipy import ndimage
|
||||
|
@ -81,7 +81,8 @@ for name in filenames:
|
|||
periodic_microstructure = np.tile(microstructure,(3,3,3))[grid[0]/2:-grid[0]/2,
|
||||
grid[1]/2:-grid[1]/2,
|
||||
grid[2]/2:-grid[2]/2] # periodically extend the microstructure
|
||||
microstructure_original = np.copy(microstructure) # store a copy the initial microstructure to find locations of immutable indices
|
||||
# store a copy the initial microstructure to find locations of immutable indices
|
||||
microstructure_original = np.copy(microstructure)
|
||||
|
||||
X,Y,Z = np.mgrid[0:grid[0],0:grid[1],0:grid[2]]
|
||||
gauss = np.exp(-(X*X + Y*Y + Z*Z)/(2.0*options.d*options.d))/math.pow(2.0*np.pi*options.d*options.d,1.5)
|
||||
|
@ -99,31 +100,36 @@ for name in filenames:
|
|||
for i in (-1,0,1):
|
||||
for j in (-1,0,1):
|
||||
for k in (-1,0,1):
|
||||
# assign interfacial energy to all voxels that have a differing neighbor (in Moore neighborhood)
|
||||
interfaceEnergy = np.maximum(boundary,
|
||||
interfacialEnergy(microstructure,np.roll(np.roll(np.roll(
|
||||
microstructure,i,axis=0), j,axis=1), k,axis=2))) # assign interfacial energy to all voxels that have a differing neighbor (in Moore neighborhood)
|
||||
microstructure,i,axis=0), j,axis=1), k,axis=2)))
|
||||
# periodically extend interfacial energy array by half a grid size in positive and negative directions
|
||||
periodic_interfaceEnergy = np.tile(interfaceEnergy,(3,3,3))[grid[0]/2:-grid[0]/2,
|
||||
grid[1]/2:-grid[1]/2,
|
||||
grid[2]/2:-grid[2]/2] # periodically extend interfacial energy array by half a grid size in positive and negative directions
|
||||
index = ndimage.morphology.distance_transform_edt(periodic_interfaceEnergy == 0., # transform bulk volume (i.e. where interfacial energy is zero)
|
||||
grid[2]/2:-grid[2]/2]
|
||||
# transform bulk volume (i.e. where interfacial energy is zero)
|
||||
index = ndimage.morphology.distance_transform_edt(periodic_interfaceEnergy == 0.,
|
||||
return_distances = False,
|
||||
return_indices = True) # want array index of nearest voxel on periodically extended boundary
|
||||
# boundaryExt = boundaryExt[index[0].flatten(),index[1].flatten(),index[2].flatten()].reshape(boundaryExt.shape) # fill bulk with energy of nearest interface | question PE: what "flatten" for?
|
||||
return_indices = True)
|
||||
# want array index of nearest voxel on periodically extended boundary
|
||||
periodic_bulkEnergy = periodic_interfaceEnergy[index[0],
|
||||
index[1],
|
||||
index[2]].reshape(2*grid) # fill bulk with energy of nearest interface
|
||||
diffusedEnergy = np.fft.irfftn(np.fft.rfftn(np.where(ndimage.morphology.binary_dilation(interfaceEnergy > 0.,
|
||||
diffusedEnergy = np.fft.irfftn(np.fft.rfftn(
|
||||
np.where(
|
||||
ndimage.morphology.binary_dilation(interfaceEnergy > 0.,
|
||||
structure = struc,
|
||||
iterations = options.d/2 + 1), # fat boundary | question PE: why 2d - 1? I would argue for d/2 + 1 !!
|
||||
terations = options.d/2 + 1), # fat boundary | PE: why 2d-1? I would argue for d/2 + 1
|
||||
periodic_bulkEnergy[grid[0]/2:-grid[0]/2, # retain filled energy on fat boundary...
|
||||
grid[1]/2:-grid[1]/2,
|
||||
grid[2]/2:-grid[2]/2], # ...and zero everywhere else
|
||||
0.)\
|
||||
)*gauss)
|
||||
0.))*gauss)
|
||||
periodic_diffusedEnergy = np.tile(diffusedEnergy,(3,3,3))[grid[0]/2:-grid[0]/2,
|
||||
grid[1]/2:-grid[1]/2,
|
||||
grid[2]/2:-grid[2]/2] # periodically extend the smoothed bulk energy
|
||||
index = ndimage.morphology.distance_transform_edt(periodic_diffusedEnergy >= 0.5, # transform voxels close to interface region | question PE: what motivates 1/2 (could be any small number, or)?
|
||||
# transform voxels close to interface region | question PE: what motivates 1/2 (could be any small number, or)?
|
||||
index = ndimage.morphology.distance_transform_edt(periodic_diffusedEnergy >= 0.5,
|
||||
return_distances = False,
|
||||
return_indices = True) # want index of closest bulk grain
|
||||
microstructure = periodic_microstructure[index[0],
|
||||
|
@ -133,10 +139,11 @@ for name in filenames:
|
|||
grid[2]/2:-grid[2]/2] # extent grains into interface region
|
||||
|
||||
immutable = np.zeros(microstructure.shape, dtype=bool)
|
||||
# find locations where immutable microstructures have been or are now
|
||||
for micro in options.immutable:
|
||||
immutable += np.logical_or(microstructure == micro, microstructure_original == micro) # find locations where immutable microstructures have been or are now
|
||||
|
||||
microstructure = np.where(immutable, microstructure_original,microstructure) # undo any changes involving immutable microstructures
|
||||
immutable += np.logical_or(microstructure == micro, microstructure_original == micro)
|
||||
# undo any changes involving immutable microstructures
|
||||
microstructure = np.where(immutable, microstructure_original,microstructure)
|
||||
|
||||
# --- renumber to sequence 1...Ngrains if requested ------------------------------------------------
|
||||
# http://stackoverflow.com/questions/10741346/np-frequency-counts-for-unique-values-in-an-array
|
||||
|
|
|
@ -1,7 +1,7 @@
|
|||
#!/usr/bin/env python
|
||||
# -*- coding: UTF-8 no BOM -*-
|
||||
|
||||
import os,string,re
|
||||
import os,re
|
||||
from optparse import OptionParser
|
||||
import damask
|
||||
|
||||
|
|
|
@ -3,7 +3,7 @@
|
|||
|
||||
from optparse import OptionParser
|
||||
import damask
|
||||
import os,sys,math,re,random,string
|
||||
import os,sys,math,random
|
||||
import numpy as np
|
||||
|
||||
scriptName = os.path.splitext(os.path.basename(__file__))[0]
|
||||
|
@ -19,7 +19,7 @@ def integerFactorization(i):
|
|||
return j
|
||||
|
||||
def binAsBins(bin,intervals):
|
||||
""" explode compound bin into 3D bins list """
|
||||
"""explode compound bin into 3D bins list"""
|
||||
bins = [0]*3
|
||||
bins[0] = (bin//(intervals[1] * intervals[2])) % intervals[0]
|
||||
bins[1] = (bin//intervals[2]) % intervals[1]
|
||||
|
@ -27,17 +27,17 @@ def binAsBins(bin,intervals):
|
|||
return bins
|
||||
|
||||
def binsAsBin(bins,intervals):
|
||||
""" implode 3D bins into compound bin """
|
||||
"""implode 3D bins into compound bin"""
|
||||
return (bins[0]*intervals[1] + bins[1])*intervals[2] + bins[2]
|
||||
|
||||
def EulersAsBins(Eulers,intervals,deltas,center):
|
||||
""" return list of Eulers translated into 3D bins list """
|
||||
"""return list of Eulers translated into 3D bins list"""
|
||||
return [int((euler+(0.5-center)*delta)//delta)%interval \
|
||||
for euler,delta,interval in zip(Eulers,deltas,intervals) \
|
||||
]
|
||||
|
||||
def binAsEulers(bin,intervals,deltas,center):
|
||||
""" compound bin number translated into list of Eulers """
|
||||
"""compound bin number translated into list of Eulers"""
|
||||
Eulers = [0.0]*3
|
||||
Eulers[2] = (bin%intervals[2] + center)*deltas[2]
|
||||
Eulers[1] = (bin//intervals[2]%intervals[1] + center)*deltas[1]
|
||||
|
@ -45,7 +45,7 @@ def binAsEulers(bin,intervals,deltas,center):
|
|||
return Eulers
|
||||
|
||||
def directInvRepetitions(probability,scale):
|
||||
""" calculate number of samples drawn by direct inversion """
|
||||
"""calculate number of samples drawn by direct inversion"""
|
||||
nDirectInv = 0
|
||||
for bin in range(len(probability)): # loop over bins
|
||||
nDirectInv += int(round(probability[bin]*scale)) # calc repetition
|
||||
|
@ -55,15 +55,12 @@ def directInvRepetitions(probability,scale):
|
|||
# ---------------------- sampling methods -----------------------------------------------------------------------
|
||||
|
||||
# ----- efficient algorithm ---------
|
||||
|
||||
def directInversion (ODF,nSamples):
|
||||
""" ODF contains 'dV_V' (normalized to 1), 'center', 'intervals', 'limits' (in radians) """
|
||||
|
||||
"""ODF contains 'dV_V' (normalized to 1), 'center', 'intervals', 'limits' (in radians)"""
|
||||
nOptSamples = max(ODF['nNonZero'],nSamples) # random subsampling if too little samples requested
|
||||
|
||||
nInvSamples = 0
|
||||
repetition = [None]*ODF['nBins']
|
||||
probabilityScale = nOptSamples # guess
|
||||
|
||||
scaleLower = 0.0
|
||||
nInvSamplesLower = 0
|
||||
|
@ -96,7 +93,7 @@ def directInversion (ODF,nSamples):
|
|||
for bin in range(ODF['nBins']): # loop over bins
|
||||
repetition[bin] = int(round(ODF['dV_V'][bin]*scale)) # calc repetition
|
||||
|
||||
# build set
|
||||
# build set
|
||||
set = [None]*nInvSamples
|
||||
i = 0
|
||||
for bin in range(ODF['nBins']):
|
||||
|
@ -110,7 +107,6 @@ def directInversion (ODF,nSamples):
|
|||
if (j == nInvSamples-1): ex = j
|
||||
else: ex = int(round(random.uniform(j+0.5,nInvSamples-0.5)))
|
||||
bin = set[ex]
|
||||
bins = binAsBins(bin,ODF['interval']) # PE: why are we doing this??
|
||||
Eulers = binAsEulers(bin,ODF['interval'],ODF['delta'],ODF['center'])
|
||||
orientations[j] = np.degrees(Eulers)
|
||||
reconstructedODF[bin] += unitInc
|
||||
|
@ -122,8 +118,7 @@ def directInversion (ODF,nSamples):
|
|||
# ----- trial and error algorithms ---------
|
||||
|
||||
def MonteCarloEulers (ODF,nSamples):
|
||||
""" ODF contains 'dV_V' (normalized to 1), 'center', 'intervals', 'limits' (in radians) """
|
||||
|
||||
"""ODF contains 'dV_V' (normalized to 1), 'center', 'intervals', 'limits' (in radians)"""
|
||||
countMC = 0
|
||||
maxdV_V = max(ODF['dV_V'])
|
||||
orientations = np.zeros((nSamples,3),'f')
|
||||
|
@ -146,8 +141,7 @@ def MonteCarloEulers (ODF,nSamples):
|
|||
|
||||
|
||||
def MonteCarloBins (ODF,nSamples):
|
||||
""" ODF contains 'dV_V' (normalized to 1), 'center', 'intervals', 'limits' (in radians) """
|
||||
|
||||
"""ODF contains 'dV_V' (normalized to 1), 'center', 'intervals', 'limits' (in radians)"""
|
||||
countMC = 0
|
||||
maxdV_V = max(ODF['dV_V'])
|
||||
orientations = np.zeros((nSamples,3),'f')
|
||||
|
@ -169,8 +163,7 @@ def MonteCarloBins (ODF,nSamples):
|
|||
|
||||
|
||||
def TothVanHoutteSTAT (ODF,nSamples):
|
||||
""" ODF contains 'dV_V' (normalized to 1), 'center', 'intervals', 'limits' (in radians) """
|
||||
|
||||
"""ODF contains 'dV_V' (normalized to 1), 'center', 'intervals', 'limits' (in radians)"""
|
||||
orientations = np.zeros((nSamples,3),'f')
|
||||
reconstructedODF = np.zeros(ODF['nBins'],'f')
|
||||
unitInc = 1.0/nSamples
|
||||
|
@ -199,7 +192,7 @@ def TothVanHoutteSTAT (ODF,nSamples):
|
|||
# --------------------------------------------------------------------
|
||||
# MAIN
|
||||
# --------------------------------------------------------------------
|
||||
parser = OptionParser(option_class=damask.extendableOption, usage='%prog options [file[s]]', description = """
|
||||
parser = OptionParser(option_class=damask.extendableOption, usage='%prog options [file[s]]', description ="""
|
||||
Transform linear binned ODF data into given number of orientations.
|
||||
IA: integral approximation, STAT: Van Houtte, MC: Monte Carlo
|
||||
|
||||
|
@ -251,7 +244,7 @@ for name in filenames:
|
|||
continue
|
||||
damask.util.report(scriptName,name)
|
||||
|
||||
randomSeed = int(os.urandom(4).encode('hex'), 16) if options.randomSeed == None else options.randomSeed # random seed per file for second phase
|
||||
randomSeed = int(os.urandom(4).encode('hex'), 16) if options.randomSeed is None else options.randomSeed # random seed per file for second phase
|
||||
random.seed(randomSeed)
|
||||
|
||||
# ------------------------------------------ read header and data ---------------------------------
|
||||
|
@ -308,13 +301,13 @@ for name in filenames:
|
|||
'Reference Integral: %12.11f\n'%(ODF['limit'][0]*ODF['limit'][2]*(1-math.cos(ODF['limit'][1]))),
|
||||
])
|
||||
|
||||
# call methods
|
||||
# call methods
|
||||
Functions = {'IA': 'directInversion', 'STAT': 'TothVanHoutteSTAT', 'MC': 'MonteCarloBins'}
|
||||
method = Functions[options.algorithm]
|
||||
|
||||
Orientations, ReconstructedODF = (globals()[method])(ODF,nSamples)
|
||||
|
||||
# calculate accuracy of sample
|
||||
# calculate accuracy of sample
|
||||
squaredDiff = {'orig':0.0,method:0.0}
|
||||
squaredRelDiff = {'orig':0.0,method:0.0}
|
||||
mutualProd = {'orig':0.0,method:0.0}
|
||||
|
@ -375,7 +368,7 @@ for name in filenames:
|
|||
'(gauss) phi1 {} Phi {} phi2 {} scatter 0.0 fraction 1.0'.format(*eulers),
|
||||
]
|
||||
|
||||
#--- output finalization --------------------------------------------------------------------------
|
||||
#--- output finalization --------------------------------------------------------------------------
|
||||
|
||||
with (open(os.path.splitext(name)[0]+'_'+method+'_'+str(nSamples)+'_material.config','w')) as outfile:
|
||||
outfile.write('\n'.join(materialConfig)+'\n')
|
||||
|
|
|
@ -1,13 +1,14 @@
|
|||
#!/usr/bin/env python
|
||||
# -*- coding: UTF-8 no BOM -*-
|
||||
|
||||
'''
|
||||
"""
|
||||
Writes meaningful labels to the marc input file (*.dat)
|
||||
based on the files
|
||||
|
||||
output is based on the files
|
||||
<modelname_jobname>.output<Homogenization/Crystallite/Constitutive>
|
||||
that are written during the first run of the model.
|
||||
'''
|
||||
import sys,os,re,string
|
||||
"""
|
||||
import sys,os,re
|
||||
from optparse import OptionParser
|
||||
import damask
|
||||
|
||||
|
@ -16,7 +17,6 @@ scriptID = ' '.join([scriptName,damask.version])
|
|||
|
||||
# -----------------------------
|
||||
def ParseOutputFormat(filename,what,me):
|
||||
# -----------------------------
|
||||
format = {'outputs':{},'specials':{'brothers':[]}}
|
||||
|
||||
outputmetafile = filename+'.output'+what
|
||||
|
@ -121,7 +121,7 @@ for file in files:
|
|||
|
||||
for what in me:
|
||||
outputFormat[what] = ParseOutputFormat(formatFile,what,me[what])
|
||||
if not '_id' in outputFormat[what]['specials']:
|
||||
if '_id' not in outputFormat[what]['specials']:
|
||||
print "'%s' not found in <%s>"%(me[what],what)
|
||||
print '\n'.join(map(lambda x:' '+x,outputFormat[what]['specials']['brothers']))
|
||||
sys.exit(1)
|
||||
|
|
|
@ -1,7 +1,7 @@
|
|||
#!/usr/bin/env python
|
||||
# -*- coding: UTF-8 no BOM -*-
|
||||
|
||||
import sys,os,string
|
||||
import sys,os
|
||||
import numpy as np
|
||||
from optparse import OptionParser
|
||||
import damask
|
||||
|
@ -17,14 +17,13 @@ def outMentat(cmd,locals):
|
|||
exec(cmd[3:])
|
||||
elif cmd[0:3] == '(?)':
|
||||
cmd = eval(cmd[3:])
|
||||
py_send(cmd)
|
||||
py_mentat.py_send(cmd)
|
||||
else:
|
||||
py_send(cmd)
|
||||
py_mentat.py_send(cmd)
|
||||
return
|
||||
|
||||
#-------------------------------------------------------------------------------------------------
|
||||
def outFile(cmd,locals,dest):
|
||||
#-------------------------------------------------------------------------------------------------
|
||||
if cmd[0:3] == '(!)':
|
||||
exec(cmd[3:])
|
||||
elif cmd[0:3] == '(?)':
|
||||
|
@ -37,7 +36,6 @@ def outFile(cmd,locals,dest):
|
|||
|
||||
#-------------------------------------------------------------------------------------------------
|
||||
def output(cmds,locals,dest):
|
||||
#-------------------------------------------------------------------------------------------------
|
||||
for cmd in cmds:
|
||||
if isinstance(cmd,list):
|
||||
output(cmd,locals,dest)
|
||||
|
@ -58,12 +56,12 @@ def servoLink():
|
|||
'max': np.zeros(3,dtype='d'),
|
||||
'delta': np.zeros(3,dtype='d'),
|
||||
}
|
||||
Nnodes = py_get_int("nnodes()")
|
||||
Nnodes = py_mentat.py_get_int("nnodes()")
|
||||
NodeCoords = np.zeros((Nnodes,3),dtype='d')
|
||||
for node in xrange(Nnodes):
|
||||
NodeCoords[node,0] = py_get_float("node_x(%i)"%(node+1))
|
||||
NodeCoords[node,1] = py_get_float("node_y(%i)"%(node+1))
|
||||
NodeCoords[node,2] = py_get_float("node_z(%i)"%(node+1))
|
||||
NodeCoords[node,0] = py_mentat.py_get_float("node_x(%i)"%(node+1))
|
||||
NodeCoords[node,1] = py_mentat.py_get_float("node_y(%i)"%(node+1))
|
||||
NodeCoords[node,2] = py_mentat.py_get_float("node_z(%i)"%(node+1))
|
||||
box['min'] = NodeCoords.min(axis=0) # find the bounding box
|
||||
box['max'] = NodeCoords.max(axis=0)
|
||||
box['delta'] = box['max']-box['min']
|
||||
|
@ -79,7 +77,6 @@ def servoLink():
|
|||
#!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!
|
||||
# loop over all nodes
|
||||
for node in xrange(Nnodes):
|
||||
pos = {}
|
||||
key = {}
|
||||
maxFlag = [False, False, False]
|
||||
Nmax = 0
|
||||
|
@ -166,7 +163,7 @@ else:
|
|||
file={'croak':sys.stdout}
|
||||
|
||||
try:
|
||||
from py_mentat import *
|
||||
import py_mentat
|
||||
except:
|
||||
file['croak'].write('error: no valid Mentat release found')
|
||||
sys.exit(-1)
|
||||
|
@ -176,8 +173,9 @@ outputLocals = {}
|
|||
file['croak'].write('\033[1m'+scriptName+'\033[0m\n\n')
|
||||
file['croak'].write( 'waiting to connect...\n')
|
||||
try:
|
||||
py_connect('',options.port)
|
||||
output(['*draw_manual'],outputLocals,'Mentat') # prevent redrawing in Mentat, should be much faster. Since py_connect has no return value, try this to determine if failed or not
|
||||
py_mentat.py_connect('',options.port)
|
||||
# prevent redrawing in Mentat, should be much faster. Since py_connect has no return value, try this to determine if failed or not
|
||||
output(['*draw_manual'],outputLocals,'Mentat')
|
||||
except:
|
||||
file['croak'].write('Could not connect. Set Tools/Python/"Run as Separate Process" & "Initiate"...\n')
|
||||
sys.exit()
|
||||
|
@ -191,7 +189,7 @@ output(['*remove_all_servos',
|
|||
|
||||
cmds = servoLink()
|
||||
output(cmds,outputLocals,'Mentat')
|
||||
py_disconnect()
|
||||
py_mentat.py_disconnect()
|
||||
|
||||
if options.verbose:
|
||||
output(cmds,outputLocals,sys.stdout)
|
||||
|
|
|
@ -1,7 +1,7 @@
|
|||
#!/usr/bin/env python
|
||||
# -*- coding: UTF-8 no BOM -*-
|
||||
|
||||
import os, sys, string
|
||||
import os,sys
|
||||
import numpy as np
|
||||
from optparse import OptionParser
|
||||
import damask
|
||||
|
@ -12,19 +12,17 @@ sys.path.append(damask.solver.Marc().libraryPath('../../'))
|
|||
|
||||
#-------------------------------------------------------------------------------------------------
|
||||
def outMentat(cmd,locals):
|
||||
#-------------------------------------------------------------------------------------------------
|
||||
if cmd[0:3] == '(!)':
|
||||
exec(cmd[3:])
|
||||
elif cmd[0:3] == '(?)':
|
||||
cmd = eval(cmd[3:])
|
||||
py_send(cmd)
|
||||
py_mentat.py_send(cmd)
|
||||
else:
|
||||
py_send(cmd)
|
||||
py_mentat.py_send(cmd)
|
||||
return
|
||||
|
||||
#-------------------------------------------------------------------------------------------------
|
||||
def outFile(cmd,locals,dest):
|
||||
#-------------------------------------------------------------------------------------------------
|
||||
if cmd[0:3] == '(!)':
|
||||
exec(cmd[3:])
|
||||
elif cmd[0:3] == '(?)':
|
||||
|
@ -36,7 +34,6 @@ def outFile(cmd,locals,dest):
|
|||
|
||||
#-------------------------------------------------------------------------------------------------
|
||||
def output(cmds,locals,dest):
|
||||
#-------------------------------------------------------------------------------------------------
|
||||
for cmd in cmds:
|
||||
if isinstance(cmd,list):
|
||||
output(cmd,locals,dest)
|
||||
|
@ -51,7 +48,6 @@ def output(cmds,locals,dest):
|
|||
|
||||
#-------------------------------------------------------------------------------------------------
|
||||
def init():
|
||||
#-------------------------------------------------------------------------------------------------
|
||||
return [
|
||||
"#"+' '.join([scriptID] + sys.argv[1:]),
|
||||
"*draw_manual", # prevent redrawing in Mentat, should be much faster
|
||||
|
@ -70,7 +66,6 @@ def init():
|
|||
|
||||
#-------------------------------------------------------------------------------------------------
|
||||
def mesh(r,d):
|
||||
#-------------------------------------------------------------------------------------------------
|
||||
return [
|
||||
"*add_nodes",
|
||||
"%f %f %f"%(0.0,0.0,0.0),
|
||||
|
@ -102,7 +97,6 @@ def mesh(r,d):
|
|||
|
||||
#-------------------------------------------------------------------------------------------------
|
||||
def material():
|
||||
#-------------------------------------------------------------------------------------------------
|
||||
cmds = [\
|
||||
"*new_mater standard",
|
||||
"*mater_option general:state:solid",
|
||||
|
@ -112,7 +106,7 @@ def material():
|
|||
"*add_mater_elements",
|
||||
"all_existing",
|
||||
"*geometry_type mech_three_solid",
|
||||
# "*geometry_option red_integ_capacity:on", # see below: reduced integration with one IP gave trouble being always OUTDATED...
|
||||
# "*geometry_option red_integ_capacity:on", reduced integration with one IP gave trouble being always OUTDATED...
|
||||
"*add_geometry_elements",
|
||||
"all_existing",
|
||||
]
|
||||
|
@ -122,13 +116,13 @@ def material():
|
|||
|
||||
#-------------------------------------------------------------------------------------------------
|
||||
def geometry():
|
||||
#-------------------------------------------------------------------------------------------------
|
||||
cmds = [\
|
||||
"*geometry_type mech_three_solid",
|
||||
# "*geometry_option red_integ_capacity:on",
|
||||
"*add_geometry_elements",
|
||||
"all_existing",
|
||||
"*element_type 7", # we are NOT using reduced integration (type 117) but opt for /elementhomogeneous/ in the respective phase description (material.config)
|
||||
# we are NOT using reduced integration (type 117) but opt for /elementhomogeneous/ in the respective phase description (material.config)
|
||||
"*element_type 7",
|
||||
"all_existing",
|
||||
]
|
||||
|
||||
|
@ -137,7 +131,6 @@ def geometry():
|
|||
|
||||
#-------------------------------------------------------------------------------------------------
|
||||
def initial_conditions(homogenization,microstructures):
|
||||
#-------------------------------------------------------------------------------------------------
|
||||
elements = []
|
||||
element = 0
|
||||
for id in microstructures:
|
||||
|
@ -204,7 +197,7 @@ parser.set_defaults(port = None,
|
|||
|
||||
if options.port:
|
||||
try:
|
||||
from py_mentat import *
|
||||
import py_mentat
|
||||
except:
|
||||
parser.error('no valid Mentat release found.')
|
||||
|
||||
|
@ -258,9 +251,9 @@ for name in filenames:
|
|||
|
||||
outputLocals = {}
|
||||
if options.port:
|
||||
py_connect('',options.port)
|
||||
py_mentat.py_connect('',options.port)
|
||||
output(cmds,outputLocals,'Mentat')
|
||||
py_disconnect()
|
||||
py_mentat.py_disconnect()
|
||||
else:
|
||||
output(cmds,outputLocals,table.__IO__['out']) # bad hack into internals of table class...
|
||||
|
||||
|
|
|
@ -1,7 +1,7 @@
|
|||
#!/usr/bin/env python
|
||||
# -*- coding: UTF-8 no BOM -*-
|
||||
|
||||
import sys,os,math,re,string
|
||||
import sys,os,math,re
|
||||
from optparse import OptionParser
|
||||
import damask
|
||||
|
||||
|
@ -18,7 +18,7 @@ except:
|
|||
sys.path.append(damask.solver.Marc().libraryPath('../../'))
|
||||
|
||||
try: # check for MSC.Mentat Python interface
|
||||
from py_mentat import *
|
||||
import py_mentat
|
||||
MentatCapability = True
|
||||
except:
|
||||
MentatCapability = False
|
||||
|
@ -29,10 +29,10 @@ def outMentat(cmd,locals):
|
|||
exec(cmd[3:])
|
||||
elif cmd[0:3] == '(?)':
|
||||
cmd = eval(cmd[3:])
|
||||
py_send(cmd)
|
||||
py_mentat.py_send(cmd)
|
||||
if 'log' in locals: locals['log'].append(cmd)
|
||||
else:
|
||||
py_send(cmd)
|
||||
py_mentat.py_send(cmd)
|
||||
if 'log' in locals: locals['log'].append(cmd)
|
||||
return
|
||||
|
||||
|
@ -83,10 +83,9 @@ def rcbOrientationParser(content,idcolumn):
|
|||
|
||||
return grains
|
||||
|
||||
def rcbParser(content,M,size,tolerance,idcolumn,segmentcolumn): # parser for TSL-OIM reconstructed boundary files
|
||||
|
||||
def rcbParser(content,M,size,tolerance,idcolumn,segmentcolumn):
|
||||
"""parser for TSL-OIM reconstructed boundary files"""
|
||||
# find bounding box
|
||||
|
||||
boxX = [1.*sys.maxint,-1.*sys.maxint]
|
||||
boxY = [1.*sys.maxint,-1.*sys.maxint]
|
||||
x = [0.,0.]
|
||||
|
@ -145,8 +144,8 @@ def rcbParser(content,M,size,tolerance,idcolumn,segmentcolumn): # parser for T
|
|||
match = True
|
||||
break
|
||||
break
|
||||
# force to boundary if inside tolerance to it
|
||||
if (not match):
|
||||
# force to boundary if inside tolerance to it
|
||||
if (abs(x[i])<dX*tolerance):
|
||||
x[i] = 0
|
||||
if (abs(dX-x[i])<dX*tolerance):
|
||||
|
@ -226,7 +225,7 @@ def rcbParser(content,M,size,tolerance,idcolumn,segmentcolumn): # parser for T
|
|||
for keyY in allkeysY:
|
||||
points.append({'coords': [float(keyX)*scalePatch,float(keyY)*scalePatch], 'segments': connectivityXY[keyX][keyY]})
|
||||
for segment in connectivityXY[keyX][keyY]:
|
||||
if (segments[segment] == None):
|
||||
if (segments[segment] is None):
|
||||
segments[segment] = pointId
|
||||
else:
|
||||
segments[segment].append(pointId)
|
||||
|
@ -259,7 +258,7 @@ def rcbParser(content,M,size,tolerance,idcolumn,segmentcolumn): # parser for T
|
|||
points[myEnd]['coords'][1]-points[myStart]['coords'][1]]
|
||||
myLen = math.sqrt(myV[0]**2+myV[1]**2)
|
||||
best = {'product': -2.0, 'peek': -1, 'len': -1, 'point': -1}
|
||||
for peek in points[myEnd]['segments']: # trying in turn all segments emanating from current endPoint
|
||||
for peek in points[myEnd]['segments']: # trying in turn all segments emanating from current end
|
||||
if peek == myWalk:
|
||||
continue
|
||||
peekEnd = segments[peek][1] if segments[peek][0] == myEnd else segments[peek][0]
|
||||
|
@ -652,7 +651,6 @@ def cleanUp(a):
|
|||
|
||||
# -------------------------
|
||||
def image(name,imgsize,marginX,marginY,rcData):
|
||||
# -------------------------
|
||||
|
||||
dX = max([coords[0] for coords in rcData['point']])
|
||||
dY = max([coords[1] for coords in rcData['point']])
|
||||
|
@ -697,8 +695,8 @@ def image(name,imgsize,marginX,marginY,rcData):
|
|||
img.save(name+'.png',"PNG")
|
||||
|
||||
# -------------------------
|
||||
def inside(x,y,points): # tests whether point(x,y) is within polygon described by points
|
||||
# -------------------------
|
||||
def inside(x,y,points):
|
||||
"""tests whether point(x,y) is within polygon described by points"""
|
||||
inside = False
|
||||
npoints=len(points)
|
||||
(x1,y1) = points[npoints-1] # start with last point of points
|
||||
|
@ -719,8 +717,8 @@ def inside(x,y,points): # te
|
|||
return inside
|
||||
|
||||
# -------------------------
|
||||
def fftbuild(rcData,height,xframe,yframe,resolution,extrusion): # build array of grain numbers
|
||||
# -------------------------
|
||||
def fftbuild(rcData,height,xframe,yframe,resolution,extrusion):
|
||||
"""build array of grain numbers"""
|
||||
maxX = -1.*sys.maxint
|
||||
maxY = -1.*sys.maxint
|
||||
for line in rcData['point']: # find data range
|
||||
|
@ -926,12 +924,12 @@ if 'mentat' in options.output:
|
|||
]
|
||||
|
||||
outputLocals = {'log':[]}
|
||||
if (options.port != None):
|
||||
py_connect('',options.port)
|
||||
if (options.port is not None):
|
||||
py_mentat.py_connect('',options.port)
|
||||
try:
|
||||
output(cmds,outputLocals,'Mentat')
|
||||
finally:
|
||||
py_disconnect()
|
||||
py_mentat.py_disconnect()
|
||||
if 'procedure' in options.output:
|
||||
output(outputLocals['log'],outputLocals,'Stdout')
|
||||
else:
|
||||
|
|
|
@ -1,7 +1,7 @@
|
|||
#!/usr/bin/env python
|
||||
# -*- coding: UTF-8 no BOM -*-
|
||||
|
||||
import os,sys,string,math,random
|
||||
import os,sys,math,random
|
||||
import numpy as np
|
||||
import damask
|
||||
from optparse import OptionParser,OptionGroup
|
||||
|
@ -14,9 +14,7 @@ scriptID = ' '.join([scriptName,damask.version])
|
|||
# ------------------------------------------ aux functions ---------------------------------
|
||||
|
||||
def kdtree_search(cloud, queryPoints):
|
||||
'''
|
||||
find distances to nearest neighbor among cloud (N,d) for each of the queryPoints (n,d)
|
||||
'''
|
||||
"""find distances to nearest neighbor among cloud (N,d) for each of the queryPoints (n,d)"""
|
||||
n = queryPoints.shape[0]
|
||||
distances = np.zeros(n,dtype=float)
|
||||
tree = spatial.cKDTree(cloud)
|
||||
|
@ -112,7 +110,7 @@ parser.set_defaults(randomSeed = None,
|
|||
options.grid = np.array(options.grid)
|
||||
gridSize = options.grid.prod()
|
||||
|
||||
if options.randomSeed == None: options.randomSeed = int(os.urandom(4).encode('hex'), 16)
|
||||
if options.randomSeed is None: options.randomSeed = int(os.urandom(4).encode('hex'), 16)
|
||||
np.random.seed(options.randomSeed) # init random generators
|
||||
random.seed(options.randomSeed)
|
||||
|
||||
|
@ -133,10 +131,12 @@ for name in filenames:
|
|||
|
||||
remarks = []
|
||||
errors = []
|
||||
if gridSize == 0: errors.append('zero grid dimension for %s.'%(', '.join([['a','b','c'][x] for x in np.where(options.grid == 0)[0]])))
|
||||
if gridSize == 0:
|
||||
errors.append('zero grid dimension for %s.'%(', '.join([['a','b','c'][x] for x in np.where(options.grid == 0)[0]])))
|
||||
if options.N > gridSize/10.: errors.append('seed count exceeds 0.1 of grid points.')
|
||||
if options.selective and 4./3.*math.pi*(options.distance/2.)**3*options.N > 0.5:
|
||||
(remarks if options.force else errors).append('maximum recommended seed point count for given distance is {}.{}'.format(int(3./8./math.pi/(options.distance/2.)**3),'..'*options.force))
|
||||
(remarks if options.force else errors).append('maximum recommended seed point count for given distance is {}.{}'.
|
||||
format(int(3./8./math.pi/(options.distance/2.)**3),'..'*options.force))
|
||||
|
||||
if remarks != []: damask.util.croak(remarks)
|
||||
if errors != []:
|
||||
|
@ -153,7 +153,7 @@ for name in filenames:
|
|||
if not options.selective:
|
||||
|
||||
seeds = np.zeros((3,options.N),dtype='d') # seed positions array
|
||||
gridpoints = random.sample(range(gridSize),options.N) # create random permutation of all grid positions and choose first N
|
||||
gridpoints = random.sample(range(gridSize),options.N) # choose first N from random permutation of grid positions
|
||||
|
||||
seeds[0,:] = (np.mod(gridpoints ,options.grid[0])\
|
||||
+np.random.random(options.N)) /options.grid[0]
|
||||
|
@ -174,7 +174,7 @@ for name in filenames:
|
|||
distances = kdtree_search(seeds[:i],candidates)
|
||||
best = distances.argmax()
|
||||
if distances[best] > options.distance: # require minimum separation
|
||||
seeds[i] = candidates[best] # take candidate with maximum separation to existing point cloud
|
||||
seeds[i] = candidates[best] # maximum separation to existing point cloud
|
||||
i += 1
|
||||
if i%(options.N/100.) < 1: damask.util.croak('.',False)
|
||||
|
||||
|
|
|
@ -1,7 +1,7 @@
|
|||
#!/usr/bin/env python
|
||||
# -*- coding: UTF-8 no BOM -*-
|
||||
|
||||
import os,string,itertools
|
||||
import os,itertools
|
||||
import numpy as np
|
||||
from optparse import OptionParser
|
||||
import damask
|
||||
|
@ -50,8 +50,8 @@ parser.set_defaults(pos = 'pos',
|
|||
|
||||
(options,filenames) = parser.parse_args()
|
||||
|
||||
if options.whitelist != None: options.whitelist = map(int,options.whitelist)
|
||||
if options.blacklist != None: options.blacklist = map(int,options.blacklist)
|
||||
if options.whitelist is not None: options.whitelist = map(int,options.whitelist)
|
||||
if options.blacklist is not None: options.blacklist = map(int,options.blacklist)
|
||||
|
||||
# --- loop over input files -------------------------------------------------------------------------
|
||||
|
||||
|
@ -101,12 +101,10 @@ for name in filenames:
|
|||
|
||||
# --- filtering of grain voxels --------------------------------------------------------------------
|
||||
|
||||
mask = np.logical_and(\
|
||||
np.ones_like(table.data[:,3],bool) \
|
||||
if options.whitelist == None \
|
||||
mask = np.logical_and(
|
||||
np.ones_like(table.data[:,3],bool) if options.whitelist is None \
|
||||
else np.in1d(table.data[:,3].ravel(), options.whitelist).reshape(table.data[:,3].shape),
|
||||
np.ones_like(table.data[:,3],bool) \
|
||||
if options.blacklist == None \
|
||||
np.ones_like(table.data[:,3],bool) if options.blacklist is None \
|
||||
else np.invert(np.in1d(table.data[:,3].ravel(), options.blacklist).reshape(table.data[:,3].shape))
|
||||
)
|
||||
table.data = table.data[mask]
|
||||
|
|
Loading…
Reference in New Issue