diff --git a/processing/pre/hybridIA_linODFsampling.py b/processing/pre/hybridIA_linODFsampling.py index caa747337..80d82a458 100755 --- a/processing/pre/hybridIA_linODFsampling.py +++ b/processing/pre/hybridIA_linODFsampling.py @@ -19,7 +19,7 @@ def integerFactorization(i): return j def binAsBins(bin,intervals): - """Explode compound bin into 3D bins list""" + """Explode compound bin into 3D bins list.""" bins = [0]*3 bins[0] = (bin//(intervals[1] * intervals[2])) % intervals[0] bins[1] = (bin//intervals[2]) % intervals[1] @@ -27,17 +27,17 @@ def binAsBins(bin,intervals): return bins def binsAsBin(bins,intervals): - """Implode 3D bins into compound bin""" + """Implode 3D bins into compound bin.""" return (bins[0]*intervals[1] + bins[1])*intervals[2] + bins[2] def EulersAsBins(Eulers,intervals,deltas,center): - """Return list of Eulers translated into 3D bins list""" + """Return list of Eulers translated into 3D bins list.""" return [int((euler+(0.5-center)*delta)//delta)%interval \ for euler,delta,interval in zip(Eulers,deltas,intervals) \ ] def binAsEulers(bin,intervals,deltas,center): - """Compound bin number translated into list of Eulers""" + """Compound bin number translated into list of Eulers.""" Eulers = [0.0]*3 Eulers[2] = (bin%intervals[2] + center)*deltas[2] Eulers[1] = (bin//intervals[2]%intervals[1] + center)*deltas[1] @@ -45,7 +45,7 @@ def binAsEulers(bin,intervals,deltas,center): return Eulers def directInvRepetitions(probability,scale): - """Calculate number of samples drawn by direct inversion""" + """Calculate number of samples drawn by direct inversion.""" nDirectInv = 0 for bin in range(len(probability)): # loop over bins nDirectInv += int(round(probability[bin]*scale)) # calc repetition @@ -56,7 +56,7 @@ def directInvRepetitions(probability,scale): # ----- efficient algorithm --------- def directInversion (ODF,nSamples): - """ODF contains 'dV_V' (normalized to 1), 'center', 'intervals', 'limits' (in radians)""" + """ODF contains 'dV_V' (normalized to 1), 'center', 'intervals', 'limits' (in radians).""" nOptSamples = max(ODF['nNonZero'],nSamples) # random subsampling if too little samples requested nInvSamples = 0 @@ -118,7 +118,7 @@ def directInversion (ODF,nSamples): # ----- trial and error algorithms --------- def MonteCarloEulers (ODF,nSamples): - """ODF contains 'dV_V' (normalized to 1), 'center', 'intervals', 'limits' (in radians)""" + """ODF contains 'dV_V' (normalized to 1), 'center', 'intervals', 'limits' (in radians).""" countMC = 0 maxdV_V = max(ODF['dV_V']) orientations = np.zeros((nSamples,3),'f') @@ -141,7 +141,7 @@ def MonteCarloEulers (ODF,nSamples): def MonteCarloBins (ODF,nSamples): - """ODF contains 'dV_V' (normalized to 1), 'center', 'intervals', 'limits' (in radians)""" + """ODF contains 'dV_V' (normalized to 1), 'center', 'intervals', 'limits' (in radians).""" countMC = 0 maxdV_V = max(ODF['dV_V']) orientations = np.zeros((nSamples,3),'f') @@ -163,7 +163,7 @@ def MonteCarloBins (ODF,nSamples): def TothVanHoutteSTAT (ODF,nSamples): - """ODF contains 'dV_V' (normalized to 1), 'center', 'intervals', 'limits' (in radians)""" + """ODF contains 'dV_V' (normalized to 1), 'center', 'intervals', 'limits' (in radians).""" orientations = np.zeros((nSamples,3),'f') reconstructedODF = np.zeros(ODF['nBins'],'f') unitInc = 1.0/nSamples @@ -235,7 +235,7 @@ if filenames == []: filenames = [None] for name in filenames: try: table = damask.ASCIItable(name = name, buffered = False, readonly=True) - except: + except IOError: continue damask.util.report(scriptName,name) diff --git a/processing/pre/patchFromReconstructedBoundaries.py b/processing/pre/patchFromReconstructedBoundaries.py index e9196916e..b710fb2cb 100755 --- a/processing/pre/patchFromReconstructedBoundaries.py +++ b/processing/pre/patchFromReconstructedBoundaries.py @@ -78,13 +78,11 @@ def rcbOrientationParser(content,idcolumn): damask.util.croak('You might not have chosen the correct column for the grain IDs! '+ 'Please check the "--id" option.') raise - except: - raise return grains def rcbParser(content,M,size,tolerance,idcolumn,segmentcolumn): - """Parser for TSL-OIM reconstructed boundary files""" + """Parser for TSL-OIM reconstructed boundary files.""" # find bounding box boxX = [1.*sys.maxint,-1.*sys.maxint] boxY = [1.*sys.maxint,-1.*sys.maxint] @@ -99,8 +97,6 @@ def rcbParser(content,M,size,tolerance,idcolumn,segmentcolumn): damask.util.croak('You might not have chosen the correct column for the segment end points! '+ 'Please check the "--segment" option.') raise - except: - raise (x[0],y[0]) = (M[0]*x[0]+M[1]*y[0],M[2]*x[0]+M[3]*y[0]) # apply transformation to coordinates (x[1],y[1]) = (M[0]*x[1]+M[1]*y[1],M[2]*x[1]+M[3]*y[1]) # to get rcb --> Euler system boxX[0] = min(boxX[0],x[0],x[1]) @@ -728,7 +724,7 @@ def image(name,imgsize,marginX,marginY,rcData): # ------------------------- def inside(x,y,points): - """Tests whether point(x,y) is within polygon described by points""" + """Tests whether point(x,y) is within polygon described by points.""" inside = False npoints=len(points) (x1,y1) = points[npoints-1] # start with last point of points @@ -750,7 +746,7 @@ def inside(x,y,points): # ------------------------- def fftbuild(rcData,height,xframe,yframe,grid,extrusion): - """Build array of grain numbers""" + """Build array of grain numbers.""" maxX = -1.*sys.maxint maxY = -1.*sys.maxint for line in rcData['point']: # find data range @@ -883,7 +879,7 @@ try: boundaryFile = open(args[0]) boundarySegments = boundaryFile.readlines() boundaryFile.close() -except: +except IOError: damask.util.croak('unable to read boundary file "{}".'.format(args[0])) raise