From fb23c49b41b35bd6b069766a1895166d50b4ec5b Mon Sep 17 00:00:00 2001 From: Martin Diehl Date: Sat, 24 Sep 2016 15:58:13 +0200 Subject: [PATCH 1/6] should be executable --- processing/misc/DREAM3D_toTable.py | 0 1 file changed, 0 insertions(+), 0 deletions(-) mode change 100644 => 100755 processing/misc/DREAM3D_toTable.py diff --git a/processing/misc/DREAM3D_toTable.py b/processing/misc/DREAM3D_toTable.py old mode 100644 new mode 100755 From a03c28f308d2988e66e3df85797c71ead411bae9 Mon Sep 17 00:00:00 2001 From: Test User Date: Sun, 25 Sep 2016 16:41:00 +0200 Subject: [PATCH 2/6] updated version information after successful test of v2.0.1-148-gfb23c49 --- VERSION | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/VERSION b/VERSION index d625e2d19..85c030ae0 100644 --- a/VERSION +++ b/VERSION @@ -1 +1 @@ -v2.0.1-146-ge540476 +v2.0.1-148-gfb23c49 From 5345b42d71f8c9a232c5465adf9f555b47d4805e Mon Sep 17 00:00:00 2001 From: Philip Eisenlohr Date: Sun, 25 Sep 2016 21:13:02 -0400 Subject: [PATCH 3/6] improved robustness against faulty RCB data deals gracefully with duplicate segments, new option to export cleaned up RCB --- .../pre/patchFromReconstructedBoundaries.py | 167 ++++++++++++------ 1 file changed, 114 insertions(+), 53 deletions(-) diff --git a/processing/pre/patchFromReconstructedBoundaries.py b/processing/pre/patchFromReconstructedBoundaries.py index ada92b09d..e4ec9ef42 100755 --- a/processing/pre/patchFromReconstructedBoundaries.py +++ b/processing/pre/patchFromReconstructedBoundaries.py @@ -2,6 +2,7 @@ # -*- coding: UTF-8 no BOM -*- import sys,os,math,re +import numpy as np from optparse import OptionParser import damask @@ -63,9 +64,8 @@ def rcbOrientationParser(content,idcolumn): grains = [] myOrientation = [0.0,0.0,0.0] - for line in content: - m = re.match(r'\s*(#|$)',line) - if m: continue # skip comments and blank lines + for j,line in enumerate(content): + if re.match(r'^\s*(#|$)',line): continue # skip comments and blank lines for grain in range(2): myID = int(line.split()[idcolumn+grain]) # get grain id myOrientation = map(float,line.split())[3*grain:3+3*grain] # get orientation @@ -75,8 +75,8 @@ def rcbOrientationParser(content,idcolumn): try: grains[myID-1] = myOrientation # store Euler angles except IndexError: - message = 'You might not have chosen the correct column for the grain IDs! Please check the "--id" option.' - print '\033[1;31m'+message+'\033[0m\n' + damask.util.croak('You might not have chosen the correct column for the grain IDs! '+ + 'Please check the "--id" option.') raise except: raise @@ -91,13 +91,13 @@ def rcbParser(content,M,size,tolerance,idcolumn,segmentcolumn): x = [0.,0.] y = [0.,0.] for line in content: - m = re.match(r'\s*(#|$)',line) + m = re.match(r'^\s*(#|$)',line) if m: continue # skip comments and blank lines try: (x[0],y[0],x[1],y[1]) = map(float,line.split())[segmentcolumn:segmentcolumn+4] # get start and end coordinates of each segment. except IndexError: - message = 'You might not have chosen the correct column for the segment end points! Please check the "--segment" option.' - print '\033[1;31m'+message+'\033[0m\n' + damask.util.croak('You might not have chosen the correct column for the segment end points! '+ + 'Please check the "--segment" option.') raise except: raise @@ -110,6 +110,9 @@ def rcbParser(content,M,size,tolerance,idcolumn,segmentcolumn): dX = boxX[1]-boxX[0] dY = boxY[1]-boxY[0] + damask.util.croak(' bounding box {},{} -- {},{}'.format(boxX[0],boxY[0],boxX[1],boxY[1])) + damask.util.croak(' dimension {} x {}'.format(dX,dY)) + if size > 0.0: scalePatch = size/dX else: scalePatch = 1.0 @@ -122,8 +125,7 @@ def rcbParser(content,M,size,tolerance,idcolumn,segmentcolumn): grainNeighbors = [] for line in content: - m = re.match(r'\s*(#|$)',line) - if m: continue # skip comments and blank lines + if re.match(r'^\s*(#|$)',line): continue # skip comments and blank lines (x[0],y[0],x[1],y[1]) = map(float,line.split())[segmentcolumn:segmentcolumn+4] # get start and end coordinates of each segment. (x[0],y[0]) = (M[0]*x[0]+M[1]*y[0],M[2]*x[0]+M[3]*y[0]) # apply transformation to coordinates (x[1],y[1]) = (M[0]*x[1]+M[1]*y[1],M[2]*x[1]+M[3]*y[1]) # to get rcb --> Euler system @@ -133,8 +135,8 @@ def rcbParser(content,M,size,tolerance,idcolumn,segmentcolumn): y[0] -= boxY[0] y[1] -= boxY[0] grainNeighbors.append(map(int,line.split()[idcolumn:idcolumn+2])) # remember right and left grain per segment - for i in range(2): # store segment to both points - match = False # check whether point is already known (within a small range) + for i in range(2): # store segment to both points + match = False # check whether point is already known (within a small range) for posX in connectivityXY.keys(): if (abs(float(posX)-x[i]) 0: + damask.util.croak(' culling {} duplicate segments...'.format(len(dupSegments))) + for rm in dupSegments: + segments[rm] = None + crappyData = False for pointId,point in enumerate(points): if len(point['segments']) < 2: # point marks a dead end! - print "Dead end at segment %i (%f,%f)"\ - %(1+point['segments'][0],boxX[0]+point['coords'][0]/scalePatch,boxY[0]+point['coords'][1]/scalePatch,) + damask.util.croak('dead end at segment {} for point {} ({},{}).' + .format(point['segments'][0], + pointId, + boxX[0]+point['coords'][0]/scalePatch,boxY[0]+point['coords'][1]/scalePatch,)) crappyData = True grains = {'draw': [], 'legs': []} @@ -249,39 +269,42 @@ def rcbParser(content,M,size,tolerance,idcolumn,segmentcolumn): innerAngleSum = 0.0 myWalk = point['segments'].pop() grainLegs = [myWalk] - if segments[myWalk][0] == myStart: - myEnd = segments[myWalk][1] - else: - myEnd = segments[myWalk][0] + myEnd = segments[myWalk][1 if segments[myWalk][0] == myStart else 0] while (myEnd != pointId): - myV = [points[myEnd]['coords'][0]-points[myStart]['coords'][0],\ - points[myEnd]['coords'][1]-points[myStart]['coords'][1]] + myV = [points[myEnd]['coords'][0]-points[myStart]['coords'][0], + points[myEnd]['coords'][1]-points[myStart]['coords'][1]] myLen = math.sqrt(myV[0]**2+myV[1]**2) + if myLen == 0.0: damask.util.croak('mylen is zero: point {} --> {}'.format(myStart,myEnd)) best = {'product': -2.0, 'peek': -1, 'len': -1, 'point': -1} for peek in points[myEnd]['segments']: # trying in turn all segments emanating from current end if peek == myWalk: - continue - peekEnd = segments[peek][1] if segments[peek][0] == myEnd else segments[peek][0] + continue # do not go back same path + peekEnd = segments[peek][1 if segments[peek][0] == myEnd else 0] peekV = [points[myEnd]['coords'][0]-points[peekEnd]['coords'][0], points[myEnd]['coords'][1]-points[peekEnd]['coords'][1]] peekLen = math.sqrt(peekV[0]**2+peekV[1]**2) - crossproduct = (myV[0]*peekV[1]-myV[1]*peekV[0])/myLen/peekLen - dotproduct = (myV[0]*peekV[0]+myV[1]*peekV[1])/myLen/peekLen - if crossproduct*(dotproduct+1.0) >= best['product']: - best['product'] = crossproduct*(dotproduct+1.0) + if peekLen == 0.0: damask.util.croak('peeklen is zero: peek point {}'.format(peek)) + crossproduct = (myV[0]*peekV[1] - myV[1]*peekV[0])/myLen/peekLen + dotproduct = (myV[0]*peekV[0] + myV[1]*peekV[1])/myLen/peekLen + innerAngle = crossproduct*(dotproduct+1.0) + if innerAngle >= best['product']: # takes sharpest left turn + best['product'] = innerAngle best['peek'] = peek best['point'] = peekEnd + innerAngleSum += best['product'] myWalk = best['peek'] myStart = myEnd myEnd = best['point'] + if myWalk in points[myStart]['segments']: points[myStart]['segments'].remove(myWalk) else: - sys.stderr.write(str(myWalk)+' not in segments of point '+str(myStart)+'\n') + damask.utilcroak('{} not in segments of point {}'.format(myWalk,myStart)) grainDraw.append(points[myStart]['coords']) grainLegs.append(myWalk) + if innerAngleSum > 0.0: grains['draw'].append(grainDraw) grains['legs'].append(grainLegs) @@ -291,16 +314,26 @@ def rcbParser(content,M,size,tolerance,idcolumn,segmentcolumn): # build overall data structure - rcData = {'dimension':[dX,dY], 'point': [],'segment': [], 'grain': [], 'grainMapping': []} - print " dimension %g x %g"%(dX,dY) + rcData = {'dimension':[dX,dY], + 'bounds': [[boxX[0],boxY[0]],[boxX[1],boxY[1]]], + 'scale': scalePatch, + 'point': [], + 'segment': [], + 'neighbors': [], + 'grain': [], + 'grainMapping': [], + } for point in points: rcData['point'].append(point['coords']) - print " found %i points"%(len(rcData['point'])) + damask.util.croak(' found {} points'.format(len(rcData['point']))) - for segment in segments: + for segment in segments: rcData['segment'].append(segment) - print " built %i segments"%(len(rcData['segment'])) + damask.util.croak(' built {} segments'.format(len(rcData['segment']))) + + for neighbors in grainNeighbors: + rcData['neighbors'].append(neighbors) for legs in grains['legs']: # loop over grains rcData['grain'].append(legs) # store list of boundary segments @@ -314,12 +347,11 @@ def rcbParser(content,M,size,tolerance,idcolumn,segmentcolumn): myNeighbors[grainNeighbors[leg][side]] = 1 if myNeighbors: # do I have any neighbors (i.e., non-bounding box segment) candidateGrains = sorted(myNeighbors.iteritems(), key=lambda (k,v): (v,k), reverse=True) # sort grain counting - if candidateGrains[0][0] not in rcData['grainMapping']: # most frequent one not yet seen? - rcData['grainMapping'].append(candidateGrains[0][0]) # must be me then - else: - rcData['grainMapping'].append(candidateGrains[1][0]) # special case of bi-crystal situation... + # most frequent one not yet seen? + rcData['grainMapping'].append(candidateGrains[0 if candidateGrains[0][0] not in rcData['grainMapping'] else 1][0]) # must be me then + # special case of bi-crystal situation... - print " found %i grains\n"%(len(rcData['grain'])) + damask.util.croak(' found {} grains'.format(len(rcData['grain']))) rcData['box'] = grains['box'] if 'box' in grains else [] @@ -670,9 +702,10 @@ def image(name,imgsize,marginX,marginY,rcData): draw.text([offsetX+point[0]*scaleImg,sizeY-(offsetY+point[1]*scaleImg)],"%i"%id,fill=(0,0,0)) for id,vertex in enumerate(rcData['segment']): + if vertex: start = rcData['point'][vertex[0]] end = rcData['point'][vertex[1]] - draw.text([offsetX+(start[0]+end[0])/2.0*scaleImg,sizeY-(offsetY+(start[1]+end[1])/2.0*scaleImg)],"%i"%id,fill=(0,0,128)) + draw.text([offsetX+(start[0]+end[0])/2.0*scaleImg,sizeY-(offsetY+(start[1]+end[1])/2.0*scaleImg)],"%i"%id,fill=(255,0,128)) draw.line([offsetX+start[0]*scaleImg,sizeY-(offsetY+start[1]*scaleImg), offsetX+ end[0]*scaleImg,sizeY-(offsetY+ end[1]*scaleImg)],width=1,fill=(128,128,128)) @@ -789,7 +822,7 @@ reconstructed boundary file meshes=['dt_planar_trimesh','af_planar_trimesh','af_planar_quadmesh'] parser.add_option('-o', '--output', action='extend', dest='output', metavar = '', - help='types of output {image, mentat, procedure, spectral}') + help='types of output {rcb, image, mentat, procedure, spectral}') parser.add_option('-p', '--port', type='int', metavar = 'int', dest='port', help='Mentat connection port [%default]') parser.add_option('-2', '--twodimensional', action='store_true', @@ -847,16 +880,15 @@ parser.set_defaults(output = [], (options, args) = parser.parse_args() -print '\033[1m'+scriptName+'\033[0m\n' if not len(args): - parser.error('no boundary file specified') + parser.error('no boundary file specified.') try: boundaryFile = open(args[0]) boundarySegments = boundaryFile.readlines() boundaryFile.close() except: - print 'unable to read boundary file "%s"'%args[0] + damask.util.croak('unable to read boundary file "{}".'.format(args[0])) raise options.output = [s.lower() for s in options.output] # lower case @@ -864,18 +896,46 @@ options.idcolumn -= 1 # py options.segmentcolumn -= 1 # python indexing starts with 0 myName = os.path.splitext(args[0])[0] -print "%s\n"%myName +damask.util.report(scriptName,myName) orientationData = rcbOrientationParser(boundarySegments,options.idcolumn) rcData = rcbParser(boundarySegments,options.M,options.size,options.tolerance,options.idcolumn,options.segmentcolumn) +# ----- write corrected RCB ----- + +Minv = np.linalg.inv(np.array(options.M).reshape(2,2)) + +if 'rcb' in options.output: + print """# Header: +# +# Column 1-3: right hand average orientation (phi1, PHI, phi2 in radians) +# Column 4-6: left hand average orientation (phi1, PHI, phi2 in radians) +# Column 7: length (in microns) +# Column 8: trace angle (in degrees) +# Column 9-12: x,y coordinates of endpoints (in microns) +# Column 13-14: IDs of right hand and left hand grains""" + for i,(left,right) in enumerate(rcData['neighbors']): + if rcData['segment'][i]: + first = np.dot(Minv,np.array([rcData['bounds'][0][0]+rcData['point'][rcData['segment'][i][0]][0]/rcData['scale'], + rcData['bounds'][0][1]+rcData['point'][rcData['segment'][i][0]][1]/rcData['scale'], + ])) + second = np.dot(Minv,np.array([rcData['bounds'][0][0]+rcData['point'][rcData['segment'][i][1]][0]/rcData['scale'], + rcData['bounds'][0][1]+rcData['point'][rcData['segment'][i][1]][1]/rcData['scale'], + ])) + print ' '.join(map(str,orientationData[left-1]+orientationData[right-1])), + print np.linalg.norm(first-second), + print '0', + print ' '.join(map(str,first)), + print ' '.join(map(str,second)), + print ' '.join(map(str,[left,right])) + # ----- write image ----- if 'image' in options.output and options.imgsize > 0: if ImageCapability: image(myName,options.imgsize,options.xmargin,options.ymargin,rcData) else: - print '...no image drawing possible (PIL missing)...' + damask.util.croak('...no image drawing possible (PIL missing)...') # ----- write spectral geom ----- @@ -893,8 +953,9 @@ if 'spectral' in options.output: (y+1)*fftdata['resolution'][0]]))+'\n') # grain indexes, x-row per line geomFile.close() # close geom file - print('assigned %i out of %i (2D) Fourier points.'\ - %(len(fftdata['fftpoints']), int(fftdata['resolution'][0])*int(fftdata['resolution'][1]))) + damask.util.croak('assigned {} out of {} (2D) Fourier points...' + .format(len(fftdata['fftpoints']), + int(fftdata['resolution'][0])*int(fftdata['resolution'][1]))) # ----- write Mentat procedure ----- @@ -936,7 +997,7 @@ if 'mentat' in options.output: if 'procedure' in options.output: output(outputLocals['log'],outputLocals,'Stdout') else: - print '...no interaction with Mentat possible...' + damask.util.croak('...no interaction with Mentat possible...') # ----- write config data to file ----- From 9f08258e05d2424fd33989bad2575dc7f7f946d0 Mon Sep 17 00:00:00 2001 From: Test User Date: Mon, 26 Sep 2016 16:34:07 +0200 Subject: [PATCH 4/6] updated version information after successful test of v2.0.1-150-g5345b42 --- VERSION | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/VERSION b/VERSION index 85c030ae0..37c33ab3c 100644 --- a/VERSION +++ b/VERSION @@ -1 +1 @@ -v2.0.1-148-gfb23c49 +v2.0.1-150-g5345b42 From 1d06e82700fef86b8f8c2c63b6c3ec1fba28a989 Mon Sep 17 00:00:00 2001 From: Aritra Chakraborty Date: Mon, 26 Sep 2016 16:30:50 -0400 Subject: [PATCH 5/6] grain circumference identification fixed right most path selection criteria for grain circumference --- .../pre/patchFromReconstructedBoundaries.py | 23 ++++++++----------- 1 file changed, 10 insertions(+), 13 deletions(-) diff --git a/processing/pre/patchFromReconstructedBoundaries.py b/processing/pre/patchFromReconstructedBoundaries.py index e4ec9ef42..572c929fa 100755 --- a/processing/pre/patchFromReconstructedBoundaries.py +++ b/processing/pre/patchFromReconstructedBoundaries.py @@ -270,7 +270,6 @@ def rcbParser(content,M,size,tolerance,idcolumn,segmentcolumn): myWalk = point['segments'].pop() grainLegs = [myWalk] myEnd = segments[myWalk][1 if segments[myWalk][0] == myStart else 0] - while (myEnd != pointId): myV = [points[myEnd]['coords'][0]-points[myStart]['coords'][0], points[myEnd]['coords'][1]-points[myStart]['coords'][1]] @@ -281,18 +280,18 @@ def rcbParser(content,M,size,tolerance,idcolumn,segmentcolumn): if peek == myWalk: continue # do not go back same path peekEnd = segments[peek][1 if segments[peek][0] == myEnd else 0] - peekV = [points[myEnd]['coords'][0]-points[peekEnd]['coords'][0], - points[myEnd]['coords'][1]-points[peekEnd]['coords'][1]] + peekV = [points[peekEnd]['coords'][0]-points[myEnd]['coords'][0], + points[peekEnd]['coords'][1]-points[myEnd]['coords'][1]] peekLen = math.sqrt(peekV[0]**2+peekV[1]**2) if peekLen == 0.0: damask.util.croak('peeklen is zero: peek point {}'.format(peek)) crossproduct = (myV[0]*peekV[1] - myV[1]*peekV[0])/myLen/peekLen dotproduct = (myV[0]*peekV[0] + myV[1]*peekV[1])/myLen/peekLen - innerAngle = crossproduct*(dotproduct+1.0) + innerAngle = math.copysign(1.0,crossproduct)*(dotproduct-1.0) if innerAngle >= best['product']: # takes sharpest left turn best['product'] = innerAngle best['peek'] = peek best['point'] = peekEnd - + innerAngleSum += best['product'] myWalk = best['peek'] myStart = myEnd @@ -301,7 +300,7 @@ def rcbParser(content,M,size,tolerance,idcolumn,segmentcolumn): if myWalk in points[myStart]['segments']: points[myStart]['segments'].remove(myWalk) else: - damask.utilcroak('{} not in segments of point {}'.format(myWalk,myStart)) + damask.util.croak('{} not in segments of point {}'.format(myWalk,myStart)) grainDraw.append(points[myStart]['coords']) grainLegs.append(myWalk) @@ -311,7 +310,6 @@ def rcbParser(content,M,size,tolerance,idcolumn,segmentcolumn): else: grains['box'] = grainLegs - # build overall data structure rcData = {'dimension':[dX,dY], @@ -772,8 +770,8 @@ def fftbuild(rcData,height,xframe,yframe,resolution,extrusion): 'dimension':(xsize,ysize,zsize)} frameindex=len(rcData['grain'])+1 # calculate frame index as largest grain index plus one - dx = xsize/(xres+1) # calculate step sizes - dy = ysize/(yres+1) + dx = xsize/(xres) # calculate step sizes + dy = ysize/(yres) grainpoints = [] for segments in rcData['grain']: # get segments of each grain @@ -788,11 +786,10 @@ def fftbuild(rcData,height,xframe,yframe,resolution,extrusion): grainpoints.append([]) # start out blank for current grain for p in sorted(points, key=points.get): # loop thru set of sorted points grainpoints[-1].append([rcData['point'][p][0],rcData['point'][p][1]]) # append x,y of point - bestGuess = 0 # assume grain 0 as best guess for i in range(int(xres*yres)): # walk through all points in xy plane xtest = -xframe+((i%xres)+0.5)*dx # calculate coordinates - ytest = -yframe+(int(i/xres)+0.5)*dy + ytest = -yframe+((i//xres)+0.5)*dy if(xtest < 0 or xtest > maxX): # check wether part of frame if( ytest < 0 or ytest > maxY): # part of edges fftdata['fftpoints'].append(frameindex+2) # append frameindex to result array @@ -944,8 +941,8 @@ if 'spectral' in options.output: geomFile = open(myName+'_'+str(int(fftdata['resolution'][0]))+'.geom','w') # open geom file for writing geomFile.write('3\theader\n') # write header info - geomFile.write('resolution a %i b %i c %i\n'%(fftdata['resolution'])) # resolution - geomFile.write('dimension x %f y %f z %f\n'%(fftdata['dimension'])) # size + geomFile.write('grid a %i b %i c %i\n'%(fftdata['resolution'])) # grid resolution + geomFile.write('size x %f y %f z %f\n'%(fftdata['dimension'])) # size geomFile.write('homogenization 1\n') # homogenization for z in xrange(fftdata['resolution'][2]): # z repetions for y in xrange(fftdata['resolution'][1]): # each x-row separately From 0ffd74d53680ab15d04ae1ad02b63d7514fc4ea8 Mon Sep 17 00:00:00 2001 From: Aritra Chakraborty Date: Mon, 26 Sep 2016 16:32:39 -0400 Subject: [PATCH 6/6] correct flag "data" for updated vtk_addRectilinearGridData --- processing/pre/geom_check.sh | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/processing/pre/geom_check.sh b/processing/pre/geom_check.sh index 1eb85913c..5a39d4fc7 100755 --- a/processing/pre/geom_check.sh +++ b/processing/pre/geom_check.sh @@ -11,7 +11,7 @@ do < $geom \ | \ vtk_addRectilinearGridData \ - --scalar microstructure \ + --data microstructure \ --inplace \ --vtk ${geom%.*}.vtk rm ${geom%.*}.vtk