pre processing finished

This commit is contained in:
Martin Diehl 2016-03-03 14:44:51 +01:00
parent 93d7643875
commit 3ff3bb1a5b
4 changed files with 28 additions and 25 deletions

View File

@ -129,7 +129,8 @@ def servoLink():
])
for i in range(nLinks):
cmds.append([
"*link_class servo *servo_ret_node %i %i"%(i+1,baseNode["%.8e"%linkCoord[i][0]]["%.8e"%linkCoord[i][1]]["%.8e"%linkCoord[i][2]]),
"*link_class servo *servo_ret_node %i %i"\
%(i+1,baseNode["%.8e"%linkCoord[i][0]]["%.8e"%linkCoord[i][1]]["%.8e"%linkCoord[i][2]]),
"*link_class servo *servo_ret_dof %i %i"%(i+1,dof),
"*link_class servo *servo_ret_coef %i 1"%(i+1),
])

View File

@ -234,7 +234,8 @@ def rcbParser(content,M,size,tolerance,idcolumn,segmentcolumn):
crappyData = False
for pointId,point in enumerate(points):
if len(point['segments']) < 2: # point marks a dead end!
print "Dead end at segment %i (%f,%f)"%(1+point['segments'][0],boxX[0]+point['coords'][0]/scalePatch,boxY[0]+point['coords'][1]/scalePatch,)
print "Dead end at segment %i (%f,%f)"\
%(1+point['segments'][0],boxX[0]+point['coords'][0]/scalePatch,boxY[0]+point['coords'][1]/scalePatch,)
crappyData = True
grains = {'draw': [], 'legs': []}
@ -367,7 +368,8 @@ def sample(size,aspect,n,xmargin,ymargin):
def patch(a,n,mesh,rcData):
cmds = []
for l in range(len(rcData['point'])): # generate all points
cmds.append("*add_points %f %f %f"%(rcData['point'][l][0]-a/2.0,rcData['point'][l][1]-a/rcData['dimension'][0]*rcData['dimension'][1]/2.0,0))
cmds.append("*add_points %f %f %f"\
%(rcData['point'][l][0]-a/2.0,rcData['point'][l][1]-a/rcData['dimension'][0]*rcData['dimension'][1]/2.0,0))
cmds.append(["*set_curve_type line",
"*set_curve_div_type_fix_ndiv",
@ -891,7 +893,8 @@ if 'spectral' in options.output:
(y+1)*fftdata['resolution'][0]]))+'\n') # grain indexes, x-row per line
geomFile.close() # close geom file
print('assigned %i out of %i (2D) Fourier points.'%(len(fftdata['fftpoints']), int(fftdata['resolution'][0])*int(fftdata['resolution'][1])))
print('assigned %i out of %i (2D) Fourier points.'\
%(len(fftdata['fftpoints']), int(fftdata['resolution'][0])*int(fftdata['resolution'][1])))
# ----- write Mentat procedure -----
@ -974,7 +977,8 @@ if 'mentat' in options.output or 'spectral' in options.output:
for grain in rcData['grainMapping']:
output += '\n[grain %i]\n'%grain + \
'(gauss)\tphi1\t%f\tphi\t%f\tphi2\t%f\tscatter\t%f\tfraction\t1.0\n'\
%(math.degrees(orientationData[grain-1][0]),math.degrees(orientationData[grain-1][1]),math.degrees(orientationData[grain-1][2]),options.scatter)
%(math.degrees(orientationData[grain-1][0]),math.degrees(orientationData[grain-1][1]),\
math.degrees(orientationData[grain-1][2]),options.scatter)
if (options.xmargin > 0.0 or options.ymargin > 0.0):
output += '\n[margin]\n' + \
'(random)\t\tscatter\t0.0\tfraction\t1.0\n'

View File

@ -1,7 +1,7 @@
#!/usr/bin/python
# -*- coding: UTF-8 no BOM -*-
import threading,time,os,subprocess,shlex,string,sys,random
import threading,time,os,sys,random
import numpy as np
from optparse import OptionParser
from operator import mul
@ -16,10 +16,8 @@ currentSeedsName = None
#---------------------------------------------------------------------------------------------------
class myThread (threading.Thread):
#---------------------------------------------------------------------------------------------------
'''
perturbes seed in seed file, performes Voronoi tessellation, evaluates, and updates best match
'''
"""perturbes seed in seed file, performes Voronoi tessellation, evaluates, and updates best match"""
def __init__(self, threadID):
threading.Thread.__init__(self)
self.threadID = threadID
@ -40,17 +38,17 @@ class myThread (threading.Thread):
s.release()
random.seed(options.randomSeed+self.threadID) # initializes to given seeds
knownSeedsUpdate = bestSeedsUpdate -1.0 # trigger update of local best seeds (time when the best seed file was found known to thread)
knownSeedsUpdate = bestSeedsUpdate -1.0 # trigger update of local best seeds
randReset = True # aquire new direction
myBestSeedsVFile = StringIO() # in-memory file to store local copy of best seeds file
perturbedSeedsVFile = StringIO() # in-memory file for perturbed best seeds file
perturbedGeomVFile = StringIO() # in-memory file for tessellated geom file
myBestSeedsVFile = StringIO() # store local copy of best seeds file
perturbedSeedsVFile = StringIO() # perturbed best seeds file
perturbedGeomVFile = StringIO() # tessellated geom file
#--- still not matching desired bin class ----------------------------------------------------------
while bestMatch < options.threshold:
s.acquire() # accessing global data, ensure only one thread does it per time
if bestSeedsUpdate > knownSeedsUpdate: # if a newer best seed file exist, read it into a virtual file
s.acquire() # ensure only one thread acces global data
if bestSeedsUpdate > knownSeedsUpdate: # write best fit to virtual file
knownSeedsUpdate = bestSeedsUpdate
bestSeedsVFile.reset()
myBestSeedsVFile.close()
@ -78,7 +76,7 @@ class myThread (threading.Thread):
perturbedSeedsVFile = StringIO()
myBestSeedsVFile.reset()
perturbedSeedsTable = damask.ASCIItable(myBestSeedsVFile,perturbedSeedsVFile,labeled=True) # read current best fitting seed file and to perturbed seed file
perturbedSeedsTable = damask.ASCIItable(myBestSeedsVFile,perturbedSeedsVFile,labeled=True) # write best fit to perturbed seed file
perturbedSeedsTable.head_read()
perturbedSeedsTable.head_write()
outputAlive=True
@ -87,7 +85,7 @@ class myThread (threading.Thread):
while outputAlive and perturbedSeedsTable.data_read(): # perturbe selected microstructure
if ms in selectedMs:
newCoords=np.array(tuple(map(float,perturbedSeedsTable.data[0:3]))+direction[i])
newCoords=np.where(newCoords>=1.0,newCoords-1.0,newCoords) # ensure that the seeds remain in the box (move one side out, other side in)
newCoords=np.where(newCoords>=1.0,newCoords-1.0,newCoords) # ensure that the seeds remain in the box
newCoords=np.where(newCoords <0.0,newCoords+1.0,newCoords)
perturbedSeedsTable.data[0:3]=[format(f, '8.6f') for f in newCoords]
direction[i]*=2.
@ -115,8 +113,9 @@ class myThread (threading.Thread):
for i in xrange(nMicrostructures): # calculate the deviation in all bins per histogram
currentHist.append(np.histogram(currentData,bins=target[i]['bins'])[0])
currentError.append(np.sqrt(np.square(np.array(target[i]['histogram']-currentHist[i])).sum()))
if currentError[0]>0.0: # as long as not all grains are within the range of the target, use the deviation to left and right as error
# as long as not all grains are within the range of the target, use the deviation to left and right as error
if currentError[0]>0.0:
currentError[0] *=((target[0]['bins'][0]-np.min(currentData))**2.0+
(target[0]['bins'][1]-np.max(currentData))**2.0)**0.5 # norm of deviations by number of usual bin deviation
s.acquire() # do the evaluation serially
@ -154,7 +153,7 @@ class myThread (threading.Thread):
match=myMatch
sys.stdout.flush()
break
if i == min(nMicrostructures,myMatch+options.bins)-1: # same quality as before (for the considered bins): take it to keep on moving
if i == min(nMicrostructures,myMatch+options.bins)-1: # same quality as before: take it to keep on moving
bestSeedsUpdate = time.time()
perturbedSeedsVFile.reset()
bestSeedsVFile.close()
@ -215,7 +214,7 @@ options = parser.parse_args()[0]
damask.util.report(scriptName,options.seedFile)
if options.randomSeed == None:
if options.randomSeed is None:
options.randomSeed = int(os.urandom(4).encode('hex'), 16)
damask.util.croak(options.randomSeed)
delta = (options.scale/options.grid[0],options.scale/options.grid[1],options.scale/options.grid[2])

View File

@ -122,10 +122,9 @@ for name in filenames:
newInfo['microstructures'] = len(np.unique(seeds[:,3]))
# --- report ---------------------------------------------------------------------------------------
if (newInfo['microstructures'] != info['microstructures']):
damask.util.croak(remarks.append('--> microstructures: %i'%newInfo['microstructures']))
remarks = []
if ( newInfo['microstructures'] != info['microstructures']): remarks.append('--> microstructures: %i'%newInfo['microstructures'])
if remarks != []: damask.util.croak(remarks)
# ------------------------------------------ assemble header ---------------------------------------
table.info_clear()