pre processing finished
This commit is contained in:
parent
93d7643875
commit
3ff3bb1a5b
|
@ -129,7 +129,8 @@ def servoLink():
|
||||||
])
|
])
|
||||||
for i in range(nLinks):
|
for i in range(nLinks):
|
||||||
cmds.append([
|
cmds.append([
|
||||||
"*link_class servo *servo_ret_node %i %i"%(i+1,baseNode["%.8e"%linkCoord[i][0]]["%.8e"%linkCoord[i][1]]["%.8e"%linkCoord[i][2]]),
|
"*link_class servo *servo_ret_node %i %i"\
|
||||||
|
%(i+1,baseNode["%.8e"%linkCoord[i][0]]["%.8e"%linkCoord[i][1]]["%.8e"%linkCoord[i][2]]),
|
||||||
"*link_class servo *servo_ret_dof %i %i"%(i+1,dof),
|
"*link_class servo *servo_ret_dof %i %i"%(i+1,dof),
|
||||||
"*link_class servo *servo_ret_coef %i 1"%(i+1),
|
"*link_class servo *servo_ret_coef %i 1"%(i+1),
|
||||||
])
|
])
|
||||||
|
|
|
@ -234,7 +234,8 @@ def rcbParser(content,M,size,tolerance,idcolumn,segmentcolumn):
|
||||||
crappyData = False
|
crappyData = False
|
||||||
for pointId,point in enumerate(points):
|
for pointId,point in enumerate(points):
|
||||||
if len(point['segments']) < 2: # point marks a dead end!
|
if len(point['segments']) < 2: # point marks a dead end!
|
||||||
print "Dead end at segment %i (%f,%f)"%(1+point['segments'][0],boxX[0]+point['coords'][0]/scalePatch,boxY[0]+point['coords'][1]/scalePatch,)
|
print "Dead end at segment %i (%f,%f)"\
|
||||||
|
%(1+point['segments'][0],boxX[0]+point['coords'][0]/scalePatch,boxY[0]+point['coords'][1]/scalePatch,)
|
||||||
crappyData = True
|
crappyData = True
|
||||||
|
|
||||||
grains = {'draw': [], 'legs': []}
|
grains = {'draw': [], 'legs': []}
|
||||||
|
@ -367,7 +368,8 @@ def sample(size,aspect,n,xmargin,ymargin):
|
||||||
def patch(a,n,mesh,rcData):
|
def patch(a,n,mesh,rcData):
|
||||||
cmds = []
|
cmds = []
|
||||||
for l in range(len(rcData['point'])): # generate all points
|
for l in range(len(rcData['point'])): # generate all points
|
||||||
cmds.append("*add_points %f %f %f"%(rcData['point'][l][0]-a/2.0,rcData['point'][l][1]-a/rcData['dimension'][0]*rcData['dimension'][1]/2.0,0))
|
cmds.append("*add_points %f %f %f"\
|
||||||
|
%(rcData['point'][l][0]-a/2.0,rcData['point'][l][1]-a/rcData['dimension'][0]*rcData['dimension'][1]/2.0,0))
|
||||||
|
|
||||||
cmds.append(["*set_curve_type line",
|
cmds.append(["*set_curve_type line",
|
||||||
"*set_curve_div_type_fix_ndiv",
|
"*set_curve_div_type_fix_ndiv",
|
||||||
|
@ -891,7 +893,8 @@ if 'spectral' in options.output:
|
||||||
(y+1)*fftdata['resolution'][0]]))+'\n') # grain indexes, x-row per line
|
(y+1)*fftdata['resolution'][0]]))+'\n') # grain indexes, x-row per line
|
||||||
geomFile.close() # close geom file
|
geomFile.close() # close geom file
|
||||||
|
|
||||||
print('assigned %i out of %i (2D) Fourier points.'%(len(fftdata['fftpoints']), int(fftdata['resolution'][0])*int(fftdata['resolution'][1])))
|
print('assigned %i out of %i (2D) Fourier points.'\
|
||||||
|
%(len(fftdata['fftpoints']), int(fftdata['resolution'][0])*int(fftdata['resolution'][1])))
|
||||||
|
|
||||||
|
|
||||||
# ----- write Mentat procedure -----
|
# ----- write Mentat procedure -----
|
||||||
|
@ -974,7 +977,8 @@ if 'mentat' in options.output or 'spectral' in options.output:
|
||||||
for grain in rcData['grainMapping']:
|
for grain in rcData['grainMapping']:
|
||||||
output += '\n[grain %i]\n'%grain + \
|
output += '\n[grain %i]\n'%grain + \
|
||||||
'(gauss)\tphi1\t%f\tphi\t%f\tphi2\t%f\tscatter\t%f\tfraction\t1.0\n'\
|
'(gauss)\tphi1\t%f\tphi\t%f\tphi2\t%f\tscatter\t%f\tfraction\t1.0\n'\
|
||||||
%(math.degrees(orientationData[grain-1][0]),math.degrees(orientationData[grain-1][1]),math.degrees(orientationData[grain-1][2]),options.scatter)
|
%(math.degrees(orientationData[grain-1][0]),math.degrees(orientationData[grain-1][1]),\
|
||||||
|
math.degrees(orientationData[grain-1][2]),options.scatter)
|
||||||
if (options.xmargin > 0.0 or options.ymargin > 0.0):
|
if (options.xmargin > 0.0 or options.ymargin > 0.0):
|
||||||
output += '\n[margin]\n' + \
|
output += '\n[margin]\n' + \
|
||||||
'(random)\t\tscatter\t0.0\tfraction\t1.0\n'
|
'(random)\t\tscatter\t0.0\tfraction\t1.0\n'
|
||||||
|
|
|
@ -1,7 +1,7 @@
|
||||||
#!/usr/bin/python
|
#!/usr/bin/python
|
||||||
# -*- coding: UTF-8 no BOM -*-
|
# -*- coding: UTF-8 no BOM -*-
|
||||||
|
|
||||||
import threading,time,os,subprocess,shlex,string,sys,random
|
import threading,time,os,sys,random
|
||||||
import numpy as np
|
import numpy as np
|
||||||
from optparse import OptionParser
|
from optparse import OptionParser
|
||||||
from operator import mul
|
from operator import mul
|
||||||
|
@ -16,10 +16,8 @@ currentSeedsName = None
|
||||||
|
|
||||||
#---------------------------------------------------------------------------------------------------
|
#---------------------------------------------------------------------------------------------------
|
||||||
class myThread (threading.Thread):
|
class myThread (threading.Thread):
|
||||||
#---------------------------------------------------------------------------------------------------
|
"""perturbes seed in seed file, performes Voronoi tessellation, evaluates, and updates best match"""
|
||||||
'''
|
|
||||||
perturbes seed in seed file, performes Voronoi tessellation, evaluates, and updates best match
|
|
||||||
'''
|
|
||||||
def __init__(self, threadID):
|
def __init__(self, threadID):
|
||||||
threading.Thread.__init__(self)
|
threading.Thread.__init__(self)
|
||||||
self.threadID = threadID
|
self.threadID = threadID
|
||||||
|
@ -40,17 +38,17 @@ class myThread (threading.Thread):
|
||||||
s.release()
|
s.release()
|
||||||
|
|
||||||
random.seed(options.randomSeed+self.threadID) # initializes to given seeds
|
random.seed(options.randomSeed+self.threadID) # initializes to given seeds
|
||||||
knownSeedsUpdate = bestSeedsUpdate -1.0 # trigger update of local best seeds (time when the best seed file was found known to thread)
|
knownSeedsUpdate = bestSeedsUpdate -1.0 # trigger update of local best seeds
|
||||||
randReset = True # aquire new direction
|
randReset = True # aquire new direction
|
||||||
|
|
||||||
myBestSeedsVFile = StringIO() # in-memory file to store local copy of best seeds file
|
myBestSeedsVFile = StringIO() # store local copy of best seeds file
|
||||||
perturbedSeedsVFile = StringIO() # in-memory file for perturbed best seeds file
|
perturbedSeedsVFile = StringIO() # perturbed best seeds file
|
||||||
perturbedGeomVFile = StringIO() # in-memory file for tessellated geom file
|
perturbedGeomVFile = StringIO() # tessellated geom file
|
||||||
|
|
||||||
#--- still not matching desired bin class ----------------------------------------------------------
|
#--- still not matching desired bin class ----------------------------------------------------------
|
||||||
while bestMatch < options.threshold:
|
while bestMatch < options.threshold:
|
||||||
s.acquire() # accessing global data, ensure only one thread does it per time
|
s.acquire() # ensure only one thread acces global data
|
||||||
if bestSeedsUpdate > knownSeedsUpdate: # if a newer best seed file exist, read it into a virtual file
|
if bestSeedsUpdate > knownSeedsUpdate: # write best fit to virtual file
|
||||||
knownSeedsUpdate = bestSeedsUpdate
|
knownSeedsUpdate = bestSeedsUpdate
|
||||||
bestSeedsVFile.reset()
|
bestSeedsVFile.reset()
|
||||||
myBestSeedsVFile.close()
|
myBestSeedsVFile.close()
|
||||||
|
@ -78,7 +76,7 @@ class myThread (threading.Thread):
|
||||||
perturbedSeedsVFile = StringIO()
|
perturbedSeedsVFile = StringIO()
|
||||||
myBestSeedsVFile.reset()
|
myBestSeedsVFile.reset()
|
||||||
|
|
||||||
perturbedSeedsTable = damask.ASCIItable(myBestSeedsVFile,perturbedSeedsVFile,labeled=True) # read current best fitting seed file and to perturbed seed file
|
perturbedSeedsTable = damask.ASCIItable(myBestSeedsVFile,perturbedSeedsVFile,labeled=True) # write best fit to perturbed seed file
|
||||||
perturbedSeedsTable.head_read()
|
perturbedSeedsTable.head_read()
|
||||||
perturbedSeedsTable.head_write()
|
perturbedSeedsTable.head_write()
|
||||||
outputAlive=True
|
outputAlive=True
|
||||||
|
@ -87,7 +85,7 @@ class myThread (threading.Thread):
|
||||||
while outputAlive and perturbedSeedsTable.data_read(): # perturbe selected microstructure
|
while outputAlive and perturbedSeedsTable.data_read(): # perturbe selected microstructure
|
||||||
if ms in selectedMs:
|
if ms in selectedMs:
|
||||||
newCoords=np.array(tuple(map(float,perturbedSeedsTable.data[0:3]))+direction[i])
|
newCoords=np.array(tuple(map(float,perturbedSeedsTable.data[0:3]))+direction[i])
|
||||||
newCoords=np.where(newCoords>=1.0,newCoords-1.0,newCoords) # ensure that the seeds remain in the box (move one side out, other side in)
|
newCoords=np.where(newCoords>=1.0,newCoords-1.0,newCoords) # ensure that the seeds remain in the box
|
||||||
newCoords=np.where(newCoords <0.0,newCoords+1.0,newCoords)
|
newCoords=np.where(newCoords <0.0,newCoords+1.0,newCoords)
|
||||||
perturbedSeedsTable.data[0:3]=[format(f, '8.6f') for f in newCoords]
|
perturbedSeedsTable.data[0:3]=[format(f, '8.6f') for f in newCoords]
|
||||||
direction[i]*=2.
|
direction[i]*=2.
|
||||||
|
@ -116,7 +114,8 @@ class myThread (threading.Thread):
|
||||||
currentHist.append(np.histogram(currentData,bins=target[i]['bins'])[0])
|
currentHist.append(np.histogram(currentData,bins=target[i]['bins'])[0])
|
||||||
currentError.append(np.sqrt(np.square(np.array(target[i]['histogram']-currentHist[i])).sum()))
|
currentError.append(np.sqrt(np.square(np.array(target[i]['histogram']-currentHist[i])).sum()))
|
||||||
|
|
||||||
if currentError[0]>0.0: # as long as not all grains are within the range of the target, use the deviation to left and right as error
|
# as long as not all grains are within the range of the target, use the deviation to left and right as error
|
||||||
|
if currentError[0]>0.0:
|
||||||
currentError[0] *=((target[0]['bins'][0]-np.min(currentData))**2.0+
|
currentError[0] *=((target[0]['bins'][0]-np.min(currentData))**2.0+
|
||||||
(target[0]['bins'][1]-np.max(currentData))**2.0)**0.5 # norm of deviations by number of usual bin deviation
|
(target[0]['bins'][1]-np.max(currentData))**2.0)**0.5 # norm of deviations by number of usual bin deviation
|
||||||
s.acquire() # do the evaluation serially
|
s.acquire() # do the evaluation serially
|
||||||
|
@ -154,7 +153,7 @@ class myThread (threading.Thread):
|
||||||
match=myMatch
|
match=myMatch
|
||||||
sys.stdout.flush()
|
sys.stdout.flush()
|
||||||
break
|
break
|
||||||
if i == min(nMicrostructures,myMatch+options.bins)-1: # same quality as before (for the considered bins): take it to keep on moving
|
if i == min(nMicrostructures,myMatch+options.bins)-1: # same quality as before: take it to keep on moving
|
||||||
bestSeedsUpdate = time.time()
|
bestSeedsUpdate = time.time()
|
||||||
perturbedSeedsVFile.reset()
|
perturbedSeedsVFile.reset()
|
||||||
bestSeedsVFile.close()
|
bestSeedsVFile.close()
|
||||||
|
@ -215,7 +214,7 @@ options = parser.parse_args()[0]
|
||||||
|
|
||||||
damask.util.report(scriptName,options.seedFile)
|
damask.util.report(scriptName,options.seedFile)
|
||||||
|
|
||||||
if options.randomSeed == None:
|
if options.randomSeed is None:
|
||||||
options.randomSeed = int(os.urandom(4).encode('hex'), 16)
|
options.randomSeed = int(os.urandom(4).encode('hex'), 16)
|
||||||
damask.util.croak(options.randomSeed)
|
damask.util.croak(options.randomSeed)
|
||||||
delta = (options.scale/options.grid[0],options.scale/options.grid[1],options.scale/options.grid[2])
|
delta = (options.scale/options.grid[0],options.scale/options.grid[1],options.scale/options.grid[2])
|
||||||
|
|
|
@ -122,10 +122,9 @@ for name in filenames:
|
||||||
newInfo['microstructures'] = len(np.unique(seeds[:,3]))
|
newInfo['microstructures'] = len(np.unique(seeds[:,3]))
|
||||||
|
|
||||||
# --- report ---------------------------------------------------------------------------------------
|
# --- report ---------------------------------------------------------------------------------------
|
||||||
|
if (newInfo['microstructures'] != info['microstructures']):
|
||||||
|
damask.util.croak(remarks.append('--> microstructures: %i'%newInfo['microstructures']))
|
||||||
|
|
||||||
remarks = []
|
|
||||||
if ( newInfo['microstructures'] != info['microstructures']): remarks.append('--> microstructures: %i'%newInfo['microstructures'])
|
|
||||||
if remarks != []: damask.util.croak(remarks)
|
|
||||||
|
|
||||||
# ------------------------------------------ assemble header ---------------------------------------
|
# ------------------------------------------ assemble header ---------------------------------------
|
||||||
table.info_clear()
|
table.info_clear()
|
||||||
|
|
Loading…
Reference in New Issue