made hybridIA stuff working again
This commit is contained in:
parent
50ea293b66
commit
8fac635c15
17
code/IO.f90
17
code/IO.f90
|
@ -588,7 +588,7 @@ function IO_hybridIA(Nast,ODFfileName)
|
||||||
integer(pInt), parameter :: FILEUNIT = 999_pInt
|
integer(pInt), parameter :: FILEUNIT = 999_pInt
|
||||||
|
|
||||||
IO_hybridIA = 0.0_pReal ! initialize return value for case of error
|
IO_hybridIA = 0.0_pReal ! initialize return value for case of error
|
||||||
write(6,'(/,a,/)',advance='no') ' Using linear ODF file:'//trim(ODFfileName)
|
write(6,'(/,a,/)',advance='no') ' Using linear ODF file: '//trim(ODFfileName)
|
||||||
|
|
||||||
!--------------------------------------------------------------------------------------------------
|
!--------------------------------------------------------------------------------------------------
|
||||||
! parse header of ODF file
|
! parse header of ODF file
|
||||||
|
@ -627,8 +627,8 @@ function IO_hybridIA(Nast,ODFfileName)
|
||||||
|
|
||||||
!--------------------------------------------------------------------------------------------------
|
!--------------------------------------------------------------------------------------------------
|
||||||
! determine limits, number of steps and step size
|
! determine limits, number of steps and step size
|
||||||
limits(1,1:3) = 720.0_pReal
|
limits(1,1:3) = 721.0_pReal
|
||||||
limits(2,1:3) = 0.0_pReal
|
limits(2,1:3) = -1.0_pReal
|
||||||
steps = 0_pInt
|
steps = 0_pInt
|
||||||
|
|
||||||
line=IO_read(FILEUNIT)
|
line=IO_read(FILEUNIT)
|
||||||
|
@ -649,9 +649,6 @@ function IO_hybridIA(Nast,ODFfileName)
|
||||||
write(6,'(/,a,/,3(2x,f12.4,1x))',advance='no') ' Ending angles / ° = ',limits(2,1:3)
|
write(6,'(/,a,/,3(2x,f12.4,1x))',advance='no') ' Ending angles / ° = ',limits(2,1:3)
|
||||||
write(6,'(/,a,/,3(2x,f12.4,1x))',advance='no') ' Angular steps / ° = ',deltas
|
write(6,'(/,a,/,3(2x,f12.4,1x))',advance='no') ' Angular steps / ° = ',deltas
|
||||||
|
|
||||||
limits = limits*INRAD
|
|
||||||
deltas = deltas*INRAD
|
|
||||||
|
|
||||||
if (all(abs(limits(1,1:3)) < tol_math_check)) then
|
if (all(abs(limits(1,1:3)) < tol_math_check)) then
|
||||||
write(6,'(/,a,/)',advance='no') ' assuming vertex centered data'
|
write(6,'(/,a,/)',advance='no') ' assuming vertex centered data'
|
||||||
center = 0.0_pReal ! no need to shift
|
center = 0.0_pReal ! no need to shift
|
||||||
|
@ -662,6 +659,8 @@ function IO_hybridIA(Nast,ODFfileName)
|
||||||
center = 0.5_pReal ! shift data by half of a bin
|
center = 0.5_pReal ! shift data by half of a bin
|
||||||
endif
|
endif
|
||||||
|
|
||||||
|
limits = limits*INRAD
|
||||||
|
deltas = deltas*INRAD
|
||||||
|
|
||||||
!--------------------------------------------------------------------------------------------------
|
!--------------------------------------------------------------------------------------------------
|
||||||
! read in data
|
! read in data
|
||||||
|
@ -678,10 +677,10 @@ function IO_hybridIA(Nast,ODFfileName)
|
||||||
do phi1=1_pInt,steps(1); do Phi=1_pInt,steps(2); do phi2=1_pInt,steps(3)
|
do phi1=1_pInt,steps(1); do Phi=1_pInt,steps(2); do phi2=1_pInt,steps(3)
|
||||||
line=IO_read(FILEUNIT)
|
line=IO_read(FILEUNIT)
|
||||||
chunkPos = IO_stringPos(line)
|
chunkPos = IO_stringPos(line)
|
||||||
eulers=[IO_floatValue(line,chunkPos,columns(1)),& ! read in again for consistency check only
|
eulers=[IO_floatValue(line,chunkPos,columns(1)),& ! read in again for consistency check only
|
||||||
IO_floatValue(line,chunkPos,columns(2)),&
|
IO_floatValue(line,chunkPos,columns(2)),&
|
||||||
IO_floatValue(line,chunkPos,columns(3))]*INRAD
|
IO_floatValue(line,chunkPos,columns(3))]*INRAD
|
||||||
if (any(abs((real([phi1,phi,phi2],pReal)-1.0_pReal + center)*deltas-eulers)>tol_math_check)) & ! check if data is in expected order (phi2 fast)
|
if (any(abs((real([phi1,phi,phi2],pReal) -1.0_pReal + center)*deltas-eulers)>tol_math_check)) & ! check if data is in expected order (phi2 fast) and correct for Fortran starting at 1
|
||||||
call IO_error(error_ID = 156_pInt, ext_msg='linear ODF data not in expected order')
|
call IO_error(error_ID = 156_pInt, ext_msg='linear ODF data not in expected order')
|
||||||
|
|
||||||
prob = IO_floatValue(line,chunkPos,columns(4))
|
prob = IO_floatValue(line,chunkPos,columns(4))
|
||||||
|
@ -756,7 +755,7 @@ function IO_hybridIA(Nast,ODFfileName)
|
||||||
integer(pInt) pure function hybridIA_reps(dV_V,steps,C)
|
integer(pInt) pure function hybridIA_reps(dV_V,steps,C)
|
||||||
|
|
||||||
implicit none
|
implicit none
|
||||||
integer(pInt), intent(in), dimension(3) :: steps !< needs description
|
integer(pInt), intent(in), dimension(3) :: steps !< number of bins in Euler space
|
||||||
real(pReal), intent(in), dimension(steps(3),steps(2),steps(1)) :: dV_V !< needs description
|
real(pReal), intent(in), dimension(steps(3),steps(2),steps(1)) :: dV_V !< needs description
|
||||||
real(pReal), intent(in) :: C !< needs description
|
real(pReal), intent(in) :: C !< needs description
|
||||||
|
|
||||||
|
|
File diff suppressed because it is too large
Load Diff
|
@ -1,4 +1,5 @@
|
||||||
#!/usr/bin/env python
|
#!/usr/bin/python
|
||||||
|
# -*- coding: UTF-8 no BOM -*-
|
||||||
|
|
||||||
from optparse import OptionParser
|
from optparse import OptionParser
|
||||||
import damask
|
import damask
|
||||||
|
@ -6,7 +7,8 @@ import os,sys,math,re,random,string
|
||||||
import numpy as np
|
import numpy as np
|
||||||
|
|
||||||
scriptID = string.replace('$Id$','\n','\\n')
|
scriptID = string.replace('$Id$','\n','\\n')
|
||||||
scriptName = scriptID.split()[1]
|
scriptName = os.path.splitext(scriptID.split()[1])[0]
|
||||||
|
|
||||||
|
|
||||||
# --- helper functions ---
|
# --- helper functions ---
|
||||||
def integerFactorization(i):
|
def integerFactorization(i):
|
||||||
|
@ -16,44 +18,6 @@ def integerFactorization(i):
|
||||||
j -= 1
|
j -= 1
|
||||||
return j
|
return j
|
||||||
|
|
||||||
|
|
||||||
def TSLheader(sizeX,sizeY,step):
|
|
||||||
|
|
||||||
return [
|
|
||||||
'# TEM_PIXperUM 1.000000',
|
|
||||||
'# x-star 0.509548',
|
|
||||||
'# y-star 0.795272',
|
|
||||||
'# z-star 0.611799',
|
|
||||||
'# WorkingDistance 18.000000',
|
|
||||||
'#',
|
|
||||||
'# Phase 1',
|
|
||||||
'# MaterialName Al',
|
|
||||||
'# Formula Fe',
|
|
||||||
'# Info',
|
|
||||||
'# Symmetry 43',
|
|
||||||
'# LatticeConstants 2.870 2.870 2.870 90.000 90.000 90.000',
|
|
||||||
'# NumberFamilies 4',
|
|
||||||
'# hklFamilies 1 1 0 1 0.000000 1',
|
|
||||||
'# hklFamilies 2 0 0 1 0.000000 1',
|
|
||||||
'# hklFamilies 2 1 1 1 0.000000 1',
|
|
||||||
'# hklFamilies 3 1 0 1 0.000000 1',
|
|
||||||
'# Categories 0 0 0 0 0 ',
|
|
||||||
'#',
|
|
||||||
'# GRID: SquareGrid',
|
|
||||||
'# XSTEP: ' + str(step),
|
|
||||||
'# YSTEP: ' + str(step),
|
|
||||||
'# NCOLS_ODD: ' + str(sizeX),
|
|
||||||
'# NCOLS_EVEN: ' + str(sizeX),
|
|
||||||
'# NROWS: ' + str(sizeY),
|
|
||||||
'#',
|
|
||||||
'# OPERATOR: ODFsammpling',
|
|
||||||
'#',
|
|
||||||
'# SAMPLEID: ',
|
|
||||||
'#',
|
|
||||||
'# SCANID: ',
|
|
||||||
'#',
|
|
||||||
]
|
|
||||||
|
|
||||||
def binAsBins(bin,intervals):
|
def binAsBins(bin,intervals):
|
||||||
""" explode compound bin into 3D bins list """
|
""" explode compound bin into 3D bins list """
|
||||||
bins = [0]*3
|
bins = [0]*3
|
||||||
|
@ -120,11 +84,11 @@ def directInversion (ODF,nSamples):
|
||||||
incFactor = 1.0
|
incFactor = 1.0
|
||||||
nInvSamplesUpper = directInvRepetitions(ODF['dV_V'],scaleUpper)
|
nInvSamplesUpper = directInvRepetitions(ODF['dV_V'],scaleUpper)
|
||||||
nIter += 1
|
nIter += 1
|
||||||
table.croak('%i:(%12.11f,%12.11f) %i <= %i <= %i'%(nIter,scaleLower,scaleUpper,
|
damask.util.croak('%i:(%12.11f,%12.11f) %i <= %i <= %i'%(nIter,scaleLower,scaleUpper,
|
||||||
nInvSamplesLower,nOptSamples,nInvSamplesUpper))
|
nInvSamplesLower,nOptSamples,nInvSamplesUpper))
|
||||||
nInvSamples = nInvSamplesUpper
|
nInvSamples = nInvSamplesUpper
|
||||||
scale = scaleUpper
|
scale = scaleUpper
|
||||||
table.croak('created set of %i samples (%12.11f) with scaling %12.11f delivering %i'%(nInvSamples,
|
damask.util.croak('created set of %i samples (%12.11f) with scaling %12.11f delivering %i'%(nInvSamples,
|
||||||
float(nInvSamples)/nOptSamples-1.0,
|
float(nInvSamples)/nOptSamples-1.0,
|
||||||
scale,nSamples))
|
scale,nSamples))
|
||||||
repetition = [None]*ODF['nBins'] # preallocate and clear
|
repetition = [None]*ODF['nBins'] # preallocate and clear
|
||||||
|
@ -227,7 +191,7 @@ def TothVanHoutteSTAT (ODF,nSamples):
|
||||||
countSamples += 1
|
countSamples += 1
|
||||||
indexSelector += 1
|
indexSelector += 1
|
||||||
|
|
||||||
table.croak('created set of %i when asked to deliver %i'%(countSamples,nSamples))
|
damask.util.croak('created set of %i when asked to deliver %i'%(countSamples,nSamples))
|
||||||
|
|
||||||
return orientations, reconstructedODF
|
return orientations, reconstructedODF
|
||||||
|
|
||||||
|
@ -260,10 +224,6 @@ parser.add_option('-r', '--rnd',
|
||||||
dest = 'randomSeed',
|
dest = 'randomSeed',
|
||||||
type = 'int', metavar = 'int', \
|
type = 'int', metavar = 'int', \
|
||||||
help = 'seed of random number generator [%default]')
|
help = 'seed of random number generator [%default]')
|
||||||
parser.add_option('--ang',
|
|
||||||
dest = 'ang',
|
|
||||||
action = 'store_true',
|
|
||||||
help = 'write TSL/EDAX .ang file [%default]')
|
|
||||||
parser.set_defaults(randomSeed = None,
|
parser.set_defaults(randomSeed = None,
|
||||||
number = 500,
|
number = 500,
|
||||||
algorithm = 'IA',
|
algorithm = 'IA',
|
||||||
|
@ -284,37 +244,34 @@ if filenames == []: filenames = [None]
|
||||||
|
|
||||||
for name in filenames:
|
for name in filenames:
|
||||||
try:
|
try:
|
||||||
table = damask.ASCIItable(name = name,
|
table = damask.ASCIItable(name = name, buffered = False, readonly=True)
|
||||||
buffered = False, readonly = True)
|
except:
|
||||||
except: continue
|
continue
|
||||||
table.croak('\033[1m'+scriptName+'\033[0m'+(': '+name if name else ''))
|
damask.util.report(scriptName,name)
|
||||||
|
|
||||||
randomSeed = int(os.urandom(4).encode('hex'), 16) if options.randomSeed == None else options.randomSeed # random seed per file for second phase
|
randomSeed = int(os.urandom(4).encode('hex'), 16) if options.randomSeed == None else options.randomSeed # random seed per file for second phase
|
||||||
random.seed(randomSeed)
|
random.seed(randomSeed)
|
||||||
|
|
||||||
# ------------------------------------------ read header ------------------------------------------
|
# ------------------------------------------ read header and data ---------------------------------
|
||||||
|
|
||||||
table.head_read()
|
table.head_read()
|
||||||
|
|
||||||
errors = []
|
errors = []
|
||||||
labels = ['phi1','Phi','phi2','intensity']
|
labels = ['phi1','Phi','phi2','intensity']
|
||||||
for i,index in enumerate(table.label_index(labels)):
|
for i,index in enumerate(table.label_index(labels)):
|
||||||
if index < 0: errors.append('label {} not present.'.format(labels[i])
|
if index < 0: errors.append('label {} not present.'.format(labels[i]))
|
||||||
|
|
||||||
if errors != []:
|
if errors != []:
|
||||||
table.croak(errors)
|
damask.util.croak(errors)
|
||||||
table.close(dismiss = True)
|
table.close(dismiss = True)
|
||||||
continue
|
continue
|
||||||
|
|
||||||
# ------------------------------------------ read data --------------------------------------------
|
table.data_readArray(labels)
|
||||||
|
|
||||||
binnedODF = table.data_readArray(labels)
|
|
||||||
|
|
||||||
# --------------- figure out limits (left/right), delta, and interval -----------------------------
|
# --------------- figure out limits (left/right), delta, and interval -----------------------------
|
||||||
|
|
||||||
ODF = {}
|
ODF = {}
|
||||||
limits = np.array([np.min(table.data,axis=0),
|
limits = np.array([np.min(table.data[:,0:3],axis=0),
|
||||||
np.max(table.data,axis=0)])
|
np.max(table.data[:,0:3],axis=0)])
|
||||||
ODF['limit'] = np.radians(limits[1,:])
|
ODF['limit'] = np.radians(limits[1,:])
|
||||||
ODF['center'] = 0.0 if all(limits[0,:]<1e-8) else 0.5 # vertex or cell centered
|
ODF['center'] = 0.0 if all(limits[0,:]<1e-8) else 0.5 # vertex or cell centered
|
||||||
|
|
||||||
|
@ -322,8 +279,8 @@ for name in filenames:
|
||||||
ODF['nBins'] = ODF['interval'].prod()
|
ODF['nBins'] = ODF['interval'].prod()
|
||||||
ODF['delta'] = np.radians(np.array(limits[1,0:3]-limits[0,0:3])/(ODF['interval']-1))
|
ODF['delta'] = np.radians(np.array(limits[1,0:3]-limits[0,0:3])/(ODF['interval']-1))
|
||||||
|
|
||||||
if binnedODF[0] != ODF['nBins']:
|
if table.data.shape[0] != ODF['nBins']:
|
||||||
table.croak('expecting %i values but got %i'%(ODF['nBins'],len(linesBinnedODF)))
|
damask.util.croak('expecting %i values but got %i'%(ODF['nBins'],table.data.shape[0]))
|
||||||
continue
|
continue
|
||||||
|
|
||||||
# build binnedODF array
|
# build binnedODF array
|
||||||
|
@ -333,15 +290,16 @@ for name in filenames:
|
||||||
dg = ODF['delta'][0]*2.0*math.sin(ODF['delta'][1]/2.0)*ODF['delta'][2]
|
dg = ODF['delta'][0]*2.0*math.sin(ODF['delta'][1]/2.0)*ODF['delta'][2]
|
||||||
for b in range(ODF['nBins']):
|
for b in range(ODF['nBins']):
|
||||||
ODF['dV_V'][b] = \
|
ODF['dV_V'][b] = \
|
||||||
max(0.0,table.data[b,column['intensity']]) * dg * \
|
max(0.0,table.data[b,table.label_index('intensity')]) * dg * \
|
||||||
math.sin(((b//ODF['interval'][2])%ODF['interval'][1]+ODF['center'])*ODF['delta'][1])
|
math.sin(((b//ODF['interval'][2])%ODF['interval'][1]+ODF['center'])*ODF['delta'][1])
|
||||||
if ODF['dV_V'][b] > 0.0:
|
if ODF['dV_V'][b] > 0.0:
|
||||||
sumdV_V += ODF['dV_V'][b]
|
sumdV_V += ODF['dV_V'][b]
|
||||||
ODF['nNonZero'] += 1
|
ODF['nNonZero'] += 1
|
||||||
|
|
||||||
for b in range(ODF['nBins']): ODF['dV_V'][b] /= sumdV_V # normalize dV/V
|
for b in range(ODF['nBins']):
|
||||||
|
ODF['dV_V'][b] /= sumdV_V # normalize dV/V
|
||||||
|
|
||||||
table.croak(['non-zero fraction: %12.11f (%i/%i)'%(float(ODF['nNonZero'])/ODF['nBins'],
|
damask.util.croak(['non-zero fraction: %12.11f (%i/%i)'%(float(ODF['nNonZero'])/ODF['nBins'],
|
||||||
ODF['nNonZero'],
|
ODF['nNonZero'],
|
||||||
ODF['nBins']),
|
ODF['nBins']),
|
||||||
'Volume integral of ODF: %12.11f\n'%sumdV_V,
|
'Volume integral of ODF: %12.11f\n'%sumdV_V,
|
||||||
|
@ -371,7 +329,7 @@ for name in filenames:
|
||||||
indivSum['orig'] += ODF['dV_V'][bin]
|
indivSum['orig'] += ODF['dV_V'][bin]
|
||||||
indivSquaredSum['orig'] += ODF['dV_V'][bin]**2
|
indivSquaredSum['orig'] += ODF['dV_V'][bin]**2
|
||||||
|
|
||||||
table.croak(['sqrt(N*)RMSD of ODFs:\t %12.11f'% math.sqrt(nSamples*squaredDiff[method]),
|
damask.util.croak(['sqrt(N*)RMSD of ODFs:\t %12.11f'% math.sqrt(nSamples*squaredDiff[method]),
|
||||||
'RMSrD of ODFs:\t %12.11f'%math.sqrt(squaredRelDiff[method]),
|
'RMSrD of ODFs:\t %12.11f'%math.sqrt(squaredRelDiff[method]),
|
||||||
'rMSD of ODFs:\t %12.11f'%(squaredDiff[method]/indivSquaredSum['orig']),
|
'rMSD of ODFs:\t %12.11f'%(squaredDiff[method]/indivSquaredSum['orig']),
|
||||||
'nNonZero correlation slope:\t %12.11f'\
|
'nNonZero correlation slope:\t %12.11f'\
|
||||||
|
@ -390,7 +348,7 @@ for name in filenames:
|
||||||
|
|
||||||
materialConfig = [
|
materialConfig = [
|
||||||
'#' + scriptID + ' ' + ' '.join(sys.argv[1:]),
|
'#' + scriptID + ' ' + ' '.join(sys.argv[1:]),
|
||||||
'# random seed %i'%randomSeed
|
'# random seed %i'%randomSeed,
|
||||||
'#-------------------#',
|
'#-------------------#',
|
||||||
'<microstructure>',
|
'<microstructure>',
|
||||||
'#-------------------#',
|
'#-------------------#',
|
||||||
|
@ -412,31 +370,12 @@ for name in filenames:
|
||||||
eulers = Orientations[ID]
|
eulers = Orientations[ID]
|
||||||
|
|
||||||
materialConfig += ['[Grain%s]'%(str(ID+1).zfill(formatwidth)),
|
materialConfig += ['[Grain%s]'%(str(ID+1).zfill(formatwidth)),
|
||||||
'(gauss) phi1 %10.5f Phi %10.5f phi2 %10.6f scatter 0.0 fraction 1.0'%(*eulers),
|
'(gauss) phi1 {} Phi {} phi2 {} scatter 0.0 fraction 1.0'.format(*eulers),
|
||||||
]
|
]
|
||||||
|
|
||||||
#--- output finalization --------------------------------------------------------------------------
|
#--- output finalization --------------------------------------------------------------------------
|
||||||
|
|
||||||
with (open(os.path.splitext(name)[0]+'_'+method+'_'+str(nSamples)+'_material.config','w') as outfile:
|
with (open(os.path.splitext(name)[0]+'_'+method+'_'+str(nSamples)+'_material.config','w')) as outfile:
|
||||||
outfile.write('\n'.join(materialConfig)+'\n')
|
outfile.write('\n'.join(materialConfig)+'\n')
|
||||||
|
|
||||||
# write ang file
|
|
||||||
if options.ang:
|
|
||||||
with open(os.path.splitext(name)[0]+'_'+method+'_'+str(nSamples)+'.ang','w') as outfile:
|
|
||||||
sizeY = integerFactorization(nSamples)
|
|
||||||
sizeX = nSamples / sizeY
|
|
||||||
table.croak('Writing .ang file: %i * %i = %i (== %i)'%(sizeX,sizeY,sizeX*sizeY,nSamples))
|
|
||||||
# write header
|
|
||||||
outfile.write('\n'.join(TSLheader(sizeX,sizeY,1.0))+'\n')
|
|
||||||
|
|
||||||
# write data
|
|
||||||
counter = 0
|
|
||||||
for eulers in Orientations:
|
|
||||||
outfile.write('%10.5f %10.5f %10.5f '%(*np.radians(eulers)) +
|
|
||||||
'%10.5f %10.5f '%(counter%sizeX,counter//sizeX) +
|
|
||||||
'100.0 1.0 0 1 1.0\n')
|
|
||||||
counter += 1
|
|
||||||
|
|
||||||
#--- output finalization --------------------------------------------------------------------------
|
|
||||||
|
|
||||||
table.close()
|
table.close()
|
||||||
|
|
Loading…
Reference in New Issue