Merge branch 'development' of magit1.mpie.de:damask/DAMASK into miscImprovements
This commit is contained in:
commit
e20623845a
|
@ -995,11 +995,17 @@ class Orientation:
|
||||||
relationModel,
|
relationModel,
|
||||||
direction,
|
direction,
|
||||||
targetSymmetry = None):
|
targetSymmetry = None):
|
||||||
|
"""
|
||||||
|
orientation relationship
|
||||||
|
|
||||||
|
positive number: fcc --> bcc
|
||||||
|
negative number: bcc --> fcc
|
||||||
|
"""
|
||||||
if relationModel not in ['KS','GT','GTdash','NW','Pitsch','Bain']: return None
|
if relationModel not in ['KS','GT','GTdash','NW','Pitsch','Bain']: return None
|
||||||
if int(direction) == 0: return None
|
if int(direction) == 0: return None
|
||||||
|
|
||||||
# KS from S. Morito et al./Journal of Alloys and Compounds 5775 (2013) S587-S592 DOES THIS PAPER EXISTS?
|
# KS from S. Morito et al./Journal of Alloys and Compounds 5775 (2013) S587-S592
|
||||||
|
# for KS rotation matrices also check K. Kitahara et al./Acta Materialia 54 (2006) 1279-1288
|
||||||
# GT from Y. He et al./Journal of Applied Crystallography (2006). 39, 72-81
|
# GT from Y. He et al./Journal of Applied Crystallography (2006). 39, 72-81
|
||||||
# GT' from Y. He et al./Journal of Applied Crystallography (2006). 39, 72-81
|
# GT' from Y. He et al./Journal of Applied Crystallography (2006). 39, 72-81
|
||||||
# NW from H. Kitahara et al./Materials Characterization 54 (2005) 378-386
|
# NW from H. Kitahara et al./Materials Characterization 54 (2005) 378-386
|
||||||
|
@ -1226,14 +1232,14 @@ class Orientation:
|
||||||
myPlane /= np.linalg.norm(myPlane)
|
myPlane /= np.linalg.norm(myPlane)
|
||||||
myNormal = [float(i) for i in normals[relationModel][variant,me]] # map(float, planes[...]) does not work in python 3
|
myNormal = [float(i) for i in normals[relationModel][variant,me]] # map(float, planes[...]) does not work in python 3
|
||||||
myNormal /= np.linalg.norm(myNormal)
|
myNormal /= np.linalg.norm(myNormal)
|
||||||
myMatrix = np.array([myPlane,myNormal,np.cross(myPlane,myNormal)])
|
myMatrix = np.array([myNormal,np.cross(myPlane,myNormal),myPlane]).T
|
||||||
|
|
||||||
otherPlane = [float(i) for i in planes[relationModel][variant,other]] # map(float, planes[...]) does not work in python 3
|
otherPlane = [float(i) for i in planes[relationModel][variant,other]] # map(float, planes[...]) does not work in python 3
|
||||||
otherPlane /= np.linalg.norm(otherPlane)
|
otherPlane /= np.linalg.norm(otherPlane)
|
||||||
otherNormal = [float(i) for i in normals[relationModel][variant,other]] # map(float, planes[...]) does not work in python 3
|
otherNormal = [float(i) for i in normals[relationModel][variant,other]] # map(float, planes[...]) does not work in python 3
|
||||||
otherNormal /= np.linalg.norm(otherNormal)
|
otherNormal /= np.linalg.norm(otherNormal)
|
||||||
otherMatrix = np.array([otherPlane,otherNormal,np.cross(otherPlane,otherNormal)])
|
otherMatrix = np.array([otherNormal,np.cross(otherPlane,otherNormal),otherPlane]).T
|
||||||
|
|
||||||
rot=np.dot(otherMatrix.T,myMatrix)
|
rot=np.dot(otherMatrix,myMatrix.T)
|
||||||
|
|
||||||
return Orientation(matrix=np.dot(rot,self.asMatrix())) # no symmetry information ??
|
return Orientation(matrix=np.dot(rot,self.asMatrix())) # no symmetry information ??
|
||||||
|
|
|
@ -0,0 +1,74 @@
|
||||||
|
#!/usr/bin/env python2.7
|
||||||
|
# -*- coding: UTF-8 no BOM -*-
|
||||||
|
|
||||||
|
import os,h5py
|
||||||
|
import numpy as np
|
||||||
|
from optparse import OptionParser
|
||||||
|
import damask
|
||||||
|
|
||||||
|
scriptName = os.path.splitext(os.path.basename(__file__))[0]
|
||||||
|
scriptID = ' '.join([scriptName,damask.version])
|
||||||
|
|
||||||
|
|
||||||
|
#--------------------------------------------------------------------------------------------------
|
||||||
|
# MAIN
|
||||||
|
#--------------------------------------------------------------------------------------------------
|
||||||
|
|
||||||
|
parser = OptionParser(option_class=damask.extendableOption, usage='%prog [geomfile[s]]', description = """
|
||||||
|
Convert DREAM3D file to ASCIItable
|
||||||
|
|
||||||
|
""", version = scriptID)
|
||||||
|
|
||||||
|
(options, filenames) = parser.parse_args()
|
||||||
|
|
||||||
|
rootDir ='DataContainers/ImageDataContainer'
|
||||||
|
|
||||||
|
# --- loop over input files -------------------------------------------------------------------------
|
||||||
|
|
||||||
|
if filenames == []: parser.error('no input file specified.')
|
||||||
|
|
||||||
|
for name in filenames:
|
||||||
|
try:
|
||||||
|
table = damask.ASCIItable(outname = os.path.splitext(name)[0]+'.txt',
|
||||||
|
buffered = False
|
||||||
|
)
|
||||||
|
except: continue
|
||||||
|
damask.util.report(scriptName,name)
|
||||||
|
|
||||||
|
inFile = h5py.File(name, 'r')
|
||||||
|
|
||||||
|
grid = inFile[rootDir+'/_SIMPL_GEOMETRY/DIMENSIONS'][...]
|
||||||
|
|
||||||
|
|
||||||
|
# --- read comments --------------------------------------------------------------------------------
|
||||||
|
|
||||||
|
coords = (np.mgrid[0:grid[2], 0:grid[1], 0: grid[0]]).reshape(3, -1).T
|
||||||
|
coords = (np.fliplr(coords)*inFile[rootDir+'/_SIMPL_GEOMETRY/SPACING'][...] \
|
||||||
|
+ inFile[rootDir+'/_SIMPL_GEOMETRY/ORIGIN'][...] \
|
||||||
|
+ inFile[rootDir+'/_SIMPL_GEOMETRY/SPACING'][...]*0.5)
|
||||||
|
|
||||||
|
table.data = np.hstack( (coords,
|
||||||
|
inFile[rootDir+'/CellData/EulerAngles'][...].reshape(grid.prod(),3),
|
||||||
|
inFile[rootDir+'/CellData/Phases'][...].reshape(grid.prod(),1),
|
||||||
|
inFile[rootDir+'/CellData/Confidence Index'][...].reshape(grid.prod(),1),
|
||||||
|
inFile[rootDir+'/CellData/Fit'][...].reshape(grid.prod(),1),
|
||||||
|
inFile[rootDir+'/CellData/Image Quality'][...].reshape(grid.prod(),1)))
|
||||||
|
|
||||||
|
|
||||||
|
labels = ['1_pos','2_pos','3_pos',
|
||||||
|
'1_Euler','2_Euler','3_Euler',
|
||||||
|
'PhaseID','CI','Fit','IQ']
|
||||||
|
try:
|
||||||
|
table.data = np.hstack((table.data, inFile[rootDir+'/CellData/FeatureIds'][...].reshape(grid.prod(),1)))
|
||||||
|
labels.append(['FeatureID'])
|
||||||
|
except Exception:
|
||||||
|
pass
|
||||||
|
|
||||||
|
# ------------------------------------------ assemble header ---------------------------------------
|
||||||
|
table.labels_clear()
|
||||||
|
table.labels_append(labels,reset = True)
|
||||||
|
table.head_write()
|
||||||
|
|
||||||
|
# ------------------------------------------ finalize output ---------------------------------------
|
||||||
|
table.data_writeArray() #(fmt='%e2.2')
|
||||||
|
table.close()
|
|
@ -89,9 +89,9 @@ for name in filenames:
|
||||||
|
|
||||||
# Calculates gaussian weights for simulating 3d diffusion
|
# Calculates gaussian weights for simulating 3d diffusion
|
||||||
gauss = np.exp(-(X*X + Y*Y + Z*Z)/(2.0*options.d*options.d))/math.pow(2.0*np.pi*options.d*options.d,1.5)
|
gauss = np.exp(-(X*X + Y*Y + Z*Z)/(2.0*options.d*options.d))/math.pow(2.0*np.pi*options.d*options.d,1.5)
|
||||||
gauss[:,:,grid[2]/2::] = gauss[:,:,round(grid[2]/2.)-1::-1] # trying to cope with uneven (odd) grid size
|
gauss[:,:,grid[2]/2::] = gauss[:,:,int(round(grid[2]/2.))-1::-1] # trying to cope with uneven (odd) grid size
|
||||||
gauss[:,grid[1]/2::,:] = gauss[:,round(grid[1]/2.)-1::-1,:]
|
gauss[:,grid[1]/2::,:] = gauss[:,int(round(grid[1]/2.))-1::-1,:]
|
||||||
gauss[grid[0]/2::,:,:] = gauss[round(grid[0]/2.)-1::-1,:,:]
|
gauss[grid[0]/2::,:,:] = gauss[int(round(grid[0]/2.))-1::-1,:,:]
|
||||||
gauss = np.fft.rfftn(gauss)
|
gauss = np.fft.rfftn(gauss)
|
||||||
|
|
||||||
interfacialEnergy = lambda A,B: (A*B != 0)*(A != B)*1.0 #1.0 if A & B are distinct & nonzero, 0.0 otherwise
|
interfacialEnergy = lambda A,B: (A*B != 0)*(A != B)*1.0 #1.0 if A & B are distinct & nonzero, 0.0 otherwise
|
||||||
|
|
|
@ -0,0 +1,117 @@
|
||||||
|
#!/usr/bin/env python2.7
|
||||||
|
# -*- coding: UTF-8 no BOM -*-
|
||||||
|
|
||||||
|
import os,sys,math
|
||||||
|
import numpy as np
|
||||||
|
import damask
|
||||||
|
from optparse import OptionParser
|
||||||
|
|
||||||
|
scriptName = os.path.splitext(os.path.basename(__file__))[0]
|
||||||
|
scriptID = ' '.join([scriptName,damask.version])
|
||||||
|
|
||||||
|
#--------------------------------------------------------------------------------------------------
|
||||||
|
# MAIN
|
||||||
|
#--------------------------------------------------------------------------------------------------
|
||||||
|
|
||||||
|
validDirections = ['x','y','z']
|
||||||
|
parser = OptionParser(option_class=damask.extendableOption, usage='%prog options [geomfile(s)]', description = """
|
||||||
|
Mirrors spectral geometry description along given directions.
|
||||||
|
|
||||||
|
""", version=scriptID)
|
||||||
|
|
||||||
|
parser.add_option('-d','--direction',
|
||||||
|
dest = 'directions',
|
||||||
|
action = 'extend', metavar = '<string LIST>',
|
||||||
|
help = "directions in which to mirror {'x','y','z'}")
|
||||||
|
|
||||||
|
(options, filenames) = parser.parse_args()
|
||||||
|
|
||||||
|
if options.directions is None:
|
||||||
|
parser.error('no direction given.')
|
||||||
|
if not set(options.directions).issubset(validDirections):
|
||||||
|
invalidDirections = [str(e) for e in set(options.directions).difference(validDirections)]
|
||||||
|
parser.error('invalid directions {}. '.format(*invalidDirections))
|
||||||
|
|
||||||
|
# --- loop over input files -------------------------------------------------------------------------
|
||||||
|
|
||||||
|
if filenames == []: filenames = [None]
|
||||||
|
|
||||||
|
for name in filenames:
|
||||||
|
try:
|
||||||
|
table = damask.ASCIItable(name = name,
|
||||||
|
buffered = False, labeled = False)
|
||||||
|
except: continue
|
||||||
|
damask.util.report(scriptName,name)
|
||||||
|
|
||||||
|
# --- interpret header ----------------------------------------------------------------------------
|
||||||
|
|
||||||
|
table.head_read()
|
||||||
|
info,extra_header = table.head_getGeom()
|
||||||
|
|
||||||
|
damask.util.croak(['grid a b c: %s'%(' x '.join(map(str,info['grid']))),
|
||||||
|
'size x y z: %s'%(' x '.join(map(str,info['size']))),
|
||||||
|
'origin x y z: %s'%(' : '.join(map(str,info['origin']))),
|
||||||
|
'homogenization: %i'%info['homogenization'],
|
||||||
|
'microstructures: %i'%info['microstructures'],
|
||||||
|
])
|
||||||
|
|
||||||
|
errors = []
|
||||||
|
if np.any(info['grid'] < 1): errors.append('invalid grid a b c.')
|
||||||
|
if np.any(info['size'] <= 0.0): errors.append('invalid size x y z.')
|
||||||
|
if errors != []:
|
||||||
|
damask.util.croak(errors)
|
||||||
|
table.close(dismiss = True)
|
||||||
|
continue
|
||||||
|
|
||||||
|
# --- read data ------------------------------------------------------------------------------------
|
||||||
|
|
||||||
|
microstructure = table.microstructure_read(info['grid']).reshape(info['grid'],order='F') # read microstructure
|
||||||
|
|
||||||
|
if 'z' in options.directions:
|
||||||
|
microstructure = np.concatenate([microstructure,microstructure[:,:,::-1]],2)
|
||||||
|
if 'y' in options.directions:
|
||||||
|
microstructure = np.concatenate([microstructure,microstructure[:,::-1,:]],1)
|
||||||
|
if 'x' in options.directions:
|
||||||
|
microstructure = np.concatenate([microstructure,microstructure[::-1,:,:]],0)
|
||||||
|
|
||||||
|
# --- do work ------------------------------------------------------------------------------------
|
||||||
|
|
||||||
|
newInfo = {
|
||||||
|
'size': microstructure.shape*info['size']/info['grid'],
|
||||||
|
'grid': microstructure.shape,
|
||||||
|
}
|
||||||
|
|
||||||
|
|
||||||
|
# --- report ---------------------------------------------------------------------------------------
|
||||||
|
|
||||||
|
remarks = []
|
||||||
|
if (any(newInfo['grid'] != info['grid'])):
|
||||||
|
remarks.append('--> grid a b c: %s'%(' x '.join(map(str,newInfo['grid']))))
|
||||||
|
if (any(newInfo['size'] != info['size'])):
|
||||||
|
remarks.append('--> size x y z: %s'%(' x '.join(map(str,newInfo['size']))))
|
||||||
|
if remarks != []: damask.util.croak(remarks)
|
||||||
|
|
||||||
|
# --- write header ---------------------------------------------------------------------------------
|
||||||
|
|
||||||
|
table.labels_clear()
|
||||||
|
table.info_clear()
|
||||||
|
table.info_append([
|
||||||
|
scriptID + ' ' + ' '.join(sys.argv[1:]),
|
||||||
|
"grid\ta {grid[0]}\tb {grid[1]}\tc {grid[2]}".format(grid=newInfo['grid']),
|
||||||
|
"size\tx {size[0]}\ty {size[1]}\tz {size[2]}".format(size=newInfo['size']),
|
||||||
|
"origin\tx {origin[0]}\ty {origin[1]}\tz {origin[2]}".format(origin=info['origin']),
|
||||||
|
"homogenization\t{homog}".format(homog=info['homogenization']),
|
||||||
|
"microstructures\t{microstructures}".format(microstructures=info['microstructures']),
|
||||||
|
extra_header
|
||||||
|
])
|
||||||
|
table.head_write()
|
||||||
|
|
||||||
|
# --- write microstructure information ------------------------------------------------------------
|
||||||
|
|
||||||
|
formatwidth = int(math.floor(math.log10(microstructure.max())+1))
|
||||||
|
table.data = microstructure.reshape((newInfo['grid'][0],np.prod(newInfo['grid'][1:])),order='F').transpose()
|
||||||
|
table.data_writeArray('%%%ii'%(formatwidth),delimiter = ' ')
|
||||||
|
|
||||||
|
# --- output finalization --------------------------------------------------------------------------
|
||||||
|
|
||||||
|
table.close() # close ASCII table
|
Loading…
Reference in New Issue