Merge branch 'development' into even-more-HDF5-postprocessing

This commit is contained in:
Martin Diehl 2019-06-04 22:39:10 +02:00
commit 1186bd16f5
41 changed files with 1238 additions and 1916 deletions

@ -1 +1 @@
Subproject commit d31da38cf25734a91e994a3d5d33bb048eb2f44f Subproject commit 64cda1c010d500f662cd9a298c7b7ad10ab91c3c

View File

@ -1 +1 @@
v2.0.3-367-g70428155 v2.0.3-369-g951134d1

View File

@ -52,7 +52,7 @@ def gradFFT(geomdim,field):
# MAIN # MAIN
# -------------------------------------------------------------------- # --------------------------------------------------------------------
parser = OptionParser(option_class=damask.extendableOption, usage='%prog option [ASCIItable(s)]', description = """ parser = OptionParser(option_class=damask.extendableOption, usage='%prog options [ASCIItable(s)]', description = """
Add column(s) containing gradient of requested column(s). Add column(s) containing gradient of requested column(s).
Operates on periodic ordered three-dimensional data sets Operates on periodic ordered three-dimensional data sets
of vector and scalar fields. of vector and scalar fields.

View File

@ -98,6 +98,6 @@ for name in filenames:
writer.Write() writer.Write()
if name is None: sys.stdout.write(writer.GetOutputString()[:writer.GetOutputStringLength()]) # limiting of outputString is fix for vtk <7.0 if name is None: sys.stdout.write(writer.GetOutputString())
table.close() table.close()

View File

@ -129,6 +129,6 @@ for name in filenames:
writer.Write() writer.Write()
if name is None: sys.stdout.write(writer.GetOutputString()[:writer.GetOutputStringLength()]) # limiting of outputString is fix for vtk <7.0 if name is None: sys.stdout.write(writer.GetOutputString())
table.close() table.close()

View File

@ -1,85 +1,88 @@
#!/usr/bin/env python3 #!/usr/bin/env python3
# -*- coding: UTF-8 no BOM -*-
import os,sys,math import os
import numpy as np import sys
from io import StringIO
from optparse import OptionParser from optparse import OptionParser
import numpy as np
import damask import damask
scriptName = os.path.splitext(os.path.basename(__file__))[0] scriptName = os.path.splitext(os.path.basename(__file__))[0]
scriptID = ' '.join([scriptName,damask.version]) scriptID = ' '.join([scriptName,damask.version])
#-------------------------------------------------------------------------------------------------- #--------------------------------------------------------------------------------------------------
# MAIN # MAIN
#-------------------------------------------------------------------------------------------------- #--------------------------------------------------------------------------------------------------
identifiers = {
'grid': ['a','b','c'],
'size': ['x','y','z'],
'origin': ['x','y','z'],
}
mappings = {
'grid': lambda x: int(x),
'size': lambda x: float(x),
'origin': lambda x: float(x),
'homogenization': lambda x: int(x),
'microstructures': lambda x: int(x),
}
parser = OptionParser(option_class=damask.extendableOption, usage='%prog option [geomfile(s)]', description = """ parser = OptionParser(option_class=damask.extendableOption, usage='%prog options [geomfile(s)]', description = """
Positions a geometric object within the (three-dimensional) canvas of a spectral geometry description. Inserts a primitive geometric object at a given position.
Depending on the sign of the dimension parameters, these objects can be boxes, cylinders, or ellipsoids. These objects can be boxes, cylinders, or ellipsoids.
""", version = scriptID) """, version = scriptID)
parser.add_option('-c', '--center', dest='center', parser.add_option('-c', '--center',
dest='center',
type='float', nargs = 3, metavar=' '.join(['float']*3), type='float', nargs = 3, metavar=' '.join(['float']*3),
help='a,b,c origin of primitive %default') help='a,b,c origin of primitive %default')
parser.add_option('-d', '--dimension', dest='dimension', parser.add_option('-d', '--dimension',
dest='dimension',
type='float', nargs = 3, metavar=' '.join(['float']*3), type='float', nargs = 3, metavar=' '.join(['float']*3),
help='a,b,c extension of hexahedral box; negative values are diameters') help='a,b,c extension of hexahedral box')
parser.add_option('-e', '--exponent', dest='exponent', parser.add_option('-e', '--exponent',
dest='exponent',
type='float', nargs = 3, metavar=' '.join(['float']*3), type='float', nargs = 3, metavar=' '.join(['float']*3),
help='i,j,k exponents for axes - 0 gives octahedron (|x|^(2^0) + |y|^(2^0) + |z|^(2^0) < 1), \ help='i,j,k exponents for axes: '+
1 gives a sphere (|x|^(2^1) + |y|^(2^1) + |z|^(2^1) < 1), \ '0 gives octahedron (|x|^(2^0) + |y|^(2^0) + |z|^(2^0) < 1), '+
large values produce boxes, negative turns concave.') '1 gives a sphere (|x|^(2^1) + |y|^(2^1) + |z|^(2^1) < 1), '+
parser.add_option('-f', '--fill', dest='fill', 'large values produce boxes, negative turn concave.')
type='float', metavar = 'float', parser.add_option('-f', '--fill',
help='grain index to fill primitive. "0" selects maximum microstructure index + 1 [%default]') dest='fill',
parser.add_option('-q', '--quaternion', dest='quaternion', type='float', metavar = 'int',
help='microstructure index to fill primitive, defaults to max microstructure index + 1')
parser.add_option('-q', '--quaternion',
dest='quaternion',
type='float', nargs = 4, metavar=' '.join(['float']*4), type='float', nargs = 4, metavar=' '.join(['float']*4),
help = 'rotation of primitive as quaternion') help = 'rotation of primitive as quaternion')
parser.add_option('-a', '--angleaxis', dest='angleaxis', type=float, parser.add_option('-a', '--angleaxis',
nargs = 4, metavar=' '.join(['float']*4), dest='angleaxis',
type=float, nargs = 4, metavar=' '.join(['float']*4),
help = 'axis and angle to rotate primitive') help = 'axis and angle to rotate primitive')
parser.add_option( '--degrees', dest='degrees', parser.add_option( '--degrees',
dest='degrees',
action='store_true', action='store_true',
help = 'angle is given in degrees [%default]') help = 'angle is given in degrees')
parser.add_option( '--nonperiodic', dest='periodic', parser.add_option( '--nonperiodic',
dest='periodic',
action='store_false', action='store_false',
help = 'wrap around edges [%default]') help = 'wrap around edges')
parser.add_option( '--realspace', dest='realspace', parser.add_option( '--realspace',
dest='realspace',
action='store_true', action='store_true',
help = '-c and -d span [origin,origin+size] instead of [0,grid] coordinates') help = '-c and -d span [origin,origin+size] instead of [0,grid] coordinates')
parser.add_option( '--invert', dest='inside', parser.add_option( '--invert',
dest='inside',
action='store_false', action='store_false',
help = 'invert the volume filled by the primitive (inside/outside)') help = 'invert the volume filled by the primitive (inside/outside)')
parser.add_option('--float', dest = 'float',
action = 'store_true',
help = 'use float input')
parser.set_defaults(center = (.0,.0,.0), parser.set_defaults(center = (.0,.0,.0),
fill = 0.0,
degrees = False, degrees = False,
exponent = (20,20,20), # box shape by default exponent = (20,20,20), # box shape by default
periodic = True, periodic = True,
realspace = False, realspace = False,
inside = True, inside = True,
float = False,
) )
(options, filenames) = parser.parse_args() (options, filenames) = parser.parse_args()
if options.dimension is None: if options.dimension is None:
parser.error('no dimension specified.') parser.error('no dimension specified.')
if [options.angleaxis,options.quaternion].count(None) == 0:
parser.error('more than one rotation specified.')
if options.angleaxis is not None: if options.angleaxis is not None:
rotation = damask.Rotation.fromAxisAngle(np.array(options.angleaxis),options.degrees,normalise=True) rotation = damask.Rotation.fromAxisAngle(np.array(options.angleaxis),options.degrees,normalise=True)
elif options.quaternion is not None: elif options.quaternion is not None:
@ -87,157 +90,49 @@ elif options.quaternion is not None:
else: else:
rotation = damask.Rotation() rotation = damask.Rotation()
datatype = 'f' if options.float else 'i'
options.center = np.array(options.center)
options.dimension = np.array(options.dimension)
# undo logarithmic sense of exponent and generate ellipsoids for negative dimensions (backward compatibility)
options.exponent = np.where(np.array(options.dimension) > 0, np.power(2,options.exponent), 2)
# --- loop over input files -------------------------------------------------------------------------
if filenames == []: filenames = [None] if filenames == []: filenames = [None]
for name in filenames: for name in filenames:
try: table = damask.ASCIItable(name = name,
buffered = False,
labeled = False)
except: continue
damask.util.report(scriptName,name) damask.util.report(scriptName,name)
# --- interpret header ---------------------------------------------------------------------------- geom = damask.Geom.from_file(StringIO(''.join(sys.stdin.read())) if name is None else name)
grid = geom.get_grid()
size = geom.get_size()
table.head_read() # scale to box of size [1.0,1.0,1.0]
info,extra_header = table.head_getGeom()
damask.util.report_geom(info)
errors = []
if np.any(info['grid'] < 1): errors.append('invalid grid a b c.')
if np.any(info['size'] <= 0.0): errors.append('invalid size x y z.')
if errors != []:
damask.util.croak(errors)
table.close(dismiss = True)
continue
#--- read data ------------------------------------------------------------------------------------
microstructure = table.microstructure_read(info['grid'],datatype) # read microstructure
# --- do work ------------------------------------------------------------------------------------
newInfo = {
'microstructures': 0,
}
options.fill = np.nanmax(microstructure)+1 if options.fill == 0 else options.fill
microstructure = microstructure.reshape(info['grid'],order='F')
# coordinates given in real space (default) vs voxel space
if options.realspace: if options.realspace:
options.center -= info['origin'] center = (np.array(options.center) - geom.get_origin())/size
options.center *= np.array(info['grid']) / np.array(info['size']) r = np.array(options.dimension)/size/2.0
options.dimension *= np.array(info['grid']) / np.array(info['size']) else:
center = (np.array(options.center) + 0.5)/grid
r = np.array(options.dimension)/grid/2.0
grid = microstructure.shape if np.any(center<0.0) or np.any(center>=1.0): print('error')
# change to coordinate space where the primitive is the unit sphere/cube/etc offset = np.ones(3)*0.5 if options.periodic else center
if options.periodic: # use padding to achieve periodicity mask = np.full(grid,False)
(X, Y, Z) = np.meshgrid(np.arange(-grid[0]/2, (3*grid[0])/2, dtype=np.float32), # 50% padding on each side # High exponents can cause underflow & overflow - okay here, just compare to 1, so +infinity and 0 are fine
np.arange(-grid[1]/2, (3*grid[1])/2, dtype=np.float32),
np.arange(-grid[2]/2, (3*grid[2])/2, dtype=np.float32),
indexing='ij')
# Padding handling
X = np.roll(np.roll(np.roll(X,
-grid[0]//2, axis=0),
-grid[1]//2, axis=1),
-grid[2]//2, axis=2)
Y = np.roll(np.roll(np.roll(Y,
-grid[0]//2, axis=0),
-grid[1]//2, axis=1),
-grid[2]//2, axis=2)
Z = np.roll(np.roll(np.roll(Z,
-grid[0]//2, axis=0),
-grid[1]//2, axis=1),
-grid[2]//2, axis=2)
else: # nonperiodic, much lighter on resources
# change to coordinate space where the primitive is the unit sphere/cube/etc
(X, Y, Z) = np.meshgrid(np.arange(0, grid[0], dtype=np.float32),
np.arange(0, grid[1], dtype=np.float32),
np.arange(0, grid[2], dtype=np.float32),
indexing='ij')
# first by translating the center onto 0, 0.5 shifts the voxel origin onto the center of the voxel
X -= options.center[0] - 0.5
Y -= options.center[1] - 0.5
Z -= options.center[2] - 0.5
# and then by applying the rotation
(X, Y, Z) = rotation * (X, Y, Z)
# and finally by scaling (we don't worry about options.dimension being negative, np.abs occurs on the microstructure = np.where... line)
X /= options.dimension[0] * 0.5
Y /= options.dimension[1] * 0.5
Z /= options.dimension[2] * 0.5
# High exponents can cause underflow & overflow - loss of precision is okay here, we just compare it to 1, so +infinity and 0 are fine
old_settings = np.seterr()
np.seterr(over='ignore', under='ignore') np.seterr(over='ignore', under='ignore')
if options.periodic: # use padding to achieve periodicity
inside = np.zeros(grid, dtype=bool)
for i in range(2):
for j in range(2):
for k in range(2):
inside = inside | ( # Most of this is handling the padding
np.abs(X[grid[0] * i : grid[0] * (i+1),
grid[1] * j : grid[1] * (j+1),
grid[2] * k : grid[2] * (k+1)])**options.exponent[0] +
np.abs(Y[grid[0] * i : grid[0] * (i+1),
grid[1] * j : grid[1] * (j+1),
grid[2] * k : grid[2] * (k+1)])**options.exponent[1] +
np.abs(Z[grid[0] * i : grid[0] * (i+1),
grid[1] * j : grid[1] * (j+1),
grid[2] * k : grid[2] * (k+1)])**options.exponent[2] <= 1.0)
microstructure = np.where(inside,
options.fill if options.inside else microstructure,
microstructure if options.inside else options.fill)
else: # nonperiodic, much lighter on resources e = 2.0**np.array(options.exponent)
microstructure = np.where(np.abs(X)**options.exponent[0] + for x in range(grid[0]):
np.abs(Y)**options.exponent[1] + for y in range(grid[1]):
np.abs(Z)**options.exponent[2] <= 1.0, for z in range(grid[2]):
options.fill if options.inside else microstructure, coords = np.array([x+0.5,y+0.5,z+0.5])/grid
microstructure if options.inside else options.fill) mask[x,y,z] = np.sum(np.abs((rotation*(coords-offset))/r)**e) < 1
np.seterr(**old_settings) # Reset warnings to old state if options.periodic:
newInfo['microstructures'] = len(np.unique(microstructure)) shift = ((offset-center)*grid).astype(int)
mask = np.roll(mask,shift,(0,1,2))
# --- report --------------------------------------------------------------------------------------- if options.inside: mask = np.logical_not(mask)
if (newInfo['microstructures'] != info['microstructures']): fill = np.nanmax(geom.microstructure)+1 if options.fill is None else options.fill
damask.util.croak('--> microstructures: {}'.format(newInfo['microstructures']))
damask.util.croak(geom.update(np.where(mask,geom.microstructure,fill)))
geom.add_comments(scriptID + ' ' + ' '.join(sys.argv[1:]))
#--- write header --------------------------------------------------------------------------------- if name is None:
sys.stdout.write(str(geom.show()))
table.info_clear() else:
table.info_append(extra_header+[ geom.to_file(name)
scriptID + ' ' + ' '.join(sys.argv[1:]),
"grid\ta {}\tb {}\tc {}".format(*info['grid']),
"size\tx {}\ty {}\tz {}".format(*info['size']),
"origin\tx {}\ty {}\tz {}".format(*info['origin']),
"homogenization\t{}".format(info['homogenization']),
"microstructures\t{}".format(newInfo['microstructures']),
])
table.labels_clear()
table.head_write()
table.output_flush()
# --- write microstructure information ------------------------------------------------------------
format = '%g' if options.float else '%{}i'.format(int(math.floor(math.log10(np.nanmax(microstructure))+1)))
table.data = microstructure.reshape((info['grid'][0],info['grid'][1]*info['grid'][2]),order='F').transpose()
table.data_writeArray(format,delimiter = ' ')
#--- output finalization --------------------------------------------------------------------------
table.close()

View File

@ -1,181 +1,77 @@
#!/usr/bin/env python3 #!/usr/bin/env python3
# -*- coding: UTF-8 no BOM -*-
import os,sys,math import os
import numpy as np import sys
from io import StringIO
from optparse import OptionParser from optparse import OptionParser
import numpy as np
import damask import damask
scriptName = os.path.splitext(os.path.basename(__file__))[0] scriptName = os.path.splitext(os.path.basename(__file__))[0]
scriptID = ' '.join([scriptName,damask.version]) scriptID = ' '.join([scriptName,damask.version])
# -------------------------------------------------------------------- # --------------------------------------------------------------------
# MAIN # MAIN
# -------------------------------------------------------------------- # --------------------------------------------------------------------
parser = OptionParser(option_class=damask.extendableOption, usage='%prog option(s) [geomfile(s)]', description = """ parser = OptionParser(option_class=damask.extendableOption, usage='%prog options [geomfile(s)]', description = """
Changes the (three-dimensional) canvas of a spectral geometry description. Increases or decreases the (three-dimensional) canvas.
Grid can be given as absolute or relative values, e.g. 16 16 16 or 2x 0.5x 32. Grid can be given as absolute or relative values, e.g. 16 16 16 or 2x 0.5x 32.
""", version = scriptID) """, version = scriptID)
parser.add_option('-g', parser.add_option('-g','--grid',
'--grid',
dest = 'grid', dest = 'grid',
type = 'string', nargs = 3, metavar = ' '.join(['string']*3), type = 'string', nargs = 3, metavar = ' '.join(['string']*3),
help = 'a,b,c grid of hexahedral box. [auto]') help = 'a,b,c grid of hexahedral box')
parser.add_option('-o', parser.add_option('-o','--offset',
'--offset',
dest = 'offset', dest = 'offset',
type = 'int', nargs = 3, metavar = ' '.join(['int']*3), type = 'int', nargs = 3, metavar = ' '.join(['int']*3),
help = 'a,b,c offset from old to new origin of grid [%default]') help = 'a,b,c offset from old to new origin of grid [%default]')
parser.add_option('-f', parser.add_option('-f','--fill',
'--fill',
dest = 'fill', dest = 'fill',
type = 'float', metavar = 'float', type = 'float', metavar = 'int',
help = '(background) canvas grain index. "0" selects maximum microstructure index + 1 [%default]') help = 'background microstructure index, defaults to max microstructure index + 1')
parser.add_option('--float',
dest = 'float',
action = 'store_true',
help = 'use float input')
parser.add_option('--blank',
dest = 'blank',
action = 'store_true',
help = 'blank out (optional) input canvas content')
parser.set_defaults(grid = ['0','0','0'], parser.set_defaults(offset = (0,0,0))
offset = (0,0,0),
fill = 0.0,
float = False,
)
(options, filenames) = parser.parse_args() (options, filenames) = parser.parse_args()
datatype = 'f' if options.float else 'i'
options.grid = ['1','1','1'] if options.blank and options.grid == ['0','0','0'] else options.grid
options.fill = 1 if options.blank and options.fill == 0 else options.fill
# --- loop over input files -------------------------------------------------------------------------
if filenames == []: filenames = [None] if filenames == []: filenames = [None]
for name in filenames: for name in filenames:
try: table = damask.ASCIItable(name = name,
buffered = False,
labeled = False)
except: continue
damask.util.report(scriptName,name) damask.util.report(scriptName,name)
if options.blank: geom = damask.Geom.from_file(StringIO(''.join(sys.stdin.read())) if name is None else name)
extra_header = [] origin = geom.get_origin()
info = {} size = geom.get_size()
info['grid'] = np.zeros(3,dtype=int) old = new = geom.get_grid()
info['size'] = np.zeros(3,dtype=float) offset = np.asarray(options.offset)
info['origin'] = np.zeros(3,dtype=float)
info['microstructures'] = 0 if options.grid is not None:
info['homogenization'] = 1 new = np.maximum(1,
np.array([int(o*float(n.lower().replace('x',''))) if n.lower().endswith('x') \
else int(n) for o,n in zip(old,options.grid)],dtype=int))
canvas = np.full(new,options.fill if options.fill is not None
else np.nanmax(geom.microstructure)+1,geom.microstructure.dtype)
l = np.clip( offset, 0,np.minimum(old +offset,new))
r = np.clip( offset+old,0,np.minimum(old*2+offset,new))
L = np.clip(-offset, 0,np.minimum(new -offset,old))
R = np.clip(-offset+new,0,np.minimum(new*2-offset,old))
canvas[l[0]:r[0],l[1]:r[1],l[2]:r[2]] = geom.microstructure[L[0]:R[0],L[1]:R[1],L[2]:R[2]]
damask.util.croak(geom.update(canvas,origin=origin+offset*size/old,rescale=True))
geom.add_comments(scriptID + ' ' + ' '.join(sys.argv[1:]))
if name is None:
sys.stdout.write(str(geom.show()))
else: else:
geom.to_file(name)
# --- interpret header ----------------------------------------------------------------------------
table.head_read()
info,extra_header = table.head_getGeom()
damask.util.report_geom(info)
errors = []
if np.any(info['grid'] < 1): errors.append('invalid grid a b c.')
if np.any(info['size'] <= 0.0): errors.append('invalid size x y z.')
if errors != []:
damask.util.croak(errors)
table.close(dismiss = True)
continue
# --- read data ------------------------------------------------------------------------------------
microstructure = table.microstructure_read(info['grid'],datatype).reshape(info['grid'],order='F') # read microstructure
# --- do work ------------------------------------------------------------------------------------
newInfo = {
'grid': np.zeros(3,'i'),
'origin': np.zeros(3,'d'),
'microstructures': 0,
}
newInfo['grid'] = np.array([int(o*float(n.translate(None,'xX'))) if n[-1].lower() == 'x'\
else int(n) for o,n in zip(info['grid'],options.grid)],'i')
newInfo['grid'] = np.where(newInfo['grid'] > 0, newInfo['grid'],info['grid'])
microstructure_cropped = np.zeros(newInfo['grid'],datatype)
microstructure_cropped.fill(options.fill if options.float or options.fill > 0 else np.nanmax(microstructure)+1)
if not options.blank:
xindex = np.arange(max(options.offset[0],0),min(options.offset[0]+newInfo['grid'][0],info['grid'][0]))
yindex = np.arange(max(options.offset[1],0),min(options.offset[1]+newInfo['grid'][1],info['grid'][1]))
zindex = np.arange(max(options.offset[2],0),min(options.offset[2]+newInfo['grid'][2],info['grid'][2]))
translate_x = [i - options.offset[0] for i in xindex]
translate_y = [i - options.offset[1] for i in yindex]
translate_z = [i - options.offset[2] for i in zindex]
if 0 in map(len,[xindex,yindex,zindex,translate_x,translate_y,translate_z]):
damask.util.croak('invaldid combination of grid and offset.')
table.close(dismiss = True)
continue
microstructure_cropped[min(translate_x):max(translate_x)+1,
min(translate_y):max(translate_y)+1,
min(translate_z):max(translate_z)+1] \
= microstructure[min(xindex):max(xindex)+1,
min(yindex):max(yindex)+1,
min(zindex):max(zindex)+1]
newInfo['size'] = info['size']/info['grid']*newInfo['grid'] if np.all(info['grid'] > 0) else newInfo['grid']
newInfo['origin'] = info['origin']+(info['size']/info['grid'] if np.all(info['grid'] > 0) \
else newInfo['size']/newInfo['grid'])*options.offset
newInfo['microstructures'] = len(np.unique(microstructure_cropped))
# --- report ---------------------------------------------------------------------------------------
remarks = []
errors = []
if (any(newInfo['grid'] != info['grid'])):
remarks.append('--> grid a b c: {}'.format(' x '.join(map(str,newInfo['grid']))))
if (any(newInfo['size'] != info['size'])):
remarks.append('--> size x y z: {}'.format(' x '.join(map(str,newInfo['size']))))
if (any(newInfo['origin'] != info['origin'])):
remarks.append('--> origin x y z: {}'.format(' : '.join(map(str,newInfo['origin']))))
if ( newInfo['microstructures'] != info['microstructures']):
remarks.append('--> microstructures: {}'.format(newInfo['microstructures']))
if np.any(newInfo['grid'] < 1): errors.append('invalid new grid a b c.')
if np.any(newInfo['size'] <= 0.0): errors.append('invalid new size x y z.')
if remarks != []: damask.util.croak(remarks)
if errors != []:
damask.util.croak(errors)
table.close(dismiss = True)
continue
# --- write header ---------------------------------------------------------------------------------
table.info_clear()
table.info_append(extra_header+[
scriptID + ' ' + ' '.join(sys.argv[1:]),
"grid\ta {}\tb {}\tc {}".format(*newInfo['grid']),
"size\tx {}\ty {}\tz {}".format(*newInfo['size']),
"origin\tx {}\ty {}\tz {}".format(*newInfo['origin']),
"homogenization\t{}".format(info['homogenization']),
"microstructures\t{}".format(newInfo['microstructures']),
])
table.labels_clear()
table.head_write()
table.output_flush()
# --- write microstructure information ------------------------------------------------------------
format = '%g' if options.float else '%{}i'.format(int(math.floor(math.log10(np.nanmax(microstructure_cropped))+1)))
table.data = microstructure_cropped.reshape((newInfo['grid'][0],newInfo['grid'][1]*newInfo['grid'][2]),order='F').transpose()
table.data_writeArray(format,delimiter=' ')
# --- output finalization --------------------------------------------------------------------------
table.close() # close ASCII table

39
processing/pre/geom_check.py Executable file
View File

@ -0,0 +1,39 @@
#!/usr/bin/env python3
import os
import sys
from io import StringIO
from optparse import OptionParser
import damask
scriptName = os.path.splitext(os.path.basename(__file__))[0]
scriptID = ' '.join([scriptName,damask.version])
#--------------------------------------------------------------------------------------------------
# MAIN
#--------------------------------------------------------------------------------------------------
parser = OptionParser(option_class=damask.extendableOption, usage='%prog [geomfile(s)]', description = """
Writes vtk file for visualization.
""", version = scriptID)
(options, filenames) = parser.parse_args()
if filenames == []: filenames = [None]
for name in filenames:
damask.util.report(scriptName,name)
geom = damask.Geom.from_file(StringIO(''.join(sys.stdin.read())) if name is None else name)
damask.util.croak(geom)
if name is None:
sys.stdout.write(geom.to_vtk())
else:
geom.to_vtk(os.path.splitext(name)[0])

View File

@ -1,25 +0,0 @@
#!/usr/bin/env bash
if [[ "$1" == "-f" || "$1" == "--float" ]]; then
shift
arg='--float'
else
arg=''
fi
for geom in "$@"
do
geom_toTable $arg \
< $geom \
| \
vtk_rectilinearGrid > ${geom%.*}.vtk
geom_toTable $arg \
< $geom \
| \
vtk_addRectilinearGridData \
--vtk ${geom%.*}.vtk \
--data microstructure \
rm ${geom%.*}.vtk
done

View File

@ -1,17 +1,23 @@
#!/usr/bin/env python3 #!/usr/bin/env python3
# -*- coding: UTF-8 no BOM -*-
import os,sys,math import os
import numpy as np import sys
import damask from io import StringIO
from scipy import ndimage
from optparse import OptionParser from optparse import OptionParser
from scipy import ndimage
import numpy as np
import damask
scriptName = os.path.splitext(os.path.basename(__file__))[0] scriptName = os.path.splitext(os.path.basename(__file__))[0]
scriptID = ' '.join([scriptName,damask.version]) scriptID = ' '.join([scriptName,damask.version])
def mostFrequent(arr): def mostFrequent(arr):
return np.argmax(np.bincount(arr.astype('int'))) unique, inverse = np.unique(arr, return_inverse=True)
return unique[np.argmax(np.bincount(inverse))]
#-------------------------------------------------------------------------------------------------- #--------------------------------------------------------------------------------------------------
@ -19,76 +25,33 @@ def mostFrequent(arr):
#-------------------------------------------------------------------------------------------------- #--------------------------------------------------------------------------------------------------
parser = OptionParser(option_class=damask.extendableOption, usage='%prog options [geomfile(s)]', description = """ parser = OptionParser(option_class=damask.extendableOption, usage='%prog options [geomfile(s)]', description = """
Smooth geometry by selecting most frequent microstructure index within given stencil at each location. Smooth microstructure by selecting most frequent index within given stencil at each location.
""", version=scriptID) """, version=scriptID)
parser.add_option('-s','--stencil', parser.add_option('-s','--stencil',
dest = 'stencil', dest = 'stencil',
type = 'int', metavar = 'int', type = 'int', metavar = 'int',
help = 'size of smoothing stencil [%default]') help = 'size of smoothing stencil [%default]')
parser.set_defaults(stencil = 3, parser.set_defaults(stencil = 3)
)
(options, filenames) = parser.parse_args() (options, filenames) = parser.parse_args()
# --- loop over input files -------------------------------------------------------------------------
if filenames == []: filenames = [None] if filenames == []: filenames = [None]
for name in filenames: for name in filenames:
try: table = damask.ASCIItable(name = name,
buffered = False,
labeled = False)
except: continue
damask.util.report(scriptName,name) damask.util.report(scriptName,name)
# --- interpret header ---------------------------------------------------------------------------- geom = damask.Geom.from_file(StringIO(''.join(sys.stdin.read())) if name is None else name)
table.head_read() damask.util.croak(geom.update(ndimage.filters.generic_filter(
info,extra_header = table.head_getGeom() geom.microstructure,mostFrequent,
damask.util.report_geom(info) size=(options.stencil,)*3).astype(geom.microstructure.dtype)))
geom.add_comments(scriptID + ' ' + ' '.join(sys.argv[1:]))
errors = [] if name is None:
if np.any(info['grid'] < 1): errors.append('invalid grid a b c.') sys.stdout.write(str(geom.show()))
if np.any(info['size'] <= 0.0): errors.append('invalid size x y z.') else:
if errors != []: geom.to_file(name)
damask.util.croak(errors)
table.close(dismiss = True)
continue
# --- read data ------------------------------------------------------------------------------------
microstructure = table.microstructure_read(info['grid']).reshape(info['grid'],order='F') # read microstructure
# --- do work ------------------------------------------------------------------------------------
microstructure = ndimage.filters.generic_filter(microstructure,mostFrequent,size=(options.stencil,)*3).astype('int_')
newInfo = {'microstructures': len(np.unique(microstructure))}
# --- report ---------------------------------------------------------------------------------------
if ( newInfo['microstructures'] != info['microstructures']):
damask.util.croak('--> microstructures: %i'%newInfo['microstructures'])
info['microstructures'] == newInfo['microstructures']
# --- write header ---------------------------------------------------------------------------------
table.info_clear()
table.info_append([scriptID + ' ' + ' '.join(sys.argv[1:]),])
table.head_putGeom(info)
table.info_append([extra_header])
table.labels_clear()
table.head_write()
# --- write microstructure information ------------------------------------------------------------
formatwidth = int(math.floor(math.log10(np.nanmax(microstructure))+1))
table.data = microstructure.reshape((info['grid'][0],np.prod(info['grid'][1:])),order='F').transpose()
table.data_writeArray('%{}i'.format(formatwidth),delimiter = ' ')
# --- output finalization --------------------------------------------------------------------------
table.close() # close ASCII table

View File

@ -1,11 +1,15 @@
#!/usr/bin/env python3 #!/usr/bin/env python3
# -*- coding: UTF-8 no BOM -*-
import os,sys,h5py import os
import numpy as np import sys
from optparse import OptionParser from optparse import OptionParser
import h5py
import numpy as np
import damask import damask
scriptName = os.path.splitext(os.path.basename(__file__))[0] scriptName = os.path.splitext(os.path.basename(__file__))[0]
scriptID = ' '.join([scriptName,damask.version]) scriptID = ' '.join([scriptName,damask.version])
@ -14,40 +18,50 @@ scriptID = ' '.join([scriptName,damask.version])
# MAIN # MAIN
#-------------------------------------------------------------------------------------------------- #--------------------------------------------------------------------------------------------------
parser = OptionParser(option_class=damask.extendableOption, usage='%prog [dream3dfile[s]]', description = """ parser = OptionParser(option_class=damask.extendableOption, usage='%prog options [DREAM.3Dfile(s)]', description = """
Convert DREAM3D file to geometry file. This can be done from cell data (direct pointwise takeover) or Converts DREAM.3D file. Input can be cell data (direct pointwise takeover) or grain data (individual
from grain data (individual grains are segmented). Requires orientation data as quaternion. grains are segmented). Requires orientation data as quaternion.
""", version = scriptID) """, version = scriptID)
parser.add_option('-b','--basegroup', parser.add_option('-b','--basegroup',
dest = 'basegroup', metavar = 'string', dest = 'basegroup',
metavar = 'string',
help = 'name of the group in "DataContainers" containing the pointwise (and, if applicable grain average) data') help = 'name of the group in "DataContainers" containing the pointwise (and, if applicable grain average) data')
parser.add_option('-p','--pointwise', parser.add_option('-p','--pointwise',
dest = 'pointwise', metavar = 'string', dest = 'pointwise',
metavar = 'string',
help = 'name of the group in "DataContainers/<basegroup>" containing pointwise data [%default]') help = 'name of the group in "DataContainers/<basegroup>" containing pointwise data [%default]')
parser.add_option('-a','--average', parser.add_option('-a','--average',
dest = 'average', metavar = 'string', dest = 'average',
metavar = 'string',
help = 'name of the group in "DataContainers</basegroup>" containing grain average data. '\ help = 'name of the group in "DataContainers</basegroup>" containing grain average data. '\
+ 'Leave empty for pointwise data') + 'Leave empty for pointwise data')
parser.add_option('--phase', parser.add_option('--phase',
dest = 'phase', dest = 'phase',
type = 'string', metavar = 'string', type = 'string',
metavar = 'string',
help = 'name of the dataset containing pointwise/average phase IDs [%default]') help = 'name of the dataset containing pointwise/average phase IDs [%default]')
parser.add_option('--microstructure', parser.add_option('--microstructure',
dest = 'microstructure', dest = 'microstructure',
type = 'string', metavar = 'string', type = 'string',
metavar = 'string',
help = 'name of the dataset connecting pointwise and average data [%default]') help = 'name of the dataset connecting pointwise and average data [%default]')
parser.add_option('-q', '--quaternion', parser.add_option('-q', '--quaternion',
dest = 'quaternion', dest = 'quaternion',
type = 'string', metavar='string', type = 'string',
metavar='string',
help = 'name of the dataset containing pointwise/average orientation as quaternion [%default]') help = 'name of the dataset containing pointwise/average orientation as quaternion [%default]')
parser.add_option('--homogenization',
dest = 'homogenization',
type = 'int', metavar = 'int',
help = 'homogenization index to be used [%default]')
parser.set_defaults(pointwise = 'CellData', parser.set_defaults(pointwise = 'CellData',
quaternion = 'Quats', quaternion = 'Quats',
phase = 'Phases', phase = 'Phases',
microstructure = 'FeatureIds', microstructure = 'FeatureIds',
crystallite = 1, homogenization = 1,
) )
(options, filenames) = parser.parse_args() (options, filenames) = parser.parse_args()
@ -57,70 +71,62 @@ if options.basegroup is None:
rootDir ='DataContainers' rootDir ='DataContainers'
# --- loop over input files -------------------------------------------------------------------------
if filenames == []: parser.error('no input file specified.') if filenames == []: parser.error('no input file specified.')
for name in filenames: for name in filenames:
try:
table = damask.ASCIItable(outname = os.path.splitext(name)[0]+'.geom',
buffered = False, labeled=False,
)
except: continue
damask.util.report(scriptName,name) damask.util.report(scriptName,name)
errors = [] errors = []
info = {}
ori = []
inFile = h5py.File(name, 'r') inFile = h5py.File(name, 'r')
group_geom = os.path.join(rootDir,options.basegroup,'_SIMPL_GEOMETRY') group_geom = os.path.join(rootDir,options.basegroup,'_SIMPL_GEOMETRY')
try: try:
info['size'] = inFile[os.path.join(group_geom,'DIMENSIONS')][...] \ size = inFile[os.path.join(group_geom,'DIMENSIONS')][...] \
* inFile[os.path.join(group_geom,'SPACING')][...] * inFile[os.path.join(group_geom,'SPACING')][...]
info['grid'] = inFile[os.path.join(group_geom,'DIMENSIONS')][...] grid = inFile[os.path.join(group_geom,'DIMENSIONS')][...]
info['origin'] = inFile[os.path.join(group_geom,'ORIGIN')][...] origin = inFile[os.path.join(group_geom,'ORIGIN')][...]
except: except:
errors.append('Geometry data ({}) not found'.format(group_geom)) errors.append('Geometry data ({}) not found'.format(group_geom))
group_pointwise = os.path.join(rootDir,options.basegroup,options.pointwise) group_pointwise = os.path.join(rootDir,options.basegroup,options.pointwise)
if options.average is None: if options.average is None:
label = 'point' label = 'Point'
N_microstructure = np.product(info['grid'])
dataset = os.path.join(group_pointwise,options.quaternion) dataset = os.path.join(group_pointwise,options.quaternion)
try: try:
quats = np.reshape(inFile[dataset][...],(N_microstructure,4)) quats = np.reshape(inFile[dataset][...],(np.product(grid),4))
texture = [damask.Rotation.fromQuaternion(q,True,P=+1) for q in quats] rot = [damask.Rotation.fromQuaternion(q,True,P=+1) for q in quats]
except: except:
errors.append('Pointwise orientation (quaternion) data ({}) not readable'.format(dataset)) errors.append('Pointwise orientation (quaternion) data ({}) not readable'.format(dataset))
dataset = os.path.join(group_pointwise,options.phase) dataset = os.path.join(group_pointwise,options.phase)
try: try:
phase = np.reshape(inFile[dataset][...],(N_microstructure)) phase = np.reshape(inFile[dataset][...],(np.product(grid)))
except: except:
errors.append('Pointwise phase data ({}) not readable'.format(dataset)) errors.append('Pointwise phase data ({}) not readable'.format(dataset))
microstructure = np.arange(1,np.product(grid)+1,dtype=int).reshape(grid,order='F')
else: else:
label = 'grain' label = 'Grain'
dataset = os.path.join(group_pointwise,options.microstructure) dataset = os.path.join(group_pointwise,options.microstructure)
try: try:
microstructure = np.reshape(inFile[dataset][...],(np.product(info['grid']))) microstructure = np.transpose(inFile[dataset][...].reshape(grid[::-1]),(2,1,0)) # convert from C ordering
N_microstructure = np.max(microstructure)
except: except:
errors.append('Link between pointwise and grain average data ({}) not readable'.format(dataset)) errors.append('Link between pointwise and grain average data ({}) not readable'.format(dataset))
group_average = os.path.join(rootDir,options.basegroup,options.average) group_average = os.path.join(rootDir,options.basegroup,options.average)
dataset = os.path.join(group_average,options.quaternion) dataset = os.path.join(group_average,options.quaternion)
try: try:
texture = [damask.Rotation.fromQuaternion(q,True,P=+1) for q in inFile[dataset][...][1:]] # skip first entry (unindexed) rot = [damask.Rotation.fromQuaternion(q,True,P=+1) for q in inFile[dataset][...][1:]] # skip first entry (unindexed)
except: except:
errors.append('Average orientation data ({}) not readable'.format(dataset)) errors.append('Average orientation data ({}) not readable'.format(dataset))
dataset = os.path.join(group_average,options.phase) dataset = os.path.join(group_average,options.phase)
try: try:
phase = [i[0] for i in inFile[dataset][...]][1:] # skip first entry (unindexed) phase = [i[0] for i in inFile[dataset][...]][1:] # skip first entry (unindexed)
@ -129,60 +135,24 @@ for name in filenames:
if errors != []: if errors != []:
damask.util.croak(errors) damask.util.croak(errors)
table.close(dismiss = True)
continue continue
mat = damask.Material()
mat.verbose = False
# dummy <homogenization> config_header = ['<texture>']
h = damask.config.material.Homogenization() for i in range(np.nanmax(microstructure)):
mat.add_section('Homogenization','none',h) config_header += ['[{}{}]'.format(label,i+1),
info['homogenization'] = 1 '(gauss)\tphi1 {:.2f}\tPhi {:.2f}\tphi2 {:.2f}'.format(*rot[i].asEulers(degrees = True)),
]
# <crystallite> placeholder (same for all microstructures at the moment) config_header += ['<microstructure>']
c = damask.config.material.Crystallite() for i in range(np.nanmax(microstructure)):
mat.add_section('Crystallite','tbd',c) config_header += ['[{}{}]'.format(label,i+1),
'crystallite 1',
# <phase> placeholders '(constituent)\tphase {}\ttexture {}\tfraction 1.0'.format(phase[i],i+1),
for i in range(np.max(phase)): ]
p = damask.config.material.Phase()
mat.add_section('phase','phase{}-tbd'.format(i+1),p)
# <texture> header = [scriptID + ' ' + ' '.join(sys.argv[1:])]\
for i,o in enumerate(texture): + config_header
t = damask.config.material.Texture() geom = damask.Geom(microstructure,size,origin,
t.add_component('gauss',{'eulers':o.asEulers(degrees=True)}) homogenization=options.homogenization,comments=header)
mat.add_section(part='texture', section='{}{}'.format(label,i+1),initialData=t) damask.util.croak(geom)
# <microstructure> geom.to_file(os.path.splitext(name)[0]+'.geom')
for i in range(N_microstructure):
m = damask.config.material.Microstructure()
mat.add_section('microstructure','{}{}'.format(label,i+1),m)
mat.add_microstructure('{}{}'.format(label,i+1),
{'phase': 'phase{}-tbd'.format(phase[i]),
'texture':'{}{}'.format(label,i+1),
'crystallite':'tbd',
'fraction':1
})
table.info_append([
scriptID + ' ' + ' '.join(sys.argv[1:]),
"grid\ta {}\tb {}\tc {}".format(*info['grid']),
"size\tx {}\ty {}\tz {}".format(*info['size']),
"origin\tx {}\ty {}\tz {}".format(*info['origin']),
"homogenization\t{}".format(info['homogenization']),
str(mat).split('\n')
])
table.head_write()
if options.average is None:
table.data = [1, 'to', format(N_microstructure)]
table.data_write()
else:
table.data = microstructure.reshape(info['grid'][1]*info['grid'][2],info['grid'][0])
table.data_writeArray()
table.close()

View File

@ -1,28 +1,33 @@
#!/usr/bin/env python3 #!/usr/bin/env python3
# -*- coding: UTF-8 no BOM -*-
import os,sys,math import os
import numpy as np import sys
from optparse import OptionParser from optparse import OptionParser
import numpy as np
import damask import damask
scriptName = os.path.splitext(os.path.basename(__file__))[0] scriptName = os.path.splitext(os.path.basename(__file__))[0]
scriptID = ' '.join([scriptName,damask.version]) scriptID = ' '.join([scriptName,damask.version])
minimal_surfaces = ['primitive','gyroid','diamond']
surface = {
'primitive': lambda x,y,z: np.cos(x)+np.cos(y)+np.cos(z),
'gyroid': lambda x,y,z: np.sin(x)*np.cos(y)+np.sin(y)*np.cos(z)+np.cos(x)*np.sin(z),
'diamond': lambda x,y,z: np.cos(x-y)*np.cos(z)+np.sin(x+y)*np.sin(z),
}
# -------------------------------------------------------------------- # --------------------------------------------------------------------
# MAIN # MAIN
# -------------------------------------------------------------------- # --------------------------------------------------------------------
minimal_surfaces = ['primitive','gyroid','diamond',] parser = OptionParser(option_class=damask.extendableOption, usage='%prog options [geomfile]', description = """
Generate a bicontinuous structure of given type.
surface = {
'primitive': lambda x,y,z: math.cos(x)+math.cos(y)+math.cos(z),
'gyroid': lambda x,y,z: math.sin(x)*math.cos(y)+math.sin(y)*math.cos(z)+math.cos(x)*math.sin(z),
'diamond': lambda x,y,z: math.cos(x-y)*math.cos(z)+math.sin(x+y)*math.sin(z),
}
parser = OptionParser(option_class=damask.extendableOption, usage='%prog [option(s)] [geomfile]', description = """
Generate a geometry file of a bicontinuous structure of given type.
""", version = scriptID) """, version = scriptID)
@ -55,6 +60,7 @@ parser.add_option('--m',
dest = 'microstructure', dest = 'microstructure',
type = 'int', nargs = 2, metavar = 'int int', type = 'int', nargs = 2, metavar = 'int int',
help = 'two microstructure indices to be used [%default]') help = 'two microstructure indices to be used [%default]')
parser.set_defaults(type = minimal_surfaces[0], parser.set_defaults(type = minimal_surfaces[0],
threshold = 0.0, threshold = 0.0,
periods = 1, periods = 1,
@ -64,67 +70,26 @@ parser.set_defaults(type = minimal_surfaces[0],
microstructure = (1,2), microstructure = (1,2),
) )
(options,filenames) = parser.parse_args() (options,filename) = parser.parse_args()
# --- loop over input files -------------------------------------------------------------------------
if filenames == []: filenames = [None]
for name in filenames:
try:
table = damask.ASCIItable(outname = name,
buffered = False, labeled = False)
except: continue
damask.util.report(scriptName,name)
# ------------------------------------------ make grid ------------------------------------- name = None if filename == [] else filename[0]
damask.util.report(scriptName,name)
info = { x,y,z = np.meshgrid(options.periods*2.0*np.pi*(np.arange(options.grid[0])+0.5)/options.grid[0],
'grid': np.array(options.grid), options.periods*2.0*np.pi*(np.arange(options.grid[1])+0.5)/options.grid[1],
'size': np.array(options.size), options.periods*2.0*np.pi*(np.arange(options.grid[2])+0.5)/options.grid[2],
'origin': np.zeros(3,'d'), indexing='xy',sparse=True)
'microstructures': max(options.microstructure),
'homogenization': options.homogenization
}
#--- report --------------------------------------------------------------------------------------- microstructure = np.where(options.threshold < surface[options.type](x,y,z),
options.microstructure[1],options.microstructure[0])
damask.util.report_geom(info) geom=damask.Geom(microstructure,options.size,
homogenization=options.homogenization,
comments=[scriptID + ' ' + ' '.join(sys.argv[1:])])
damask.util.croak(geom)
errors = [] if name is None:
if np.any(info['grid'] < 1): errors.append('invalid grid a b c.') sys.stdout.write(str(geom.show()))
if np.any(info['size'] <= 0.0): errors.append('invalid size x y z.') else:
if errors != []: geom.to_file(name)
damask.util.croak(errors)
table.close(dismiss = True)
continue
#--- write header ---------------------------------------------------------------------------------
table.labels_clear()
table.info_clear()
table.info_append([
scriptID + ' ' + ' '.join(sys.argv[1:]),
"grid\ta {grid[0]}\tb {grid[1]}\tc {grid[2]}".format(grid=info['grid']),
"size\tx {size[0]}\ty {size[1]}\tz {size[2]}".format(size=info['size']),
"origin\tx {origin[0]}\ty {origin[1]}\tz {origin[2]}".format(origin=info['origin']),
"homogenization\t{homog}".format(homog=info['homogenization']),
"microstructures\t{microstructures}".format(microstructures=info['microstructures']),
])
table.head_write()
#--- write data -----------------------------------------------------------------------------------
X = options.periods*2.0*math.pi*(np.arange(options.grid[0])+0.5)/options.grid[0]
Y = options.periods*2.0*math.pi*(np.arange(options.grid[1])+0.5)/options.grid[1]
Z = options.periods*2.0*math.pi*(np.arange(options.grid[2])+0.5)/options.grid[2]
for z in range(options.grid[2]):
for y in range(options.grid[1]):
table.data_clear()
for x in range(options.grid[0]):
table.data_append(options.microstructure[options.threshold < surface[options.type](X[x],Y[y],Z[z])])
table.data_write()
table.close()

View File

@ -1,46 +1,66 @@
#!/usr/bin/env python3 #!/usr/bin/env python3
# -*- coding: UTF-8 no BOM -*-
import os,sys,math import os
import numpy as np import sys
from optparse import OptionParser from optparse import OptionParser
import numpy as np
import damask import damask
scriptName = os.path.splitext(os.path.basename(__file__))[0] scriptName = os.path.splitext(os.path.basename(__file__))[0]
scriptID = ' '.join([scriptName,damask.version]) scriptID = ' '.join([scriptName,damask.version])
# -------------------------------------------------------------------- # --------------------------------------------------------------------
# MAIN # MAIN
# -------------------------------------------------------------------- # --------------------------------------------------------------------
parser = OptionParser(option_class=damask.extendableOption, usage='%prog [option(s)] [geomfile]', description = """ parser = OptionParser(option_class=damask.extendableOption, usage='%prog options [geomfile]', description = """
Generate a geometry file of an osteon enclosing the Harvesian canal and separated by interstitial tissue. Generate description of an osteon enclosing the Harvesian canal and separated by interstitial tissue.
The osteon phase is lamellar with a twisted plywood structure. The osteon phase is lamellar with a twisted plywood structure.
Its fiber orientation is oscillating by +/- amplitude within one period. Its fiber orientation is oscillating by +/- amplitude within one period.
""", version = scriptID) """, version = scriptID)
parser.add_option('-g', '--grid', dest='grid', type='int', nargs=2, metavar = 'int int', parser.add_option('-g', '--grid',
dest='grid', type='int',
nargs=2, metavar = 'int int',
help='a,b grid of hexahedral box [%default]') help='a,b grid of hexahedral box [%default]')
parser.add_option('-s', '--size', dest='size', type='float', nargs=2, metavar = 'float float', parser.add_option('-s', '--size',
dest='size',
type='float', nargs=2, metavar = 'float float',
help='x,y size of hexahedral box [%default]') help='x,y size of hexahedral box [%default]')
parser.add_option('-c', '--canal', dest='canal', type='float', metavar = 'float', parser.add_option('-c', '--canal',
dest='canal',
type='float', metavar = 'float',
help='Haversian canal radius [%default]') help='Haversian canal radius [%default]')
parser.add_option('-o', '--osteon', dest='osteon', type='float', metavar = 'float', parser.add_option('-o', '--osteon',
dest='osteon',
type='float', metavar = 'float',
help='horizontal osteon radius [%default]') help='horizontal osteon radius [%default]')
parser.add_option('-l', '--lamella', dest='period', type='float', metavar = 'float', parser.add_option('-l', '--lamella',
dest='period',
type='float', metavar = 'float',
help='lamella width [%default]') help='lamella width [%default]')
parser.add_option('-a', '--amplitude', dest='amplitude', type='float', metavar = 'float', parser.add_option('-a', '--amplitude',
dest='amplitude',
type='float', metavar = 'float',
help='amplitude of twisted plywood wiggle in deg [%default]') help='amplitude of twisted plywood wiggle in deg [%default]')
parser.add_option( '--aspect', dest='aspect', type='float', metavar = 'float', parser.add_option( '--aspect',
dest='aspect',
type='float', metavar = 'float',
help='vertical/horizontal osteon aspect ratio [%default]') help='vertical/horizontal osteon aspect ratio [%default]')
parser.add_option('-w', '--omega', dest='omega', type='float', metavar = 'float', parser.add_option('-w', '--omega',
dest='omega',
type='float', metavar = 'float',
help='rotation angle around normal of osteon [%default]') help='rotation angle around normal of osteon [%default]')
parser.add_option('--homogenization', dest='homogenization', type='int', metavar = 'int', parser.add_option( '--homogenization',
dest='homogenization',
type='int', metavar = 'int',
help='homogenization index to be used [%default]') help='homogenization index to be used [%default]')
parser.add_option('--crystallite', dest='crystallite', type='int', metavar = 'int',
help='crystallite index to be used [%default]')
parser.set_defaults(canal = 25e-6, parser.set_defaults(canal = 25e-6,
osteon = 100e-6, osteon = 100e-6,
@ -50,107 +70,82 @@ parser.set_defaults(canal = 25e-6,
amplitude = 60, amplitude = 60,
size = (300e-6,300e-6), size = (300e-6,300e-6),
grid = (512,512), grid = (512,512),
homogenization = 1, homogenization = 1)
crystallite = 1)
(options,filename) = parser.parse_args() (options,filename) = parser.parse_args()
if np.any(np.array(options.grid) < 2):
parser('invalid grid a b c.')
if np.any(np.array(options.size) <= 0.0):
parser('invalid size x y z.')
# --- open input files ---------------------------------------------------------------------------- name = None if filename == [] else filename[0]
damask.util.report(scriptName,name)
if filename == []: filename = [None] omega = np.deg2rad(options.omega)
rotation = np.array([[ np.cos(omega),np.sin(omega),],
[-np.sin(omega),np.cos(omega),]])
table = damask.ASCIItable(outname = filename[0], grid = np.array(options.grid,'i')
buffered = False, labeled=False) size = np.array(options.size,'d')
damask.util.report(scriptName,filename[0]) X0,Y0 = np.meshgrid(size[0]/grid[0] * (np.arange(grid[0]) - grid[0]/2 + 0.5),
size[1]/grid[0] * (np.arange(grid[1]) - grid[1]/2 + 0.5), indexing='ij')
options.omega *= math.pi/180.0 # rescale ro radians
rotation = np.array([[ math.cos(options.omega),math.sin(options.omega),],
[-math.sin(options.omega),math.cos(options.omega),]],'d')
box = np.dot(np.array([[options.canal,0.],[0.,options.aspect*options.canal]]).transpose(),rotation)
info = {
'grid': np.ones(3,'i'),
'size': np.ones(3,'d'),
'origin': np.zeros(3,'d'),
'microstructures': 3,
'homogenization': options.homogenization,
}
info['grid'][:2] = np.array(options.grid,'i')
info['size'][:2] = np.array(options.size,'d')
info['size'][2] = min(info['size'][0]/info['grid'][0],info['size'][1]/info['grid'][1])
info['origin'] = -info['size']/2.0
X0 = info['size'][0]/info['grid'][0]*\
(np.tile(np.arange(info['grid'][0]),(info['grid'][1],1)) - info['grid'][0]/2 + 0.5)
Y0 = info['size'][1]/info['grid'][1]*\
(np.tile(np.arange(info['grid'][1]),(info['grid'][0],1)).transpose() - info['grid'][1]/2 + 0.5)
X = X0*rotation[0,0] + Y0*rotation[0,1] # rotate by omega X = X0*rotation[0,0] + Y0*rotation[0,1] # rotate by omega
Y = X0*rotation[1,0] + Y0*rotation[1,1] # rotate by omega Y = X0*rotation[1,0] + Y0*rotation[1,1] # rotate by omega
radius = np.sqrt(X*X + Y*Y/options.aspect/options.aspect) radius = np.sqrt(X*X + Y*Y/options.aspect**2.0)
alpha = np.degrees(np.arctan2(Y/options.aspect,X)) alpha = np.degrees(np.arctan2(Y/options.aspect,X))
beta = options.amplitude*np.sin(2.0*math.pi*(radius-options.canal)/options.period) beta = options.amplitude*np.sin(2.0*np.pi*(radius-options.canal)/options.period)
microstructure = np.where(radius < float(options.canal),1,0) + np.where(radius > float(options.osteon),2,0) microstructure = np.where(radius < float(options.canal), 1,0) \
+ np.where(radius > float(options.osteon),2,0)
alphaOfGrain = np.zeros(info['grid'][0]*info['grid'][1],'d') # extend to 3D
betaOfGrain = np.zeros(info['grid'][0]*info['grid'][1],'d') size = np.append(size,np.min(size/grid))
for y in range(info['grid'][1]): grid = np.append(grid,1)
for x in range(info['grid'][0]): microstructure = microstructure.reshape(microstructure.shape+(1,))
if microstructure[y,x] == 0:
microstructure[y,x] = info['microstructures']
alphaOfGrain[info['microstructures']] = alpha[y,x]
betaOfGrain[ info['microstructures']] = beta[y,x]
info['microstructures'] += 1
#--- report --------------------------------------------------------------------------------------- Alpha = np.zeros(grid[0]*grid[1],'d')
damask.util.report_geom(info,['grid','size','origin','homogenization','microstructures']) Beta = np.zeros(grid[0]*grid[1],'d')
formatwidth = 1+int(math.floor(math.log10(info['microstructures']-1))) i = 3
header = [scriptID + ' ' + ' '.join(sys.argv[1:])] for y in range(grid[1]):
header.append('<microstructure>') for x in range(grid[0]):
header.append('[canal]') if microstructure[x,y] == 0:
header.append('crystallite %i'%options.crystallite) microstructure[x,y] = i
header.append('(constituent)\tphase 1\ttexture 1\tfraction 1.0') Alpha[i] = alpha[x,y]
header.append('[interstitial]') Beta [i] = beta [x,y]
header.append('crystallite %i'%options.crystallite) i+=1
header.append('(constituent)\tphase 2\ttexture 2\tfraction 1.0')
for i in range(3,info['microstructures']):
header.append('[Grain%s]'%(str(i).zfill(formatwidth)))
header.append('crystallite %i'%options.crystallite)
header.append('(constituent)\tphase 3\ttexture %s\tfraction 1.0'%(str(i).rjust(formatwidth)))
header.append('<texture>') config_header = ['<texture>',
header.append('[canal]') '[canal]',
header.append('[interstitial]') '[interstitial]'
for i in range(3,info['microstructures']): ]
header.append('[Grain%s]'%(str(i).zfill(formatwidth))) for i in range(3,np.max(microstructure)):
header.append('(gauss)\tphi1 %g\tPhi %g\tphi2 0\tscatter 0.0\tfraction 1.0'\ config_header += ['[Point{}]'.format(i-2),
%(alphaOfGrain[i],betaOfGrain[i])) '(gauss)\tphi1 {:.2f}\tPhi {:.2f}\tphi2 0'.format(Alpha[i],Beta[i])
header.append([ ]
"grid\ta {grid[0]}\tb {grid[1]}\tc {grid[2]}".format(grid=info['grid']),
"size\tx {size[0]}\ty {size[1]}\tz {size[2]}".format(size=info['size']),
"origin\tx {origin[0]}\ty {origin[1]}\tz {origin[2]}".format(origin=info['origin']),
"homogenization\t{homog}".format(homog=info['homogenization']),
"microstructures\t{microstructures}".format(microstructures=info['microstructures'])])
table.info_append(header)
table.head_write()
# --- write microstructure information ------------------------------------------------------------
table.data = microstructure.reshape(info['grid'][1]*info['grid'][2],info['grid'][0]) config_header = ['<microstructure>',
table.data_writeArray('%%%ii'%(formatwidth),delimiter=' ') '[canal]',
'crystallite 1',
#--- output finalization -------------------------------------------------------------------------- '(constituent)\tphase 1\ttexture 1\tfraction 1.0',
table.close() '[interstitial]',
'crystallite 1',
'(constituent)\tphase 2\ttexture 2\tfraction 1.0'
]
for i in range(3,np.max(microstructure)):
config_header += ['[Point{}]'.format(i-2),
'crystallite 1',
'(constituent)\tphase 3\ttexture {}\tfraction 1.0'.format(i)
]
header = [scriptID + ' ' + ' '.join(sys.argv[1:])]\
+ config_header
geom = damask.Geom(microstructure.reshape(grid),
size,-size/2,
homogenization=options.homogenization,comments=header)
damask.util.croak(geom)
if name is None:
sys.stdout.write(str(geom.show()))
else:
geom.to_file(name)

View File

@ -0,0 +1,69 @@
#!/usr/bin/env python3
import os
import sys
from optparse import OptionParser
import numpy as np
import damask
scriptName = os.path.splitext(os.path.basename(__file__))[0]
scriptID = ' '.join([scriptName,damask.version])
# --------------------------------------------------------------------
# MAIN
# --------------------------------------------------------------------
parser = OptionParser(option_class=damask.extendableOption, usage='%prog options [geomfile(s)]', description = """
Generate homogeneous geometry.
""", version = scriptID)
parser.add_option('-g','--grid',
dest = 'grid',
type = 'int', nargs = 3, metavar = ' '.join(['int']*3),
help = 'a,b,c grid of hexahedral box %default')
parser.add_option('-s', '--size',
dest = 'size',
type = 'float', nargs = 3, metavar = ' '.join(['float']*3),
help = 'x,y,z of geometry size')
parser.add_option('-o','--origin',
dest = 'origin',
type = 'float', nargs = 3, metavar = ' '.join(['float']*3),
help = 'x,y,z of geometry origin %default')
parser.add_option('--homogenization',
dest = 'homogenization',
type = 'int', metavar = 'int',
help = 'homogenization index [%default]')
parser.add_option('-f','--fill',
dest = 'fill',
type = 'float', metavar = 'int',
help = 'microstructure index [%default]')
parser.set_defaults(grid = (16,16,16),
origin = (0.,0.,0.),
homogenization = 1,
fill = 1,
)
(options, filename) = parser.parse_args()
name = None if filename == [] else filename[0]
damask.util.report(scriptName,name)
dtype = float if np.isnan(options.fill) or int(options.fill) != options.fill else int
geom = damask.Geom(microstructure=np.full(options.grid,options.fill,dtype=dtype),
size=options.size,
origin=options.origin,
homogenization=options.homogenization,
comments=scriptID + ' ' + ' '.join(sys.argv[1:]))
damask.util.croak(geom)
if name is None:
sys.stdout.write(str(geom.show()))
else:
geom.to_file(name)

View File

@ -1,22 +1,25 @@
#!/usr/bin/env python3 #!/usr/bin/env python3
# -*- coding: UTF-8 no BOM -*-
import os,sys,math import os
import numpy as np import sys
from optparse import OptionParser from optparse import OptionParser
import numpy as np
import damask import damask
scriptName = os.path.splitext(os.path.basename(__file__))[0] scriptName = os.path.splitext(os.path.basename(__file__))[0]
scriptID = ' '.join([scriptName,damask.version]) scriptID = ' '.join([scriptName,damask.version])
# -------------------------------------------------------------------- # --------------------------------------------------------------------
# MAIN # MAIN
# -------------------------------------------------------------------- # --------------------------------------------------------------------
parser = OptionParser(option_class=damask.extendableOption, usage='%prog option(s) [ASCIItable(s)]', description = """ parser = OptionParser(option_class=damask.extendableOption, usage='%prog options [ASCIItable(s)]', description = """
Generate geometry description and material configuration from position, phase, and orientation (or microstructure) data. Converts ASCII table. Input can be microstructure or orientation (as quaternion). For the latter,
phase information can be given additionally.
""", version = scriptID) """, version = scriptID)
@ -40,30 +43,26 @@ parser.add_option('--axes',
dest = 'axes', dest = 'axes',
type = 'string', nargs = 3, metavar = ' '.join(['string']*3), type = 'string', nargs = 3, metavar = ' '.join(['string']*3),
help = 'orientation coordinate frame in terms of position coordinate frame [+x +y +z]') help = 'orientation coordinate frame in terms of position coordinate frame [+x +y +z]')
parser.add_option('--homogenization', parser.add_option('--homogenization',
dest = 'homogenization', dest = 'homogenization',
type = 'int', metavar = 'int', type = 'int', metavar = 'int',
help = 'homogenization index to be used [%default]') help = 'homogenization index to be used [%default]')
parser.add_option('--crystallite',
dest = 'crystallite',
type = 'int', metavar = 'int',
help = 'crystallite index to be used [%default]')
parser.set_defaults(homogenization = 1, parser.set_defaults(homogenization = 1,
crystallite = 1,
pos = 'pos', pos = 'pos',
) )
(options,filenames) = parser.parse_args() (options,filenames) = parser.parse_args()
input = [ options.quaternion is not None, input = [options.quaternion is not None,
options.microstructure is not None, options.microstructure is not None,
] ]
if np.sum(input) != 1: if np.sum(input) != 1:
parser.error('need either microstructure label or exactly one orientation input format.') parser.error('need either microstructure or quaternion (and optionally phase) as input.')
if options.microstructure is not None and options.phase is not None:
parser.error('need either microstructure or phase (and mandatory quaternion) as input.')
if options.axes is not None and not set(options.axes).issubset(set(['x','+x','-x','y','+y','-y','z','+z','-z'])): if options.axes is not None and not set(options.axes).issubset(set(['x','+x','-x','y','+y','-y','z','+z','-z'])):
parser.error('invalid axes {} {} {}.'.format(*options.axes)) parser.error('invalid axes {} {} {}.'.format(*options.axes))
@ -71,23 +70,15 @@ if options.axes is not None and not set(options.axes).issubset(set(['x','+x','-x
(options.microstructure,1,'microstructure'), (options.microstructure,1,'microstructure'),
][np.where(input)[0][0]] # select input label that was requested ][np.where(input)[0][0]] # select input label that was requested
# --- loop over input files -------------------------------------------------------------------------
if filenames == []: filenames = [None] if filenames == []: filenames = [None]
for name in filenames: for name in filenames:
try:
table = damask.ASCIItable(name = name,
outname = os.path.splitext(name)[-2]+'.geom' if name else name,
buffered = False)
except: continue
damask.util.report(scriptName,name) damask.util.report(scriptName,name)
table = damask.ASCIItable(name = name,readonly=True)
# ------------------------------------------ read head ---------------------------------------
table.head_read() # read ASCII header info table.head_read() # read ASCII header info
# ------------------------------------------ sanity checks --------------------------------------- # ------------------------------------------ sanity checks ---------------------------------------
coordDim = table.label_dimension(options.pos) coordDim = table.label_dimension(options.pos)
@ -98,135 +89,70 @@ for name in filenames:
errors.append('input "{}" needs to have dimension {}.'.format(label,dim)) errors.append('input "{}" needs to have dimension {}.'.format(label,dim))
if options.phase and table.label_dimension(options.phase) != 1: if options.phase and table.label_dimension(options.phase) != 1:
errors.append('phase column "{}" is not scalar.'.format(options.phase)) errors.append('phase column "{}" is not scalar.'.format(options.phase))
if errors != []: if errors != []:
damask.util.croak(errors) damask.util.croak(errors)
table.close(dismiss = True)
continue continue
table.data_readArray([options.pos] \ table.data_readArray([options.pos] \
+ (label if isinstance(label, list) else [label]) \ + (label if isinstance(label, list) else [label]) \
+ ([options.phase] if options.phase else [])) + ([options.phase] if options.phase else []))
if coordDim == 2: if coordDim == 2:
table.data = np.insert(table.data,2,np.zeros(len(table.data)),axis=1) # add zero z coordinate for two-dimensional input table.data = np.insert(table.data,2,np.zeros(len(table.data)),axis=1) # add zero z coordinate for two-dimensional input
if options.phase is None: if options.phase is None:
table.data = np.column_stack((table.data,np.ones(len(table.data)))) # add single phase if no phase column given table.data = np.column_stack((table.data,np.ones(len(table.data)))) # add single phase if no phase column given
# --------------- figure out size and grid --------------------------------------------------------- grid,size = damask.util.coordGridAndSize(table.data[:,0:3])
coords = [np.unique(table.data[:,i]) for i in range(3)] coords = [np.unique(table.data[:,i]) for i in range(3)]
mincorner = np.array(list(map(min,coords))) mincorner = np.array(list(map(min,coords)))
maxcorner = np.array(list(map(max,coords))) origin = mincorner - 0.5*size/grid # shift from cell center to corner
grid = np.array(list(map(len,coords)),'i')
size = grid/np.maximum(np.ones(3,'d'), grid-1.0) * (maxcorner-mincorner) # size from edge to edge = dim * n/(n-1)
size = np.where(grid > 1, size, min(size[grid > 1]/grid[grid > 1])) # spacing for grid==1 set to smallest among other spacings
delta = size/np.maximum(np.ones(3,'d'), grid)
origin = mincorner - 0.5*delta # shift from cell center to corner
N = grid.prod()
if N != len(table.data): indices = np.lexsort((table.data[:,0],table.data[:,1],table.data[:,2])) # indices of position when sorting x fast, z slow
errors.append('data count {} does not match grid {}.'.format(len(table.data),' x '.join(map(repr,grid)))) microstructure = np.empty(grid,dtype = int) # initialize empty microstructure
if np.any(np.abs(np.log10((coords[0][1:]-coords[0][:-1])/delta[0])) > 0.01) \ i = 0
or np.any(np.abs(np.log10((coords[1][1:]-coords[1][:-1])/delta[1])) > 0.01) \
or np.any(np.abs(np.log10((coords[2][1:]-coords[2][:-1])/delta[2])) > 0.01):
errors.append('regular grid spacing {} violated.'.format(' x '.join(map(repr,delta))))
if errors != []:
damask.util.croak(errors)
table.close(dismiss = True)
continue
# ------------------------------------------ process data ------------------------------------------
colOri = table.label_index(label)+(3-coordDim) # column(s) of orientation data followed by 3 coordinates
if inputtype == 'microstructure': if inputtype == 'microstructure':
for z in range(grid[2]):
for y in range(grid[1]):
for x in range(grid[0]):
microstructure[x,y,z] = table.data[indices[i],3]
i+=1
grain = table.data[:,colOri] config_header = []
nGrains = len(np.unique(grain))
elif inputtype == 'quaternion': elif inputtype == 'quaternion':
unique,unique_inverse = np.unique(table.data[:,3:8],return_inverse=True,axis=0)
colPhase = -1 # column of phase data comes last
index = np.lexsort((table.data[:,0],table.data[:,1],table.data[:,2])) # index of position when sorting x fast, z slow
grain = -np.ones(N,dtype = 'int32') # initialize empty microstructure
orientations = [] # orientations
multiplicity = [] # orientation multiplicity (number of group members)
phases = [] # phase info
nGrains = 0 # counter for detected grains
existingGrains = np.arange(nGrains)
myPos = 0 # position (in list) of current grid point
for z in range(grid[2]): for z in range(grid[2]):
for y in range(grid[1]): for y in range(grid[1]):
for x in range(grid[0]): for x in range(grid[0]):
microstructure[x,y,z] = unique_inverse[indices[i]]+1
i+=1
config_header = ['<texture>']
for i,data in enumerate(unique):
ori = damask.Rotation(data[0:4])
config_header += ['[Grain{}]'.format(i+1),
'(gauss)\tphi1 {:.2f}\tPhi {:.2f}\tphi2 {:.2f}'.format(*ori.asEulers(degrees = True)),
]
if options.axes is not None: config_header += ['axes\t{} {} {}'.format(*options.axes)]
myData = table.data[index[myPos]] # read data for current grid point config_header += ['<microstructure>']
myPhase = int(myData[colPhase]) for i,data in enumerate(unique):
config_header += ['[Grain{}]'.format(i+1),
o = damask.Rotation(myData[colOri:colOri+4]) 'crystallite 1',
'(constituent)\tphase {}\ttexture {}\tfraction 1.0'.format(int(data[4]),i+1),
grain[myPos] = nGrains # assign new grain to me ... ]
nGrains += 1 # ... and update counter
orientations.append(o) # store new orientation for future comparison
multiplicity.append(1) # having single occurrence so far
phases.append(myPhase) # store phase info for future reporting
existingGrains = np.arange(nGrains) # update list of existing grains
myPos += 1 header = [scriptID + ' ' + ' '.join(sys.argv[1:])]\
+ config_header
geom = damask.Geom(microstructure,size,origin,
homogenization=options.homogenization,comments=header)
damask.util.croak(geom)
if name is None:
grain += 1 # offset from starting index 0 to 1 sys.stdout.write(str(geom.show()))
# --- generate header ----------------------------------------------------------------------------
info = {
'grid': grid,
'size': size,
'origin': origin,
'microstructures': nGrains,
'homogenization': options.homogenization,
}
damask.util.report_geom(info)
# --- write header ---------------------------------------------------------------------------------
formatwidth = 1+int(math.log10(info['microstructures']))
if inputtype == 'microstructure':
config_header = []
else: else:
config_header = ['<microstructure>'] geom.to_file(os.path.splitext(name)[0]+'.geom')
for i,phase in enumerate(phases):
config_header += ['[Grain%s]'%(str(i+1).zfill(formatwidth)),
'crystallite %i'%options.crystallite,
'(constituent)\tphase %i\ttexture %s\tfraction 1.0'%(phase,str(i+1).rjust(formatwidth)),
]
config_header += ['<texture>']
for i,orientation in enumerate(orientations):
config_header += ['[Grain%s]'%(str(i+1).zfill(formatwidth)),
'axes\t%s %s %s'%tuple(options.axes) if options.axes is not None else '',
'(gauss)\tphi1 %g\tPhi %g\tphi2 %g\tscatter 0.0\tfraction 1.0'%tuple(orientation.asEulers(degrees = True)),
]
table.labels_clear()
table.info_clear()
table.info_append([scriptID + ' ' + ' '.join(sys.argv[1:])])
table.head_putGeom(info)
table.info_append(config_header)
table.head_write()
# --- write microstructure information ------------------------------------------------------------
table.data = grain.reshape(info['grid'][1]*info['grid'][2],info['grid'][0])
table.data_writeArray('%{}i'.format(formatwidth),delimiter=' ')
#--- output finalization --------------------------------------------------------------------------
table.close()

View File

@ -1,102 +1,88 @@
#!/usr/bin/env python3 #!/usr/bin/env python3
# -*- coding: UTF-8 no BOM -*-
import os,sys,math import os
import numpy as np import sys
import multiprocessing import multiprocessing
from optparse import OptionParser,OptionGroup from optparse import OptionParser,OptionGroup
from scipy import spatial
import numpy as np
from scipy import spatial
import damask import damask
scriptName = os.path.splitext(os.path.basename(__file__))[0] scriptName = os.path.splitext(os.path.basename(__file__))[0]
scriptID = ' '.join([scriptName,damask.version]) scriptID = ' '.join([scriptName,damask.version])
def meshgrid2(*arrs): def laguerreTessellation(undeformed, coords, weights, grains, periodic = True, cpus = 2):
"""Code inspired by http://stackoverflow.com/questions/1827489/numpy-meshgrid-in-3d"""
arrs = tuple(reversed(arrs))
lens = np.array(list(map(len, arrs)))
dim = len(arrs)
ans = []
for i, arr in enumerate(arrs):
slc = np.ones(dim,'i')
slc[i] = lens[i]
arr2 = np.asarray(arr).reshape(slc)
for j, sz in enumerate(lens):
if j != i:
arr2 = arr2.repeat(sz, axis=j)
ans.insert(0,arr2)
return tuple(ans)
def findClosestSeed(fargs): def findClosestSeed(fargs):
point, seeds, myWeights = fargs point, seeds, myWeights = fargs
tmp = np.repeat(point.reshape(3,1), len(seeds), axis=1).T tmp = np.repeat(point.reshape(3,1), len(seeds), axis=1).T
dist = np.sum((tmp - seeds)**2,axis=1) -myWeights dist = np.sum((tmp - seeds)**2,axis=1) -myWeights
return np.argmin(dist) # seed point closest to point return np.argmin(dist) # seed point closest to point
copies = \
np.array([
[ -1,-1,-1 ],
[ 0,-1,-1 ],
[ 1,-1,-1 ],
[ -1, 0,-1 ],
[ 0, 0,-1 ],
[ 1, 0,-1 ],
[ -1, 1,-1 ],
[ 0, 1,-1 ],
[ 1, 1,-1 ],
[ -1,-1, 0 ],
[ 0,-1, 0 ],
[ 1,-1, 0 ],
[ -1, 0, 0 ],
[ 0, 0, 0 ],
[ 1, 0, 0 ],
[ -1, 1, 0 ],
[ 0, 1, 0 ],
[ 1, 1, 0 ],
[ -1,-1, 1 ],
[ 0,-1, 1 ],
[ 1,-1, 1 ],
[ -1, 0, 1 ],
[ 0, 0, 1 ],
[ 1, 0, 1 ],
[ -1, 1, 1 ],
[ 0, 1, 1 ],
[ 1, 1, 1 ],
]).astype(float)*info['size'] if periodic else \
np.array([
[ 0, 0, 0 ],
]).astype(float)
def laguerreTessellation(undeformed, coords, weights, grains, periodic = True, cpus = 2): repeatweights = np.tile(weights,len(copies)).flatten(order='F') # Laguerre weights (1,2,3,1,2,3,...,1,2,3)
for i,vec in enumerate(copies): # periodic copies of seed points ...
try: seeds = np.append(seeds, coords+vec, axis=0) # ... (1+a,2+a,3+a,...,1+z,2+z,3+z)
except NameError: seeds = coords+vec
copies = \ if (repeatweights == 0.0).all(): # standard Voronoi (no weights, KD tree)
np.array([ myKDTree = spatial.cKDTree(seeds)
[ -1,-1,-1 ], devNull,closestSeeds = myKDTree.query(undeformed)
[ 0,-1,-1 ], else:
[ 1,-1,-1 ], damask.util.croak('...using {} cpu{}'.format(options.cpus, 's' if options.cpus > 1 else ''))
[ -1, 0,-1 ], arguments = [[arg,seeds,repeatweights] for arg in list(undeformed)]
[ 0, 0,-1 ],
[ 1, 0,-1 ],
[ -1, 1,-1 ],
[ 0, 1,-1 ],
[ 1, 1,-1 ],
[ -1,-1, 0 ],
[ 0,-1, 0 ],
[ 1,-1, 0 ],
[ -1, 0, 0 ],
[ 0, 0, 0 ],
[ 1, 0, 0 ],
[ -1, 1, 0 ],
[ 0, 1, 0 ],
[ 1, 1, 0 ],
[ -1,-1, 1 ],
[ 0,-1, 1 ],
[ 1,-1, 1 ],
[ -1, 0, 1 ],
[ 0, 0, 1 ],
[ 1, 0, 1 ],
[ -1, 1, 1 ],
[ 0, 1, 1 ],
[ 1, 1, 1 ],
]).astype(float)*info['size'] if periodic else \
np.array([
[ 0, 0, 0 ],
]).astype(float)
repeatweights = np.tile(weights,len(copies)).flatten(order='F') # Laguerre weights (1,2,3,1,2,3,...,1,2,3) if cpus > 1: # use multithreading
for i,vec in enumerate(copies): # periodic copies of seed points ... pool = multiprocessing.Pool(processes = cpus) # initialize workers
try: seeds = np.append(seeds, coords+vec, axis=0) # ... (1+a,2+a,3+a,...,1+z,2+z,3+z) result = pool.map_async(findClosestSeed, arguments) # evaluate function in parallel
except NameError: seeds = coords+vec pool.close()
pool.join()
if (repeatweights == 0.0).all(): # standard Voronoi (no weights, KD tree) closestSeeds = np.array(result.get()).flatten()
myKDTree = spatial.cKDTree(seeds)
devNull,closestSeeds = myKDTree.query(undeformed)
else: else:
damask.util.croak('...using {} cpu{}'.format(options.cpus, 's' if options.cpus > 1 else '')) closestSeeds = np.zeros(len(arguments),dtype='i')
arguments = [[arg,seeds,repeatweights] for arg in list(undeformed)] for i,arg in enumerate(arguments):
closestSeeds[i] = findClosestSeed(arg)
if cpus > 1: # use multithreading
pool = multiprocessing.Pool(processes = cpus) # initialize workers
result = pool.map_async(findClosestSeed, arguments) # evaluate function in parallel
pool.close()
pool.join()
closestSeeds = np.array(result.get()).flatten()
else:
closestSeeds = np.zeros(len(arguments),dtype='i')
for i,arg in enumerate(arguments):
closestSeeds[i] = findClosestSeed(arg)
# closestSeed is modulo number of original seed points (i.e. excluding periodic copies) # closestSeed is modulo number of original seed points (i.e. excluding periodic copies)
return grains[closestSeeds%coords.shape[0]] return grains[closestSeeds%coords.shape[0]]
# -------------------------------------------------------------------- # --------------------------------------------------------------------
# MAIN # MAIN
@ -189,10 +175,6 @@ group.add_option('--homogenization',
dest = 'homogenization', dest = 'homogenization',
type = 'int', metavar = 'int', type = 'int', metavar = 'int',
help = 'homogenization index to be used [%default]') help = 'homogenization index to be used [%default]')
group.add_option('--crystallite',
dest = 'crystallite',
type = 'int', metavar = 'int',
help = 'crystallite index to be used [%default]')
group.add_option('--phase', group.add_option('--phase',
dest = 'phase', dest = 'phase',
type = 'int', metavar = 'int', type = 'int', metavar = 'int',
@ -205,7 +187,6 @@ parser.set_defaults(pos = 'pos',
microstructure = 'microstructure', microstructure = 'microstructure',
eulers = 'euler', eulers = 'euler',
homogenization = 1, homogenization = 1,
crystallite = 1,
phase = 1, phase = 1,
cpus = 2, cpus = 2,
laguerre = False, laguerre = False,
@ -213,46 +194,41 @@ parser.set_defaults(pos = 'pos',
normalized = True, normalized = True,
config = True, config = True,
) )
(options,filenames) = parser.parse_args() (options,filenames) = parser.parse_args()
# --- loop over input files -------------------------------------------------------------------------
if filenames == []: filenames = [None] if filenames == []: filenames = [None]
for name in filenames: for name in filenames:
try: table = damask.ASCIItable(name = name,
outname = os.path.splitext(name)[0]+'.geom' if name else name,
buffered = False)
except: continue
damask.util.report(scriptName,name) damask.util.report(scriptName,name)
table = damask.ASCIItable(name = name, readonly = True)
# --- read header ---------------------------------------------------------------------------- # --- read header ----------------------------------------------------------------------------
table.head_read() table.head_read()
info,extra_header = table.head_getGeom() info,extra_header = table.head_getGeom()
if options.grid is not None: info['grid'] = options.grid if options.grid is not None: info['grid'] = options.grid
if options.size is not None: info['size'] = options.size if options.size is not None: info['size'] = options.size
if options.origin is not None: info['origin'] = options.origin if options.origin is not None: info['origin'] = options.origin
# ------------------------------------------ sanity checks --------------------------------------- # ------------------------------------------ sanity checks ---------------------------------------
remarks = [] remarks = []
errors = [] errors = []
labels = [] labels = []
hasGrains = table.label_dimension(options.microstructure) == 1 hasGrains = table.label_dimension(options.microstructure) == 1
hasEulers = table.label_dimension(options.eulers) == 3 hasEulers = table.label_dimension(options.eulers) == 3
hasWeights = table.label_dimension(options.weight) == 1 and options.laguerre hasWeights = table.label_dimension(options.weight) == 1 and options.laguerre
if np.any(np.array(info['grid']) < 1): errors.append('invalid grid a b c.') for i in range(3):
if np.any(np.array(info['size']) <= 0.0) \ if info['size'][i] <= 0.0: # any invalid size?
and np.all(np.array(info['grid']) < 1): errors.append('invalid size x y z.') info['size'][i] = float(info['grid'][i])/max(info['grid']) # normalize to grid
else: remarks.append('rescaling size {} to {}...'.format(['x','y','z'][i],info['size'][i]))
for i in range(3):
if info['size'][i] <= 0.0: # any invalid size?
info['size'][i] = float(info['grid'][i])/max(info['grid']) # normalize to grid
remarks.append('rescaling size {} to {}...'.format(['x','y','z'][i],info['size'][i]))
if table.label_dimension(options.pos) != 3: if table.label_dimension(options.pos) != 3:
errors.append('seed positions "{}" have dimension {}.'.format(options.pos, errors.append('seed positions "{}" have dimension {}.'.format(options.pos,
@ -274,14 +250,14 @@ for name in filenames:
table.close(dismiss=True) table.close(dismiss=True)
continue continue
# ------------------------------------------ read seeds --------------------------------------- # ------------------------------------------ read seeds ---------------------------------------
table.data_readArray(labels) table.data_readArray(labels)
coords = table.data[:,table.label_indexrange(options.pos)] * info['size'] if options.normalized \ coords = table.data[:,table.label_indexrange(options.pos)] * info['size'] if options.normalized \
else table.data[:,table.label_indexrange(options.pos)] - info['origin'] else table.data[:,table.label_indexrange(options.pos)] - info['origin']
eulers = table.data[:,table.label_indexrange(options.eulers)] if hasEulers \ eulers = table.data[:,table.label_indexrange(options.eulers)] if hasEulers \
else np.zeros(3*len(coords)) else np.zeros(3*len(coords))
grains = table.data[:,table.label_indexrange(options.microstructure)].astype('i') if hasGrains \ grains = table.data[:,table.label_indexrange(options.microstructure)].astype(int) if hasGrains \
else 1+np.arange(len(coords)) else 1+np.arange(len(coords))
weights = table.data[:,table.label_indexrange(options.weight)] if hasWeights \ weights = table.data[:,table.label_indexrange(options.weight)] if hasWeights \
else np.zeros(len(coords)) else np.zeros(len(coords))
@ -293,62 +269,40 @@ for name in filenames:
x = (np.arange(info['grid'][0])+0.5)*info['size'][0]/info['grid'][0] x = (np.arange(info['grid'][0])+0.5)*info['size'][0]/info['grid'][0]
y = (np.arange(info['grid'][1])+0.5)*info['size'][1]/info['grid'][1] y = (np.arange(info['grid'][1])+0.5)*info['size'][1]/info['grid'][1]
z = (np.arange(info['grid'][2])+0.5)*info['size'][2]/info['grid'][2] z = (np.arange(info['grid'][2])+0.5)*info['size'][2]/info['grid'][2]
X,Y,Z = np.meshgrid(x, y, z,indexing='ij')
grid = np.stack((X,Y,Z),axis=-1).reshape((np.prod(info['grid']),3),order='F')
damask.util.croak('tessellating...') damask.util.croak('tessellating...')
grid = np.vstack(meshgrid2(x, y, z)).reshape(3,-1).T
indices = laguerreTessellation(grid, coords, weights, grains, options.periodic, options.cpus) indices = laguerreTessellation(grid, coords, weights, grains, options.periodic, options.cpus)
# --- write header ------------------------------------------------------------------------
usedGrainIDs = np.intersect1d(grainIDs,indices)
info['microstructures'] = len(usedGrainIDs)
if info['homogenization'] == 0: info['homogenization'] = options.homogenization
damask.util.report_geom(info,['grid','size','origin','homogenization',])
damask.util.croak(['microstructures: {}{}'.format(info['microstructures'],
(' out of {}'.format(NgrainIDs) if NgrainIDs != info['microstructures'] else '')),
])
config_header = [] config_header = []
formatwidth = 1+int(math.log10(NgrainIDs))
if options.config: if options.config:
config_header += ['<microstructure>']
for i,ID in enumerate(grainIDs):
config_header += ['[Grain{}]'.format(str(ID).zfill(formatwidth)),
'crystallite {}'.format(options.crystallite),
'(constituent)\tphase {}\ttexture {}\tfraction 1.0'.format(options.phase,str(ID).rjust(formatwidth)),
]
if hasEulers: if hasEulers:
config_header += ['<texture>'] config_header += ['<texture>']
theAxes = [] if options.axes is None else ['axes\t{} {} {}'.format(*options.axes)]
for ID in grainIDs: for ID in grainIDs:
eulerID = np.nonzero(grains == ID)[0][0] # find first occurrence of this grain id eulerID = np.nonzero(grains == ID)[0][0] # find first occurrence of this grain id
config_header += ['[Grain{}]'.format(str(ID).zfill(formatwidth)), config_header += ['[Grain{}]'.format(ID),
'(gauss)\tphi1 {:g}\tPhi {:g}\tphi2 {:g}\tscatter 0.0\tfraction 1.0'.format(*eulers[eulerID]) '(gauss)\tphi1 {:.2f}\tPhi {:.2f}\tphi2 {:.2f}'.format(*eulers[eulerID])
] + theAxes ]
if options.axes is not None: config_header += ['axes\t{} {} {}'.format(*options.axes)]
config_header += ['<microstructure>']
for ID in grainIDs:
config_header += ['[Grain{}]'.format(ID),
'crystallite 1',
'(constituent)\tphase {}\ttexture {}\tfraction 1.0'.format(options.phase,ID)
]
config_header += ['<!skip>'] config_header += ['<!skip>']
table.labels_clear()
table.info_clear()
table.info_append([
scriptID + ' ' + ' '.join(sys.argv[1:]),
"grid\ta {}\tb {}\tc {}".format(*info['grid']),
"size\tx {}\ty {}\tz {}".format(*info['size']),
"origin\tx {}\ty {}\tz {}".format(*info['origin']),
"homogenization\t{}".format(info['homogenization']),
"microstructures\t{}".format(info['microstructures']),
config_header,
])
table.head_write()
# --- write microstructure information ------------------------------------------------------------
table.data = indices.reshape(info['grid'][1]*info['grid'][2],info['grid'][0]) header = [scriptID + ' ' + ' '.join(sys.argv[1:])]\
table.data_writeArray('%%%ii'%(formatwidth),delimiter=' ') + config_header
geom = damask.Geom(indices.reshape(info['grid'],order='F'),info['size'],info['origin'],
#--- output finalization -------------------------------------------------------------------------- homogenization=options.homogenization,comments=header)
damask.util.croak(geom)
table.close() if name is None:
sys.stdout.write(str(geom.show()))
else:
geom.to_file(os.path.splitext(name)[0]+'.geom')

View File

@ -1,21 +1,30 @@
#!/usr/bin/env python3 #!/usr/bin/env python3
# -*- coding: UTF-8 no BOM -*-
import os,sys,math import os
import numpy as np import sys
from io import StringIO
from optparse import OptionParser from optparse import OptionParser
import numpy as np
from scipy import ndimage from scipy import ndimage
import damask import damask
scriptName = os.path.splitext(os.path.basename(__file__))[0] scriptName = os.path.splitext(os.path.basename(__file__))[0]
scriptID = ' '.join([scriptName,damask.version]) scriptID = ' '.join([scriptName,damask.version])
getInterfaceEnergy = lambda A,B: np.float32((A*B != 0)*(A != B)*1.0) # 1.0 if A & B are distinct & nonzero, 0.0 otherwise
struc = ndimage.generate_binary_structure(3,1) # 3D von Neumann neighborhood
#-------------------------------------------------------------------------------------------------- #--------------------------------------------------------------------------------------------------
# MAIN # MAIN
#-------------------------------------------------------------------------------------------------- #--------------------------------------------------------------------------------------------------
parser = OptionParser(option_class=damask.extendableOption, usage='%prog [option(s)] [geomfile(s)]', description = """ parser = OptionParser(option_class=damask.extendableOption, usage='%prog option(s) [geomfile(s)]', description = """
Smoothens out interface roughness by simulated curvature flow. Smoothen interface roughness by simulated curvature flow.
This is achieved by the diffusion of each initially sharply bounded grain volume within the periodic domain This is achieved by the diffusion of each initially sharply bounded grain volume within the periodic domain
up to a given distance 'd' voxels. up to a given distance 'd' voxels.
The final geometry is assembled by selecting at each voxel that grain index for which the concentration remains largest. The final geometry is assembled by selecting at each voxel that grain index for which the concentration remains largest.
@ -33,69 +42,45 @@ parser.add_option('-N', '--iterations',
parser.add_option('-i', '--immutable', parser.add_option('-i', '--immutable',
action = 'extend', dest = 'immutable', metavar = '<int LIST>', action = 'extend', dest = 'immutable', metavar = '<int LIST>',
help = 'list of immutable microstructure indices') help = 'list of immutable microstructure indices')
parser.add_option('-r', '--renumber',
dest = 'renumber', action='store_true',
help = 'output consecutive microstructure indices')
parser.add_option('--ndimage', parser.add_option('--ndimage',
dest = 'ndimage', action='store_true', dest = 'ndimage', action='store_true',
help = 'use ndimage.gaussian_filter in lieu of explicit FFT') help = 'use ndimage.gaussian_filter in lieu of explicit FFT')
parser.set_defaults(d = 1, parser.set_defaults(d = 1,
N = 1, N = 1,
immutable = [], immutable = [],
renumber = False, ndimage = False,
ndimage = False,
) )
(options, filenames) = parser.parse_args() (options, filenames) = parser.parse_args()
options.immutable = list(map(int,options.immutable)) options.immutable = list(map(int,options.immutable))
getInterfaceEnergy = lambda A,B: np.float32((A*B != 0)*(A != B)*1.0) # 1.0 if A & B are distinct & nonzero, 0.0 otherwise
struc = ndimage.generate_binary_structure(3,1) # 3D von Neumann neighborhood
# --- loop over input files -----------------------------------------------------------------------
if filenames == []: filenames = [None] if filenames == []: filenames = [None]
for name in filenames: for name in filenames:
try: table = damask.ASCIItable(name = name,
buffered = False,
labeled = False)
except: continue
damask.util.report(scriptName,name) damask.util.report(scriptName,name)
# --- interpret header ---------------------------------------------------------------------------- geom = damask.Geom.from_file(StringIO(''.join(sys.stdin.read())) if name is None else name)
table.head_read() grid_original = geom.get_grid()
info,extra_header = table.head_getGeom() damask.util.croak(geom)
damask.util.report_geom(info) microstructure = np.tile(geom.microstructure,np.where(grid_original == 1, 2,1)) # make one copy along dimensions with grid == 1
errors = []
if np.any(info['grid'] < 1): errors.append('invalid grid a b c.')
if np.any(info['size'] <= 0.0): errors.append('invalid size x y z.')
if errors != []:
damask.util.croak(errors)
table.close(dismiss = True)
continue
# --- read data -----------------------------------------------------------------------------------
microstructure = np.tile(table.microstructure_read(info['grid']).reshape(info['grid'],order='F'),
np.where(info['grid'] == 1, 2,1)) # make one copy along dimensions with grid == 1
grid = np.array(microstructure.shape) grid = np.array(microstructure.shape)
# --- initialize support data --------------------------------------------------------------------- # --- initialize support data ---------------------------------------------------------------------
# store a copy the initial microstructure to find locations of immutable indices # store a copy the initial microstructure to find locations of immutable indices
microstructure_original = np.copy(microstructure) microstructure_original = np.copy(microstructure)
if not options.ndimage: if not options.ndimage:
X,Y,Z = np.mgrid[0:grid[0],0:grid[1],0:grid[2]] X,Y,Z = np.mgrid[0:grid[0],0:grid[1],0:grid[2]]
# Calculates gaussian weights for simulating 3d diffusion # Calculates gaussian weights for simulating 3d diffusion
gauss = np.exp(-(X*X + Y*Y + Z*Z)/(2.0*options.d*options.d),dtype=np.float32) \ gauss = np.exp(-(X*X + Y*Y + Z*Z)/(2.0*options.d*options.d),dtype=np.float32) \
/np.power(2.0*np.pi*options.d*options.d,(3.0 - np.count_nonzero(info['grid'] == 1))/2.,dtype=np.float32) /np.power(2.0*np.pi*options.d*options.d,(3.0 - np.count_nonzero(grid_original == 1))/2.,dtype=np.float32)
gauss[:,:,:grid[2]//2:-1] = gauss[:,:,1:(grid[2]+1)//2] # trying to cope with uneven (odd) grid size gauss[:,:,:grid[2]//2:-1] = gauss[:,:,1:(grid[2]+1)//2] # trying to cope with uneven (odd) grid size
gauss[:,:grid[1]//2:-1,:] = gauss[:,1:(grid[1]+1)//2,:] gauss[:,:grid[1]//2:-1,:] = gauss[:,1:(grid[1]+1)//2,:]
gauss[:grid[0]//2:-1,:,:] = gauss[1:(grid[0]+1)//2,:,:] gauss[:grid[0]//2:-1,:,:] = gauss[1:(grid[0]+1)//2,:,:]
@ -118,7 +103,7 @@ for name in filenames:
grid[2]//2:-grid[2]//2] grid[2]//2:-grid[2]//2]
# transform bulk volume (i.e. where interfacial energy remained zero), store index of closest boundary voxel # transform bulk volume (i.e. where interfacial energy remained zero), store index of closest boundary voxel
index = ndimage.morphology.distance_transform_edt(periodic_interfaceEnergy == 0., index = ndimage.morphology.distance_transform_edt(periodic_interfaceEnergy == 0.,
return_distances = False, return_distances = False,
return_indices = True) return_indices = True)
@ -156,7 +141,7 @@ for name in filenames:
# transform voxels close to interface region # transform voxels close to interface region
index = ndimage.morphology.distance_transform_edt(periodic_diffusedEnergy >= 0.95*np.amax(periodic_diffusedEnergy), index = ndimage.morphology.distance_transform_edt(periodic_diffusedEnergy >= 0.95*np.amax(periodic_diffusedEnergy),
return_distances = False, return_distances = False,
return_indices = True) # want index of closest bulk grain return_indices = True) # want index of closest bulk grain
periodic_microstructure = np.tile(microstructure,(3,3,3))[grid[0]//2:-grid[0]//2, periodic_microstructure = np.tile(microstructure,(3,3,3))[grid[0]//2:-grid[0]//2,
grid[1]//2:-grid[1]//2, grid[1]//2:-grid[1]//2,
@ -184,49 +169,10 @@ for name in filenames:
# undo any changes involving immutable microstructures # undo any changes involving immutable microstructures
microstructure = np.where(immutable, microstructure_original,microstructure) microstructure = np.where(immutable, microstructure_original,microstructure)
# --- renumber to sequence 1...Ngrains if requested ----------------------------------------------- damask.util.croak(geom.update(microstructure[0:grid_original[0],0:grid_original[1],0:grid_original[2]]))
# http://stackoverflow.com/questions/10741346/np-frequency-counts-for-unique-values-in-an-array geom.add_comments(scriptID + ' ' + ' '.join(sys.argv[1:]))
if options.renumber: if name is None:
newID = 0 sys.stdout.write(str(geom.show()))
for microstructureID,count in enumerate(np.bincount(microstructure.flatten())): else:
if count != 0: geom.to_file(name)
newID += 1
microstructure = np.where(microstructure == microstructureID, newID, microstructure)
newInfo = {'microstructures': len(np.unique(microstructure)),}
# --- report --------------------------------------------------------------------------------------
remarks = []
if newInfo['microstructures'] != info['microstructures']:
remarks.append('--> microstructures: {}'.format(newInfo['microstructures']))
if remarks != []: damask.util.croak(remarks)
# --- write header --------------------------------------------------------------------------------
table.labels_clear()
table.info_clear()
table.info_append(extra_header+[
scriptID + ' ' + ' '.join(sys.argv[1:]),
"grid\ta {grid[0]}\tb {grid[1]}\tc {grid[2]}".format(grid=info['grid']),
"size\tx {size[0]}\ty {size[1]}\tz {size[2]}".format(size=info['size']),
"origin\tx {origin[0]}\ty {origin[1]}\tz {origin[2]}".format(origin=info['origin']),
"homogenization\t{homog}".format(homog=info['homogenization']),
"microstructures\t{microstructures}".format(microstructures=newInfo['microstructures']),
])
table.head_write()
# --- write microstructure information ------------------------------------------------------------
formatwidth = int(math.floor(math.log10(np.nanmax(microstructure))+1))
table.data = microstructure[::1 if info['grid'][0]>1 else 2,
::1 if info['grid'][1]>1 else 2,
::1 if info['grid'][2]>1 else 2,].\
reshape((info['grid'][0],info['grid'][1]*info['grid'][2]),order='F').transpose()
table.data_writeArray('%{}i'.format(formatwidth),delimiter = ' ')
# --- output finalization --------------------------------------------------------------------------
table.close()

View File

@ -1,120 +1,72 @@
#!/usr/bin/env python3 #!/usr/bin/env python3
# -*- coding: UTF-8 no BOM -*-
import os,sys,math import os
import numpy as np import sys
import damask from io import StringIO
from optparse import OptionParser from optparse import OptionParser
import numpy as np
import damask
scriptName = os.path.splitext(os.path.basename(__file__))[0] scriptName = os.path.splitext(os.path.basename(__file__))[0]
scriptID = ' '.join([scriptName,damask.version]) scriptID = ' '.join([scriptName,damask.version])
#-------------------------------------------------------------------------------------------------- #--------------------------------------------------------------------------------------------------
# MAIN # MAIN
#-------------------------------------------------------------------------------------------------- #--------------------------------------------------------------------------------------------------
validDirections = ['x','y','z']
parser = OptionParser(option_class=damask.extendableOption, usage='%prog options [geomfile(s)]', description = """ parser = OptionParser(option_class=damask.extendableOption, usage='%prog options [geomfile(s)]', description = """
Mirrors spectral geometry description along given directions. Mirror along given directions.
""", version=scriptID) """, version=scriptID)
validDirections = ['x','y','z']
parser.add_option('-d','--direction', parser.add_option('-d','--direction',
dest = 'directions', dest = 'directions',
action = 'extend', metavar = '<string LIST>', action = 'extend', metavar = '<string LIST>',
help = "directions in which to mirror {'x','y','z'}") help = "directions in which to mirror {{{}}}".format(','.join(validDirections)))
parser.add_option('--float', parser.add_option( '--reflect',
dest = 'float', dest = 'reflect',
action = 'store_true', action = 'store_true',
help = 'use float input') help = 'reflect (include) outermost layers')
parser.set_defaults(float = False, parser.set_defaults(reflect = False)
)
(options, filenames) = parser.parse_args() (options, filenames) = parser.parse_args()
if options.directions is None: if options.directions is None:
parser.error('no direction given.') parser.error('no direction given.')
if not set(options.directions).issubset(validDirections): if not set(options.directions).issubset(validDirections):
invalidDirections = [str(e) for e in set(options.directions).difference(validDirections)] invalidDirections = [str(e) for e in set(options.directions).difference(validDirections)]
parser.error('invalid directions {}. '.format(*invalidDirections)) parser.error('invalid directions {}. '.format(*invalidDirections))
datatype = 'f' if options.float else 'i' limits = [None,None] if options.reflect else [-2,0]
# --- loop over input files -------------------------------------------------------------------------
if filenames == []: filenames = [None] if filenames == []: filenames = [None]
for name in filenames: for name in filenames:
try:
table = damask.ASCIItable(name = name,
buffered = False,
labeled = False)
except: continue
damask.util.report(scriptName,name) damask.util.report(scriptName,name)
# --- interpret header ---------------------------------------------------------------------------- geom = damask.Geom.from_file(StringIO(''.join(sys.stdin.read())) if name is None else name)
table.head_read()
info,extra_header = table.head_getGeom()
damask.util.report_geom(info)
errors = []
if np.any(info['grid'] < 1): errors.append('invalid grid a b c.')
if np.any(info['size'] <= 0.0): errors.append('invalid size x y z.')
if errors != []:
damask.util.croak(errors)
table.close(dismiss = True)
continue
# --- read data ------------------------------------------------------------------------------------
microstructure = table.microstructure_read(info['grid'],datatype).reshape(info['grid'],order='F') # read microstructure
microstructure = geom.get_microstructure()
if 'z' in options.directions: if 'z' in options.directions:
microstructure = np.concatenate([microstructure,microstructure[:,:,::-1]],2) microstructure = np.concatenate([microstructure,microstructure[:,:,limits[0]:limits[1]:-1]],2)
if 'y' in options.directions: if 'y' in options.directions:
microstructure = np.concatenate([microstructure,microstructure[:,::-1,:]],1) microstructure = np.concatenate([microstructure,microstructure[:,limits[0]:limits[1]:-1,:]],1)
if 'x' in options.directions: if 'x' in options.directions:
microstructure = np.concatenate([microstructure,microstructure[::-1,:,:]],0) microstructure = np.concatenate([microstructure,microstructure[limits[0]:limits[1]:-1,:,:]],0)
# --- do work ------------------------------------------------------------------------------------ damask.util.croak(geom.update(microstructure,rescale=True))
geom.add_comments(scriptID + ' ' + ' '.join(sys.argv[1:]))
newInfo = { if name is None:
'size': microstructure.shape*info['size']/info['grid'], sys.stdout.write(str(geom.show()))
'grid': microstructure.shape, else:
} geom.to_file(name)
# --- report ---------------------------------------------------------------------------------------
remarks = []
if (any(newInfo['grid'] != info['grid'])):
remarks.append('--> grid a b c: %s'%(' x '.join(map(str,newInfo['grid']))))
if (any(newInfo['size'] != info['size'])):
remarks.append('--> size x y z: %s'%(' x '.join(map(str,newInfo['size']))))
if remarks != []: damask.util.croak(remarks)
# --- write header ---------------------------------------------------------------------------------
table.labels_clear()
table.info_clear()
table.info_append(extra_header+[
scriptID + ' ' + ' '.join(sys.argv[1:]),
"grid\ta {grid[0]}\tb {grid[1]}\tc {grid[2]}".format(grid=newInfo['grid']),
"size\tx {size[0]}\ty {size[1]}\tz {size[2]}".format(size=newInfo['size']),
"origin\tx {origin[0]}\ty {origin[1]}\tz {origin[2]}".format(origin=info['origin']),
"homogenization\t{homog}".format(homog=info['homogenization']),
"microstructures\t{microstructures}".format(microstructures=info['microstructures']),
])
table.head_write()
# --- write microstructure information ------------------------------------------------------------
formatwidth = int(math.floor(math.log10(np.nanmax(microstructure))+1))
table.data = microstructure.reshape((newInfo['grid'][0],np.prod(newInfo['grid'][1:])),order='F').transpose()
table.data_writeArray('%{}i'.format(formatwidth),delimiter = ' ')
# --- output finalization --------------------------------------------------------------------------
table.close() # close ASCII table

View File

@ -1,112 +1,74 @@
#!/usr/bin/env python3 #!/usr/bin/env python3
# -*- coding: UTF-8 no BOM -*-
import os,sys import os
import numpy as np import sys
from io import StringIO
from optparse import OptionParser from optparse import OptionParser
import damask import damask
scriptName = os.path.splitext(os.path.basename(__file__))[0] scriptName = os.path.splitext(os.path.basename(__file__))[0]
scriptID = ' '.join([scriptName,damask.version]) scriptID = ' '.join([scriptName,damask.version])
#-------------------------------------------------------------------------------------------------- #--------------------------------------------------------------------------------------------------
# MAIN # MAIN
#-------------------------------------------------------------------------------------------------- #--------------------------------------------------------------------------------------------------
parser = OptionParser(option_class=damask.extendableOption, usage='%prog [geomfile(s)]', description = """ parser = OptionParser(option_class=damask.extendableOption, usage='%prog [geomfile(s)]', description = """
compress geometry files with ranges "a to b" and/or multiples "n of x". Pack ranges to "a to b" and/or multiples to "n of x".
""", version = scriptID) """, version = scriptID)
(options, filenames) = parser.parse_args() (options, filenames) = parser.parse_args()
# --- loop over input files -------------------------------------------------------------------------
if filenames == []: filenames = [None] if filenames == []: filenames = [None]
for name in filenames: for name in filenames:
try:
table = damask.ASCIItable(name = name,
buffered = False, labeled = False)
except: continue
damask.util.report(scriptName,name) damask.util.report(scriptName,name)
# --- interpret header ---------------------------------------------------------------------------- geom = damask.Geom.from_file(StringIO(''.join(sys.stdin.read())) if name is None else name)
table.head_read() damask.util.croak(geom)
info,extra_header = table.head_getGeom() geom.add_comments(scriptID + ' ' + ' '.join(sys.argv[1:]))
damask.util.report_geom(info)
errors = []
if np.any(info['grid'] < 1): errors.append('invalid grid a b c.')
if np.any(info['size'] <= 0.0): errors.append('invalid size x y z.')
if errors != []:
damask.util.croak(errors)
table.close(dismiss = True)
continue
# --- write header --------------------------------------------------------------------------------- compressType = None
table.labels_clear()
table.info_clear()
table.info_append(extra_header+[
scriptID + ' ' + ' '.join(sys.argv[1:]),
"grid\ta {grid[0]}\tb {grid[1]}\tc {grid[2]}".format(grid=info['grid']),
"size\tx {size[0]}\ty {size[1]}\tz {size[2]}".format(size=info['size']),
"origin\tx {origin[0]}\ty {origin[1]}\tz {origin[2]}".format(origin=info['origin']),
"homogenization\t{homog}".format(homog=info['homogenization']),
"microstructures\t{microstructures}".format(microstructures=info['microstructures']),
])
table.head_write()
# --- write packed microstructure information -----------------------------------------------------
compressType = ''
former = start = -1 former = start = -1
reps = 0 reps = 0
outputAlive = True if name is None:
while outputAlive and table.data_read(): # read next data line of ASCII table f = sys.stdout
items = table.data else:
if len(items) > 2: f= open(name,'w')
if items[1].lower() == 'of': items = [int(items[2])]*int(items[0])
elif items[1].lower() == 'to': items = range(int(items[0]),1+int(items[2]))
else: items = map(int,items)
else: items = map(int,items)
for current in items: for current in geom.microstructure.flatten('F'):
if abs(current - former) == 1 and (start - current) == reps*(former - current): if abs(current - former) == 1 and (start - current) == reps*(former - current):
compressType = 'to' compressType = 'to'
reps += 1 reps += 1
elif current == former and start == former: elif current == former and start == former:
compressType = 'of' compressType = 'of'
reps += 1 reps += 1
else: else:
if compressType == '': if compressType is None:
table.data = [] f.write('\n'.join(geom.get_header())+'\n')
elif compressType == '.': elif compressType == '.':
table.data = [former] f.write('{}\n'.format(former))
elif compressType == 'to': elif compressType == 'to':
table.data = [start,'to',former] f.write('{} to {}\n'.format(start,former))
elif compressType == 'of': elif compressType == 'of':
table.data = [reps,'of',former] f.write('{} of {}\n'.format(reps,former))
outputAlive = table.data_write(delimiter = ' ') # output processed line compressType = '.'
start = current
reps = 1
compressType = '.' former = current
start = current
reps = 1
former = current if compressType == '.':
f.write('{}\n'.format(former))
table.data = { elif compressType == 'to':
'.' : [former], f.write('{} to {}\n'.format(start,former))
'to': [start,'to',former], elif compressType == 'of':
'of': [reps,'of',former], f.write('{} of {}\n'.format(reps,former))
}[compressType]
outputAlive = table.data_write(delimiter = ' ') # output processed line
# --- output finalization --------------------------------------------------------------------------
table.close() # close ASCII table

View File

@ -1,96 +1,46 @@
#!/usr/bin/env python3 #!/usr/bin/env python3
# -*- coding: UTF-8 no BOM -*-
import os,sys,math import os
import numpy as np import sys
import damask from io import StringIO
from optparse import OptionParser from optparse import OptionParser
import numpy as np
import damask
scriptName = os.path.splitext(os.path.basename(__file__))[0] scriptName = os.path.splitext(os.path.basename(__file__))[0]
scriptID = ' '.join([scriptName,damask.version]) scriptID = ' '.join([scriptName,damask.version])
#-------------------------------------------------------------------------------------------------- #--------------------------------------------------------------------------------------------------
# MAIN # MAIN
#-------------------------------------------------------------------------------------------------- #--------------------------------------------------------------------------------------------------
parser = OptionParser(option_class=damask.extendableOption, usage='%prog [file[s]]', description = """ parser = OptionParser(option_class=damask.extendableOption, usage='%prog [geomfile(s)]', description = """
renumber sorted microstructure indices to 1,...,N. Renumber sorted microstructure indices to 1,...,N.
""", version=scriptID) """, version=scriptID)
(options, filenames) = parser.parse_args() (options, filenames) = parser.parse_args()
# --- loop over input files ----------------------------------------------------------------------
if filenames == []: filenames = [None] if filenames == []: filenames = [None]
for name in filenames: for name in filenames:
try: table = damask.ASCIItable(name = name,
buffered = False,
labeled = False)
except: continue
damask.util.report(scriptName,name) damask.util.report(scriptName,name)
# --- interpret header --------------------------------------------------------------------------- geom = damask.Geom.from_file(StringIO(''.join(sys.stdin.read())) if name is None else name)
table.head_read() renumbered = np.empty(geom.get_grid(),dtype=geom.microstructure.dtype)
info,extra_header = table.head_getGeom() for i, oldID in enumerate(np.unique(geom.microstructure)):
damask.util.report_geom(info) renumbered = np.where(geom.microstructure == oldID, i+1, renumbered)
errors = [] damask.util.croak(geom.update(renumbered))
if np.any(info['grid'] < 1): errors.append('invalid grid a b c.') geom.add_comments(scriptID + ' ' + ' '.join(sys.argv[1:]))
if np.any(info['size'] <= 0.0): errors.append('invalid size x y z.')
if errors != []:
damask.util.croak(errors)
table.close(dismiss = True)
continue
# --- read data ---------------------------------------------------------------------------------- if name is None:
sys.stdout.write(str(geom.show()))
microstructure = table.microstructure_read(info['grid']) # read microstructure else:
geom.to_file(name)
# --- do work ------------------------------------------------------------------------------------
newInfo = {
'origin': np.zeros(3,'d'),
'microstructures': 0,
}
grainIDs = np.unique(microstructure)
renumbered = np.copy(microstructure)
for i, oldID in enumerate(grainIDs):
renumbered = np.where(microstructure == oldID, i+1, renumbered)
newInfo['microstructures'] = len(grainIDs)
# --- report -------------------------------------------------------------------------------------
remarks = []
if ( newInfo['microstructures'] != info['microstructures']):
remarks.append('--> microstructures: %i'%newInfo['microstructures'])
if remarks != []: damask.util.croak(remarks)
# --- write header -------------------------------------------------------------------------------
table.labels_clear()
table.info_clear()
table.info_append(extra_header+[
scriptID + ' ' + ' '.join(sys.argv[1:]),
"grid\ta {grid[0]}\tb {grid[1]}\tc {grid[2]}".format(grid=info['grid']),
"size\tx {size[0]}\ty {size[1]}\tz {size[2]}".format(size=info['size']),
"origin\tx {origin[0]}\ty {origin[1]}\tz {origin[2]}".format(origin=info['origin']),
"homogenization\t{homog}".format(homog=info['homogenization']),
"microstructures\t{microstructures}".format(microstructures=newInfo['microstructures']),
])
table.head_write()
# --- write microstructure information -----------------------------------------------------------
format = '%{}i'.format(int(math.floor(math.log10(np.nanmax(renumbered))+1)))
table.data = renumbered.reshape((info['grid'][0],info['grid'][1]*info['grid'][2]),order='F').transpose()
table.data_writeArray(format,delimiter = ' ')
# --- output finalization ------------------------------------------------------------------------
table.close() # close ASCII table

View File

@ -1,20 +1,26 @@
#!/usr/bin/env python3 #!/usr/bin/env python3
# -*- coding: UTF-8 no BOM -*-
import os,sys,math import os
import sys
import numpy as np import numpy as np
from io import StringIO
from optparse import OptionParser from optparse import OptionParser
from scipy import ndimage
import damask import damask
scriptName = os.path.splitext(os.path.basename(__file__))[0] scriptName = os.path.splitext(os.path.basename(__file__))[0]
scriptID = ' '.join([scriptName,damask.version]) scriptID = ' '.join([scriptName,damask.version])
#-------------------------------------------------------------------------------------------------- #--------------------------------------------------------------------------------------------------
# MAIN # MAIN
#-------------------------------------------------------------------------------------------------- #--------------------------------------------------------------------------------------------------
parser = OptionParser(option_class=damask.extendableOption, usage='%prog options [geomfile(s)]', description = """ parser = OptionParser(option_class=damask.extendableOption, usage='%prog options [geomfile(s)]', description = """
Scales a geometry description independently in x, y, and z direction in terms of grid and/or size. Scales independently in x, y, and z direction in terms of grid and/or size.
Either absolute values or relative factors (like "0.25x") can be used. Either absolute values or relative factors (like "0.25x") can be used.
""", version = scriptID) """, version = scriptID)
@ -22,142 +28,47 @@ Either absolute values or relative factors (like "0.25x") can be used.
parser.add_option('-g', '--grid', parser.add_option('-g', '--grid',
dest = 'grid', dest = 'grid',
type = 'string', nargs = 3, metavar = 'string string string', type = 'string', nargs = 3, metavar = 'string string string',
help = 'a,b,c grid of hexahedral box [unchanged]') help = 'a,b,c grid of hexahedral box')
parser.add_option('-s', '--size', parser.add_option('-s', '--size',
dest = 'size', dest = 'size',
type = 'string', nargs = 3, metavar = 'string string string', type = 'string', nargs = 3, metavar = 'string string string',
help = 'x,y,z size of hexahedral box [unchanged]') help = 'x,y,z size of hexahedral box')
parser.add_option('-r', '--renumber',
dest = 'renumber',
action = 'store_true',
help = 'renumber microstructure indices from 1..N [%default]')
parser.add_option('--float',
dest = 'float',
action = 'store_true',
help = 'use float input')
parser.set_defaults(renumber = False,
grid = ['0','0','0'],
size = ['0.0','0.0','0.0'],
float = False,
)
(options, filenames) = parser.parse_args() (options, filenames) = parser.parse_args()
datatype = 'f' if options.float else 'i'
# --- loop over input files -------------------------------------------------------------------------
if filenames == []: filenames = [None] if filenames == []: filenames = [None]
for name in filenames: for name in filenames:
try:
table = damask.ASCIItable(name = name,
buffered = False,
labeled = False)
except: continue
damask.util.report(scriptName,name) damask.util.report(scriptName,name)
# --- interpret header ---------------------------------------------------------------------------- geom = damask.Geom.from_file(StringIO(''.join(sys.stdin.read())) if name is None else name)
table.head_read() grid = geom.get_grid()
info,extra_header = table.head_getGeom() size = geom.get_size()
damask.util.report_geom(info)
errors = [] new_grid = grid if options.grid is None else \
if np.any(info['grid'] < 1): errors.append('invalid grid a b c.') np.array([int(o*float(n.lower().replace('x',''))) if n.lower().endswith('x') \
if np.any(info['size'] <= 0.0): errors.append('invalid size x y z.') else int(n) for o,n in zip(grid,options.grid)],dtype=int)
if errors != []:
damask.util.croak(errors)
table.close(dismiss = True)
continue
# --- read data ------------------------------------------------------------------------------------ new_size = size if options.size is None else \
np.array([o*float(n.lower().replace('x','')) if n.lower().endswith('x') \
else float(n) for o,n in zip(size,options.size)],dtype=float)
microstructure = table.microstructure_read(info['grid'],datatype) # read microstructure damask.util.croak(geom.update(microstructure =
ndimage.interpolation.zoom(
geom.microstructure,
new_grid/grid,
output=geom.microstructure.dtype,
order=0,
mode='nearest',
prefilter=False,
) if np.any(new_grid != grid) \
else None,
size = new_size))
geom.add_comments(scriptID + ' ' + ' '.join(sys.argv[1:]))
# --- do work ------------------------------------------------------------------------------------ if name is None:
sys.stdout.write(str(geom.show()))
newInfo = { else:
'grid': np.zeros(3,'i'), geom.to_file(name)
'origin': np.zeros(3,'d'),
'microstructures': 0,
}
newInfo['grid'] = np.array([{True: round(o*float(n.lower().replace('x',''))),
False: round(float(n.lower().replace('x','')))}[n[-1].lower() == 'x']
for o,n in zip(info['grid'],options.grid)],'i')
newInfo['size'] = np.array([{True: o*float(n.lower().replace('x','')),
False: float(n.lower().replace('x',''))}[n[-1].lower() == 'x']
for o,n in zip(info['size'],options.size)],'d')
newInfo['grid'] = np.where(newInfo['grid'] <= 0 , info['grid'],newInfo['grid'])
newInfo['size'] = np.where(newInfo['size'] <= 0.0, info['size'],newInfo['size'])
multiplicity = []
for j in range(3):
multiplicity.append([])
last = 0
for i in range(info['grid'][j]):
this = int((i+1)*float(newInfo['grid'][j])/info['grid'][j])
multiplicity[j].append(this-last)
last = this
microstructure = microstructure.reshape(info['grid'],order='F')
microstructure = np.repeat(np.repeat(np.repeat(microstructure,
multiplicity[0], axis=0),multiplicity[1], axis=1),multiplicity[2], axis=2)
# --- renumber to sequence 1...Ngrains if requested ------------------------------------------------
# http://stackoverflow.com/questions/10741346/np-frequency-counts-for-unique-values-in-an-array
if options.renumber:
newID = 0
for microstructureID,count in enumerate(np.bincount(microstructure.reshape(newInfo['grid'].prod()))):
if count != 0:
newID += 1
microstructure = np.where(microstructure == microstructureID, newID,microstructure).reshape(microstructure.shape)
newInfo['microstructures'] = len(np.unique(microstructure))
# --- report ---------------------------------------------------------------------------------------
remarks = []
errors = []
if (any(newInfo['grid'] != info['grid'])):
remarks.append('--> grid a b c: %s'%(' x '.join(map(str,newInfo['grid']))))
if (any(newInfo['size'] != info['size'])):
remarks.append('--> size x y z: %s'%(' x '.join(map(str,newInfo['size']))))
if ( newInfo['microstructures'] != info['microstructures']):
remarks.append('--> microstructures: %i'%newInfo['microstructures'])
if np.any(newInfo['grid'] < 1): errors.append('invalid new grid a b c.')
if np.any(newInfo['size'] <= 0.0): errors.append('invalid new size x y z.')
if remarks != []: damask.util.croak(remarks)
if errors != []:
damask.util.croak(errors)
table.close(dismiss = True)
continue
# --- write header ---------------------------------------------------------------------------------
table.info_clear()
table.info_append(extra_header+[
scriptID + ' ' + ' '.join(sys.argv[1:]),
"grid\ta {grid[0]}\tb {grid[1]}\tc {grid[2]}".format(grid=newInfo['grid']),
"size\tx {size[0]}\ty {size[1]}\tz {size[2]}".format(size=newInfo['size']),
"origin\tx {origin[0]}\ty {origin[1]}\tz {origin[2]}".format(origin=info['origin']),
"homogenization\t{homog}".format(homog=info['homogenization']),
"microstructures\t{microstructures}".format(microstructures=newInfo['microstructures']),
])
table.labels_clear()
table.head_write()
# --- write microstructure information ------------------------------------------------------------
format = '%g' if options.float else '%{}i'.format(int(math.floor(math.log10(np.nanmax(microstructure))+1)))
table.data = microstructure.reshape((newInfo['grid'][0],newInfo['grid'][1]*newInfo['grid'][2]),order='F').transpose()
table.data_writeArray(format,delimiter=' ')
# --- output finalization --------------------------------------------------------------------------
table.close() # close ASCII table

View File

@ -1,28 +1,33 @@
#!/usr/bin/env python3 #!/usr/bin/env python3
# -*- coding: UTF-8 no BOM -*-
import os,sys,math import os
import numpy as np import sys
import damask from io import StringIO
from scipy import ndimage
from optparse import OptionParser from optparse import OptionParser
from scipy import ndimage
import numpy as np
import damask
scriptName = os.path.splitext(os.path.basename(__file__))[0] scriptName = os.path.splitext(os.path.basename(__file__))[0]
scriptID = ' '.join([scriptName,damask.version]) scriptID = ' '.join([scriptName,damask.version])
#-------------------------------------------------------------------------------------------------- #--------------------------------------------------------------------------------------------------
# MAIN # MAIN
#-------------------------------------------------------------------------------------------------- #--------------------------------------------------------------------------------------------------
parser = OptionParser(option_class=damask.extendableOption, usage='%prog options [geomfile(s)]', description = """ parser = OptionParser(option_class=damask.extendableOption, usage='%prog options [geomfile(s)]', description = """
Rotates spectral geometry description. Rotates original microstructure and embeddeds it into buffer material.
""", version=scriptID) """, version=scriptID)
parser.add_option('-r', '--rotation', parser.add_option('-r', '--rotation',
dest='rotation', dest='rotation',
type = 'float', nargs = 4, metavar = ' '.join(['float']*4), type = 'float', nargs = 4, metavar = ' '.join(['float']*4),
help = 'rotation given as angle and axis') help = 'rotation given as axis and angle')
parser.add_option('-e', '--eulers', parser.add_option('-e', '--eulers',
dest = 'eulers', dest = 'eulers',
type = 'float', nargs = 3, metavar = ' '.join(['float']*3), type = 'float', nargs = 3, metavar = ' '.join(['float']*3),
@ -30,7 +35,7 @@ parser.add_option('-e', '--eulers',
parser.add_option('-d', '--degrees', parser.add_option('-d', '--degrees',
dest = 'degrees', dest = 'degrees',
action = 'store_true', action = 'store_true',
help = 'Euler angles are given in degrees [%default]') help = 'Euler angles/axis angle are given in degrees')
parser.add_option('-m', '--matrix', parser.add_option('-m', '--matrix',
dest = 'matrix', dest = 'matrix',
type = 'float', nargs = 9, metavar = ' '.join(['float']*9), type = 'float', nargs = 9, metavar = ' '.join(['float']*9),
@ -41,108 +46,56 @@ parser.add_option('-q', '--quaternion',
help = 'rotation given as quaternion') help = 'rotation given as quaternion')
parser.add_option('-f', '--fill', parser.add_option('-f', '--fill',
dest = 'fill', dest = 'fill',
type = 'int', metavar = 'int', type = 'float', metavar = 'int',
help = 'background grain index. "0" selects maximum microstructure index + 1 [%default]') help = 'background microstructure index, defaults to max microstructure index + 1')
parser.add_option('--float',
dest = 'float',
action = 'store_true',
help = 'use float input')
parser.set_defaults(degrees = False, parser.set_defaults(degrees = False)
fill = 0,
float = False,
)
(options, filenames) = parser.parse_args() (options, filenames) = parser.parse_args()
if sum(x is not None for x in [options.rotation,options.eulers,options.matrix,options.quaternion]) != 1: if [options.rotation,options.eulers,options.matrix,options.quaternion].count(None) < 3:
parser.error('not exactly one rotation specified...') parser.error('more than one rotation specified.')
if [options.rotation,options.eulers,options.matrix,options.quaternion].count(None) > 3:
parser.error('no rotation specified.')
if options.quaternion is not None: if options.quaternion is not None:
eulers = damask.Rotation.fromQuaternion(np.array(options.quaternion)).asEulers(degrees=True) rot = damask.Rotation.fromQuaternion(np.array(options.quaternion)) # we might need P=+1 here, too...
if options.rotation is not None: if options.rotation is not None:
eulers = damask.Rotation.fromAxisAngle(np.array(options.rotation,degrees=True)).asEulers(degrees=True) rot = damask.Rotation.fromAxisAngle(np.array(options.rotation),degrees=options.degrees,normalise=True,P=+1)
if options.matrix is not None: if options.matrix is not None:
eulers = damask.Rotation.fromMatrix(np.array(options.Matrix)).asEulers(degrees=True) rot = damask.Rotation.fromMatrix(np.array(options.Matrix))
if options.eulers is not None: if options.eulers is not None:
eulers = damask.Rotation.fromEulers(np.array(options.eulers),degrees=True).asEulers(degrees=True) rot = damask.Rotation.fromEulers(np.array(options.eulers),degrees=options.degrees)
datatype = 'f' if options.float else 'i' eulers = rot.asEulers(degrees=True)
# --- loop over input files -------------------------------------------------------------------------
if filenames == []: filenames = [None] if filenames == []: filenames = [None]
for name in filenames: for name in filenames:
try:
table = damask.ASCIItable(name = name,
buffered = False,
labeled = False)
except: continue
damask.util.report(scriptName,name) damask.util.report(scriptName,name)
# --- interpret header ---------------------------------------------------------------------------- geom = damask.Geom.from_file(StringIO(''.join(sys.stdin.read())) if name is None else name)
size = geom.get_size()
grid = geom.get_grid()
origin = geom.get_origin()
microstructure = geom.get_microstructure()
fill = np.nanmax(microstructure)+1 if options.fill is None else options.fill
dtype = float if np.isnan(fill) or int(fill) != fill or microstructure.dtype==np.float else int
table.head_read() # These rotations are always applied in the reference coordinate system, i.e. (z,x,z) not (z,x',z'')
info,extra_header = table.head_getGeom() # this seems to be ok, see https://www.cs.utexas.edu/~theshark/courses/cs354/lectures/cs354-14.pdf
damask.util.report_geom(info) microstructure = ndimage.rotate(microstructure,eulers[2],(0,1),order=0,
prefilter=False,output=dtype,cval=fill) # rotation around z
microstructure = ndimage.rotate(microstructure,eulers[1],(1,2),order=0,
prefilter=False,output=dtype,cval=fill) # rotation around x
microstructure = ndimage.rotate(microstructure,eulers[0],(0,1),order=0,
prefilter=False,output=dtype,cval=fill) # rotation around z
errors = [] damask.util.croak(geom.update(microstructure,origin=origin-(np.asarray(microstructure.shape)-grid)/2*size/grid,rescale=True))
if np.any(info['grid'] < 1): errors.append('invalid grid a b c.') geom.add_comments(scriptID + ' ' + ' '.join(sys.argv[1:]))
if np.any(info['size'] <= 0.0): errors.append('invalid size x y z.')
if errors != []:
damask.util.croak(errors)
table.close(dismiss = True)
continue
# --- read data ------------------------------------------------------------------------------------ if name is None:
sys.stdout.write(str(geom.show()))
microstructure = table.microstructure_read(info['grid'],datatype).reshape(info['grid'],order='F') # read microstructure else:
geom.to_file(name)
newGrainID = options.fill if options.fill != 0 else np.nanmax(microstructure)+1
microstructure = ndimage.rotate(microstructure,eulers[2],(0,1),order=0,prefilter=False,output=int,cval=newGrainID) # rotation around Z
microstructure = ndimage.rotate(microstructure,eulers[1],(1,2),order=0,prefilter=False,output=int,cval=newGrainID) # rotation around X
microstructure = ndimage.rotate(microstructure,eulers[0],(0,1),order=0,prefilter=False,output=int,cval=newGrainID) # rotation around Z
# --- do work ------------------------------------------------------------------------------------
newInfo = {
'size': microstructure.shape*info['size']/info['grid'],
'grid': microstructure.shape,
'microstructures': len(np.unique(microstructure)),
}
# --- report ---------------------------------------------------------------------------------------
remarks = []
if (any(newInfo['grid'] != info['grid'])):
remarks.append('--> grid a b c: {}'.format(' x '.join(map(str,newInfo['grid']))))
if (any(newInfo['size'] != info['size'])):
remarks.append('--> size x y z: {}'.format(' x '.join(map(str,newInfo['size']))))
if ( newInfo['microstructures'] != info['microstructures']):
remarks.append('--> microstructures: {}'.format(newInfo['microstructures']))
if remarks != []: damask.util.croak(remarks)
# --- write header ---------------------------------------------------------------------------------
table.labels_clear()
table.info_clear()
table.info_append(extra_header+[
scriptID + ' ' + ' '.join(sys.argv[1:]),
"grid\ta {grid[0]}\tb {grid[1]}\tc {grid[2]}".format(grid=newInfo['grid']),
"size\tx {size[0]}\ty {size[1]}\tz {size[2]}".format(size=newInfo['size']),
"origin\tx {origin[0]}\ty {origin[1]}\tz {origin[2]}".format(origin=info['origin']),
"homogenization\t{homog}".format(homog=info['homogenization']),
"microstructures\t{microstructures}".format(microstructures=newInfo['microstructures']),
])
table.head_write()
# --- write microstructure information ------------------------------------------------------------
format = '%g' if options.float else '%{}i'.format(int(math.floor(math.log10(np.nanmax(microstructure))+1)))
table.data = microstructure.reshape((newInfo['grid'][0],np.prod(newInfo['grid'][1:])),order='F').transpose()
table.data_writeArray(format,delimiter=' ')
# --- output finalization --------------------------------------------------------------------------
table.close() # close ASCII table

View File

@ -1,11 +1,15 @@
#!/usr/bin/env python3 #!/usr/bin/env python3
# -*- coding: UTF-8 no BOM -*-
import os,sys import os
import numpy as np import sys
from optparse import OptionParser from optparse import OptionParser
from io import StringIO
import numpy as np
import damask import damask
scriptName = os.path.splitext(os.path.basename(__file__))[0] scriptName = os.path.splitext(os.path.basename(__file__))[0]
scriptID = ' '.join([scriptName,damask.version]) scriptID = ' '.join([scriptName,damask.version])
@ -19,70 +23,39 @@ Translate geom description into ASCIItable containing position and microstructur
""", version = scriptID) """, version = scriptID)
parser.add_option('--float',
dest = 'float',
action = 'store_true',
help = 'use float input')
parser.set_defaults(float = False,
)
(options, filenames) = parser.parse_args() (options, filenames) = parser.parse_args()
datatype = 'f' if options.float else 'i'
# --- loop over input files -------------------------------------------------------------------------
if filenames == []: filenames = [None] if filenames == []: filenames = [None]
for name in filenames: for name in filenames:
try: table = damask.ASCIItable(name = name,
outname = os.path.splitext(name)[0]+'.txt' if name else name,
buffered = False,
labeled = False,
)
except: continue
damask.util.report(scriptName,name) damask.util.report(scriptName,name)
# --- interpret header ---------------------------------------------------------------------------- geom = damask.Geom.from_file(StringIO(''.join(sys.stdin.read())) if name is None else name)
table.head_read() damask.util.croak(geom)
info,extra_header = table.head_getGeom()
damask.util.report_geom(info)
errors = [] # --- generate grid --------------------------------------------------------------------------------
if np.any(info['grid'] < 1): errors.append('invalid grid a b c.')
if np.any(info['size'] <= 0.0): errors.append('invalid size x y z.')
if errors != []:
damask.util.croak(errors)
table.close(dismiss = True)
continue
# --- read data ------------------------------------------------------------------------------------ grid = geom.get_grid()
size = geom.get_size()
origin = geom.get_origin()
microstructure = table.microstructure_read(info['grid'],datatype) x = (0.5 + np.arange(grid[0],dtype=float))/grid[0]*size[0]+origin[0]
y = (0.5 + np.arange(grid[1],dtype=float))/grid[1]*size[1]+origin[1]
z = (0.5 + np.arange(grid[2],dtype=float))/grid[2]*size[2]+origin[2]
# ------------------------------------------ assemble header --------------------------------------- xx = np.tile( x, grid[1]* grid[2])
yy = np.tile(np.repeat(y,grid[0] ),grid[2])
zz = np.repeat(z,grid[0]*grid[1])
table.info_clear() # --- create ASCII table --------------------------------------------------------------------------
table.info_append(extra_header + [scriptID + '\t' + ' '.join(sys.argv[1:])])
table.labels_clear() table = damask.ASCIItable(outname = os.path.splitext(name)[0]+'.txt' if name else name)
table.info_append(geom.get_comments() + [scriptID + '\t' + ' '.join(sys.argv[1:])])
table.labels_append(['{}_{}'.format(1+i,'pos') for i in range(3)]+['microstructure']) table.labels_append(['{}_{}'.format(1+i,'pos') for i in range(3)]+['microstructure'])
table.head_write() table.head_write()
table.output_flush() table.output_flush()
table.data = np.squeeze(np.dstack((xx,yy,zz,geom.microstructure.flatten('F'))),axis=0)
#--- generate grid --------------------------------------------------------------------------------
x = (0.5 + np.arange(info['grid'][0],dtype=float))/info['grid'][0]*info['size'][0]+info['origin'][0]
y = (0.5 + np.arange(info['grid'][1],dtype=float))/info['grid'][1]*info['size'][1]+info['origin'][1]
z = (0.5 + np.arange(info['grid'][2],dtype=float))/info['grid'][2]*info['size'][2]+info['origin'][2]
xx = np.tile( x, info['grid'][1]* info['grid'][2])
yy = np.tile(np.repeat(y,info['grid'][0] ),info['grid'][2])
zz = np.repeat(z,info['grid'][0]*info['grid'][1])
table.data = np.squeeze(np.dstack((xx,yy,zz,microstructure)),axis=0)
table.data_writeArray() table.data_writeArray()
# ------------------------------------------ finalize output ---------------------------------------
table.close() table.close()

View File

@ -1,20 +1,23 @@
#!/usr/bin/env python3 #!/usr/bin/env python3
# -*- coding: UTF-8 no BOM -*-
import os,sys,math import os
import numpy as np import sys
import damask
from optparse import OptionParser from optparse import OptionParser
from io import StringIO
import damask
scriptName = os.path.splitext(os.path.basename(__file__))[0] scriptName = os.path.splitext(os.path.basename(__file__))[0]
scriptID = ' '.join([scriptName,damask.version]) scriptID = ' '.join([scriptName,damask.version])
#-------------------------------------------------------------------------------------------------- #--------------------------------------------------------------------------------------------------
# MAIN # MAIN
#-------------------------------------------------------------------------------------------------- #--------------------------------------------------------------------------------------------------
parser = OptionParser(option_class=damask.extendableOption, usage='%prog options [file[s]]', description = """ parser = OptionParser(option_class=damask.extendableOption, usage='%prog options [geomfile(s)]', description = """
translate microstructure indices (shift or substitute) and/or geometry origin. Translate microstructure indices (shift or substitute) and/or geometry origin.
""", version=scriptID) """, version=scriptID)
@ -25,103 +28,37 @@ parser.add_option('-o', '--origin',
parser.add_option('-m', '--microstructure', parser.add_option('-m', '--microstructure',
dest = 'microstructure', dest = 'microstructure',
type = 'int', metavar = 'int', type = 'int', metavar = 'int',
help = 'offset from old to new microstructure indices') help = 'offset from old to new microstructure indices (after substitution)')
parser.add_option('-s', '--substitute', parser.add_option('-s', '--substitute',
dest = 'substitute', dest = 'substitute',
action = 'extend', metavar = '<string LIST>', action = 'extend', metavar = '<string LIST>',
help = 'substitutions of microstructure indices from,to,from,to,...') help = 'substitutions of microstructure indices from,to,from,to,...')
parser.add_option('--float',
dest = 'float',
action = 'store_true',
help = 'use float input')
parser.set_defaults(origin = (0.0,0.0,0.0), parser.set_defaults(origin = (0.0,0.0,0.0),
microstructure = 0, microstructure = 0,
substitute = [], substitute = []
float = False,
) )
(options, filenames) = parser.parse_args() (options, filenames) = parser.parse_args()
datatype = 'f' if options.float else 'i' sub = list(map(int,options.substitute))
sub = {}
for i in range(len(options.substitute)//2): # split substitution list into "from" -> "to"
sub[int(options.substitute[i*2])] = int(options.substitute[i*2+1])
# --- loop over input files ----------------------------------------------------------------------
if filenames == []: filenames = [None] if filenames == []: filenames = [None]
for name in filenames: for name in filenames:
try: table = damask.ASCIItable(name = name,
buffered = False,
labeled = False)
except: continue
damask.util.report(scriptName,name) damask.util.report(scriptName,name)
# --- interpret header --------------------------------------------------------------------------- geom = damask.Geom.from_file(StringIO(''.join(sys.stdin.read())) if name is None else name)
table.head_read() substituted = geom.get_microstructure()
info,extra_header = table.head_getGeom() for old,new in zip(sub[0::2],sub[1::2]): substituted[substituted==old] = new # substitute microstructure indices
damask.util.report_geom(info) substituted += options.microstructure # constant shift
errors = [] damask.util.croak(geom.update(substituted,origin=geom.get_origin()+options.origin))
if np.any(info['grid'] < 1): errors.append('invalid grid a b c.') geom.add_comments(scriptID + ' ' + ' '.join(sys.argv[1:]))
if np.any(info['size'] <= 0.0): errors.append('invalid size x y z.')
if errors != []:
damask.util.croak(errors)
table.close(dismiss = True)
continue
# --- read data ---------------------------------------------------------------------------------- if name is None:
sys.stdout.write(str(geom.show()))
microstructure = table.microstructure_read(info['grid'],datatype) # read microstructure else:
geom.to_file(name)
# --- do work ------------------------------------------------------------------------------------
newInfo = {
'origin': np.zeros(3,'d'),
'microstructures': 0,
}
substituted = np.copy(microstructure)
for k, v in sub.items(): substituted[microstructure==k] = v # substitute microstructure indices
substituted += options.microstructure # shift microstructure indices
newInfo['origin'] = info['origin'] + options.origin
newInfo['microstructures'] = len(np.unique(substituted))
# --- report -------------------------------------------------------------------------------------
remarks = []
if (any(newInfo['origin'] != info['origin'])):
remarks.append('--> origin x y z: {}'.format(' : '.join(map(str,newInfo['origin']))))
if ( newInfo['microstructures'] != info['microstructures']):
remarks.append('--> microstructures: {}'.format(newInfo['microstructures']))
if remarks != []: damask.util.croak(remarks)
# --- write header -------------------------------------------------------------------------------
table.labels_clear()
table.info_clear()
table.info_append(extra_header+[
scriptID + ' ' + ' '.join(sys.argv[1:]),
"grid\ta {grid[0]}\tb {grid[1]}\tc {grid[2]}".format(grid=info['grid']),
"size\tx {size[0]}\ty {size[1]}\tz {size[2]}".format(size=info['size']),
"origin\tx {origin[0]}\ty {origin[1]}\tz {origin[2]}".format(origin=newInfo['origin']),
"homogenization\t{homog}".format(homog=info['homogenization']),
"microstructures\t{microstructures}".format(microstructures=newInfo['microstructures']),
])
table.head_write()
# --- write microstructure information -----------------------------------------------------------
format = '%g' if options.float else '%{}i'.format(int(math.floor(math.log10(np.nanmax(substituted))+1)))
table.data = substituted.reshape((info['grid'][0],info['grid'][1]*info['grid'][2]),order='F').transpose()
table.data_writeArray(format,delimiter = ' ')
# --- output finalization ------------------------------------------------------------------------
table.close() # close ASCII table

View File

@ -1,80 +1,40 @@
#!/usr/bin/env python3 #!/usr/bin/env python3
# -*- coding: UTF-8 no BOM -*-
import os,sys,math import os
import numpy as np import sys
from optparse import OptionParser from optparse import OptionParser
from io import StringIO
import damask import damask
scriptName = os.path.splitext(os.path.basename(__file__))[0] scriptName = os.path.splitext(os.path.basename(__file__))[0]
scriptID = ' '.join([scriptName,damask.version]) scriptID = ' '.join([scriptName,damask.version])
#-------------------------------------------------------------------------------------------------- #--------------------------------------------------------------------------------------------------
# MAIN # MAIN
#-------------------------------------------------------------------------------------------------- #--------------------------------------------------------------------------------------------------
parser = OptionParser(option_class=damask.extendableOption, usage='%prog options [file[s]]', description = """ parser = OptionParser(option_class=damask.extendableOption, usage='%prog [geomfile(s)]', description = """
Unpack geometry files containing ranges "a to b" and/or "n of x" multiples (exclusively in one line). Unpack ranges "a to b" and/or "n of x" multiples (exclusively in one line).
""", version = scriptID) """, version = scriptID)
parser.add_option('-1', '--onedimensional',
dest = 'oneD',
action = 'store_true',
help = 'output geom file with one-dimensional data arrangement')
parser.set_defaults(oneD = False,
)
(options, filenames) = parser.parse_args() (options, filenames) = parser.parse_args()
# --- loop over input files -------------------------------------------------------------------------
if filenames == []: filenames = [None] if filenames == []: filenames = [None]
for name in filenames: for name in filenames:
try:
table = damask.ASCIItable(name = name,
buffered = False, labeled = False)
except: continue
damask.util.report(scriptName,name) damask.util.report(scriptName,name)
# --- interpret header ---------------------------------------------------------------------------- geom = damask.Geom.from_file(StringIO(''.join(sys.stdin.read())) if name is None else name)
table.head_read() damask.util.croak(geom)
info,extra_header = table.head_getGeom() geom.add_comments(scriptID + ' ' + ' '.join(sys.argv[1:]))
damask.util.report_geom(info)
errors = [] if name is None:
if np.any(info['grid'] < 1): errors.append('invalid grid a b c.') sys.stdout.write(str(geom.show()))
if np.any(info['size'] <= 0.0): errors.append('invalid size x y z.') else:
if errors != []: geom.to_file(name)
damask.util.croak(errors)
table.close(dismiss = True)
continue
# --- write header ---------------------------------------------------------------------------------
table.labels_clear()
table.info_clear()
table.info_append(extra_header+[
scriptID + ' ' + ' '.join(sys.argv[1:]),
"grid\ta {grid[0]}\tb {grid[1]}\tc {grid[2]}".format(grid=info['grid']),
"size\tx {size[0]}\ty {size[1]}\tz {size[2]}".format(size=info['size']),
"origin\tx {origin[0]}\ty {origin[1]}\tz {origin[2]}".format(origin=info['origin']),
"homogenization\t{homog}".format(homog=info['homogenization']),
"microstructures\t{microstructures}".format(microstructures=info['microstructures']),
])
table.head_write()
# --- write microstructure information ------------------------------------------------------------
microstructure = table.microstructure_read(info['grid']) # read microstructure
formatwidth = int(math.floor(math.log10(microstructure.max())+1)) # efficient number printing format
table.data = microstructure if options.oneD else \
microstructure.reshape((info['grid'][0],info['grid'][1]*info['grid'][2]),order='F').transpose()
table.data_writeArray('%%%ii'%(formatwidth),delimiter = ' ')
#--- output finalization --------------------------------------------------------------------------
table.close() # close ASCII table

View File

@ -1,15 +1,20 @@
#!/usr/bin/env python3 #!/usr/bin/env python3
# -*- coding: UTF-8 no BOM -*-
import os,sys,math import os
import numpy as np import sys
from scipy import ndimage from io import StringIO
from optparse import OptionParser from optparse import OptionParser
from scipy import ndimage
import numpy as np
import damask import damask
scriptName = os.path.splitext(os.path.basename(__file__))[0] scriptName = os.path.splitext(os.path.basename(__file__))[0]
scriptID = ' '.join([scriptName,damask.version]) scriptID = ' '.join([scriptName,damask.version])
def taintedNeighborhood(stencil,trigger=[],size=1): def taintedNeighborhood(stencil,trigger=[],size=1):
me = stencil[stencil.shape[0]//2] me = stencil[stencil.shape[0]//2]
@ -21,11 +26,12 @@ def taintedNeighborhood(stencil,trigger=[],size=1):
trigger = list(trigger) trigger = list(trigger)
return np.any(np.in1d(stencil,np.array(trigger))) return np.any(np.in1d(stencil,np.array(trigger)))
#-------------------------------------------------------------------------------------------------- #--------------------------------------------------------------------------------------------------
# MAIN # MAIN
#-------------------------------------------------------------------------------------------------- #--------------------------------------------------------------------------------------------------
parser = OptionParser(option_class=damask.extendableOption, usage='%prog options [file[s]]', description = """ parser = OptionParser(option_class=damask.extendableOption, usage='%prog options [geomfile(s)]', description = """
Offset microstructure index for points which see a microstructure different from themselves Offset microstructure index for points which see a microstructure different from themselves
(or listed as triggers) within a given (cubic) vicinity, i.e. within the region close to a grain/phase boundary. (or listed as triggers) within a given (cubic) vicinity, i.e. within the region close to a grain/phase boundary.
@ -35,13 +41,13 @@ parser.add_option('-v', '--vicinity',
dest = 'vicinity', dest = 'vicinity',
type = 'int', metavar = 'int', type = 'int', metavar = 'int',
help = 'voxel distance checked for presence of other microstructure [%default]') help = 'voxel distance checked for presence of other microstructure [%default]')
parser.add_option('-m', '--microstructureoffset', parser.add_option('-o', '--offset',
dest='offset', dest='offset',
type = 'int', metavar = 'int', type = 'int', metavar = 'int',
help = 'offset (positive or negative) for tagged microstructure indices. '+ help='offset (positive or negative) to tag microstructure indices, defaults to max microstructure index')
'"0" selects maximum microstructure index [%default]')
parser.add_option('-t', '--trigger', parser.add_option('-t', '--trigger',
action = 'extend', dest = 'trigger', metavar = '<int LIST>', dest = 'trigger',
action = 'extend', metavar = '<int LIST>',
help = 'list of microstructure indices triggering a change') help = 'list of microstructure indices triggering a change')
parser.add_option('-n', '--nonperiodic', parser.add_option('-n', '--nonperiodic',
dest = 'mode', dest = 'mode',
@ -49,86 +55,34 @@ parser.add_option('-n', '--nonperiodic',
help = 'assume geometry to be non-periodic') help = 'assume geometry to be non-periodic')
parser.set_defaults(vicinity = 1, parser.set_defaults(vicinity = 1,
offset = 0,
trigger = [], trigger = [],
mode = 'wrap', mode = 'wrap',
) )
(options, filenames) = parser.parse_args() (options, filenames) = parser.parse_args()
options.trigger = np.array(options.trigger, dtype=int) options.trigger = np.array(options.trigger, dtype=int)
# --- loop over input files -------------------------------------------------------------------------
if filenames == []: filenames = [None] if filenames == []: filenames = [None]
for name in filenames: for name in filenames:
try:
table = damask.ASCIItable(name = name,
buffered = False, labeled = False)
except: continue
damask.util.report(scriptName,name) damask.util.report(scriptName,name)
# --- interpret header ---------------------------------------------------------------------------- geom = damask.Geom.from_file(StringIO(''.join(sys.stdin.read())) if name is None else name)
table.head_read() offset = np.nanmax(geom.microstructure) if options.offset is None else options.offset
info,extra_header = table.head_getGeom()
damask.util.report_geom(info)
errors = [] damask.util.croak(geom.update(np.where(ndimage.filters.generic_filter(
if np.any(info['grid'] < 1): errors.append('invalid grid a b c.') geom.microstructure,
if np.any(info['size'] <= 0.0): errors.append('invalid size x y z.') taintedNeighborhood,
if errors != []: size=1+2*options.vicinity,mode=options.mode,
damask.util.croak(errors) extra_arguments=(),
table.close(dismiss = True) extra_keywords={"trigger":options.trigger,"size":1+2*options.vicinity}),
continue geom.microstructure + offset,geom.microstructure)))
geom.add_comments(scriptID + ' ' + ' '.join(sys.argv[1:]))
# --- read data ------------------------------------------------------------------------------------ if name is None:
sys.stdout.write(str(geom.show()))
microstructure = table.microstructure_read(info['grid']).reshape(info['grid'],order='F') # read microstructure else:
geom.to_file(name)
# --- do work ------------------------------------------------------------------------------------
newInfo = {
'microstructures': 0,
}
if options.offset == 0: options.offset = microstructure.max()
microstructure = np.where(ndimage.filters.generic_filter(microstructure,
taintedNeighborhood,
size=1+2*options.vicinity,mode=options.mode,
extra_arguments=(),
extra_keywords={"trigger":options.trigger,"size":1+2*options.vicinity}),
microstructure + options.offset,microstructure)
newInfo['microstructures'] = len(np.unique(microstructure))
# --- report ---------------------------------------------------------------------------------------
if (newInfo['microstructures'] != info['microstructures']):
damask.util.croak('--> microstructures: %i'%newInfo['microstructures'])
# --- write header ---------------------------------------------------------------------------------
table.labels_clear()
table.info_clear()
table.info_append(extra_header+[
scriptID + ' ' + ' '.join(sys.argv[1:]),
"grid\ta {grid[0]}\tb {grid[1]}\tc {grid[2]}".format(grid=info['grid']),
"size\tx {size[0]}\ty {size[1]}\tz {size[2]}".format(size=info['size']),
"origin\tx {origin[0]}\ty {origin[1]}\tz {origin[2]}".format(origin=info['origin']),
"homogenization\t{homog}".format(homog=info['homogenization']),
"microstructures\t{microstructures}".format(microstructures=newInfo['microstructures']),
])
table.head_write()
# --- write microstructure information ------------------------------------------------------------
formatwidth = int(math.floor(math.log10(np.nanmax(microstructure))+1))
table.data = microstructure.reshape((info['grid'][0],info['grid'][1]*info['grid'][2]),order='F').transpose()
table.data_writeArray('%{}i'.format(formatwidth),delimiter = ' ')
# --- output finalization --------------------------------------------------------------------------
table.close() # close ASCII table

View File

@ -1,5 +1,3 @@
# -*- coding: UTF-8 no BOM -*-
#################################################################################################### ####################################################################################################
# Code below available according to the followin conditions on https://github.com/MarDiehl/3Drotations # Code below available according to the followin conditions on https://github.com/MarDiehl/3Drotations
#################################################################################################### ####################################################################################################

View File

@ -17,8 +17,7 @@ from .orientation import Symmetry, Lattice, Rotation, Orientation # noqa
from .dadf5 import DADF5 # noqa from .dadf5 import DADF5 # noqa
#from .block import Block # only one class #from .block import Block # only one class
from .result import Result # noqa from .geom import Geom # noqa
from .geometry import Geometry # noqa
from .solver import Solver # noqa from .solver import Solver # noqa
from .test import Test # noqa from .test import Test # noqa
from .util import extendableOption # noqa from .util import extendableOption # noqa

View File

@ -1,6 +1,9 @@
# -*- coding: UTF-8 no BOM -*- import os
import sys
import re
import shlex
from collections import Iterable
import os, sys
import numpy as np import numpy as np
# ------------------------------------------------------------------ # ------------------------------------------------------------------
@ -80,8 +83,6 @@ class ASCIItable():
def _quote(self, def _quote(self,
what): what):
"""Quote empty or white space-containing output""" """Quote empty or white space-containing output"""
import re
return '{quote}{content}{quote}'.format( return '{quote}{content}{quote}'.format(
quote = ('"' if str(what)=='' or re.search(r"\s",str(what)) else ''), quote = ('"' if str(what)=='' or re.search(r"\s",str(what)) else ''),
content = what) content = what)
@ -147,8 +148,6 @@ class ASCIItable():
by either reading the first row or, by either reading the first row or,
if keyword "head[*]" is present, the last line of the header if keyword "head[*]" is present, the last line of the header
""" """
import re,shlex
try: try:
self.__IO__['in'].seek(0) self.__IO__['in'].seek(0)
except IOError: except IOError:
@ -244,17 +243,6 @@ class ASCIItable():
return info,extra_header return info,extra_header
# ------------------------------------------------------------------
def head_putGeom(self,info):
"""Translate geometry description to header"""
self.info_append([
"grid\ta {}\tb {}\tc {}".format(*info['grid']),
"size\tx {}\ty {}\tz {}".format(*info['size']),
"origin\tx {}\ty {}\tz {}".format(*info['origin']),
"homogenization\t{}".format(info['homogenization']),
"microstructures\t{}".format(info['microstructures']),
])
# ------------------------------------------------------------------ # ------------------------------------------------------------------
def labels_append(self, def labels_append(self,
what, what,
@ -287,8 +275,6 @@ class ASCIItable():
"x" for "1_x","2_x",... unless raw output is requested. "x" for "1_x","2_x",... unless raw output is requested.
operates on object tags or given list. operates on object tags or given list.
""" """
from collections import Iterable
if tags is None: tags = self.tags if tags is None: tags = self.tags
if isinstance(tags, Iterable) and not raw: # check whether list of tags is requested if isinstance(tags, Iterable) and not raw: # check whether list of tags is requested
@ -324,8 +310,6 @@ class ASCIItable():
return numpy array if asked for list of labels. return numpy array if asked for list of labels.
transparently deals with label positions implicitly given as numbers or their headings given as strings. transparently deals with label positions implicitly given as numbers or their headings given as strings.
""" """
from collections import Iterable
if isinstance(labels, Iterable) and not isinstance(labels, str): # check whether list of labels is requested if isinstance(labels, Iterable) and not isinstance(labels, str): # check whether list of labels is requested
idx = [] idx = []
for label in labels: for label in labels:
@ -365,8 +349,6 @@ class ASCIItable():
return numpy array if asked for list of labels. return numpy array if asked for list of labels.
transparently deals with label positions implicitly given as numbers or their headings given as strings. transparently deals with label positions implicitly given as numbers or their headings given as strings.
""" """
from collections import Iterable
listOfLabels = isinstance(labels, Iterable) and not isinstance(labels, str) # check whether list of labels is requested listOfLabels = isinstance(labels, Iterable) and not isinstance(labels, str) # check whether list of labels is requested
if not listOfLabels: labels = [labels] if not listOfLabels: labels = [labels]
@ -400,8 +382,6 @@ class ASCIItable():
return numpy array if asked for list of labels. return numpy array if asked for list of labels.
transparently deals with label positions implicitly given as numbers or their headings given as strings. transparently deals with label positions implicitly given as numbers or their headings given as strings.
""" """
from collections import Iterable
start = self.label_index(labels) start = self.label_index(labels)
dim = self.label_dimension(labels) dim = self.label_dimension(labels)
@ -447,8 +427,6 @@ class ASCIItable():
advance = True, advance = True,
respectLabels = True): respectLabels = True):
"""Read next line (possibly buffered) and parse it into data array""" """Read next line (possibly buffered) and parse it into data array"""
import shlex
self.line = self.__IO__['readBuffer'].pop(0) if len(self.__IO__['readBuffer']) > 0 \ self.line = self.__IO__['readBuffer'].pop(0) if len(self.__IO__['readBuffer']) > 0 \
else self.__IO__['in'].readline().strip() # take buffered content or get next data row from file else self.__IO__['in'].readline().strip() # take buffered content or get next data row from file
@ -469,8 +447,6 @@ class ASCIItable():
def data_readArray(self, def data_readArray(self,
labels = []): labels = []):
"""Read whole data of all (given) labels as numpy array""" """Read whole data of all (given) labels as numpy array"""
from collections import Iterable
try: self.data_rewind() # try to wind back to start of data try: self.data_rewind() # try to wind back to start of data
except: pass # assume/hope we are at data start already... except: pass # assume/hope we are at data start already...

View File

@ -1,8 +1,6 @@
# -*- coding: UTF-8 no BOM -*- import math
import math,numpy as np import numpy as np
### --- COLOR CLASS --------------------------------------------------
class Color(): class Color():
""" """

View File

@ -1,6 +1,5 @@
# -*- coding: UTF-8 no BOM -*- import os
import re
import os,re
class Environment(): class Environment():
__slots__ = [ \ __slots__ = [ \

269
python/damask/geom.py Normal file
View File

@ -0,0 +1,269 @@
import os
from io import StringIO
import numpy as np
import vtk
from vtk.util import numpy_support
from . import util
from . import version
class Geom():
"""Geometry definition for grid solvers"""
def __init__(self,microstructure,size,origin=[0.0,0.0,0.0],homogenization=1,comments=[]):
"""New geometry definition from array of microstructures and size"""
self.set_microstructure(microstructure)
self.set_size(size)
self.set_origin(origin)
self.set_homogenization(homogenization)
self.set_comments(comments)
def __repr__(self):
"""Basic information on geometry definition"""
return util.srepr([
'grid a b c: {}'.format(' x '.join(map(str,self.get_grid ()))),
'size x y z: {}'.format(' x '.join(map(str,self.get_size ()))),
'origin x y z: {}'.format(' '.join(map(str,self.get_origin()))),
'homogenization: {}'.format(self.get_homogenization()),
'# microstructures: {}'.format(len(np.unique(self.microstructure))),
'max microstructure: {}'.format(np.nanmax(self.microstructure)),
])
def update(self,microstructure=None,size=None,origin=None,rescale=False):
"""Updates microstructure and size"""
grid_old = self.get_grid()
size_old = self.get_size()
origin_old = self.get_origin()
unique_old = len(np.unique(self.microstructure))
max_old = np.nanmax(self.microstructure)
if size is not None and rescale:
raise ValueError('Either set size explicitly or rescale automatically')
self.set_microstructure(microstructure)
self.set_size(self.get_grid()/grid_old*self.size if rescale else size)
self.set_origin(origin)
message = ['grid a b c: {}'.format(' x '.join(map(str,grid_old)))]
if np.any(grid_old != self.get_grid()):
message[-1] = util.delete(message[-1])
message.append('grid a b c: {}'.format(' x '.join(map(str,self.get_grid()))))
message.append('size x y z: {}'.format(' x '.join(map(str,size_old))))
if np.any(size_old != self.get_size()):
message[-1] = util.delete(message[-1])
message.append('size x y z: {}'.format(' x '.join(map(str,self.get_size()))))
message.append('origin x y z: {}'.format(' '.join(map(str,origin_old))))
if np.any(origin_old != self.get_origin()):
message[-1] = util.delete(message[-1])
message.append('origin x y z: {}'.format(' '.join(map(str,self.get_origin()))))
message.append('homogenization: {}'.format(self.get_homogenization()))
message.append('# microstructures: {}'.format(unique_old))
if unique_old != len(np.unique(self.microstructure)):
message[-1] = util.delete(message[-1])
message.append('# microstructures: {}'.format(len(np.unique(self.microstructure))))
message.append('max microstructure: {}'.format(max_old))
if max_old != np.nanmax(self.microstructure):
message[-1] = util.delete(message[-1])
message.append('max microstructure: {}'.format(np.nanmax(self.microstructure)))
return util.return_message(message)
def set_comments(self,comments):
self.comments = []
self.add_comments(comments)
def add_comments(self,comments):
self.comments += [str(c) for c in comments] if isinstance(comments,list) else [str(comments)]
def set_microstructure(self,microstructure):
if microstructure is not None:
if len(microstructure.shape) != 3:
raise ValueError('Invalid microstructure shape {}'.format(*microstructure.shape))
elif microstructure.dtype not in np.sctypes['float'] + np.sctypes['int']:
raise TypeError('Invalid data type {} for microstructure'.format(microstructure.dtype))
else:
self.microstructure = np.copy(microstructure)
def set_size(self,size):
if size is None:
grid = np.asarray(self.microstructure.shape)
self.size = grid/np.max(grid)
else:
if len(size) != 3 or any(np.array(size)<=0):
raise ValueError('Invalid size {}'.format(*size))
else:
self.size = np.array(size)
def set_origin(self,origin):
if origin is not None:
if len(origin) != 3:
raise ValueError('Invalid origin {}'.format(*origin))
else:
self.origin = np.array(origin)
def set_homogenization(self,homogenization):
if homogenization is not None:
if not isinstance(homogenization,int) or homogenization < 1:
raise TypeError('Invalid homogenization {}'.format(homogenization))
else:
self.homogenization = homogenization
def get_microstructure(self):
return np.copy(self.microstructure)
def get_size(self):
return np.copy(self.size)
def get_origin(self):
return np.copy(self.origin)
def get_grid(self):
return np.array(self.microstructure.shape)
def get_homogenization(self):
return self.homogenization
def get_comments(self):
return self.comments[:]
def get_header(self):
header = ['{} header'.format(len(self.comments)+4)] + self.comments
header.append('grid a {} b {} c {}'.format(*self.get_grid()))
header.append('size x {} y {} z {}'.format(*self.get_size()))
header.append('origin x {} y {} z {}'.format(*self.get_origin()))
header.append('homogenization {}'.format(self.get_homogenization()))
return header
@classmethod
def from_file(cls,fname):
"""Reads a geom file"""
with (open(fname) if isinstance(fname,str) else fname) as f:
f.seek(0)
header_length,keyword = f.readline().split()[:2]
header_length = int(header_length)
content = f.readlines()
if not keyword.startswith('head') or header_length < 3:
raise TypeError('Header length information missing or invalid')
comments = []
for i,line in enumerate(content[:header_length]):
items = line.lower().strip().split()
key = items[0] if len(items) > 0 else ''
if key == 'grid':
grid = np.array([ int(dict(zip(items[1::2],items[2::2]))[i]) for i in ['a','b','c']])
elif key == 'size':
size = np.array([float(dict(zip(items[1::2],items[2::2]))[i]) for i in ['x','y','z']])
elif key == 'origin':
origin = np.array([float(dict(zip(items[1::2],items[2::2]))[i]) for i in ['x','y','z']])
elif key == 'homogenization':
homogenization = int(items[1])
else:
comments.append(line.strip())
microstructure = np.empty(grid.prod()) # initialize as flat array
i = 0
for line in content[header_length:]:
items = line.split()
if len(items) == 3:
if items[1].lower() == 'of':
items = np.ones(int(items[0]))*float(items[2])
elif items[1].lower() == 'to':
items = np.linspace(int(items[0]),int(items[2]),
abs(int(items[2])-int(items[0]))+1,dtype=float)
else: items = list(map(float,items))
else: items = list(map(float,items))
microstructure[i:i+len(items)] = items
i += len(items)
if i != grid.prod():
raise TypeError('Invalid file: expected {} entries,found {}'.format(grid.prod(),i))
microstructure = microstructure.reshape(grid,order='F')
if not np.any(np.mod(microstructure.flatten(),1) != 0.0): # no float present
microstructure = microstructure.astype('int')
return cls(microstructure.reshape(grid),size,origin,homogenization,comments)
def to_file(self,fname):
"""Writes to file"""
header = self.get_header()
grid = self.get_grid()
format_string = '%{}i'.format(1+int(np.floor(np.log10(np.nanmax(self.microstructure))))) if self.microstructure.dtype == int \
else '%g'
np.savetxt(fname,
self.microstructure.reshape([grid[0],np.prod(grid[1:])],order='F').T,
header='\n'.join(header), fmt=format_string, comments='')
def to_vtk(self,fname=None):
"""Generates vtk file. If file name is given, stored in file otherwise returned as string"""
grid = self.get_grid() + np.ones(3,dtype=int)
size = self.get_size()
origin = self.get_origin()
coords = [
np.linspace(0,size[0],grid[0]) + origin[0],
np.linspace(0,size[1],grid[1]) + origin[1],
np.linspace(0,size[2],grid[2]) + origin[2]
]
rGrid = vtk.vtkRectilinearGrid()
coordArray = [vtk.vtkDoubleArray(),vtk.vtkDoubleArray(),vtk.vtkDoubleArray()]
rGrid.SetDimensions(*grid)
for d,coord in enumerate(coords):
for c in coord:
coordArray[d].InsertNextValue(c)
rGrid.SetXCoordinates(coordArray[0])
rGrid.SetYCoordinates(coordArray[1])
rGrid.SetZCoordinates(coordArray[2])
ms = numpy_support.numpy_to_vtk(num_array=self.microstructure.flatten(order='F'),
array_type=vtk.VTK_INT if self.microstructure.dtype == int else vtk.VTK_FLOAT)
ms.SetName('microstructure')
rGrid.GetCellData().AddArray(ms)
if fname is None:
writer = vtk.vtkDataSetWriter()
writer.SetHeader('damask.Geom '+version)
writer.WriteToOutputStringOn()
else:
writer = vtk.vtkXMLRectilinearGridWriter()
writer.SetCompressorTypeToZLib()
writer.SetDataModeToBinary()
ext = os.path.splitext(fname)[1]
if ext == '':
name = fname + '.' + writer.GetDefaultFileExtension()
elif ext == writer.GetDefaultFileExtension():
name = fname
else:
raise ValueError("unknown extension {}".format(ext))
writer.SetFileName(name)
writer.SetInputData(rGrid)
writer.Write()
if fname is None: return writer.GetOutputString()
def show(self):
"""Show raw content (as in file)"""
f=StringIO()
self.to_file(f)
f.seek(0)
return ''.join(f.readlines())

View File

@ -1,7 +0,0 @@
# -*- coding: UTF-8 no BOM -*-
"""Aggregator for geometry handling"""
from .geometry import Geometry # noqa
from .spectral import Spectral # noqa
from .marc import Marc # noqa

View File

@ -1,21 +0,0 @@
# -*- coding: UTF-8 no BOM -*-
import damask.geometry
class Geometry():
"""
General class for geometry parsing.
Sub-classed by the individual solvers.
"""
def __init__(self,solver=''):
solverClass = {
'spectral': damask.geometry.Spectral,
'marc': damask.geometry.Marc,
}
if solver.lower() in list(solverClass.keys()):
self.__class__=solverClass[solver.lower()]
self.__init__()

View File

@ -1,9 +0,0 @@
# -*- coding: UTF-8 no BOM -*-
from .geometry import Geometry
class Marc(Geometry):
def __init__(self):
self.solver='Marc'

View File

@ -1,9 +0,0 @@
# -*- coding: UTF-8 no BOM -*-
from .geometry import Geometry
class Spectral(Geometry):
def __init__(self):
self.solver='Spectral'

View File

@ -1,7 +1,7 @@
# -*- coding: UTF-8 no BOM -*-
import math import math
import numpy as np import numpy as np
from . import Lambert from . import Lambert
from .quaternion import Quaternion from .quaternion import Quaternion
from .quaternion import P from .quaternion import P

View File

@ -1,5 +1,3 @@
# -*- coding: UTF-8 no BOM -*-
import numpy as np import numpy as np
P = -1 # convention (see DOI:10.1088/0965-0393/23/8/083501) P = -1 # convention (see DOI:10.1088/0965-0393/23/8/083501)

View File

@ -1,41 +0,0 @@
# -*- coding: UTF-8 no BOM -*-
import numpy as np
try:
import h5py
except (ImportError) as e:
pass # sys.stderr.write('\nREMARK: h5py module not available \n\n')
class Result():
"""
General class for result parsing.
Needs h5py to be installed
"""
def __init__(self,resultsFile):
self.data=h5py.File(resultsFile,"r")
self.Npoints=self.data.attrs['Number of Materialpoints']
print("Opened {} with {} points".format(resultsFile,self.Npoints))
def getCrystallite(self,labels,inc,constituent=1,points=None):
if points is None: points = np.array(np.array(range(self.Npoints)))
results = {}
mapping=self.data['mapping/crystallite']
for instance in self.data['increments/%s/crystallite/'%inc]:
dsets = list(self.data['increments/%s/crystallite/%s'%(inc,instance)].keys())
for label in labels:
if label in dsets and label not in results:
shape = np.shape(self.data['increments/%s/crystallite/%s/%s'%(inc,instance,label)])[1:]
results[label] = np.nan*np.ones(np.array((self.Npoints,)+shape))
for myPoint in range(len(points)):
matPoint = points[myPoint]
pos = mapping[matPoint,constituent-1]
if pos[0] != 0:
try:
for label in labels:
results[label][matPoint,...] = self.data['increments/%s/crystallite/%s/%s'%(inc,pos[0],label)][pos[1],...]
except:
pass
return results

View File

@ -1,11 +1,15 @@
# -*- coding: UTF-8 no BOM -*- import sys
import sys,time,os,subprocess,shlex import time
import numpy as np import os
import subprocess
import shlex
from optparse import Option from optparse import Option
from queue import Queue from queue import Queue
from threading import Thread from threading import Thread
import numpy as np
class bcolors: class bcolors:
""" """
ASCII Colors (Blender code) ASCII Colors (Blender code)
@ -14,15 +18,16 @@ class bcolors:
http://stackoverflow.com/questions/287871/print-in-terminal-with-colors-using-python http://stackoverflow.com/questions/287871/print-in-terminal-with-colors-using-python
""" """
HEADER = '\033[95m' HEADER = '\033[95m'
OKBLUE = '\033[94m' OKBLUE = '\033[94m'
OKGREEN = '\033[92m' OKGREEN = '\033[92m'
WARNING = '\033[93m' WARNING = '\033[93m'
FAIL = '\033[91m' FAIL = '\033[91m'
ENDC = '\033[0m' ENDC = '\033[0m'
BOLD = '\033[1m' BOLD = '\033[1m'
DIM = '\033[2m' DIM = '\033[2m'
UNDERLINE = '\033[4m' UNDERLINE = '\033[4m'
CROSSOUT = '\033[9m'
def disable(self): def disable(self):
self.HEADER = '' self.HEADER = ''
@ -33,6 +38,7 @@ class bcolors:
self.ENDC = '' self.ENDC = ''
self.BOLD = '' self.BOLD = ''
self.UNDERLINE = '' self.UNDERLINE = ''
self.CROSSOUT = ''
# ----------------------------- # -----------------------------
@ -47,14 +53,15 @@ def srepr(arg,glue = '\n'):
# ----------------------------- # -----------------------------
def croak(what, newline = True): def croak(what, newline = True):
"""Writes formated to stderr""" """Writes formated to stderr"""
sys.stderr.write(srepr(what,glue = '\n') + ('\n' if newline else '')) if what is not None:
sys.stderr.write(srepr(what,glue = '\n') + ('\n' if newline else ''))
sys.stderr.flush() sys.stderr.flush()
# ----------------------------- # -----------------------------
def report(who = None, def report(who = None,
what = None): what = None):
"""Reports script and file name""" """Reports script and file name"""
croak( (emph(who)+': ' if who is not None else '') + (what if what is not None else '') ) croak( (emph(who)+': ' if who is not None else '') + (what if what is not None else '') + '\n' )
# ----------------------------- # -----------------------------
@ -85,6 +92,11 @@ def delete(what):
"""Dims string""" """Dims string"""
return bcolors.DIM+srepr(what)+bcolors.ENDC return bcolors.DIM+srepr(what)+bcolors.ENDC
# -----------------------------
def strikeout(what):
"""Dims string"""
return bcolors.CROSSOUT+srepr(what)+bcolors.ENDC
# ----------------------------- # -----------------------------
def execute(cmd, def execute(cmd,
streamIn = None, streamIn = None,
@ -113,6 +125,19 @@ def coordGridAndSize(coordinates):
grid = np.array(list(map(len,coords)),'i') grid = np.array(list(map(len,coords)),'i')
size = grid/np.maximum(np.ones(dim,'d'), grid-1.0) * (maxcorner-mincorner) # size from edge to edge = dim * n/(n-1) size = grid/np.maximum(np.ones(dim,'d'), grid-1.0) * (maxcorner-mincorner) # size from edge to edge = dim * n/(n-1)
size = np.where(grid > 1, size, min(size[grid > 1]/grid[grid > 1])) # spacing for grid==1 equal to smallest among other ones size = np.where(grid > 1, size, min(size[grid > 1]/grid[grid > 1])) # spacing for grid==1 equal to smallest among other ones
delta = size/grid
N = grid.prod()
if N != len(coordinates):
raise ValueError('Data count {} does not match grid {}.'.format(len(coordinates),' x '.join(map(repr,grid))))
if np.any(np.abs(np.log10((coords[0][1:]-coords[0][:-1])/delta[0])) > 0.01) \
or np.any(np.abs(np.log10((coords[1][1:]-coords[1][:-1])/delta[1])) > 0.01):
raise ValueError('regular grid spacing {} violated.'.format(' x '.join(map(repr,delta))))
if dim==3 and np.any(np.abs(np.log10((coords[2][1:]-coords[2][:-1])/delta[2])) > 0.01):
raise ValueError('regular grid spacing {} violated.'.format(' x '.join(map(repr,delta))))
return grid,size return grid,size
# ----------------------------- # -----------------------------
@ -171,6 +196,17 @@ def progressBar(iteration, total, prefix='', bar_length=50):
if iteration == total: sys.stderr.write('\n') if iteration == total: sys.stderr.write('\n')
sys.stderr.flush() sys.stderr.flush()
class return_message():
"""Object with formatted return message"""
def __init__(self,message):
self.message = message
def __repr__(self):
"""Return message suitable for interactive shells"""
return srepr(self.message)
def leastsqBound(func, x0, args=(), bounds=None, Dfun=None, full_output=0, def leastsqBound(func, x0, args=(), bounds=None, Dfun=None, full_output=0,

View File

@ -491,7 +491,7 @@ module lattice
end enum end enum
integer(kind(LATTICE_undefined_ID)), dimension(:), allocatable, public, protected :: & integer(kind(LATTICE_undefined_ID)), dimension(:), allocatable, public, protected :: &
lattice_structure, trans_lattice_structure lattice_structure
interface lattice_forestProjection ! DEPRECATED, use lattice_forestProjection_edge interface lattice_forestProjection ! DEPRECATED, use lattice_forestProjection_edge
@ -557,7 +557,6 @@ subroutine lattice_init
Nphases = size(config_phase) Nphases = size(config_phase)
allocate(lattice_structure(Nphases),source = LATTICE_undefined_ID) allocate(lattice_structure(Nphases),source = LATTICE_undefined_ID)
allocate(trans_lattice_structure(Nphases),source = LATTICE_undefined_ID)
allocate(lattice_C66(6,6,Nphases), source=0.0_pReal) allocate(lattice_C66(6,6,Nphases), source=0.0_pReal)
allocate(lattice_C3333(3,3,3,3,Nphases), source=0.0_pReal) allocate(lattice_C3333(3,3,3,3,Nphases), source=0.0_pReal)
@ -595,14 +594,6 @@ subroutine lattice_init
lattice_structure(p) = LATTICE_ort_ID lattice_structure(p) = LATTICE_ort_ID
end select end select
tag = 'undefined'
tag = config_phase(p)%getString('trans_lattice_structure',defaultVal=tag)
select case(trim(tag))
case('bcc')
trans_lattice_structure(p) = LATTICE_bcc_ID
case('hex','hexagonal')
trans_lattice_structure(p) = LATTICE_hex_ID
end select
lattice_C66(1,1,p) = config_phase(p)%getFloat('c11',defaultVal=0.0_pReal) lattice_C66(1,1,p) = config_phase(p)%getFloat('c11',defaultVal=0.0_pReal)
lattice_C66(1,2,p) = config_phase(p)%getFloat('c12',defaultVal=0.0_pReal) lattice_C66(1,2,p) = config_phase(p)%getFloat('c12',defaultVal=0.0_pReal)