Merge branch 'development' into even-more-HDF5-postprocessing
This commit is contained in:
commit
1186bd16f5
2
PRIVATE
2
PRIVATE
|
@ -1 +1 @@
|
|||
Subproject commit d31da38cf25734a91e994a3d5d33bb048eb2f44f
|
||||
Subproject commit 64cda1c010d500f662cd9a298c7b7ad10ab91c3c
|
|
@ -52,7 +52,7 @@ def gradFFT(geomdim,field):
|
|||
# MAIN
|
||||
# --------------------------------------------------------------------
|
||||
|
||||
parser = OptionParser(option_class=damask.extendableOption, usage='%prog option [ASCIItable(s)]', description = """
|
||||
parser = OptionParser(option_class=damask.extendableOption, usage='%prog options [ASCIItable(s)]', description = """
|
||||
Add column(s) containing gradient of requested column(s).
|
||||
Operates on periodic ordered three-dimensional data sets
|
||||
of vector and scalar fields.
|
||||
|
|
|
@ -98,6 +98,6 @@ for name in filenames:
|
|||
|
||||
writer.Write()
|
||||
|
||||
if name is None: sys.stdout.write(writer.GetOutputString()[:writer.GetOutputStringLength()]) # limiting of outputString is fix for vtk <7.0
|
||||
if name is None: sys.stdout.write(writer.GetOutputString())
|
||||
|
||||
table.close()
|
||||
|
|
|
@ -129,6 +129,6 @@ for name in filenames:
|
|||
|
||||
writer.Write()
|
||||
|
||||
if name is None: sys.stdout.write(writer.GetOutputString()[:writer.GetOutputStringLength()]) # limiting of outputString is fix for vtk <7.0
|
||||
if name is None: sys.stdout.write(writer.GetOutputString())
|
||||
|
||||
table.close()
|
||||
|
|
|
@ -1,85 +1,88 @@
|
|||
#!/usr/bin/env python3
|
||||
# -*- coding: UTF-8 no BOM -*-
|
||||
|
||||
import os,sys,math
|
||||
import numpy as np
|
||||
import os
|
||||
import sys
|
||||
from io import StringIO
|
||||
from optparse import OptionParser
|
||||
|
||||
import numpy as np
|
||||
|
||||
import damask
|
||||
|
||||
|
||||
scriptName = os.path.splitext(os.path.basename(__file__))[0]
|
||||
scriptID = ' '.join([scriptName,damask.version])
|
||||
|
||||
|
||||
#--------------------------------------------------------------------------------------------------
|
||||
# MAIN
|
||||
#--------------------------------------------------------------------------------------------------
|
||||
identifiers = {
|
||||
'grid': ['a','b','c'],
|
||||
'size': ['x','y','z'],
|
||||
'origin': ['x','y','z'],
|
||||
}
|
||||
mappings = {
|
||||
'grid': lambda x: int(x),
|
||||
'size': lambda x: float(x),
|
||||
'origin': lambda x: float(x),
|
||||
'homogenization': lambda x: int(x),
|
||||
'microstructures': lambda x: int(x),
|
||||
}
|
||||
|
||||
parser = OptionParser(option_class=damask.extendableOption, usage='%prog option [geomfile(s)]', description = """
|
||||
Positions a geometric object within the (three-dimensional) canvas of a spectral geometry description.
|
||||
Depending on the sign of the dimension parameters, these objects can be boxes, cylinders, or ellipsoids.
|
||||
parser = OptionParser(option_class=damask.extendableOption, usage='%prog options [geomfile(s)]', description = """
|
||||
Inserts a primitive geometric object at a given position.
|
||||
These objects can be boxes, cylinders, or ellipsoids.
|
||||
|
||||
""", version = scriptID)
|
||||
|
||||
parser.add_option('-c', '--center', dest='center',
|
||||
parser.add_option('-c', '--center',
|
||||
dest='center',
|
||||
type='float', nargs = 3, metavar=' '.join(['float']*3),
|
||||
help='a,b,c origin of primitive %default')
|
||||
parser.add_option('-d', '--dimension', dest='dimension',
|
||||
parser.add_option('-d', '--dimension',
|
||||
dest='dimension',
|
||||
type='float', nargs = 3, metavar=' '.join(['float']*3),
|
||||
help='a,b,c extension of hexahedral box; negative values are diameters')
|
||||
parser.add_option('-e', '--exponent', dest='exponent',
|
||||
help='a,b,c extension of hexahedral box')
|
||||
parser.add_option('-e', '--exponent',
|
||||
dest='exponent',
|
||||
type='float', nargs = 3, metavar=' '.join(['float']*3),
|
||||
help='i,j,k exponents for axes - 0 gives octahedron (|x|^(2^0) + |y|^(2^0) + |z|^(2^0) < 1), \
|
||||
1 gives a sphere (|x|^(2^1) + |y|^(2^1) + |z|^(2^1) < 1), \
|
||||
large values produce boxes, negative turns concave.')
|
||||
parser.add_option('-f', '--fill', dest='fill',
|
||||
type='float', metavar = 'float',
|
||||
help='grain index to fill primitive. "0" selects maximum microstructure index + 1 [%default]')
|
||||
parser.add_option('-q', '--quaternion', dest='quaternion',
|
||||
help='i,j,k exponents for axes: '+
|
||||
'0 gives octahedron (|x|^(2^0) + |y|^(2^0) + |z|^(2^0) < 1), '+
|
||||
'1 gives a sphere (|x|^(2^1) + |y|^(2^1) + |z|^(2^1) < 1), '+
|
||||
'large values produce boxes, negative turn concave.')
|
||||
parser.add_option('-f', '--fill',
|
||||
dest='fill',
|
||||
type='float', metavar = 'int',
|
||||
help='microstructure index to fill primitive, defaults to max microstructure index + 1')
|
||||
parser.add_option('-q', '--quaternion',
|
||||
dest='quaternion',
|
||||
type='float', nargs = 4, metavar=' '.join(['float']*4),
|
||||
help = 'rotation of primitive as quaternion')
|
||||
parser.add_option('-a', '--angleaxis', dest='angleaxis', type=float,
|
||||
nargs = 4, metavar=' '.join(['float']*4),
|
||||
parser.add_option('-a', '--angleaxis',
|
||||
dest='angleaxis',
|
||||
type=float, nargs = 4, metavar=' '.join(['float']*4),
|
||||
help = 'axis and angle to rotate primitive')
|
||||
parser.add_option( '--degrees', dest='degrees',
|
||||
parser.add_option( '--degrees',
|
||||
dest='degrees',
|
||||
action='store_true',
|
||||
help = 'angle is given in degrees [%default]')
|
||||
parser.add_option( '--nonperiodic', dest='periodic',
|
||||
help = 'angle is given in degrees')
|
||||
parser.add_option( '--nonperiodic',
|
||||
dest='periodic',
|
||||
action='store_false',
|
||||
help = 'wrap around edges [%default]')
|
||||
parser.add_option( '--realspace', dest='realspace',
|
||||
help = 'wrap around edges')
|
||||
parser.add_option( '--realspace',
|
||||
dest='realspace',
|
||||
action='store_true',
|
||||
help = '-c and -d span [origin,origin+size] instead of [0,grid] coordinates')
|
||||
parser.add_option( '--invert', dest='inside',
|
||||
parser.add_option( '--invert',
|
||||
dest='inside',
|
||||
action='store_false',
|
||||
help = 'invert the volume filled by the primitive (inside/outside)')
|
||||
parser.add_option('--float', dest = 'float',
|
||||
action = 'store_true',
|
||||
help = 'use float input')
|
||||
|
||||
parser.set_defaults(center = (.0,.0,.0),
|
||||
fill = 0.0,
|
||||
degrees = False,
|
||||
exponent = (20,20,20), # box shape by default
|
||||
periodic = True,
|
||||
realspace = False,
|
||||
inside = True,
|
||||
float = False,
|
||||
)
|
||||
|
||||
(options, filenames) = parser.parse_args()
|
||||
|
||||
if options.dimension is None:
|
||||
parser.error('no dimension specified.')
|
||||
if [options.angleaxis,options.quaternion].count(None) == 0:
|
||||
parser.error('more than one rotation specified.')
|
||||
|
||||
if options.angleaxis is not None:
|
||||
rotation = damask.Rotation.fromAxisAngle(np.array(options.angleaxis),options.degrees,normalise=True)
|
||||
elif options.quaternion is not None:
|
||||
|
@ -87,157 +90,49 @@ elif options.quaternion is not None:
|
|||
else:
|
||||
rotation = damask.Rotation()
|
||||
|
||||
datatype = 'f' if options.float else 'i'
|
||||
|
||||
options.center = np.array(options.center)
|
||||
options.dimension = np.array(options.dimension)
|
||||
# undo logarithmic sense of exponent and generate ellipsoids for negative dimensions (backward compatibility)
|
||||
options.exponent = np.where(np.array(options.dimension) > 0, np.power(2,options.exponent), 2)
|
||||
|
||||
# --- loop over input files -------------------------------------------------------------------------
|
||||
if filenames == []: filenames = [None]
|
||||
|
||||
for name in filenames:
|
||||
try: table = damask.ASCIItable(name = name,
|
||||
buffered = False,
|
||||
labeled = False)
|
||||
except: continue
|
||||
damask.util.report(scriptName,name)
|
||||
|
||||
# --- interpret header ----------------------------------------------------------------------------
|
||||
geom = damask.Geom.from_file(StringIO(''.join(sys.stdin.read())) if name is None else name)
|
||||
grid = geom.get_grid()
|
||||
size = geom.get_size()
|
||||
|
||||
table.head_read()
|
||||
info,extra_header = table.head_getGeom()
|
||||
damask.util.report_geom(info)
|
||||
|
||||
errors = []
|
||||
if np.any(info['grid'] < 1): errors.append('invalid grid a b c.')
|
||||
if np.any(info['size'] <= 0.0): errors.append('invalid size x y z.')
|
||||
if errors != []:
|
||||
damask.util.croak(errors)
|
||||
table.close(dismiss = True)
|
||||
continue
|
||||
|
||||
#--- read data ------------------------------------------------------------------------------------
|
||||
|
||||
microstructure = table.microstructure_read(info['grid'],datatype) # read microstructure
|
||||
|
||||
# --- do work ------------------------------------------------------------------------------------
|
||||
|
||||
newInfo = {
|
||||
'microstructures': 0,
|
||||
}
|
||||
|
||||
options.fill = np.nanmax(microstructure)+1 if options.fill == 0 else options.fill
|
||||
|
||||
microstructure = microstructure.reshape(info['grid'],order='F')
|
||||
|
||||
# coordinates given in real space (default) vs voxel space
|
||||
# scale to box of size [1.0,1.0,1.0]
|
||||
if options.realspace:
|
||||
options.center -= info['origin']
|
||||
options.center *= np.array(info['grid']) / np.array(info['size'])
|
||||
options.dimension *= np.array(info['grid']) / np.array(info['size'])
|
||||
center = (np.array(options.center) - geom.get_origin())/size
|
||||
r = np.array(options.dimension)/size/2.0
|
||||
else:
|
||||
center = (np.array(options.center) + 0.5)/grid
|
||||
r = np.array(options.dimension)/grid/2.0
|
||||
|
||||
grid = microstructure.shape
|
||||
if np.any(center<0.0) or np.any(center>=1.0): print('error')
|
||||
|
||||
# change to coordinate space where the primitive is the unit sphere/cube/etc
|
||||
if options.periodic: # use padding to achieve periodicity
|
||||
(X, Y, Z) = np.meshgrid(np.arange(-grid[0]/2, (3*grid[0])/2, dtype=np.float32), # 50% padding on each side
|
||||
np.arange(-grid[1]/2, (3*grid[1])/2, dtype=np.float32),
|
||||
np.arange(-grid[2]/2, (3*grid[2])/2, dtype=np.float32),
|
||||
indexing='ij')
|
||||
# Padding handling
|
||||
X = np.roll(np.roll(np.roll(X,
|
||||
-grid[0]//2, axis=0),
|
||||
-grid[1]//2, axis=1),
|
||||
-grid[2]//2, axis=2)
|
||||
Y = np.roll(np.roll(np.roll(Y,
|
||||
-grid[0]//2, axis=0),
|
||||
-grid[1]//2, axis=1),
|
||||
-grid[2]//2, axis=2)
|
||||
Z = np.roll(np.roll(np.roll(Z,
|
||||
-grid[0]//2, axis=0),
|
||||
-grid[1]//2, axis=1),
|
||||
-grid[2]//2, axis=2)
|
||||
else: # nonperiodic, much lighter on resources
|
||||
# change to coordinate space where the primitive is the unit sphere/cube/etc
|
||||
(X, Y, Z) = np.meshgrid(np.arange(0, grid[0], dtype=np.float32),
|
||||
np.arange(0, grid[1], dtype=np.float32),
|
||||
np.arange(0, grid[2], dtype=np.float32),
|
||||
indexing='ij')
|
||||
|
||||
# first by translating the center onto 0, 0.5 shifts the voxel origin onto the center of the voxel
|
||||
X -= options.center[0] - 0.5
|
||||
Y -= options.center[1] - 0.5
|
||||
Z -= options.center[2] - 0.5
|
||||
# and then by applying the rotation
|
||||
(X, Y, Z) = rotation * (X, Y, Z)
|
||||
# and finally by scaling (we don't worry about options.dimension being negative, np.abs occurs on the microstructure = np.where... line)
|
||||
X /= options.dimension[0] * 0.5
|
||||
Y /= options.dimension[1] * 0.5
|
||||
Z /= options.dimension[2] * 0.5
|
||||
|
||||
|
||||
# High exponents can cause underflow & overflow - loss of precision is okay here, we just compare it to 1, so +infinity and 0 are fine
|
||||
old_settings = np.seterr()
|
||||
offset = np.ones(3)*0.5 if options.periodic else center
|
||||
mask = np.full(grid,False)
|
||||
# High exponents can cause underflow & overflow - okay here, just compare to 1, so +infinity and 0 are fine
|
||||
np.seterr(over='ignore', under='ignore')
|
||||
|
||||
if options.periodic: # use padding to achieve periodicity
|
||||
inside = np.zeros(grid, dtype=bool)
|
||||
for i in range(2):
|
||||
for j in range(2):
|
||||
for k in range(2):
|
||||
inside = inside | ( # Most of this is handling the padding
|
||||
np.abs(X[grid[0] * i : grid[0] * (i+1),
|
||||
grid[1] * j : grid[1] * (j+1),
|
||||
grid[2] * k : grid[2] * (k+1)])**options.exponent[0] +
|
||||
np.abs(Y[grid[0] * i : grid[0] * (i+1),
|
||||
grid[1] * j : grid[1] * (j+1),
|
||||
grid[2] * k : grid[2] * (k+1)])**options.exponent[1] +
|
||||
np.abs(Z[grid[0] * i : grid[0] * (i+1),
|
||||
grid[1] * j : grid[1] * (j+1),
|
||||
grid[2] * k : grid[2] * (k+1)])**options.exponent[2] <= 1.0)
|
||||
e = 2.0**np.array(options.exponent)
|
||||
for x in range(grid[0]):
|
||||
for y in range(grid[1]):
|
||||
for z in range(grid[2]):
|
||||
coords = np.array([x+0.5,y+0.5,z+0.5])/grid
|
||||
mask[x,y,z] = np.sum(np.abs((rotation*(coords-offset))/r)**e) < 1
|
||||
|
||||
microstructure = np.where(inside,
|
||||
options.fill if options.inside else microstructure,
|
||||
microstructure if options.inside else options.fill)
|
||||
if options.periodic:
|
||||
shift = ((offset-center)*grid).astype(int)
|
||||
mask = np.roll(mask,shift,(0,1,2))
|
||||
|
||||
else: # nonperiodic, much lighter on resources
|
||||
microstructure = np.where(np.abs(X)**options.exponent[0] +
|
||||
np.abs(Y)**options.exponent[1] +
|
||||
np.abs(Z)**options.exponent[2] <= 1.0,
|
||||
options.fill if options.inside else microstructure,
|
||||
microstructure if options.inside else options.fill)
|
||||
if options.inside: mask = np.logical_not(mask)
|
||||
fill = np.nanmax(geom.microstructure)+1 if options.fill is None else options.fill
|
||||
|
||||
np.seterr(**old_settings) # Reset warnings to old state
|
||||
newInfo['microstructures'] = len(np.unique(microstructure))
|
||||
damask.util.croak(geom.update(np.where(mask,geom.microstructure,fill)))
|
||||
geom.add_comments(scriptID + ' ' + ' '.join(sys.argv[1:]))
|
||||
|
||||
# --- report ---------------------------------------------------------------------------------------
|
||||
if (newInfo['microstructures'] != info['microstructures']):
|
||||
damask.util.croak('--> microstructures: {}'.format(newInfo['microstructures']))
|
||||
|
||||
|
||||
#--- write header ---------------------------------------------------------------------------------
|
||||
|
||||
table.info_clear()
|
||||
table.info_append(extra_header+[
|
||||
scriptID + ' ' + ' '.join(sys.argv[1:]),
|
||||
"grid\ta {}\tb {}\tc {}".format(*info['grid']),
|
||||
"size\tx {}\ty {}\tz {}".format(*info['size']),
|
||||
"origin\tx {}\ty {}\tz {}".format(*info['origin']),
|
||||
"homogenization\t{}".format(info['homogenization']),
|
||||
"microstructures\t{}".format(newInfo['microstructures']),
|
||||
])
|
||||
table.labels_clear()
|
||||
table.head_write()
|
||||
table.output_flush()
|
||||
|
||||
# --- write microstructure information ------------------------------------------------------------
|
||||
|
||||
format = '%g' if options.float else '%{}i'.format(int(math.floor(math.log10(np.nanmax(microstructure))+1)))
|
||||
table.data = microstructure.reshape((info['grid'][0],info['grid'][1]*info['grid'][2]),order='F').transpose()
|
||||
table.data_writeArray(format,delimiter = ' ')
|
||||
|
||||
#--- output finalization --------------------------------------------------------------------------
|
||||
|
||||
table.close()
|
||||
if name is None:
|
||||
sys.stdout.write(str(geom.show()))
|
||||
else:
|
||||
geom.to_file(name)
|
||||
|
|
|
@ -1,181 +1,77 @@
|
|||
#!/usr/bin/env python3
|
||||
# -*- coding: UTF-8 no BOM -*-
|
||||
|
||||
import os,sys,math
|
||||
import numpy as np
|
||||
import os
|
||||
import sys
|
||||
from io import StringIO
|
||||
from optparse import OptionParser
|
||||
|
||||
import numpy as np
|
||||
|
||||
import damask
|
||||
|
||||
|
||||
scriptName = os.path.splitext(os.path.basename(__file__))[0]
|
||||
scriptID = ' '.join([scriptName,damask.version])
|
||||
|
||||
|
||||
# --------------------------------------------------------------------
|
||||
# MAIN
|
||||
# --------------------------------------------------------------------
|
||||
|
||||
parser = OptionParser(option_class=damask.extendableOption, usage='%prog option(s) [geomfile(s)]', description = """
|
||||
Changes the (three-dimensional) canvas of a spectral geometry description.
|
||||
parser = OptionParser(option_class=damask.extendableOption, usage='%prog options [geomfile(s)]', description = """
|
||||
Increases or decreases the (three-dimensional) canvas.
|
||||
Grid can be given as absolute or relative values, e.g. 16 16 16 or 2x 0.5x 32.
|
||||
|
||||
""", version = scriptID)
|
||||
|
||||
parser.add_option('-g',
|
||||
'--grid',
|
||||
parser.add_option('-g','--grid',
|
||||
dest = 'grid',
|
||||
type = 'string', nargs = 3, metavar = ' '.join(['string']*3),
|
||||
help = 'a,b,c grid of hexahedral box. [auto]')
|
||||
parser.add_option('-o',
|
||||
'--offset',
|
||||
help = 'a,b,c grid of hexahedral box')
|
||||
parser.add_option('-o','--offset',
|
||||
dest = 'offset',
|
||||
type = 'int', nargs = 3, metavar = ' '.join(['int']*3),
|
||||
help = 'a,b,c offset from old to new origin of grid [%default]')
|
||||
parser.add_option('-f',
|
||||
'--fill',
|
||||
parser.add_option('-f','--fill',
|
||||
dest = 'fill',
|
||||
type = 'float', metavar = 'float',
|
||||
help = '(background) canvas grain index. "0" selects maximum microstructure index + 1 [%default]')
|
||||
parser.add_option('--float',
|
||||
dest = 'float',
|
||||
action = 'store_true',
|
||||
help = 'use float input')
|
||||
parser.add_option('--blank',
|
||||
dest = 'blank',
|
||||
action = 'store_true',
|
||||
help = 'blank out (optional) input canvas content')
|
||||
type = 'float', metavar = 'int',
|
||||
help = 'background microstructure index, defaults to max microstructure index + 1')
|
||||
|
||||
parser.set_defaults(grid = ['0','0','0'],
|
||||
offset = (0,0,0),
|
||||
fill = 0.0,
|
||||
float = False,
|
||||
)
|
||||
parser.set_defaults(offset = (0,0,0))
|
||||
|
||||
(options, filenames) = parser.parse_args()
|
||||
|
||||
datatype = 'f' if options.float else 'i'
|
||||
options.grid = ['1','1','1'] if options.blank and options.grid == ['0','0','0'] else options.grid
|
||||
options.fill = 1 if options.blank and options.fill == 0 else options.fill
|
||||
|
||||
# --- loop over input files -------------------------------------------------------------------------
|
||||
|
||||
if filenames == []: filenames = [None]
|
||||
|
||||
for name in filenames:
|
||||
try: table = damask.ASCIItable(name = name,
|
||||
buffered = False,
|
||||
labeled = False)
|
||||
except: continue
|
||||
damask.util.report(scriptName,name)
|
||||
|
||||
if options.blank:
|
||||
extra_header = []
|
||||
info = {}
|
||||
info['grid'] = np.zeros(3,dtype=int)
|
||||
info['size'] = np.zeros(3,dtype=float)
|
||||
info['origin'] = np.zeros(3,dtype=float)
|
||||
info['microstructures'] = 0
|
||||
info['homogenization'] = 1
|
||||
geom = damask.Geom.from_file(StringIO(''.join(sys.stdin.read())) if name is None else name)
|
||||
origin = geom.get_origin()
|
||||
size = geom.get_size()
|
||||
old = new = geom.get_grid()
|
||||
offset = np.asarray(options.offset)
|
||||
|
||||
if options.grid is not None:
|
||||
new = np.maximum(1,
|
||||
np.array([int(o*float(n.lower().replace('x',''))) if n.lower().endswith('x') \
|
||||
else int(n) for o,n in zip(old,options.grid)],dtype=int))
|
||||
|
||||
canvas = np.full(new,options.fill if options.fill is not None
|
||||
else np.nanmax(geom.microstructure)+1,geom.microstructure.dtype)
|
||||
|
||||
l = np.clip( offset, 0,np.minimum(old +offset,new))
|
||||
r = np.clip( offset+old,0,np.minimum(old*2+offset,new))
|
||||
L = np.clip(-offset, 0,np.minimum(new -offset,old))
|
||||
R = np.clip(-offset+new,0,np.minimum(new*2-offset,old))
|
||||
canvas[l[0]:r[0],l[1]:r[1],l[2]:r[2]] = geom.microstructure[L[0]:R[0],L[1]:R[1],L[2]:R[2]]
|
||||
|
||||
|
||||
damask.util.croak(geom.update(canvas,origin=origin+offset*size/old,rescale=True))
|
||||
geom.add_comments(scriptID + ' ' + ' '.join(sys.argv[1:]))
|
||||
|
||||
if name is None:
|
||||
sys.stdout.write(str(geom.show()))
|
||||
else:
|
||||
|
||||
# --- interpret header ----------------------------------------------------------------------------
|
||||
|
||||
table.head_read()
|
||||
info,extra_header = table.head_getGeom()
|
||||
damask.util.report_geom(info)
|
||||
|
||||
errors = []
|
||||
if np.any(info['grid'] < 1): errors.append('invalid grid a b c.')
|
||||
if np.any(info['size'] <= 0.0): errors.append('invalid size x y z.')
|
||||
if errors != []:
|
||||
damask.util.croak(errors)
|
||||
table.close(dismiss = True)
|
||||
continue
|
||||
|
||||
# --- read data ------------------------------------------------------------------------------------
|
||||
|
||||
microstructure = table.microstructure_read(info['grid'],datatype).reshape(info['grid'],order='F') # read microstructure
|
||||
|
||||
# --- do work ------------------------------------------------------------------------------------
|
||||
|
||||
newInfo = {
|
||||
'grid': np.zeros(3,'i'),
|
||||
'origin': np.zeros(3,'d'),
|
||||
'microstructures': 0,
|
||||
}
|
||||
|
||||
newInfo['grid'] = np.array([int(o*float(n.translate(None,'xX'))) if n[-1].lower() == 'x'\
|
||||
else int(n) for o,n in zip(info['grid'],options.grid)],'i')
|
||||
newInfo['grid'] = np.where(newInfo['grid'] > 0, newInfo['grid'],info['grid'])
|
||||
|
||||
microstructure_cropped = np.zeros(newInfo['grid'],datatype)
|
||||
microstructure_cropped.fill(options.fill if options.float or options.fill > 0 else np.nanmax(microstructure)+1)
|
||||
|
||||
if not options.blank:
|
||||
xindex = np.arange(max(options.offset[0],0),min(options.offset[0]+newInfo['grid'][0],info['grid'][0]))
|
||||
yindex = np.arange(max(options.offset[1],0),min(options.offset[1]+newInfo['grid'][1],info['grid'][1]))
|
||||
zindex = np.arange(max(options.offset[2],0),min(options.offset[2]+newInfo['grid'][2],info['grid'][2]))
|
||||
translate_x = [i - options.offset[0] for i in xindex]
|
||||
translate_y = [i - options.offset[1] for i in yindex]
|
||||
translate_z = [i - options.offset[2] for i in zindex]
|
||||
if 0 in map(len,[xindex,yindex,zindex,translate_x,translate_y,translate_z]):
|
||||
damask.util.croak('invaldid combination of grid and offset.')
|
||||
table.close(dismiss = True)
|
||||
continue
|
||||
microstructure_cropped[min(translate_x):max(translate_x)+1,
|
||||
min(translate_y):max(translate_y)+1,
|
||||
min(translate_z):max(translate_z)+1] \
|
||||
= microstructure[min(xindex):max(xindex)+1,
|
||||
min(yindex):max(yindex)+1,
|
||||
min(zindex):max(zindex)+1]
|
||||
|
||||
newInfo['size'] = info['size']/info['grid']*newInfo['grid'] if np.all(info['grid'] > 0) else newInfo['grid']
|
||||
newInfo['origin'] = info['origin']+(info['size']/info['grid'] if np.all(info['grid'] > 0) \
|
||||
else newInfo['size']/newInfo['grid'])*options.offset
|
||||
newInfo['microstructures'] = len(np.unique(microstructure_cropped))
|
||||
|
||||
# --- report ---------------------------------------------------------------------------------------
|
||||
|
||||
remarks = []
|
||||
errors = []
|
||||
|
||||
if (any(newInfo['grid'] != info['grid'])):
|
||||
remarks.append('--> grid a b c: {}'.format(' x '.join(map(str,newInfo['grid']))))
|
||||
if (any(newInfo['size'] != info['size'])):
|
||||
remarks.append('--> size x y z: {}'.format(' x '.join(map(str,newInfo['size']))))
|
||||
if (any(newInfo['origin'] != info['origin'])):
|
||||
remarks.append('--> origin x y z: {}'.format(' : '.join(map(str,newInfo['origin']))))
|
||||
if ( newInfo['microstructures'] != info['microstructures']):
|
||||
remarks.append('--> microstructures: {}'.format(newInfo['microstructures']))
|
||||
|
||||
if np.any(newInfo['grid'] < 1): errors.append('invalid new grid a b c.')
|
||||
if np.any(newInfo['size'] <= 0.0): errors.append('invalid new size x y z.')
|
||||
|
||||
if remarks != []: damask.util.croak(remarks)
|
||||
if errors != []:
|
||||
damask.util.croak(errors)
|
||||
table.close(dismiss = True)
|
||||
continue
|
||||
|
||||
# --- write header ---------------------------------------------------------------------------------
|
||||
|
||||
table.info_clear()
|
||||
table.info_append(extra_header+[
|
||||
scriptID + ' ' + ' '.join(sys.argv[1:]),
|
||||
"grid\ta {}\tb {}\tc {}".format(*newInfo['grid']),
|
||||
"size\tx {}\ty {}\tz {}".format(*newInfo['size']),
|
||||
"origin\tx {}\ty {}\tz {}".format(*newInfo['origin']),
|
||||
"homogenization\t{}".format(info['homogenization']),
|
||||
"microstructures\t{}".format(newInfo['microstructures']),
|
||||
])
|
||||
table.labels_clear()
|
||||
table.head_write()
|
||||
table.output_flush()
|
||||
|
||||
# --- write microstructure information ------------------------------------------------------------
|
||||
|
||||
format = '%g' if options.float else '%{}i'.format(int(math.floor(math.log10(np.nanmax(microstructure_cropped))+1)))
|
||||
table.data = microstructure_cropped.reshape((newInfo['grid'][0],newInfo['grid'][1]*newInfo['grid'][2]),order='F').transpose()
|
||||
table.data_writeArray(format,delimiter=' ')
|
||||
|
||||
# --- output finalization --------------------------------------------------------------------------
|
||||
|
||||
table.close() # close ASCII table
|
||||
geom.to_file(name)
|
||||
|
|
|
@ -0,0 +1,39 @@
|
|||
#!/usr/bin/env python3
|
||||
|
||||
import os
|
||||
import sys
|
||||
from io import StringIO
|
||||
from optparse import OptionParser
|
||||
|
||||
import damask
|
||||
|
||||
|
||||
scriptName = os.path.splitext(os.path.basename(__file__))[0]
|
||||
scriptID = ' '.join([scriptName,damask.version])
|
||||
|
||||
|
||||
#--------------------------------------------------------------------------------------------------
|
||||
# MAIN
|
||||
#--------------------------------------------------------------------------------------------------
|
||||
|
||||
parser = OptionParser(option_class=damask.extendableOption, usage='%prog [geomfile(s)]', description = """
|
||||
Writes vtk file for visualization.
|
||||
|
||||
""", version = scriptID)
|
||||
|
||||
(options, filenames) = parser.parse_args()
|
||||
|
||||
|
||||
if filenames == []: filenames = [None]
|
||||
|
||||
for name in filenames:
|
||||
damask.util.report(scriptName,name)
|
||||
|
||||
geom = damask.Geom.from_file(StringIO(''.join(sys.stdin.read())) if name is None else name)
|
||||
|
||||
damask.util.croak(geom)
|
||||
|
||||
if name is None:
|
||||
sys.stdout.write(geom.to_vtk())
|
||||
else:
|
||||
geom.to_vtk(os.path.splitext(name)[0])
|
|
@ -1,25 +0,0 @@
|
|||
#!/usr/bin/env bash
|
||||
|
||||
if [[ "$1" == "-f" || "$1" == "--float" ]]; then
|
||||
shift
|
||||
arg='--float'
|
||||
else
|
||||
arg=''
|
||||
fi
|
||||
|
||||
for geom in "$@"
|
||||
do
|
||||
geom_toTable $arg \
|
||||
< $geom \
|
||||
| \
|
||||
vtk_rectilinearGrid > ${geom%.*}.vtk
|
||||
|
||||
geom_toTable $arg \
|
||||
< $geom \
|
||||
| \
|
||||
vtk_addRectilinearGridData \
|
||||
--vtk ${geom%.*}.vtk \
|
||||
--data microstructure \
|
||||
|
||||
rm ${geom%.*}.vtk
|
||||
done
|
|
@ -1,17 +1,23 @@
|
|||
#!/usr/bin/env python3
|
||||
# -*- coding: UTF-8 no BOM -*-
|
||||
|
||||
import os,sys,math
|
||||
import numpy as np
|
||||
import damask
|
||||
from scipy import ndimage
|
||||
import os
|
||||
import sys
|
||||
from io import StringIO
|
||||
from optparse import OptionParser
|
||||
|
||||
from scipy import ndimage
|
||||
import numpy as np
|
||||
|
||||
import damask
|
||||
|
||||
|
||||
scriptName = os.path.splitext(os.path.basename(__file__))[0]
|
||||
scriptID = ' '.join([scriptName,damask.version])
|
||||
|
||||
|
||||
def mostFrequent(arr):
|
||||
return np.argmax(np.bincount(arr.astype('int')))
|
||||
unique, inverse = np.unique(arr, return_inverse=True)
|
||||
return unique[np.argmax(np.bincount(inverse))]
|
||||
|
||||
|
||||
#--------------------------------------------------------------------------------------------------
|
||||
|
@ -19,76 +25,33 @@ def mostFrequent(arr):
|
|||
#--------------------------------------------------------------------------------------------------
|
||||
|
||||
parser = OptionParser(option_class=damask.extendableOption, usage='%prog options [geomfile(s)]', description = """
|
||||
Smooth geometry by selecting most frequent microstructure index within given stencil at each location.
|
||||
Smooth microstructure by selecting most frequent index within given stencil at each location.
|
||||
|
||||
""", version=scriptID)
|
||||
|
||||
|
||||
parser.add_option('-s','--stencil',
|
||||
dest = 'stencil',
|
||||
type = 'int', metavar = 'int',
|
||||
help = 'size of smoothing stencil [%default]')
|
||||
|
||||
parser.set_defaults(stencil = 3,
|
||||
)
|
||||
parser.set_defaults(stencil = 3)
|
||||
|
||||
(options, filenames) = parser.parse_args()
|
||||
|
||||
|
||||
# --- loop over input files -------------------------------------------------------------------------
|
||||
|
||||
if filenames == []: filenames = [None]
|
||||
|
||||
for name in filenames:
|
||||
try: table = damask.ASCIItable(name = name,
|
||||
buffered = False,
|
||||
labeled = False)
|
||||
except: continue
|
||||
damask.util.report(scriptName,name)
|
||||
|
||||
# --- interpret header ----------------------------------------------------------------------------
|
||||
geom = damask.Geom.from_file(StringIO(''.join(sys.stdin.read())) if name is None else name)
|
||||
|
||||
table.head_read()
|
||||
info,extra_header = table.head_getGeom()
|
||||
damask.util.report_geom(info)
|
||||
damask.util.croak(geom.update(ndimage.filters.generic_filter(
|
||||
geom.microstructure,mostFrequent,
|
||||
size=(options.stencil,)*3).astype(geom.microstructure.dtype)))
|
||||
geom.add_comments(scriptID + ' ' + ' '.join(sys.argv[1:]))
|
||||
|
||||
errors = []
|
||||
if np.any(info['grid'] < 1): errors.append('invalid grid a b c.')
|
||||
if np.any(info['size'] <= 0.0): errors.append('invalid size x y z.')
|
||||
if errors != []:
|
||||
damask.util.croak(errors)
|
||||
table.close(dismiss = True)
|
||||
continue
|
||||
|
||||
# --- read data ------------------------------------------------------------------------------------
|
||||
|
||||
microstructure = table.microstructure_read(info['grid']).reshape(info['grid'],order='F') # read microstructure
|
||||
|
||||
# --- do work ------------------------------------------------------------------------------------
|
||||
|
||||
microstructure = ndimage.filters.generic_filter(microstructure,mostFrequent,size=(options.stencil,)*3).astype('int_')
|
||||
newInfo = {'microstructures': len(np.unique(microstructure))}
|
||||
|
||||
# --- report ---------------------------------------------------------------------------------------
|
||||
if ( newInfo['microstructures'] != info['microstructures']):
|
||||
damask.util.croak('--> microstructures: %i'%newInfo['microstructures'])
|
||||
info['microstructures'] == newInfo['microstructures']
|
||||
|
||||
# --- write header ---------------------------------------------------------------------------------
|
||||
|
||||
table.info_clear()
|
||||
table.info_append([scriptID + ' ' + ' '.join(sys.argv[1:]),])
|
||||
table.head_putGeom(info)
|
||||
table.info_append([extra_header])
|
||||
table.labels_clear()
|
||||
table.head_write()
|
||||
|
||||
# --- write microstructure information ------------------------------------------------------------
|
||||
|
||||
formatwidth = int(math.floor(math.log10(np.nanmax(microstructure))+1))
|
||||
table.data = microstructure.reshape((info['grid'][0],np.prod(info['grid'][1:])),order='F').transpose()
|
||||
table.data_writeArray('%{}i'.format(formatwidth),delimiter = ' ')
|
||||
|
||||
# --- output finalization --------------------------------------------------------------------------
|
||||
|
||||
table.close() # close ASCII table
|
||||
if name is None:
|
||||
sys.stdout.write(str(geom.show()))
|
||||
else:
|
||||
geom.to_file(name)
|
||||
|
|
|
@ -1,11 +1,15 @@
|
|||
#!/usr/bin/env python3
|
||||
# -*- coding: UTF-8 no BOM -*-
|
||||
|
||||
import os,sys,h5py
|
||||
import numpy as np
|
||||
import os
|
||||
import sys
|
||||
from optparse import OptionParser
|
||||
|
||||
import h5py
|
||||
import numpy as np
|
||||
|
||||
import damask
|
||||
|
||||
|
||||
scriptName = os.path.splitext(os.path.basename(__file__))[0]
|
||||
scriptID = ' '.join([scriptName,damask.version])
|
||||
|
||||
|
@ -14,40 +18,50 @@ scriptID = ' '.join([scriptName,damask.version])
|
|||
# MAIN
|
||||
#--------------------------------------------------------------------------------------------------
|
||||
|
||||
parser = OptionParser(option_class=damask.extendableOption, usage='%prog [dream3dfile[s]]', description = """
|
||||
Convert DREAM3D file to geometry file. This can be done from cell data (direct pointwise takeover) or
|
||||
from grain data (individual grains are segmented). Requires orientation data as quaternion.
|
||||
parser = OptionParser(option_class=damask.extendableOption, usage='%prog options [DREAM.3Dfile(s)]', description = """
|
||||
Converts DREAM.3D file. Input can be cell data (direct pointwise takeover) or grain data (individual
|
||||
grains are segmented). Requires orientation data as quaternion.
|
||||
|
||||
""", version = scriptID)
|
||||
|
||||
parser.add_option('-b','--basegroup',
|
||||
dest = 'basegroup', metavar = 'string',
|
||||
dest = 'basegroup',
|
||||
metavar = 'string',
|
||||
help = 'name of the group in "DataContainers" containing the pointwise (and, if applicable grain average) data')
|
||||
parser.add_option('-p','--pointwise',
|
||||
dest = 'pointwise', metavar = 'string',
|
||||
dest = 'pointwise',
|
||||
metavar = 'string',
|
||||
help = 'name of the group in "DataContainers/<basegroup>" containing pointwise data [%default]')
|
||||
parser.add_option('-a','--average',
|
||||
dest = 'average', metavar = 'string',
|
||||
dest = 'average',
|
||||
metavar = 'string',
|
||||
help = 'name of the group in "DataContainers</basegroup>" containing grain average data. '\
|
||||
+ 'Leave empty for pointwise data')
|
||||
parser.add_option('--phase',
|
||||
dest = 'phase',
|
||||
type = 'string', metavar = 'string',
|
||||
type = 'string',
|
||||
metavar = 'string',
|
||||
help = 'name of the dataset containing pointwise/average phase IDs [%default]')
|
||||
parser.add_option('--microstructure',
|
||||
dest = 'microstructure',
|
||||
type = 'string', metavar = 'string',
|
||||
type = 'string',
|
||||
metavar = 'string',
|
||||
help = 'name of the dataset connecting pointwise and average data [%default]')
|
||||
parser.add_option('-q', '--quaternion',
|
||||
dest = 'quaternion',
|
||||
type = 'string', metavar='string',
|
||||
type = 'string',
|
||||
metavar='string',
|
||||
help = 'name of the dataset containing pointwise/average orientation as quaternion [%default]')
|
||||
parser.add_option('--homogenization',
|
||||
dest = 'homogenization',
|
||||
type = 'int', metavar = 'int',
|
||||
help = 'homogenization index to be used [%default]')
|
||||
|
||||
parser.set_defaults(pointwise = 'CellData',
|
||||
quaternion = 'Quats',
|
||||
phase = 'Phases',
|
||||
microstructure = 'FeatureIds',
|
||||
crystallite = 1,
|
||||
homogenization = 1,
|
||||
)
|
||||
|
||||
(options, filenames) = parser.parse_args()
|
||||
|
@ -57,59 +71,51 @@ if options.basegroup is None:
|
|||
|
||||
rootDir ='DataContainers'
|
||||
|
||||
# --- loop over input files -------------------------------------------------------------------------
|
||||
|
||||
if filenames == []: parser.error('no input file specified.')
|
||||
|
||||
for name in filenames:
|
||||
try:
|
||||
table = damask.ASCIItable(outname = os.path.splitext(name)[0]+'.geom',
|
||||
buffered = False, labeled=False,
|
||||
)
|
||||
except: continue
|
||||
damask.util.report(scriptName,name)
|
||||
|
||||
errors = []
|
||||
|
||||
info = {}
|
||||
ori = []
|
||||
inFile = h5py.File(name, 'r')
|
||||
group_geom = os.path.join(rootDir,options.basegroup,'_SIMPL_GEOMETRY')
|
||||
try:
|
||||
info['size'] = inFile[os.path.join(group_geom,'DIMENSIONS')][...] \
|
||||
size = inFile[os.path.join(group_geom,'DIMENSIONS')][...] \
|
||||
* inFile[os.path.join(group_geom,'SPACING')][...]
|
||||
info['grid'] = inFile[os.path.join(group_geom,'DIMENSIONS')][...]
|
||||
info['origin'] = inFile[os.path.join(group_geom,'ORIGIN')][...]
|
||||
grid = inFile[os.path.join(group_geom,'DIMENSIONS')][...]
|
||||
origin = inFile[os.path.join(group_geom,'ORIGIN')][...]
|
||||
except:
|
||||
errors.append('Geometry data ({}) not found'.format(group_geom))
|
||||
|
||||
|
||||
group_pointwise = os.path.join(rootDir,options.basegroup,options.pointwise)
|
||||
if options.average is None:
|
||||
label = 'point'
|
||||
N_microstructure = np.product(info['grid'])
|
||||
label = 'Point'
|
||||
|
||||
dataset = os.path.join(group_pointwise,options.quaternion)
|
||||
try:
|
||||
quats = np.reshape(inFile[dataset][...],(N_microstructure,4))
|
||||
texture = [damask.Rotation.fromQuaternion(q,True,P=+1) for q in quats]
|
||||
quats = np.reshape(inFile[dataset][...],(np.product(grid),4))
|
||||
rot = [damask.Rotation.fromQuaternion(q,True,P=+1) for q in quats]
|
||||
except:
|
||||
errors.append('Pointwise orientation (quaternion) data ({}) not readable'.format(dataset))
|
||||
|
||||
dataset = os.path.join(group_pointwise,options.phase)
|
||||
try:
|
||||
phase = np.reshape(inFile[dataset][...],(N_microstructure))
|
||||
phase = np.reshape(inFile[dataset][...],(np.product(grid)))
|
||||
except:
|
||||
errors.append('Pointwise phase data ({}) not readable'.format(dataset))
|
||||
|
||||
microstructure = np.arange(1,np.product(grid)+1,dtype=int).reshape(grid,order='F')
|
||||
|
||||
|
||||
else:
|
||||
label = 'grain'
|
||||
label = 'Grain'
|
||||
|
||||
dataset = os.path.join(group_pointwise,options.microstructure)
|
||||
try:
|
||||
microstructure = np.reshape(inFile[dataset][...],(np.product(info['grid'])))
|
||||
N_microstructure = np.max(microstructure)
|
||||
microstructure = np.transpose(inFile[dataset][...].reshape(grid[::-1]),(2,1,0)) # convert from C ordering
|
||||
except:
|
||||
errors.append('Link between pointwise and grain average data ({}) not readable'.format(dataset))
|
||||
|
||||
|
@ -117,7 +123,7 @@ for name in filenames:
|
|||
|
||||
dataset = os.path.join(group_average,options.quaternion)
|
||||
try:
|
||||
texture = [damask.Rotation.fromQuaternion(q,True,P=+1) for q in inFile[dataset][...][1:]] # skip first entry (unindexed)
|
||||
rot = [damask.Rotation.fromQuaternion(q,True,P=+1) for q in inFile[dataset][...][1:]] # skip first entry (unindexed)
|
||||
except:
|
||||
errors.append('Average orientation data ({}) not readable'.format(dataset))
|
||||
|
||||
|
@ -129,60 +135,24 @@ for name in filenames:
|
|||
|
||||
if errors != []:
|
||||
damask.util.croak(errors)
|
||||
table.close(dismiss = True)
|
||||
continue
|
||||
|
||||
config_header = ['<texture>']
|
||||
for i in range(np.nanmax(microstructure)):
|
||||
config_header += ['[{}{}]'.format(label,i+1),
|
||||
'(gauss)\tphi1 {:.2f}\tPhi {:.2f}\tphi2 {:.2f}'.format(*rot[i].asEulers(degrees = True)),
|
||||
]
|
||||
config_header += ['<microstructure>']
|
||||
for i in range(np.nanmax(microstructure)):
|
||||
config_header += ['[{}{}]'.format(label,i+1),
|
||||
'crystallite 1',
|
||||
'(constituent)\tphase {}\ttexture {}\tfraction 1.0'.format(phase[i],i+1),
|
||||
]
|
||||
|
||||
mat = damask.Material()
|
||||
mat.verbose = False
|
||||
header = [scriptID + ' ' + ' '.join(sys.argv[1:])]\
|
||||
+ config_header
|
||||
geom = damask.Geom(microstructure,size,origin,
|
||||
homogenization=options.homogenization,comments=header)
|
||||
damask.util.croak(geom)
|
||||
|
||||
# dummy <homogenization>
|
||||
h = damask.config.material.Homogenization()
|
||||
mat.add_section('Homogenization','none',h)
|
||||
info['homogenization'] = 1
|
||||
|
||||
# <crystallite> placeholder (same for all microstructures at the moment)
|
||||
c = damask.config.material.Crystallite()
|
||||
mat.add_section('Crystallite','tbd',c)
|
||||
|
||||
# <phase> placeholders
|
||||
for i in range(np.max(phase)):
|
||||
p = damask.config.material.Phase()
|
||||
mat.add_section('phase','phase{}-tbd'.format(i+1),p)
|
||||
|
||||
# <texture>
|
||||
for i,o in enumerate(texture):
|
||||
t = damask.config.material.Texture()
|
||||
t.add_component('gauss',{'eulers':o.asEulers(degrees=True)})
|
||||
mat.add_section(part='texture', section='{}{}'.format(label,i+1),initialData=t)
|
||||
|
||||
# <microstructure>
|
||||
for i in range(N_microstructure):
|
||||
m = damask.config.material.Microstructure()
|
||||
mat.add_section('microstructure','{}{}'.format(label,i+1),m)
|
||||
mat.add_microstructure('{}{}'.format(label,i+1),
|
||||
{'phase': 'phase{}-tbd'.format(phase[i]),
|
||||
'texture':'{}{}'.format(label,i+1),
|
||||
'crystallite':'tbd',
|
||||
'fraction':1
|
||||
})
|
||||
|
||||
table.info_append([
|
||||
scriptID + ' ' + ' '.join(sys.argv[1:]),
|
||||
"grid\ta {}\tb {}\tc {}".format(*info['grid']),
|
||||
"size\tx {}\ty {}\tz {}".format(*info['size']),
|
||||
"origin\tx {}\ty {}\tz {}".format(*info['origin']),
|
||||
"homogenization\t{}".format(info['homogenization']),
|
||||
str(mat).split('\n')
|
||||
])
|
||||
table.head_write()
|
||||
|
||||
if options.average is None:
|
||||
table.data = [1, 'to', format(N_microstructure)]
|
||||
table.data_write()
|
||||
else:
|
||||
table.data = microstructure.reshape(info['grid'][1]*info['grid'][2],info['grid'][0])
|
||||
table.data_writeArray()
|
||||
|
||||
|
||||
table.close()
|
||||
geom.to_file(os.path.splitext(name)[0]+'.geom')
|
||||
|
|
|
@ -1,28 +1,33 @@
|
|||
#!/usr/bin/env python3
|
||||
# -*- coding: UTF-8 no BOM -*-
|
||||
|
||||
import os,sys,math
|
||||
import numpy as np
|
||||
import os
|
||||
import sys
|
||||
from optparse import OptionParser
|
||||
|
||||
import numpy as np
|
||||
|
||||
import damask
|
||||
|
||||
|
||||
scriptName = os.path.splitext(os.path.basename(__file__))[0]
|
||||
scriptID = ' '.join([scriptName,damask.version])
|
||||
|
||||
|
||||
minimal_surfaces = ['primitive','gyroid','diamond']
|
||||
|
||||
surface = {
|
||||
'primitive': lambda x,y,z: np.cos(x)+np.cos(y)+np.cos(z),
|
||||
'gyroid': lambda x,y,z: np.sin(x)*np.cos(y)+np.sin(y)*np.cos(z)+np.cos(x)*np.sin(z),
|
||||
'diamond': lambda x,y,z: np.cos(x-y)*np.cos(z)+np.sin(x+y)*np.sin(z),
|
||||
}
|
||||
|
||||
|
||||
# --------------------------------------------------------------------
|
||||
# MAIN
|
||||
# --------------------------------------------------------------------
|
||||
|
||||
minimal_surfaces = ['primitive','gyroid','diamond',]
|
||||
|
||||
surface = {
|
||||
'primitive': lambda x,y,z: math.cos(x)+math.cos(y)+math.cos(z),
|
||||
'gyroid': lambda x,y,z: math.sin(x)*math.cos(y)+math.sin(y)*math.cos(z)+math.cos(x)*math.sin(z),
|
||||
'diamond': lambda x,y,z: math.cos(x-y)*math.cos(z)+math.sin(x+y)*math.sin(z),
|
||||
}
|
||||
|
||||
parser = OptionParser(option_class=damask.extendableOption, usage='%prog [option(s)] [geomfile]', description = """
|
||||
Generate a geometry file of a bicontinuous structure of given type.
|
||||
parser = OptionParser(option_class=damask.extendableOption, usage='%prog options [geomfile]', description = """
|
||||
Generate a bicontinuous structure of given type.
|
||||
|
||||
""", version = scriptID)
|
||||
|
||||
|
@ -55,6 +60,7 @@ parser.add_option('--m',
|
|||
dest = 'microstructure',
|
||||
type = 'int', nargs = 2, metavar = 'int int',
|
||||
help = 'two microstructure indices to be used [%default]')
|
||||
|
||||
parser.set_defaults(type = minimal_surfaces[0],
|
||||
threshold = 0.0,
|
||||
periods = 1,
|
||||
|
@ -64,67 +70,26 @@ parser.set_defaults(type = minimal_surfaces[0],
|
|||
microstructure = (1,2),
|
||||
)
|
||||
|
||||
(options,filenames) = parser.parse_args()
|
||||
(options,filename) = parser.parse_args()
|
||||
|
||||
# --- loop over input files -------------------------------------------------------------------------
|
||||
|
||||
if filenames == []: filenames = [None]
|
||||
|
||||
for name in filenames:
|
||||
try:
|
||||
table = damask.ASCIItable(outname = name,
|
||||
buffered = False, labeled = False)
|
||||
except: continue
|
||||
name = None if filename == [] else filename[0]
|
||||
damask.util.report(scriptName,name)
|
||||
|
||||
x,y,z = np.meshgrid(options.periods*2.0*np.pi*(np.arange(options.grid[0])+0.5)/options.grid[0],
|
||||
options.periods*2.0*np.pi*(np.arange(options.grid[1])+0.5)/options.grid[1],
|
||||
options.periods*2.0*np.pi*(np.arange(options.grid[2])+0.5)/options.grid[2],
|
||||
indexing='xy',sparse=True)
|
||||
|
||||
# ------------------------------------------ make grid -------------------------------------
|
||||
microstructure = np.where(options.threshold < surface[options.type](x,y,z),
|
||||
options.microstructure[1],options.microstructure[0])
|
||||
|
||||
info = {
|
||||
'grid': np.array(options.grid),
|
||||
'size': np.array(options.size),
|
||||
'origin': np.zeros(3,'d'),
|
||||
'microstructures': max(options.microstructure),
|
||||
'homogenization': options.homogenization
|
||||
}
|
||||
geom=damask.Geom(microstructure,options.size,
|
||||
homogenization=options.homogenization,
|
||||
comments=[scriptID + ' ' + ' '.join(sys.argv[1:])])
|
||||
damask.util.croak(geom)
|
||||
|
||||
#--- report ---------------------------------------------------------------------------------------
|
||||
|
||||
damask.util.report_geom(info)
|
||||
|
||||
errors = []
|
||||
if np.any(info['grid'] < 1): errors.append('invalid grid a b c.')
|
||||
if np.any(info['size'] <= 0.0): errors.append('invalid size x y z.')
|
||||
if errors != []:
|
||||
damask.util.croak(errors)
|
||||
table.close(dismiss = True)
|
||||
continue
|
||||
|
||||
#--- write header ---------------------------------------------------------------------------------
|
||||
|
||||
table.labels_clear()
|
||||
table.info_clear()
|
||||
table.info_append([
|
||||
scriptID + ' ' + ' '.join(sys.argv[1:]),
|
||||
"grid\ta {grid[0]}\tb {grid[1]}\tc {grid[2]}".format(grid=info['grid']),
|
||||
"size\tx {size[0]}\ty {size[1]}\tz {size[2]}".format(size=info['size']),
|
||||
"origin\tx {origin[0]}\ty {origin[1]}\tz {origin[2]}".format(origin=info['origin']),
|
||||
"homogenization\t{homog}".format(homog=info['homogenization']),
|
||||
"microstructures\t{microstructures}".format(microstructures=info['microstructures']),
|
||||
])
|
||||
table.head_write()
|
||||
|
||||
#--- write data -----------------------------------------------------------------------------------
|
||||
|
||||
X = options.periods*2.0*math.pi*(np.arange(options.grid[0])+0.5)/options.grid[0]
|
||||
Y = options.periods*2.0*math.pi*(np.arange(options.grid[1])+0.5)/options.grid[1]
|
||||
Z = options.periods*2.0*math.pi*(np.arange(options.grid[2])+0.5)/options.grid[2]
|
||||
|
||||
for z in range(options.grid[2]):
|
||||
for y in range(options.grid[1]):
|
||||
table.data_clear()
|
||||
for x in range(options.grid[0]):
|
||||
table.data_append(options.microstructure[options.threshold < surface[options.type](X[x],Y[y],Z[z])])
|
||||
table.data_write()
|
||||
|
||||
table.close()
|
||||
if name is None:
|
||||
sys.stdout.write(str(geom.show()))
|
||||
else:
|
||||
geom.to_file(name)
|
||||
|
|
|
@ -1,46 +1,66 @@
|
|||
#!/usr/bin/env python3
|
||||
# -*- coding: UTF-8 no BOM -*-
|
||||
|
||||
import os,sys,math
|
||||
import numpy as np
|
||||
import os
|
||||
import sys
|
||||
from optparse import OptionParser
|
||||
|
||||
import numpy as np
|
||||
|
||||
import damask
|
||||
|
||||
|
||||
scriptName = os.path.splitext(os.path.basename(__file__))[0]
|
||||
scriptID = ' '.join([scriptName,damask.version])
|
||||
|
||||
|
||||
# --------------------------------------------------------------------
|
||||
# MAIN
|
||||
# --------------------------------------------------------------------
|
||||
|
||||
parser = OptionParser(option_class=damask.extendableOption, usage='%prog [option(s)] [geomfile]', description = """
|
||||
Generate a geometry file of an osteon enclosing the Harvesian canal and separated by interstitial tissue.
|
||||
parser = OptionParser(option_class=damask.extendableOption, usage='%prog options [geomfile]', description = """
|
||||
Generate description of an osteon enclosing the Harvesian canal and separated by interstitial tissue.
|
||||
The osteon phase is lamellar with a twisted plywood structure.
|
||||
Its fiber orientation is oscillating by +/- amplitude within one period.
|
||||
|
||||
""", version = scriptID)
|
||||
|
||||
|
||||
parser.add_option('-g', '--grid', dest='grid', type='int', nargs=2, metavar = 'int int',
|
||||
parser.add_option('-g', '--grid',
|
||||
dest='grid', type='int',
|
||||
nargs=2, metavar = 'int int',
|
||||
help='a,b grid of hexahedral box [%default]')
|
||||
parser.add_option('-s', '--size', dest='size', type='float', nargs=2, metavar = 'float float',
|
||||
parser.add_option('-s', '--size',
|
||||
dest='size',
|
||||
type='float', nargs=2, metavar = 'float float',
|
||||
help='x,y size of hexahedral box [%default]')
|
||||
parser.add_option('-c', '--canal', dest='canal', type='float', metavar = 'float',
|
||||
parser.add_option('-c', '--canal',
|
||||
dest='canal',
|
||||
type='float', metavar = 'float',
|
||||
help='Haversian canal radius [%default]')
|
||||
parser.add_option('-o', '--osteon', dest='osteon', type='float', metavar = 'float',
|
||||
parser.add_option('-o', '--osteon',
|
||||
dest='osteon',
|
||||
type='float', metavar = 'float',
|
||||
help='horizontal osteon radius [%default]')
|
||||
parser.add_option('-l', '--lamella', dest='period', type='float', metavar = 'float',
|
||||
parser.add_option('-l', '--lamella',
|
||||
dest='period',
|
||||
type='float', metavar = 'float',
|
||||
help='lamella width [%default]')
|
||||
parser.add_option('-a', '--amplitude', dest='amplitude', type='float', metavar = 'float',
|
||||
parser.add_option('-a', '--amplitude',
|
||||
dest='amplitude',
|
||||
type='float', metavar = 'float',
|
||||
help='amplitude of twisted plywood wiggle in deg [%default]')
|
||||
parser.add_option( '--aspect', dest='aspect', type='float', metavar = 'float',
|
||||
parser.add_option( '--aspect',
|
||||
dest='aspect',
|
||||
type='float', metavar = 'float',
|
||||
help='vertical/horizontal osteon aspect ratio [%default]')
|
||||
parser.add_option('-w', '--omega', dest='omega', type='float', metavar = 'float',
|
||||
parser.add_option('-w', '--omega',
|
||||
dest='omega',
|
||||
type='float', metavar = 'float',
|
||||
help='rotation angle around normal of osteon [%default]')
|
||||
parser.add_option('--homogenization', dest='homogenization', type='int', metavar = 'int',
|
||||
parser.add_option( '--homogenization',
|
||||
dest='homogenization',
|
||||
type='int', metavar = 'int',
|
||||
help='homogenization index to be used [%default]')
|
||||
parser.add_option('--crystallite', dest='crystallite', type='int', metavar = 'int',
|
||||
help='crystallite index to be used [%default]')
|
||||
|
||||
parser.set_defaults(canal = 25e-6,
|
||||
osteon = 100e-6,
|
||||
|
@ -50,107 +70,82 @@ parser.set_defaults(canal = 25e-6,
|
|||
amplitude = 60,
|
||||
size = (300e-6,300e-6),
|
||||
grid = (512,512),
|
||||
homogenization = 1,
|
||||
crystallite = 1)
|
||||
homogenization = 1)
|
||||
|
||||
(options,filename) = parser.parse_args()
|
||||
|
||||
if np.any(np.array(options.grid) < 2):
|
||||
parser('invalid grid a b c.')
|
||||
if np.any(np.array(options.size) <= 0.0):
|
||||
parser('invalid size x y z.')
|
||||
|
||||
# --- open input files ----------------------------------------------------------------------------
|
||||
name = None if filename == [] else filename[0]
|
||||
damask.util.report(scriptName,name)
|
||||
|
||||
if filename == []: filename = [None]
|
||||
omega = np.deg2rad(options.omega)
|
||||
rotation = np.array([[ np.cos(omega),np.sin(omega),],
|
||||
[-np.sin(omega),np.cos(omega),]])
|
||||
|
||||
table = damask.ASCIItable(outname = filename[0],
|
||||
buffered = False, labeled=False)
|
||||
grid = np.array(options.grid,'i')
|
||||
size = np.array(options.size,'d')
|
||||
|
||||
damask.util.report(scriptName,filename[0])
|
||||
|
||||
options.omega *= math.pi/180.0 # rescale ro radians
|
||||
rotation = np.array([[ math.cos(options.omega),math.sin(options.omega),],
|
||||
[-math.sin(options.omega),math.cos(options.omega),]],'d')
|
||||
|
||||
box = np.dot(np.array([[options.canal,0.],[0.,options.aspect*options.canal]]).transpose(),rotation)
|
||||
|
||||
|
||||
info = {
|
||||
'grid': np.ones(3,'i'),
|
||||
'size': np.ones(3,'d'),
|
||||
'origin': np.zeros(3,'d'),
|
||||
'microstructures': 3,
|
||||
'homogenization': options.homogenization,
|
||||
}
|
||||
|
||||
info['grid'][:2] = np.array(options.grid,'i')
|
||||
info['size'][:2] = np.array(options.size,'d')
|
||||
info['size'][2] = min(info['size'][0]/info['grid'][0],info['size'][1]/info['grid'][1])
|
||||
info['origin'] = -info['size']/2.0
|
||||
|
||||
X0 = info['size'][0]/info['grid'][0]*\
|
||||
(np.tile(np.arange(info['grid'][0]),(info['grid'][1],1)) - info['grid'][0]/2 + 0.5)
|
||||
Y0 = info['size'][1]/info['grid'][1]*\
|
||||
(np.tile(np.arange(info['grid'][1]),(info['grid'][0],1)).transpose() - info['grid'][1]/2 + 0.5)
|
||||
X0,Y0 = np.meshgrid(size[0]/grid[0] * (np.arange(grid[0]) - grid[0]/2 + 0.5),
|
||||
size[1]/grid[0] * (np.arange(grid[1]) - grid[1]/2 + 0.5), indexing='ij')
|
||||
|
||||
X = X0*rotation[0,0] + Y0*rotation[0,1] # rotate by omega
|
||||
Y = X0*rotation[1,0] + Y0*rotation[1,1] # rotate by omega
|
||||
|
||||
radius = np.sqrt(X*X + Y*Y/options.aspect/options.aspect)
|
||||
radius = np.sqrt(X*X + Y*Y/options.aspect**2.0)
|
||||
alpha = np.degrees(np.arctan2(Y/options.aspect,X))
|
||||
beta = options.amplitude*np.sin(2.0*math.pi*(radius-options.canal)/options.period)
|
||||
beta = options.amplitude*np.sin(2.0*np.pi*(radius-options.canal)/options.period)
|
||||
|
||||
microstructure = np.where(radius < float(options.canal),1,0) + np.where(radius > float(options.osteon),2,0)
|
||||
microstructure = np.where(radius < float(options.canal), 1,0) \
|
||||
+ np.where(radius > float(options.osteon),2,0)
|
||||
|
||||
alphaOfGrain = np.zeros(info['grid'][0]*info['grid'][1],'d')
|
||||
betaOfGrain = np.zeros(info['grid'][0]*info['grid'][1],'d')
|
||||
for y in range(info['grid'][1]):
|
||||
for x in range(info['grid'][0]):
|
||||
if microstructure[y,x] == 0:
|
||||
microstructure[y,x] = info['microstructures']
|
||||
alphaOfGrain[info['microstructures']] = alpha[y,x]
|
||||
betaOfGrain[ info['microstructures']] = beta[y,x]
|
||||
info['microstructures'] += 1
|
||||
# extend to 3D
|
||||
size = np.append(size,np.min(size/grid))
|
||||
grid = np.append(grid,1)
|
||||
microstructure = microstructure.reshape(microstructure.shape+(1,))
|
||||
|
||||
#--- report ---------------------------------------------------------------------------------------
|
||||
damask.util.report_geom(info,['grid','size','origin','homogenization','microstructures'])
|
||||
Alpha = np.zeros(grid[0]*grid[1],'d')
|
||||
Beta = np.zeros(grid[0]*grid[1],'d')
|
||||
|
||||
formatwidth = 1+int(math.floor(math.log10(info['microstructures']-1)))
|
||||
header = [scriptID + ' ' + ' '.join(sys.argv[1:])]
|
||||
header.append('<microstructure>')
|
||||
header.append('[canal]')
|
||||
header.append('crystallite %i'%options.crystallite)
|
||||
header.append('(constituent)\tphase 1\ttexture 1\tfraction 1.0')
|
||||
header.append('[interstitial]')
|
||||
header.append('crystallite %i'%options.crystallite)
|
||||
header.append('(constituent)\tphase 2\ttexture 2\tfraction 1.0')
|
||||
for i in range(3,info['microstructures']):
|
||||
header.append('[Grain%s]'%(str(i).zfill(formatwidth)))
|
||||
header.append('crystallite %i'%options.crystallite)
|
||||
header.append('(constituent)\tphase 3\ttexture %s\tfraction 1.0'%(str(i).rjust(formatwidth)))
|
||||
i = 3
|
||||
for y in range(grid[1]):
|
||||
for x in range(grid[0]):
|
||||
if microstructure[x,y] == 0:
|
||||
microstructure[x,y] = i
|
||||
Alpha[i] = alpha[x,y]
|
||||
Beta [i] = beta [x,y]
|
||||
i+=1
|
||||
|
||||
header.append('<texture>')
|
||||
header.append('[canal]')
|
||||
header.append('[interstitial]')
|
||||
for i in range(3,info['microstructures']):
|
||||
header.append('[Grain%s]'%(str(i).zfill(formatwidth)))
|
||||
header.append('(gauss)\tphi1 %g\tPhi %g\tphi2 0\tscatter 0.0\tfraction 1.0'\
|
||||
%(alphaOfGrain[i],betaOfGrain[i]))
|
||||
header.append([
|
||||
"grid\ta {grid[0]}\tb {grid[1]}\tc {grid[2]}".format(grid=info['grid']),
|
||||
"size\tx {size[0]}\ty {size[1]}\tz {size[2]}".format(size=info['size']),
|
||||
"origin\tx {origin[0]}\ty {origin[1]}\tz {origin[2]}".format(origin=info['origin']),
|
||||
"homogenization\t{homog}".format(homog=info['homogenization']),
|
||||
"microstructures\t{microstructures}".format(microstructures=info['microstructures'])])
|
||||
config_header = ['<texture>',
|
||||
'[canal]',
|
||||
'[interstitial]'
|
||||
]
|
||||
for i in range(3,np.max(microstructure)):
|
||||
config_header += ['[Point{}]'.format(i-2),
|
||||
'(gauss)\tphi1 {:.2f}\tPhi {:.2f}\tphi2 0'.format(Alpha[i],Beta[i])
|
||||
]
|
||||
|
||||
table.info_append(header)
|
||||
table.head_write()
|
||||
config_header = ['<microstructure>',
|
||||
'[canal]',
|
||||
'crystallite 1',
|
||||
'(constituent)\tphase 1\ttexture 1\tfraction 1.0',
|
||||
'[interstitial]',
|
||||
'crystallite 1',
|
||||
'(constituent)\tphase 2\ttexture 2\tfraction 1.0'
|
||||
]
|
||||
for i in range(3,np.max(microstructure)):
|
||||
config_header += ['[Point{}]'.format(i-2),
|
||||
'crystallite 1',
|
||||
'(constituent)\tphase 3\ttexture {}\tfraction 1.0'.format(i)
|
||||
]
|
||||
|
||||
# --- write microstructure information ------------------------------------------------------------
|
||||
header = [scriptID + ' ' + ' '.join(sys.argv[1:])]\
|
||||
+ config_header
|
||||
geom = damask.Geom(microstructure.reshape(grid),
|
||||
size,-size/2,
|
||||
homogenization=options.homogenization,comments=header)
|
||||
damask.util.croak(geom)
|
||||
|
||||
table.data = microstructure.reshape(info['grid'][1]*info['grid'][2],info['grid'][0])
|
||||
table.data_writeArray('%%%ii'%(formatwidth),delimiter=' ')
|
||||
|
||||
#--- output finalization --------------------------------------------------------------------------
|
||||
table.close()
|
||||
if name is None:
|
||||
sys.stdout.write(str(geom.show()))
|
||||
else:
|
||||
geom.to_file(name)
|
||||
|
|
|
@ -0,0 +1,69 @@
|
|||
#!/usr/bin/env python3
|
||||
|
||||
import os
|
||||
import sys
|
||||
from optparse import OptionParser
|
||||
|
||||
import numpy as np
|
||||
|
||||
import damask
|
||||
|
||||
|
||||
scriptName = os.path.splitext(os.path.basename(__file__))[0]
|
||||
scriptID = ' '.join([scriptName,damask.version])
|
||||
|
||||
|
||||
# --------------------------------------------------------------------
|
||||
# MAIN
|
||||
# --------------------------------------------------------------------
|
||||
|
||||
parser = OptionParser(option_class=damask.extendableOption, usage='%prog options [geomfile(s)]', description = """
|
||||
Generate homogeneous geometry.
|
||||
|
||||
""", version = scriptID)
|
||||
|
||||
parser.add_option('-g','--grid',
|
||||
dest = 'grid',
|
||||
type = 'int', nargs = 3, metavar = ' '.join(['int']*3),
|
||||
help = 'a,b,c grid of hexahedral box %default')
|
||||
parser.add_option('-s', '--size',
|
||||
dest = 'size',
|
||||
type = 'float', nargs = 3, metavar = ' '.join(['float']*3),
|
||||
help = 'x,y,z of geometry size')
|
||||
parser.add_option('-o','--origin',
|
||||
dest = 'origin',
|
||||
type = 'float', nargs = 3, metavar = ' '.join(['float']*3),
|
||||
help = 'x,y,z of geometry origin %default')
|
||||
parser.add_option('--homogenization',
|
||||
dest = 'homogenization',
|
||||
type = 'int', metavar = 'int',
|
||||
help = 'homogenization index [%default]')
|
||||
parser.add_option('-f','--fill',
|
||||
dest = 'fill',
|
||||
type = 'float', metavar = 'int',
|
||||
help = 'microstructure index [%default]')
|
||||
|
||||
parser.set_defaults(grid = (16,16,16),
|
||||
origin = (0.,0.,0.),
|
||||
homogenization = 1,
|
||||
fill = 1,
|
||||
)
|
||||
|
||||
(options, filename) = parser.parse_args()
|
||||
|
||||
|
||||
name = None if filename == [] else filename[0]
|
||||
damask.util.report(scriptName,name)
|
||||
|
||||
dtype = float if np.isnan(options.fill) or int(options.fill) != options.fill else int
|
||||
geom = damask.Geom(microstructure=np.full(options.grid,options.fill,dtype=dtype),
|
||||
size=options.size,
|
||||
origin=options.origin,
|
||||
homogenization=options.homogenization,
|
||||
comments=scriptID + ' ' + ' '.join(sys.argv[1:]))
|
||||
damask.util.croak(geom)
|
||||
|
||||
if name is None:
|
||||
sys.stdout.write(str(geom.show()))
|
||||
else:
|
||||
geom.to_file(name)
|
|
@ -1,22 +1,25 @@
|
|||
#!/usr/bin/env python3
|
||||
# -*- coding: UTF-8 no BOM -*-
|
||||
|
||||
import os,sys,math
|
||||
import numpy as np
|
||||
import os
|
||||
import sys
|
||||
from optparse import OptionParser
|
||||
|
||||
import numpy as np
|
||||
|
||||
import damask
|
||||
|
||||
|
||||
scriptName = os.path.splitext(os.path.basename(__file__))[0]
|
||||
scriptID = ' '.join([scriptName,damask.version])
|
||||
|
||||
|
||||
|
||||
# --------------------------------------------------------------------
|
||||
# MAIN
|
||||
# --------------------------------------------------------------------
|
||||
|
||||
parser = OptionParser(option_class=damask.extendableOption, usage='%prog option(s) [ASCIItable(s)]', description = """
|
||||
Generate geometry description and material configuration from position, phase, and orientation (or microstructure) data.
|
||||
parser = OptionParser(option_class=damask.extendableOption, usage='%prog options [ASCIItable(s)]', description = """
|
||||
Converts ASCII table. Input can be microstructure or orientation (as quaternion). For the latter,
|
||||
phase information can be given additionally.
|
||||
|
||||
""", version = scriptID)
|
||||
|
||||
|
@ -40,19 +43,13 @@ parser.add_option('--axes',
|
|||
dest = 'axes',
|
||||
type = 'string', nargs = 3, metavar = ' '.join(['string']*3),
|
||||
help = 'orientation coordinate frame in terms of position coordinate frame [+x +y +z]')
|
||||
|
||||
parser.add_option('--homogenization',
|
||||
dest = 'homogenization',
|
||||
type = 'int', metavar = 'int',
|
||||
help = 'homogenization index to be used [%default]')
|
||||
parser.add_option('--crystallite',
|
||||
dest = 'crystallite',
|
||||
type = 'int', metavar = 'int',
|
||||
help = 'crystallite index to be used [%default]')
|
||||
|
||||
|
||||
parser.set_defaults(homogenization = 1,
|
||||
crystallite = 1,
|
||||
pos = 'pos',
|
||||
)
|
||||
|
||||
|
@ -63,7 +60,9 @@ input = [ options.quaternion is not None,
|
|||
]
|
||||
|
||||
if np.sum(input) != 1:
|
||||
parser.error('need either microstructure label or exactly one orientation input format.')
|
||||
parser.error('need either microstructure or quaternion (and optionally phase) as input.')
|
||||
if options.microstructure is not None and options.phase is not None:
|
||||
parser.error('need either microstructure or phase (and mandatory quaternion) as input.')
|
||||
if options.axes is not None and not set(options.axes).issubset(set(['x','+x','-x','y','+y','-y','z','+z','-z'])):
|
||||
parser.error('invalid axes {} {} {}.'.format(*options.axes))
|
||||
|
||||
|
@ -71,20 +70,12 @@ if options.axes is not None and not set(options.axes).issubset(set(['x','+x','-x
|
|||
(options.microstructure,1,'microstructure'),
|
||||
][np.where(input)[0][0]] # select input label that was requested
|
||||
|
||||
# --- loop over input files -------------------------------------------------------------------------
|
||||
|
||||
if filenames == []: filenames = [None]
|
||||
|
||||
for name in filenames:
|
||||
try:
|
||||
table = damask.ASCIItable(name = name,
|
||||
outname = os.path.splitext(name)[-2]+'.geom' if name else name,
|
||||
buffered = False)
|
||||
except: continue
|
||||
damask.util.report(scriptName,name)
|
||||
|
||||
# ------------------------------------------ read head ---------------------------------------
|
||||
|
||||
table = damask.ASCIItable(name = name,readonly=True)
|
||||
table.head_read() # read ASCII header info
|
||||
|
||||
# ------------------------------------------ sanity checks ---------------------------------------
|
||||
|
@ -101,7 +92,6 @@ for name in filenames:
|
|||
|
||||
if errors != []:
|
||||
damask.util.croak(errors)
|
||||
table.close(dismiss = True)
|
||||
continue
|
||||
|
||||
table.data_readArray([options.pos] \
|
||||
|
@ -113,120 +103,56 @@ for name in filenames:
|
|||
if options.phase is None:
|
||||
table.data = np.column_stack((table.data,np.ones(len(table.data)))) # add single phase if no phase column given
|
||||
|
||||
# --------------- figure out size and grid ---------------------------------------------------------
|
||||
|
||||
grid,size = damask.util.coordGridAndSize(table.data[:,0:3])
|
||||
coords = [np.unique(table.data[:,i]) for i in range(3)]
|
||||
mincorner = np.array(list(map(min,coords)))
|
||||
maxcorner = np.array(list(map(max,coords)))
|
||||
grid = np.array(list(map(len,coords)),'i')
|
||||
size = grid/np.maximum(np.ones(3,'d'), grid-1.0) * (maxcorner-mincorner) # size from edge to edge = dim * n/(n-1)
|
||||
size = np.where(grid > 1, size, min(size[grid > 1]/grid[grid > 1])) # spacing for grid==1 set to smallest among other spacings
|
||||
delta = size/np.maximum(np.ones(3,'d'), grid)
|
||||
origin = mincorner - 0.5*delta # shift from cell center to corner
|
||||
origin = mincorner - 0.5*size/grid # shift from cell center to corner
|
||||
|
||||
N = grid.prod()
|
||||
|
||||
if N != len(table.data):
|
||||
errors.append('data count {} does not match grid {}.'.format(len(table.data),' x '.join(map(repr,grid))))
|
||||
if np.any(np.abs(np.log10((coords[0][1:]-coords[0][:-1])/delta[0])) > 0.01) \
|
||||
or np.any(np.abs(np.log10((coords[1][1:]-coords[1][:-1])/delta[1])) > 0.01) \
|
||||
or np.any(np.abs(np.log10((coords[2][1:]-coords[2][:-1])/delta[2])) > 0.01):
|
||||
errors.append('regular grid spacing {} violated.'.format(' x '.join(map(repr,delta))))
|
||||
|
||||
if errors != []:
|
||||
damask.util.croak(errors)
|
||||
table.close(dismiss = True)
|
||||
continue
|
||||
|
||||
# ------------------------------------------ process data ------------------------------------------
|
||||
|
||||
colOri = table.label_index(label)+(3-coordDim) # column(s) of orientation data followed by 3 coordinates
|
||||
indices = np.lexsort((table.data[:,0],table.data[:,1],table.data[:,2])) # indices of position when sorting x fast, z slow
|
||||
microstructure = np.empty(grid,dtype = int) # initialize empty microstructure
|
||||
i = 0
|
||||
|
||||
if inputtype == 'microstructure':
|
||||
for z in range(grid[2]):
|
||||
for y in range(grid[1]):
|
||||
for x in range(grid[0]):
|
||||
microstructure[x,y,z] = table.data[indices[i],3]
|
||||
i+=1
|
||||
|
||||
grain = table.data[:,colOri]
|
||||
nGrains = len(np.unique(grain))
|
||||
config_header = []
|
||||
|
||||
elif inputtype == 'quaternion':
|
||||
|
||||
colPhase = -1 # column of phase data comes last
|
||||
index = np.lexsort((table.data[:,0],table.data[:,1],table.data[:,2])) # index of position when sorting x fast, z slow
|
||||
grain = -np.ones(N,dtype = 'int32') # initialize empty microstructure
|
||||
orientations = [] # orientations
|
||||
multiplicity = [] # orientation multiplicity (number of group members)
|
||||
phases = [] # phase info
|
||||
nGrains = 0 # counter for detected grains
|
||||
existingGrains = np.arange(nGrains)
|
||||
myPos = 0 # position (in list) of current grid point
|
||||
|
||||
unique,unique_inverse = np.unique(table.data[:,3:8],return_inverse=True,axis=0)
|
||||
|
||||
for z in range(grid[2]):
|
||||
for y in range(grid[1]):
|
||||
for x in range(grid[0]):
|
||||
microstructure[x,y,z] = unique_inverse[indices[i]]+1
|
||||
i+=1
|
||||
|
||||
config_header = ['<texture>']
|
||||
for i,data in enumerate(unique):
|
||||
ori = damask.Rotation(data[0:4])
|
||||
config_header += ['[Grain{}]'.format(i+1),
|
||||
'(gauss)\tphi1 {:.2f}\tPhi {:.2f}\tphi2 {:.2f}'.format(*ori.asEulers(degrees = True)),
|
||||
]
|
||||
if options.axes is not None: config_header += ['axes\t{} {} {}'.format(*options.axes)]
|
||||
|
||||
myData = table.data[index[myPos]] # read data for current grid point
|
||||
myPhase = int(myData[colPhase])
|
||||
config_header += ['<microstructure>']
|
||||
for i,data in enumerate(unique):
|
||||
config_header += ['[Grain{}]'.format(i+1),
|
||||
'crystallite 1',
|
||||
'(constituent)\tphase {}\ttexture {}\tfraction 1.0'.format(int(data[4]),i+1),
|
||||
]
|
||||
|
||||
o = damask.Rotation(myData[colOri:colOri+4])
|
||||
header = [scriptID + ' ' + ' '.join(sys.argv[1:])]\
|
||||
+ config_header
|
||||
geom = damask.Geom(microstructure,size,origin,
|
||||
homogenization=options.homogenization,comments=header)
|
||||
damask.util.croak(geom)
|
||||
|
||||
grain[myPos] = nGrains # assign new grain to me ...
|
||||
nGrains += 1 # ... and update counter
|
||||
orientations.append(o) # store new orientation for future comparison
|
||||
multiplicity.append(1) # having single occurrence so far
|
||||
phases.append(myPhase) # store phase info for future reporting
|
||||
existingGrains = np.arange(nGrains) # update list of existing grains
|
||||
|
||||
myPos += 1
|
||||
|
||||
|
||||
grain += 1 # offset from starting index 0 to 1
|
||||
|
||||
# --- generate header ----------------------------------------------------------------------------
|
||||
|
||||
info = {
|
||||
'grid': grid,
|
||||
'size': size,
|
||||
'origin': origin,
|
||||
'microstructures': nGrains,
|
||||
'homogenization': options.homogenization,
|
||||
}
|
||||
|
||||
damask.util.report_geom(info)
|
||||
|
||||
# --- write header ---------------------------------------------------------------------------------
|
||||
|
||||
formatwidth = 1+int(math.log10(info['microstructures']))
|
||||
|
||||
if inputtype == 'microstructure':
|
||||
config_header = []
|
||||
if name is None:
|
||||
sys.stdout.write(str(geom.show()))
|
||||
else:
|
||||
config_header = ['<microstructure>']
|
||||
for i,phase in enumerate(phases):
|
||||
config_header += ['[Grain%s]'%(str(i+1).zfill(formatwidth)),
|
||||
'crystallite %i'%options.crystallite,
|
||||
'(constituent)\tphase %i\ttexture %s\tfraction 1.0'%(phase,str(i+1).rjust(formatwidth)),
|
||||
]
|
||||
|
||||
config_header += ['<texture>']
|
||||
for i,orientation in enumerate(orientations):
|
||||
config_header += ['[Grain%s]'%(str(i+1).zfill(formatwidth)),
|
||||
'axes\t%s %s %s'%tuple(options.axes) if options.axes is not None else '',
|
||||
'(gauss)\tphi1 %g\tPhi %g\tphi2 %g\tscatter 0.0\tfraction 1.0'%tuple(orientation.asEulers(degrees = True)),
|
||||
]
|
||||
|
||||
table.labels_clear()
|
||||
table.info_clear()
|
||||
table.info_append([scriptID + ' ' + ' '.join(sys.argv[1:])])
|
||||
table.head_putGeom(info)
|
||||
table.info_append(config_header)
|
||||
table.head_write()
|
||||
|
||||
# --- write microstructure information ------------------------------------------------------------
|
||||
|
||||
table.data = grain.reshape(info['grid'][1]*info['grid'][2],info['grid'][0])
|
||||
table.data_writeArray('%{}i'.format(formatwidth),delimiter=' ')
|
||||
|
||||
#--- output finalization --------------------------------------------------------------------------
|
||||
|
||||
table.close()
|
||||
geom.to_file(os.path.splitext(name)[0]+'.geom')
|
||||
|
|
|
@ -1,33 +1,21 @@
|
|||
#!/usr/bin/env python3
|
||||
# -*- coding: UTF-8 no BOM -*-
|
||||
|
||||
import os,sys,math
|
||||
import numpy as np
|
||||
import os
|
||||
import sys
|
||||
import multiprocessing
|
||||
from optparse import OptionParser,OptionGroup
|
||||
|
||||
import numpy as np
|
||||
from scipy import spatial
|
||||
|
||||
import damask
|
||||
|
||||
|
||||
scriptName = os.path.splitext(os.path.basename(__file__))[0]
|
||||
scriptID = ' '.join([scriptName,damask.version])
|
||||
|
||||
|
||||
def meshgrid2(*arrs):
|
||||
"""Code inspired by http://stackoverflow.com/questions/1827489/numpy-meshgrid-in-3d"""
|
||||
arrs = tuple(reversed(arrs))
|
||||
lens = np.array(list(map(len, arrs)))
|
||||
dim = len(arrs)
|
||||
ans = []
|
||||
for i, arr in enumerate(arrs):
|
||||
slc = np.ones(dim,'i')
|
||||
slc[i] = lens[i]
|
||||
arr2 = np.asarray(arr).reshape(slc)
|
||||
for j, sz in enumerate(lens):
|
||||
if j != i:
|
||||
arr2 = arr2.repeat(sz, axis=j)
|
||||
|
||||
ans.insert(0,arr2)
|
||||
return tuple(ans)
|
||||
def laguerreTessellation(undeformed, coords, weights, grains, periodic = True, cpus = 2):
|
||||
|
||||
def findClosestSeed(fargs):
|
||||
point, seeds, myWeights = fargs
|
||||
|
@ -35,9 +23,6 @@ def findClosestSeed(fargs):
|
|||
dist = np.sum((tmp - seeds)**2,axis=1) -myWeights
|
||||
return np.argmin(dist) # seed point closest to point
|
||||
|
||||
|
||||
def laguerreTessellation(undeformed, coords, weights, grains, periodic = True, cpus = 2):
|
||||
|
||||
copies = \
|
||||
np.array([
|
||||
[ -1,-1,-1 ],
|
||||
|
@ -98,6 +83,7 @@ def laguerreTessellation(undeformed, coords, weights, grains, periodic = True, c
|
|||
# closestSeed is modulo number of original seed points (i.e. excluding periodic copies)
|
||||
return grains[closestSeeds%coords.shape[0]]
|
||||
|
||||
|
||||
# --------------------------------------------------------------------
|
||||
# MAIN
|
||||
# --------------------------------------------------------------------
|
||||
|
@ -189,10 +175,6 @@ group.add_option('--homogenization',
|
|||
dest = 'homogenization',
|
||||
type = 'int', metavar = 'int',
|
||||
help = 'homogenization index to be used [%default]')
|
||||
group.add_option('--crystallite',
|
||||
dest = 'crystallite',
|
||||
type = 'int', metavar = 'int',
|
||||
help = 'crystallite index to be used [%default]')
|
||||
group.add_option('--phase',
|
||||
dest = 'phase',
|
||||
type = 'int', metavar = 'int',
|
||||
|
@ -205,7 +187,6 @@ parser.set_defaults(pos = 'pos',
|
|||
microstructure = 'microstructure',
|
||||
eulers = 'euler',
|
||||
homogenization = 1,
|
||||
crystallite = 1,
|
||||
phase = 1,
|
||||
cpus = 2,
|
||||
laguerre = False,
|
||||
|
@ -213,19 +194,18 @@ parser.set_defaults(pos = 'pos',
|
|||
normalized = True,
|
||||
config = True,
|
||||
)
|
||||
|
||||
(options,filenames) = parser.parse_args()
|
||||
|
||||
# --- loop over input files -------------------------------------------------------------------------
|
||||
|
||||
if filenames == []: filenames = [None]
|
||||
|
||||
for name in filenames:
|
||||
try: table = damask.ASCIItable(name = name,
|
||||
outname = os.path.splitext(name)[0]+'.geom' if name else name,
|
||||
buffered = False)
|
||||
except: continue
|
||||
damask.util.report(scriptName,name)
|
||||
|
||||
table = damask.ASCIItable(name = name, readonly = True)
|
||||
|
||||
|
||||
# --- read header ----------------------------------------------------------------------------
|
||||
|
||||
table.head_read()
|
||||
|
@ -245,10 +225,6 @@ for name in filenames:
|
|||
hasEulers = table.label_dimension(options.eulers) == 3
|
||||
hasWeights = table.label_dimension(options.weight) == 1 and options.laguerre
|
||||
|
||||
if np.any(np.array(info['grid']) < 1): errors.append('invalid grid a b c.')
|
||||
if np.any(np.array(info['size']) <= 0.0) \
|
||||
and np.all(np.array(info['grid']) < 1): errors.append('invalid size x y z.')
|
||||
else:
|
||||
for i in range(3):
|
||||
if info['size'][i] <= 0.0: # any invalid size?
|
||||
info['size'][i] = float(info['grid'][i])/max(info['grid']) # normalize to grid
|
||||
|
@ -281,7 +257,7 @@ for name in filenames:
|
|||
else table.data[:,table.label_indexrange(options.pos)] - info['origin']
|
||||
eulers = table.data[:,table.label_indexrange(options.eulers)] if hasEulers \
|
||||
else np.zeros(3*len(coords))
|
||||
grains = table.data[:,table.label_indexrange(options.microstructure)].astype('i') if hasGrains \
|
||||
grains = table.data[:,table.label_indexrange(options.microstructure)].astype(int) if hasGrains \
|
||||
else 1+np.arange(len(coords))
|
||||
weights = table.data[:,table.label_indexrange(options.weight)] if hasWeights \
|
||||
else np.zeros(len(coords))
|
||||
|
@ -293,62 +269,40 @@ for name in filenames:
|
|||
x = (np.arange(info['grid'][0])+0.5)*info['size'][0]/info['grid'][0]
|
||||
y = (np.arange(info['grid'][1])+0.5)*info['size'][1]/info['grid'][1]
|
||||
z = (np.arange(info['grid'][2])+0.5)*info['size'][2]/info['grid'][2]
|
||||
X,Y,Z = np.meshgrid(x, y, z,indexing='ij')
|
||||
grid = np.stack((X,Y,Z),axis=-1).reshape((np.prod(info['grid']),3),order='F')
|
||||
|
||||
damask.util.croak('tessellating...')
|
||||
|
||||
grid = np.vstack(meshgrid2(x, y, z)).reshape(3,-1).T
|
||||
indices = laguerreTessellation(grid, coords, weights, grains, options.periodic, options.cpus)
|
||||
|
||||
# --- write header ------------------------------------------------------------------------
|
||||
|
||||
usedGrainIDs = np.intersect1d(grainIDs,indices)
|
||||
info['microstructures'] = len(usedGrainIDs)
|
||||
|
||||
if info['homogenization'] == 0: info['homogenization'] = options.homogenization
|
||||
|
||||
damask.util.report_geom(info,['grid','size','origin','homogenization',])
|
||||
damask.util.croak(['microstructures: {}{}'.format(info['microstructures'],
|
||||
(' out of {}'.format(NgrainIDs) if NgrainIDs != info['microstructures'] else '')),
|
||||
])
|
||||
|
||||
config_header = []
|
||||
formatwidth = 1+int(math.log10(NgrainIDs))
|
||||
|
||||
if options.config:
|
||||
config_header += ['<microstructure>']
|
||||
for i,ID in enumerate(grainIDs):
|
||||
config_header += ['[Grain{}]'.format(str(ID).zfill(formatwidth)),
|
||||
'crystallite {}'.format(options.crystallite),
|
||||
'(constituent)\tphase {}\ttexture {}\tfraction 1.0'.format(options.phase,str(ID).rjust(formatwidth)),
|
||||
]
|
||||
|
||||
if hasEulers:
|
||||
config_header += ['<texture>']
|
||||
theAxes = [] if options.axes is None else ['axes\t{} {} {}'.format(*options.axes)]
|
||||
for ID in grainIDs:
|
||||
eulerID = np.nonzero(grains == ID)[0][0] # find first occurrence of this grain id
|
||||
config_header += ['[Grain{}]'.format(str(ID).zfill(formatwidth)),
|
||||
'(gauss)\tphi1 {:g}\tPhi {:g}\tphi2 {:g}\tscatter 0.0\tfraction 1.0'.format(*eulers[eulerID])
|
||||
] + theAxes
|
||||
config_header += ['[Grain{}]'.format(ID),
|
||||
'(gauss)\tphi1 {:.2f}\tPhi {:.2f}\tphi2 {:.2f}'.format(*eulers[eulerID])
|
||||
]
|
||||
if options.axes is not None: config_header += ['axes\t{} {} {}'.format(*options.axes)]
|
||||
|
||||
config_header += ['<microstructure>']
|
||||
for ID in grainIDs:
|
||||
config_header += ['[Grain{}]'.format(ID),
|
||||
'crystallite 1',
|
||||
'(constituent)\tphase {}\ttexture {}\tfraction 1.0'.format(options.phase,ID)
|
||||
]
|
||||
|
||||
config_header += ['<!skip>']
|
||||
|
||||
table.labels_clear()
|
||||
table.info_clear()
|
||||
table.info_append([
|
||||
scriptID + ' ' + ' '.join(sys.argv[1:]),
|
||||
"grid\ta {}\tb {}\tc {}".format(*info['grid']),
|
||||
"size\tx {}\ty {}\tz {}".format(*info['size']),
|
||||
"origin\tx {}\ty {}\tz {}".format(*info['origin']),
|
||||
"homogenization\t{}".format(info['homogenization']),
|
||||
"microstructures\t{}".format(info['microstructures']),
|
||||
config_header,
|
||||
])
|
||||
table.head_write()
|
||||
header = [scriptID + ' ' + ' '.join(sys.argv[1:])]\
|
||||
+ config_header
|
||||
geom = damask.Geom(indices.reshape(info['grid'],order='F'),info['size'],info['origin'],
|
||||
homogenization=options.homogenization,comments=header)
|
||||
damask.util.croak(geom)
|
||||
|
||||
# --- write microstructure information ------------------------------------------------------------
|
||||
|
||||
table.data = indices.reshape(info['grid'][1]*info['grid'][2],info['grid'][0])
|
||||
table.data_writeArray('%%%ii'%(formatwidth),delimiter=' ')
|
||||
|
||||
#--- output finalization --------------------------------------------------------------------------
|
||||
|
||||
table.close()
|
||||
if name is None:
|
||||
sys.stdout.write(str(geom.show()))
|
||||
else:
|
||||
geom.to_file(os.path.splitext(name)[0]+'.geom')
|
||||
|
|
|
@ -1,21 +1,30 @@
|
|||
#!/usr/bin/env python3
|
||||
# -*- coding: UTF-8 no BOM -*-
|
||||
|
||||
import os,sys,math
|
||||
import numpy as np
|
||||
import os
|
||||
import sys
|
||||
from io import StringIO
|
||||
from optparse import OptionParser
|
||||
|
||||
import numpy as np
|
||||
from scipy import ndimage
|
||||
|
||||
import damask
|
||||
|
||||
|
||||
scriptName = os.path.splitext(os.path.basename(__file__))[0]
|
||||
scriptID = ' '.join([scriptName,damask.version])
|
||||
|
||||
|
||||
getInterfaceEnergy = lambda A,B: np.float32((A*B != 0)*(A != B)*1.0) # 1.0 if A & B are distinct & nonzero, 0.0 otherwise
|
||||
struc = ndimage.generate_binary_structure(3,1) # 3D von Neumann neighborhood
|
||||
|
||||
|
||||
#--------------------------------------------------------------------------------------------------
|
||||
# MAIN
|
||||
#--------------------------------------------------------------------------------------------------
|
||||
|
||||
parser = OptionParser(option_class=damask.extendableOption, usage='%prog [option(s)] [geomfile(s)]', description = """
|
||||
Smoothens out interface roughness by simulated curvature flow.
|
||||
parser = OptionParser(option_class=damask.extendableOption, usage='%prog option(s) [geomfile(s)]', description = """
|
||||
Smoothen interface roughness by simulated curvature flow.
|
||||
This is achieved by the diffusion of each initially sharply bounded grain volume within the periodic domain
|
||||
up to a given distance 'd' voxels.
|
||||
The final geometry is assembled by selecting at each voxel that grain index for which the concentration remains largest.
|
||||
|
@ -33,9 +42,6 @@ parser.add_option('-N', '--iterations',
|
|||
parser.add_option('-i', '--immutable',
|
||||
action = 'extend', dest = 'immutable', metavar = '<int LIST>',
|
||||
help = 'list of immutable microstructure indices')
|
||||
parser.add_option('-r', '--renumber',
|
||||
dest = 'renumber', action='store_true',
|
||||
help = 'output consecutive microstructure indices')
|
||||
parser.add_option('--ndimage',
|
||||
dest = 'ndimage', action='store_true',
|
||||
help = 'use ndimage.gaussian_filter in lieu of explicit FFT')
|
||||
|
@ -43,7 +49,6 @@ parser.add_option('--ndimage',
|
|||
parser.set_defaults(d = 1,
|
||||
N = 1,
|
||||
immutable = [],
|
||||
renumber = False,
|
||||
ndimage = False,
|
||||
)
|
||||
|
||||
|
@ -51,37 +56,17 @@ parser.set_defaults(d = 1,
|
|||
|
||||
options.immutable = list(map(int,options.immutable))
|
||||
|
||||
getInterfaceEnergy = lambda A,B: np.float32((A*B != 0)*(A != B)*1.0) # 1.0 if A & B are distinct & nonzero, 0.0 otherwise
|
||||
struc = ndimage.generate_binary_structure(3,1) # 3D von Neumann neighborhood
|
||||
|
||||
# --- loop over input files -----------------------------------------------------------------------
|
||||
|
||||
if filenames == []: filenames = [None]
|
||||
|
||||
for name in filenames:
|
||||
try: table = damask.ASCIItable(name = name,
|
||||
buffered = False,
|
||||
labeled = False)
|
||||
except: continue
|
||||
damask.util.report(scriptName,name)
|
||||
|
||||
# --- interpret header ----------------------------------------------------------------------------
|
||||
geom = damask.Geom.from_file(StringIO(''.join(sys.stdin.read())) if name is None else name)
|
||||
|
||||
table.head_read()
|
||||
info,extra_header = table.head_getGeom()
|
||||
damask.util.report_geom(info)
|
||||
|
||||
errors = []
|
||||
if np.any(info['grid'] < 1): errors.append('invalid grid a b c.')
|
||||
if np.any(info['size'] <= 0.0): errors.append('invalid size x y z.')
|
||||
if errors != []:
|
||||
damask.util.croak(errors)
|
||||
table.close(dismiss = True)
|
||||
continue
|
||||
|
||||
# --- read data -----------------------------------------------------------------------------------
|
||||
microstructure = np.tile(table.microstructure_read(info['grid']).reshape(info['grid'],order='F'),
|
||||
np.where(info['grid'] == 1, 2,1)) # make one copy along dimensions with grid == 1
|
||||
grid_original = geom.get_grid()
|
||||
damask.util.croak(geom)
|
||||
microstructure = np.tile(geom.microstructure,np.where(grid_original == 1, 2,1)) # make one copy along dimensions with grid == 1
|
||||
grid = np.array(microstructure.shape)
|
||||
|
||||
# --- initialize support data ---------------------------------------------------------------------
|
||||
|
@ -94,7 +79,7 @@ for name in filenames:
|
|||
|
||||
# Calculates gaussian weights for simulating 3d diffusion
|
||||
gauss = np.exp(-(X*X + Y*Y + Z*Z)/(2.0*options.d*options.d),dtype=np.float32) \
|
||||
/np.power(2.0*np.pi*options.d*options.d,(3.0 - np.count_nonzero(info['grid'] == 1))/2.,dtype=np.float32)
|
||||
/np.power(2.0*np.pi*options.d*options.d,(3.0 - np.count_nonzero(grid_original == 1))/2.,dtype=np.float32)
|
||||
|
||||
gauss[:,:,:grid[2]//2:-1] = gauss[:,:,1:(grid[2]+1)//2] # trying to cope with uneven (odd) grid size
|
||||
gauss[:,:grid[1]//2:-1,:] = gauss[:,1:(grid[1]+1)//2,:]
|
||||
|
@ -184,49 +169,10 @@ for name in filenames:
|
|||
# undo any changes involving immutable microstructures
|
||||
microstructure = np.where(immutable, microstructure_original,microstructure)
|
||||
|
||||
# --- renumber to sequence 1...Ngrains if requested -----------------------------------------------
|
||||
# http://stackoverflow.com/questions/10741346/np-frequency-counts-for-unique-values-in-an-array
|
||||
|
||||
if options.renumber:
|
||||
newID = 0
|
||||
for microstructureID,count in enumerate(np.bincount(microstructure.flatten())):
|
||||
if count != 0:
|
||||
newID += 1
|
||||
microstructure = np.where(microstructure == microstructureID, newID, microstructure)
|
||||
|
||||
newInfo = {'microstructures': len(np.unique(microstructure)),}
|
||||
|
||||
# --- report --------------------------------------------------------------------------------------
|
||||
|
||||
remarks = []
|
||||
if newInfo['microstructures'] != info['microstructures']:
|
||||
remarks.append('--> microstructures: {}'.format(newInfo['microstructures']))
|
||||
if remarks != []: damask.util.croak(remarks)
|
||||
|
||||
# --- write header --------------------------------------------------------------------------------
|
||||
|
||||
table.labels_clear()
|
||||
table.info_clear()
|
||||
table.info_append(extra_header+[
|
||||
scriptID + ' ' + ' '.join(sys.argv[1:]),
|
||||
"grid\ta {grid[0]}\tb {grid[1]}\tc {grid[2]}".format(grid=info['grid']),
|
||||
"size\tx {size[0]}\ty {size[1]}\tz {size[2]}".format(size=info['size']),
|
||||
"origin\tx {origin[0]}\ty {origin[1]}\tz {origin[2]}".format(origin=info['origin']),
|
||||
"homogenization\t{homog}".format(homog=info['homogenization']),
|
||||
"microstructures\t{microstructures}".format(microstructures=newInfo['microstructures']),
|
||||
])
|
||||
table.head_write()
|
||||
|
||||
# --- write microstructure information ------------------------------------------------------------
|
||||
|
||||
formatwidth = int(math.floor(math.log10(np.nanmax(microstructure))+1))
|
||||
table.data = microstructure[::1 if info['grid'][0]>1 else 2,
|
||||
::1 if info['grid'][1]>1 else 2,
|
||||
::1 if info['grid'][2]>1 else 2,].\
|
||||
reshape((info['grid'][0],info['grid'][1]*info['grid'][2]),order='F').transpose()
|
||||
table.data_writeArray('%{}i'.format(formatwidth),delimiter = ' ')
|
||||
|
||||
# --- output finalization --------------------------------------------------------------------------
|
||||
|
||||
table.close()
|
||||
damask.util.croak(geom.update(microstructure[0:grid_original[0],0:grid_original[1],0:grid_original[2]]))
|
||||
geom.add_comments(scriptID + ' ' + ' '.join(sys.argv[1:]))
|
||||
|
||||
if name is None:
|
||||
sys.stdout.write(str(geom.show()))
|
||||
else:
|
||||
geom.to_file(name)
|
||||
|
|
|
@ -1,120 +1,72 @@
|
|||
#!/usr/bin/env python3
|
||||
# -*- coding: UTF-8 no BOM -*-
|
||||
|
||||
import os,sys,math
|
||||
import numpy as np
|
||||
import damask
|
||||
import os
|
||||
import sys
|
||||
from io import StringIO
|
||||
from optparse import OptionParser
|
||||
|
||||
import numpy as np
|
||||
|
||||
import damask
|
||||
|
||||
|
||||
scriptName = os.path.splitext(os.path.basename(__file__))[0]
|
||||
scriptID = ' '.join([scriptName,damask.version])
|
||||
|
||||
|
||||
#--------------------------------------------------------------------------------------------------
|
||||
# MAIN
|
||||
#--------------------------------------------------------------------------------------------------
|
||||
|
||||
validDirections = ['x','y','z']
|
||||
parser = OptionParser(option_class=damask.extendableOption, usage='%prog options [geomfile(s)]', description = """
|
||||
Mirrors spectral geometry description along given directions.
|
||||
Mirror along given directions.
|
||||
|
||||
""", version=scriptID)
|
||||
|
||||
validDirections = ['x','y','z']
|
||||
|
||||
parser.add_option('-d','--direction',
|
||||
dest = 'directions',
|
||||
action = 'extend', metavar = '<string LIST>',
|
||||
help = "directions in which to mirror {'x','y','z'}")
|
||||
parser.add_option('--float',
|
||||
dest = 'float',
|
||||
help = "directions in which to mirror {{{}}}".format(','.join(validDirections)))
|
||||
parser.add_option( '--reflect',
|
||||
dest = 'reflect',
|
||||
action = 'store_true',
|
||||
help = 'use float input')
|
||||
help = 'reflect (include) outermost layers')
|
||||
|
||||
parser.set_defaults(float = False,
|
||||
)
|
||||
parser.set_defaults(reflect = False)
|
||||
|
||||
(options, filenames) = parser.parse_args()
|
||||
|
||||
if options.directions is None:
|
||||
parser.error('no direction given.')
|
||||
|
||||
if not set(options.directions).issubset(validDirections):
|
||||
invalidDirections = [str(e) for e in set(options.directions).difference(validDirections)]
|
||||
parser.error('invalid directions {}. '.format(*invalidDirections))
|
||||
|
||||
datatype = 'f' if options.float else 'i'
|
||||
limits = [None,None] if options.reflect else [-2,0]
|
||||
|
||||
# --- loop over input files -------------------------------------------------------------------------
|
||||
|
||||
if filenames == []: filenames = [None]
|
||||
|
||||
for name in filenames:
|
||||
try:
|
||||
table = damask.ASCIItable(name = name,
|
||||
buffered = False,
|
||||
labeled = False)
|
||||
except: continue
|
||||
damask.util.report(scriptName,name)
|
||||
|
||||
# --- interpret header ----------------------------------------------------------------------------
|
||||
|
||||
table.head_read()
|
||||
info,extra_header = table.head_getGeom()
|
||||
damask.util.report_geom(info)
|
||||
|
||||
errors = []
|
||||
if np.any(info['grid'] < 1): errors.append('invalid grid a b c.')
|
||||
if np.any(info['size'] <= 0.0): errors.append('invalid size x y z.')
|
||||
if errors != []:
|
||||
damask.util.croak(errors)
|
||||
table.close(dismiss = True)
|
||||
continue
|
||||
|
||||
# --- read data ------------------------------------------------------------------------------------
|
||||
|
||||
microstructure = table.microstructure_read(info['grid'],datatype).reshape(info['grid'],order='F') # read microstructure
|
||||
geom = damask.Geom.from_file(StringIO(''.join(sys.stdin.read())) if name is None else name)
|
||||
|
||||
microstructure = geom.get_microstructure()
|
||||
if 'z' in options.directions:
|
||||
microstructure = np.concatenate([microstructure,microstructure[:,:,::-1]],2)
|
||||
microstructure = np.concatenate([microstructure,microstructure[:,:,limits[0]:limits[1]:-1]],2)
|
||||
if 'y' in options.directions:
|
||||
microstructure = np.concatenate([microstructure,microstructure[:,::-1,:]],1)
|
||||
microstructure = np.concatenate([microstructure,microstructure[:,limits[0]:limits[1]:-1,:]],1)
|
||||
if 'x' in options.directions:
|
||||
microstructure = np.concatenate([microstructure,microstructure[::-1,:,:]],0)
|
||||
microstructure = np.concatenate([microstructure,microstructure[limits[0]:limits[1]:-1,:,:]],0)
|
||||
|
||||
# --- do work ------------------------------------------------------------------------------------
|
||||
damask.util.croak(geom.update(microstructure,rescale=True))
|
||||
geom.add_comments(scriptID + ' ' + ' '.join(sys.argv[1:]))
|
||||
|
||||
newInfo = {
|
||||
'size': microstructure.shape*info['size']/info['grid'],
|
||||
'grid': microstructure.shape,
|
||||
}
|
||||
|
||||
|
||||
# --- report ---------------------------------------------------------------------------------------
|
||||
|
||||
remarks = []
|
||||
if (any(newInfo['grid'] != info['grid'])):
|
||||
remarks.append('--> grid a b c: %s'%(' x '.join(map(str,newInfo['grid']))))
|
||||
if (any(newInfo['size'] != info['size'])):
|
||||
remarks.append('--> size x y z: %s'%(' x '.join(map(str,newInfo['size']))))
|
||||
if remarks != []: damask.util.croak(remarks)
|
||||
|
||||
# --- write header ---------------------------------------------------------------------------------
|
||||
|
||||
table.labels_clear()
|
||||
table.info_clear()
|
||||
table.info_append(extra_header+[
|
||||
scriptID + ' ' + ' '.join(sys.argv[1:]),
|
||||
"grid\ta {grid[0]}\tb {grid[1]}\tc {grid[2]}".format(grid=newInfo['grid']),
|
||||
"size\tx {size[0]}\ty {size[1]}\tz {size[2]}".format(size=newInfo['size']),
|
||||
"origin\tx {origin[0]}\ty {origin[1]}\tz {origin[2]}".format(origin=info['origin']),
|
||||
"homogenization\t{homog}".format(homog=info['homogenization']),
|
||||
"microstructures\t{microstructures}".format(microstructures=info['microstructures']),
|
||||
])
|
||||
table.head_write()
|
||||
|
||||
# --- write microstructure information ------------------------------------------------------------
|
||||
|
||||
formatwidth = int(math.floor(math.log10(np.nanmax(microstructure))+1))
|
||||
table.data = microstructure.reshape((newInfo['grid'][0],np.prod(newInfo['grid'][1:])),order='F').transpose()
|
||||
table.data_writeArray('%{}i'.format(formatwidth),delimiter = ' ')
|
||||
|
||||
# --- output finalization --------------------------------------------------------------------------
|
||||
|
||||
table.close() # close ASCII table
|
||||
if name is None:
|
||||
sys.stdout.write(str(geom.show()))
|
||||
else:
|
||||
geom.to_file(name)
|
||||
|
|
|
@ -1,80 +1,49 @@
|
|||
#!/usr/bin/env python3
|
||||
# -*- coding: UTF-8 no BOM -*-
|
||||
|
||||
import os,sys
|
||||
import numpy as np
|
||||
import os
|
||||
import sys
|
||||
from io import StringIO
|
||||
from optparse import OptionParser
|
||||
|
||||
import damask
|
||||
|
||||
|
||||
scriptName = os.path.splitext(os.path.basename(__file__))[0]
|
||||
scriptID = ' '.join([scriptName,damask.version])
|
||||
|
||||
|
||||
#--------------------------------------------------------------------------------------------------
|
||||
# MAIN
|
||||
#--------------------------------------------------------------------------------------------------
|
||||
|
||||
parser = OptionParser(option_class=damask.extendableOption, usage='%prog [geomfile(s)]', description = """
|
||||
compress geometry files with ranges "a to b" and/or multiples "n of x".
|
||||
Pack ranges to "a to b" and/or multiples to "n of x".
|
||||
|
||||
""", version = scriptID)
|
||||
|
||||
(options, filenames) = parser.parse_args()
|
||||
|
||||
# --- loop over input files -------------------------------------------------------------------------
|
||||
|
||||
if filenames == []: filenames = [None]
|
||||
|
||||
for name in filenames:
|
||||
try:
|
||||
table = damask.ASCIItable(name = name,
|
||||
buffered = False, labeled = False)
|
||||
except: continue
|
||||
damask.util.report(scriptName,name)
|
||||
|
||||
# --- interpret header ----------------------------------------------------------------------------
|
||||
geom = damask.Geom.from_file(StringIO(''.join(sys.stdin.read())) if name is None else name)
|
||||
|
||||
table.head_read()
|
||||
info,extra_header = table.head_getGeom()
|
||||
damask.util.report_geom(info)
|
||||
damask.util.croak(geom)
|
||||
geom.add_comments(scriptID + ' ' + ' '.join(sys.argv[1:]))
|
||||
|
||||
errors = []
|
||||
if np.any(info['grid'] < 1): errors.append('invalid grid a b c.')
|
||||
if np.any(info['size'] <= 0.0): errors.append('invalid size x y z.')
|
||||
if errors != []:
|
||||
damask.util.croak(errors)
|
||||
table.close(dismiss = True)
|
||||
continue
|
||||
|
||||
# --- write header ---------------------------------------------------------------------------------
|
||||
|
||||
table.labels_clear()
|
||||
table.info_clear()
|
||||
table.info_append(extra_header+[
|
||||
scriptID + ' ' + ' '.join(sys.argv[1:]),
|
||||
"grid\ta {grid[0]}\tb {grid[1]}\tc {grid[2]}".format(grid=info['grid']),
|
||||
"size\tx {size[0]}\ty {size[1]}\tz {size[2]}".format(size=info['size']),
|
||||
"origin\tx {origin[0]}\ty {origin[1]}\tz {origin[2]}".format(origin=info['origin']),
|
||||
"homogenization\t{homog}".format(homog=info['homogenization']),
|
||||
"microstructures\t{microstructures}".format(microstructures=info['microstructures']),
|
||||
])
|
||||
table.head_write()
|
||||
|
||||
# --- write packed microstructure information -----------------------------------------------------
|
||||
|
||||
compressType = ''
|
||||
compressType = None
|
||||
former = start = -1
|
||||
reps = 0
|
||||
|
||||
outputAlive = True
|
||||
while outputAlive and table.data_read(): # read next data line of ASCII table
|
||||
items = table.data
|
||||
if len(items) > 2:
|
||||
if items[1].lower() == 'of': items = [int(items[2])]*int(items[0])
|
||||
elif items[1].lower() == 'to': items = range(int(items[0]),1+int(items[2]))
|
||||
else: items = map(int,items)
|
||||
else: items = map(int,items)
|
||||
if name is None:
|
||||
f = sys.stdout
|
||||
else:
|
||||
f= open(name,'w')
|
||||
|
||||
for current in items:
|
||||
for current in geom.microstructure.flatten('F'):
|
||||
if abs(current - former) == 1 and (start - current) == reps*(former - current):
|
||||
compressType = 'to'
|
||||
reps += 1
|
||||
|
@ -82,16 +51,14 @@ for name in filenames:
|
|||
compressType = 'of'
|
||||
reps += 1
|
||||
else:
|
||||
if compressType == '':
|
||||
table.data = []
|
||||
if compressType is None:
|
||||
f.write('\n'.join(geom.get_header())+'\n')
|
||||
elif compressType == '.':
|
||||
table.data = [former]
|
||||
f.write('{}\n'.format(former))
|
||||
elif compressType == 'to':
|
||||
table.data = [start,'to',former]
|
||||
f.write('{} to {}\n'.format(start,former))
|
||||
elif compressType == 'of':
|
||||
table.data = [reps,'of',former]
|
||||
|
||||
outputAlive = table.data_write(delimiter = ' ') # output processed line
|
||||
f.write('{} of {}\n'.format(reps,former))
|
||||
|
||||
compressType = '.'
|
||||
start = current
|
||||
|
@ -99,14 +66,9 @@ for name in filenames:
|
|||
|
||||
former = current
|
||||
|
||||
table.data = {
|
||||
'.' : [former],
|
||||
'to': [start,'to',former],
|
||||
'of': [reps,'of',former],
|
||||
}[compressType]
|
||||
|
||||
outputAlive = table.data_write(delimiter = ' ') # output processed line
|
||||
|
||||
# --- output finalization --------------------------------------------------------------------------
|
||||
|
||||
table.close() # close ASCII table
|
||||
if compressType == '.':
|
||||
f.write('{}\n'.format(former))
|
||||
elif compressType == 'to':
|
||||
f.write('{} to {}\n'.format(start,former))
|
||||
elif compressType == 'of':
|
||||
f.write('{} of {}\n'.format(reps,former))
|
||||
|
|
|
@ -1,96 +1,46 @@
|
|||
#!/usr/bin/env python3
|
||||
# -*- coding: UTF-8 no BOM -*-
|
||||
|
||||
import os,sys,math
|
||||
import numpy as np
|
||||
import damask
|
||||
import os
|
||||
import sys
|
||||
from io import StringIO
|
||||
from optparse import OptionParser
|
||||
|
||||
import numpy as np
|
||||
|
||||
import damask
|
||||
|
||||
|
||||
scriptName = os.path.splitext(os.path.basename(__file__))[0]
|
||||
scriptID = ' '.join([scriptName,damask.version])
|
||||
|
||||
|
||||
#--------------------------------------------------------------------------------------------------
|
||||
# MAIN
|
||||
#--------------------------------------------------------------------------------------------------
|
||||
|
||||
parser = OptionParser(option_class=damask.extendableOption, usage='%prog [file[s]]', description = """
|
||||
renumber sorted microstructure indices to 1,...,N.
|
||||
parser = OptionParser(option_class=damask.extendableOption, usage='%prog [geomfile(s)]', description = """
|
||||
Renumber sorted microstructure indices to 1,...,N.
|
||||
|
||||
""", version=scriptID)
|
||||
|
||||
(options, filenames) = parser.parse_args()
|
||||
|
||||
# --- loop over input files ----------------------------------------------------------------------
|
||||
|
||||
if filenames == []: filenames = [None]
|
||||
|
||||
for name in filenames:
|
||||
try: table = damask.ASCIItable(name = name,
|
||||
buffered = False,
|
||||
labeled = False)
|
||||
except: continue
|
||||
damask.util.report(scriptName,name)
|
||||
|
||||
# --- interpret header ---------------------------------------------------------------------------
|
||||
geom = damask.Geom.from_file(StringIO(''.join(sys.stdin.read())) if name is None else name)
|
||||
|
||||
table.head_read()
|
||||
info,extra_header = table.head_getGeom()
|
||||
damask.util.report_geom(info)
|
||||
renumbered = np.empty(geom.get_grid(),dtype=geom.microstructure.dtype)
|
||||
for i, oldID in enumerate(np.unique(geom.microstructure)):
|
||||
renumbered = np.where(geom.microstructure == oldID, i+1, renumbered)
|
||||
|
||||
errors = []
|
||||
if np.any(info['grid'] < 1): errors.append('invalid grid a b c.')
|
||||
if np.any(info['size'] <= 0.0): errors.append('invalid size x y z.')
|
||||
if errors != []:
|
||||
damask.util.croak(errors)
|
||||
table.close(dismiss = True)
|
||||
continue
|
||||
damask.util.croak(geom.update(renumbered))
|
||||
geom.add_comments(scriptID + ' ' + ' '.join(sys.argv[1:]))
|
||||
|
||||
# --- read data ----------------------------------------------------------------------------------
|
||||
|
||||
microstructure = table.microstructure_read(info['grid']) # read microstructure
|
||||
|
||||
# --- do work ------------------------------------------------------------------------------------
|
||||
|
||||
newInfo = {
|
||||
'origin': np.zeros(3,'d'),
|
||||
'microstructures': 0,
|
||||
}
|
||||
|
||||
grainIDs = np.unique(microstructure)
|
||||
renumbered = np.copy(microstructure)
|
||||
|
||||
for i, oldID in enumerate(grainIDs):
|
||||
renumbered = np.where(microstructure == oldID, i+1, renumbered)
|
||||
|
||||
newInfo['microstructures'] = len(grainIDs)
|
||||
|
||||
# --- report -------------------------------------------------------------------------------------
|
||||
|
||||
remarks = []
|
||||
if ( newInfo['microstructures'] != info['microstructures']):
|
||||
remarks.append('--> microstructures: %i'%newInfo['microstructures'])
|
||||
if remarks != []: damask.util.croak(remarks)
|
||||
|
||||
# --- write header -------------------------------------------------------------------------------
|
||||
|
||||
table.labels_clear()
|
||||
table.info_clear()
|
||||
table.info_append(extra_header+[
|
||||
scriptID + ' ' + ' '.join(sys.argv[1:]),
|
||||
"grid\ta {grid[0]}\tb {grid[1]}\tc {grid[2]}".format(grid=info['grid']),
|
||||
"size\tx {size[0]}\ty {size[1]}\tz {size[2]}".format(size=info['size']),
|
||||
"origin\tx {origin[0]}\ty {origin[1]}\tz {origin[2]}".format(origin=info['origin']),
|
||||
"homogenization\t{homog}".format(homog=info['homogenization']),
|
||||
"microstructures\t{microstructures}".format(microstructures=newInfo['microstructures']),
|
||||
])
|
||||
table.head_write()
|
||||
|
||||
# --- write microstructure information -----------------------------------------------------------
|
||||
|
||||
format = '%{}i'.format(int(math.floor(math.log10(np.nanmax(renumbered))+1)))
|
||||
table.data = renumbered.reshape((info['grid'][0],info['grid'][1]*info['grid'][2]),order='F').transpose()
|
||||
table.data_writeArray(format,delimiter = ' ')
|
||||
|
||||
# --- output finalization ------------------------------------------------------------------------
|
||||
|
||||
table.close() # close ASCII table
|
||||
if name is None:
|
||||
sys.stdout.write(str(geom.show()))
|
||||
else:
|
||||
geom.to_file(name)
|
||||
|
|
|
@ -1,20 +1,26 @@
|
|||
#!/usr/bin/env python3
|
||||
# -*- coding: UTF-8 no BOM -*-
|
||||
|
||||
import os,sys,math
|
||||
import os
|
||||
import sys
|
||||
import numpy as np
|
||||
|
||||
from io import StringIO
|
||||
from optparse import OptionParser
|
||||
from scipy import ndimage
|
||||
|
||||
import damask
|
||||
|
||||
|
||||
scriptName = os.path.splitext(os.path.basename(__file__))[0]
|
||||
scriptID = ' '.join([scriptName,damask.version])
|
||||
|
||||
|
||||
#--------------------------------------------------------------------------------------------------
|
||||
# MAIN
|
||||
#--------------------------------------------------------------------------------------------------
|
||||
|
||||
parser = OptionParser(option_class=damask.extendableOption, usage='%prog options [geomfile(s)]', description = """
|
||||
Scales a geometry description independently in x, y, and z direction in terms of grid and/or size.
|
||||
Scales independently in x, y, and z direction in terms of grid and/or size.
|
||||
Either absolute values or relative factors (like "0.25x") can be used.
|
||||
|
||||
""", version = scriptID)
|
||||
|
@ -22,142 +28,47 @@ Either absolute values or relative factors (like "0.25x") can be used.
|
|||
parser.add_option('-g', '--grid',
|
||||
dest = 'grid',
|
||||
type = 'string', nargs = 3, metavar = 'string string string',
|
||||
help = 'a,b,c grid of hexahedral box [unchanged]')
|
||||
help = 'a,b,c grid of hexahedral box')
|
||||
parser.add_option('-s', '--size',
|
||||
dest = 'size',
|
||||
type = 'string', nargs = 3, metavar = 'string string string',
|
||||
help = 'x,y,z size of hexahedral box [unchanged]')
|
||||
parser.add_option('-r', '--renumber',
|
||||
dest = 'renumber',
|
||||
action = 'store_true',
|
||||
help = 'renumber microstructure indices from 1..N [%default]')
|
||||
parser.add_option('--float',
|
||||
dest = 'float',
|
||||
action = 'store_true',
|
||||
help = 'use float input')
|
||||
|
||||
parser.set_defaults(renumber = False,
|
||||
grid = ['0','0','0'],
|
||||
size = ['0.0','0.0','0.0'],
|
||||
float = False,
|
||||
)
|
||||
help = 'x,y,z size of hexahedral box')
|
||||
|
||||
(options, filenames) = parser.parse_args()
|
||||
|
||||
datatype = 'f' if options.float else 'i'
|
||||
|
||||
# --- loop over input files -------------------------------------------------------------------------
|
||||
|
||||
if filenames == []: filenames = [None]
|
||||
|
||||
for name in filenames:
|
||||
try:
|
||||
table = damask.ASCIItable(name = name,
|
||||
buffered = False,
|
||||
labeled = False)
|
||||
except: continue
|
||||
damask.util.report(scriptName,name)
|
||||
|
||||
# --- interpret header ----------------------------------------------------------------------------
|
||||
geom = damask.Geom.from_file(StringIO(''.join(sys.stdin.read())) if name is None else name)
|
||||
|
||||
table.head_read()
|
||||
info,extra_header = table.head_getGeom()
|
||||
damask.util.report_geom(info)
|
||||
grid = geom.get_grid()
|
||||
size = geom.get_size()
|
||||
|
||||
errors = []
|
||||
if np.any(info['grid'] < 1): errors.append('invalid grid a b c.')
|
||||
if np.any(info['size'] <= 0.0): errors.append('invalid size x y z.')
|
||||
if errors != []:
|
||||
damask.util.croak(errors)
|
||||
table.close(dismiss = True)
|
||||
continue
|
||||
new_grid = grid if options.grid is None else \
|
||||
np.array([int(o*float(n.lower().replace('x',''))) if n.lower().endswith('x') \
|
||||
else int(n) for o,n in zip(grid,options.grid)],dtype=int)
|
||||
|
||||
# --- read data ------------------------------------------------------------------------------------
|
||||
new_size = size if options.size is None else \
|
||||
np.array([o*float(n.lower().replace('x','')) if n.lower().endswith('x') \
|
||||
else float(n) for o,n in zip(size,options.size)],dtype=float)
|
||||
|
||||
microstructure = table.microstructure_read(info['grid'],datatype) # read microstructure
|
||||
damask.util.croak(geom.update(microstructure =
|
||||
ndimage.interpolation.zoom(
|
||||
geom.microstructure,
|
||||
new_grid/grid,
|
||||
output=geom.microstructure.dtype,
|
||||
order=0,
|
||||
mode='nearest',
|
||||
prefilter=False,
|
||||
) if np.any(new_grid != grid) \
|
||||
else None,
|
||||
size = new_size))
|
||||
geom.add_comments(scriptID + ' ' + ' '.join(sys.argv[1:]))
|
||||
|
||||
# --- do work ------------------------------------------------------------------------------------
|
||||
|
||||
newInfo = {
|
||||
'grid': np.zeros(3,'i'),
|
||||
'origin': np.zeros(3,'d'),
|
||||
'microstructures': 0,
|
||||
}
|
||||
|
||||
newInfo['grid'] = np.array([{True: round(o*float(n.lower().replace('x',''))),
|
||||
False: round(float(n.lower().replace('x','')))}[n[-1].lower() == 'x']
|
||||
for o,n in zip(info['grid'],options.grid)],'i')
|
||||
newInfo['size'] = np.array([{True: o*float(n.lower().replace('x','')),
|
||||
False: float(n.lower().replace('x',''))}[n[-1].lower() == 'x']
|
||||
for o,n in zip(info['size'],options.size)],'d')
|
||||
newInfo['grid'] = np.where(newInfo['grid'] <= 0 , info['grid'],newInfo['grid'])
|
||||
newInfo['size'] = np.where(newInfo['size'] <= 0.0, info['size'],newInfo['size'])
|
||||
|
||||
multiplicity = []
|
||||
for j in range(3):
|
||||
multiplicity.append([])
|
||||
last = 0
|
||||
for i in range(info['grid'][j]):
|
||||
this = int((i+1)*float(newInfo['grid'][j])/info['grid'][j])
|
||||
multiplicity[j].append(this-last)
|
||||
last = this
|
||||
|
||||
microstructure = microstructure.reshape(info['grid'],order='F')
|
||||
microstructure = np.repeat(np.repeat(np.repeat(microstructure,
|
||||
multiplicity[0], axis=0),multiplicity[1], axis=1),multiplicity[2], axis=2)
|
||||
# --- renumber to sequence 1...Ngrains if requested ------------------------------------------------
|
||||
# http://stackoverflow.com/questions/10741346/np-frequency-counts-for-unique-values-in-an-array
|
||||
|
||||
if options.renumber:
|
||||
newID = 0
|
||||
for microstructureID,count in enumerate(np.bincount(microstructure.reshape(newInfo['grid'].prod()))):
|
||||
if count != 0:
|
||||
newID += 1
|
||||
microstructure = np.where(microstructure == microstructureID, newID,microstructure).reshape(microstructure.shape)
|
||||
|
||||
newInfo['microstructures'] = len(np.unique(microstructure))
|
||||
|
||||
# --- report ---------------------------------------------------------------------------------------
|
||||
|
||||
remarks = []
|
||||
errors = []
|
||||
|
||||
if (any(newInfo['grid'] != info['grid'])):
|
||||
remarks.append('--> grid a b c: %s'%(' x '.join(map(str,newInfo['grid']))))
|
||||
if (any(newInfo['size'] != info['size'])):
|
||||
remarks.append('--> size x y z: %s'%(' x '.join(map(str,newInfo['size']))))
|
||||
if ( newInfo['microstructures'] != info['microstructures']):
|
||||
remarks.append('--> microstructures: %i'%newInfo['microstructures'])
|
||||
|
||||
if np.any(newInfo['grid'] < 1): errors.append('invalid new grid a b c.')
|
||||
if np.any(newInfo['size'] <= 0.0): errors.append('invalid new size x y z.')
|
||||
|
||||
if remarks != []: damask.util.croak(remarks)
|
||||
if errors != []:
|
||||
damask.util.croak(errors)
|
||||
table.close(dismiss = True)
|
||||
continue
|
||||
|
||||
# --- write header ---------------------------------------------------------------------------------
|
||||
|
||||
table.info_clear()
|
||||
table.info_append(extra_header+[
|
||||
scriptID + ' ' + ' '.join(sys.argv[1:]),
|
||||
"grid\ta {grid[0]}\tb {grid[1]}\tc {grid[2]}".format(grid=newInfo['grid']),
|
||||
"size\tx {size[0]}\ty {size[1]}\tz {size[2]}".format(size=newInfo['size']),
|
||||
"origin\tx {origin[0]}\ty {origin[1]}\tz {origin[2]}".format(origin=info['origin']),
|
||||
"homogenization\t{homog}".format(homog=info['homogenization']),
|
||||
"microstructures\t{microstructures}".format(microstructures=newInfo['microstructures']),
|
||||
])
|
||||
table.labels_clear()
|
||||
table.head_write()
|
||||
|
||||
# --- write microstructure information ------------------------------------------------------------
|
||||
|
||||
format = '%g' if options.float else '%{}i'.format(int(math.floor(math.log10(np.nanmax(microstructure))+1)))
|
||||
table.data = microstructure.reshape((newInfo['grid'][0],newInfo['grid'][1]*newInfo['grid'][2]),order='F').transpose()
|
||||
table.data_writeArray(format,delimiter=' ')
|
||||
|
||||
# --- output finalization --------------------------------------------------------------------------
|
||||
|
||||
table.close() # close ASCII table
|
||||
if name is None:
|
||||
sys.stdout.write(str(geom.show()))
|
||||
else:
|
||||
geom.to_file(name)
|
||||
|
|
|
@ -1,28 +1,33 @@
|
|||
#!/usr/bin/env python3
|
||||
# -*- coding: UTF-8 no BOM -*-
|
||||
|
||||
import os,sys,math
|
||||
import numpy as np
|
||||
import damask
|
||||
from scipy import ndimage
|
||||
import os
|
||||
import sys
|
||||
from io import StringIO
|
||||
from optparse import OptionParser
|
||||
|
||||
from scipy import ndimage
|
||||
import numpy as np
|
||||
|
||||
import damask
|
||||
|
||||
|
||||
scriptName = os.path.splitext(os.path.basename(__file__))[0]
|
||||
scriptID = ' '.join([scriptName,damask.version])
|
||||
|
||||
|
||||
#--------------------------------------------------------------------------------------------------
|
||||
# MAIN
|
||||
#--------------------------------------------------------------------------------------------------
|
||||
|
||||
parser = OptionParser(option_class=damask.extendableOption, usage='%prog options [geomfile(s)]', description = """
|
||||
Rotates spectral geometry description.
|
||||
Rotates original microstructure and embeddeds it into buffer material.
|
||||
|
||||
""", version=scriptID)
|
||||
|
||||
parser.add_option('-r', '--rotation',
|
||||
dest='rotation',
|
||||
type = 'float', nargs = 4, metavar = ' '.join(['float']*4),
|
||||
help = 'rotation given as angle and axis')
|
||||
help = 'rotation given as axis and angle')
|
||||
parser.add_option('-e', '--eulers',
|
||||
dest = 'eulers',
|
||||
type = 'float', nargs = 3, metavar = ' '.join(['float']*3),
|
||||
|
@ -30,7 +35,7 @@ parser.add_option('-e', '--eulers',
|
|||
parser.add_option('-d', '--degrees',
|
||||
dest = 'degrees',
|
||||
action = 'store_true',
|
||||
help = 'Euler angles are given in degrees [%default]')
|
||||
help = 'Euler angles/axis angle are given in degrees')
|
||||
parser.add_option('-m', '--matrix',
|
||||
dest = 'matrix',
|
||||
type = 'float', nargs = 9, metavar = ' '.join(['float']*9),
|
||||
|
@ -41,108 +46,56 @@ parser.add_option('-q', '--quaternion',
|
|||
help = 'rotation given as quaternion')
|
||||
parser.add_option('-f', '--fill',
|
||||
dest = 'fill',
|
||||
type = 'int', metavar = 'int',
|
||||
help = 'background grain index. "0" selects maximum microstructure index + 1 [%default]')
|
||||
parser.add_option('--float',
|
||||
dest = 'float',
|
||||
action = 'store_true',
|
||||
help = 'use float input')
|
||||
type = 'float', metavar = 'int',
|
||||
help = 'background microstructure index, defaults to max microstructure index + 1')
|
||||
|
||||
parser.set_defaults(degrees = False,
|
||||
fill = 0,
|
||||
float = False,
|
||||
)
|
||||
parser.set_defaults(degrees = False)
|
||||
|
||||
(options, filenames) = parser.parse_args()
|
||||
|
||||
if sum(x is not None for x in [options.rotation,options.eulers,options.matrix,options.quaternion]) != 1:
|
||||
parser.error('not exactly one rotation specified...')
|
||||
if [options.rotation,options.eulers,options.matrix,options.quaternion].count(None) < 3:
|
||||
parser.error('more than one rotation specified.')
|
||||
if [options.rotation,options.eulers,options.matrix,options.quaternion].count(None) > 3:
|
||||
parser.error('no rotation specified.')
|
||||
|
||||
if options.quaternion is not None:
|
||||
eulers = damask.Rotation.fromQuaternion(np.array(options.quaternion)).asEulers(degrees=True)
|
||||
rot = damask.Rotation.fromQuaternion(np.array(options.quaternion)) # we might need P=+1 here, too...
|
||||
if options.rotation is not None:
|
||||
eulers = damask.Rotation.fromAxisAngle(np.array(options.rotation,degrees=True)).asEulers(degrees=True)
|
||||
rot = damask.Rotation.fromAxisAngle(np.array(options.rotation),degrees=options.degrees,normalise=True,P=+1)
|
||||
if options.matrix is not None:
|
||||
eulers = damask.Rotation.fromMatrix(np.array(options.Matrix)).asEulers(degrees=True)
|
||||
rot = damask.Rotation.fromMatrix(np.array(options.Matrix))
|
||||
if options.eulers is not None:
|
||||
eulers = damask.Rotation.fromEulers(np.array(options.eulers),degrees=True).asEulers(degrees=True)
|
||||
rot = damask.Rotation.fromEulers(np.array(options.eulers),degrees=options.degrees)
|
||||
|
||||
datatype = 'f' if options.float else 'i'
|
||||
eulers = rot.asEulers(degrees=True)
|
||||
|
||||
# --- loop over input files -------------------------------------------------------------------------
|
||||
|
||||
if filenames == []: filenames = [None]
|
||||
|
||||
for name in filenames:
|
||||
try:
|
||||
table = damask.ASCIItable(name = name,
|
||||
buffered = False,
|
||||
labeled = False)
|
||||
except: continue
|
||||
damask.util.report(scriptName,name)
|
||||
|
||||
# --- interpret header ----------------------------------------------------------------------------
|
||||
geom = damask.Geom.from_file(StringIO(''.join(sys.stdin.read())) if name is None else name)
|
||||
size = geom.get_size()
|
||||
grid = geom.get_grid()
|
||||
origin = geom.get_origin()
|
||||
microstructure = geom.get_microstructure()
|
||||
fill = np.nanmax(microstructure)+1 if options.fill is None else options.fill
|
||||
dtype = float if np.isnan(fill) or int(fill) != fill or microstructure.dtype==np.float else int
|
||||
|
||||
table.head_read()
|
||||
info,extra_header = table.head_getGeom()
|
||||
damask.util.report_geom(info)
|
||||
# These rotations are always applied in the reference coordinate system, i.e. (z,x,z) not (z,x',z'')
|
||||
# this seems to be ok, see https://www.cs.utexas.edu/~theshark/courses/cs354/lectures/cs354-14.pdf
|
||||
microstructure = ndimage.rotate(microstructure,eulers[2],(0,1),order=0,
|
||||
prefilter=False,output=dtype,cval=fill) # rotation around z
|
||||
microstructure = ndimage.rotate(microstructure,eulers[1],(1,2),order=0,
|
||||
prefilter=False,output=dtype,cval=fill) # rotation around x
|
||||
microstructure = ndimage.rotate(microstructure,eulers[0],(0,1),order=0,
|
||||
prefilter=False,output=dtype,cval=fill) # rotation around z
|
||||
|
||||
errors = []
|
||||
if np.any(info['grid'] < 1): errors.append('invalid grid a b c.')
|
||||
if np.any(info['size'] <= 0.0): errors.append('invalid size x y z.')
|
||||
if errors != []:
|
||||
damask.util.croak(errors)
|
||||
table.close(dismiss = True)
|
||||
continue
|
||||
damask.util.croak(geom.update(microstructure,origin=origin-(np.asarray(microstructure.shape)-grid)/2*size/grid,rescale=True))
|
||||
geom.add_comments(scriptID + ' ' + ' '.join(sys.argv[1:]))
|
||||
|
||||
# --- read data ------------------------------------------------------------------------------------
|
||||
|
||||
microstructure = table.microstructure_read(info['grid'],datatype).reshape(info['grid'],order='F') # read microstructure
|
||||
|
||||
newGrainID = options.fill if options.fill != 0 else np.nanmax(microstructure)+1
|
||||
microstructure = ndimage.rotate(microstructure,eulers[2],(0,1),order=0,prefilter=False,output=int,cval=newGrainID) # rotation around Z
|
||||
microstructure = ndimage.rotate(microstructure,eulers[1],(1,2),order=0,prefilter=False,output=int,cval=newGrainID) # rotation around X
|
||||
microstructure = ndimage.rotate(microstructure,eulers[0],(0,1),order=0,prefilter=False,output=int,cval=newGrainID) # rotation around Z
|
||||
|
||||
# --- do work ------------------------------------------------------------------------------------
|
||||
|
||||
newInfo = {
|
||||
'size': microstructure.shape*info['size']/info['grid'],
|
||||
'grid': microstructure.shape,
|
||||
'microstructures': len(np.unique(microstructure)),
|
||||
}
|
||||
|
||||
# --- report ---------------------------------------------------------------------------------------
|
||||
|
||||
remarks = []
|
||||
if (any(newInfo['grid'] != info['grid'])):
|
||||
remarks.append('--> grid a b c: {}'.format(' x '.join(map(str,newInfo['grid']))))
|
||||
if (any(newInfo['size'] != info['size'])):
|
||||
remarks.append('--> size x y z: {}'.format(' x '.join(map(str,newInfo['size']))))
|
||||
if ( newInfo['microstructures'] != info['microstructures']):
|
||||
remarks.append('--> microstructures: {}'.format(newInfo['microstructures']))
|
||||
if remarks != []: damask.util.croak(remarks)
|
||||
|
||||
# --- write header ---------------------------------------------------------------------------------
|
||||
|
||||
table.labels_clear()
|
||||
table.info_clear()
|
||||
table.info_append(extra_header+[
|
||||
scriptID + ' ' + ' '.join(sys.argv[1:]),
|
||||
"grid\ta {grid[0]}\tb {grid[1]}\tc {grid[2]}".format(grid=newInfo['grid']),
|
||||
"size\tx {size[0]}\ty {size[1]}\tz {size[2]}".format(size=newInfo['size']),
|
||||
"origin\tx {origin[0]}\ty {origin[1]}\tz {origin[2]}".format(origin=info['origin']),
|
||||
"homogenization\t{homog}".format(homog=info['homogenization']),
|
||||
"microstructures\t{microstructures}".format(microstructures=newInfo['microstructures']),
|
||||
])
|
||||
table.head_write()
|
||||
|
||||
# --- write microstructure information ------------------------------------------------------------
|
||||
|
||||
format = '%g' if options.float else '%{}i'.format(int(math.floor(math.log10(np.nanmax(microstructure))+1)))
|
||||
table.data = microstructure.reshape((newInfo['grid'][0],np.prod(newInfo['grid'][1:])),order='F').transpose()
|
||||
table.data_writeArray(format,delimiter=' ')
|
||||
|
||||
# --- output finalization --------------------------------------------------------------------------
|
||||
|
||||
table.close() # close ASCII table
|
||||
if name is None:
|
||||
sys.stdout.write(str(geom.show()))
|
||||
else:
|
||||
geom.to_file(name)
|
||||
|
|
|
@ -1,11 +1,15 @@
|
|||
#!/usr/bin/env python3
|
||||
# -*- coding: UTF-8 no BOM -*-
|
||||
|
||||
import os,sys
|
||||
import numpy as np
|
||||
import os
|
||||
import sys
|
||||
from optparse import OptionParser
|
||||
from io import StringIO
|
||||
|
||||
import numpy as np
|
||||
|
||||
import damask
|
||||
|
||||
|
||||
scriptName = os.path.splitext(os.path.basename(__file__))[0]
|
||||
scriptID = ' '.join([scriptName,damask.version])
|
||||
|
||||
|
@ -19,70 +23,39 @@ Translate geom description into ASCIItable containing position and microstructur
|
|||
|
||||
""", version = scriptID)
|
||||
|
||||
parser.add_option('--float',
|
||||
dest = 'float',
|
||||
action = 'store_true',
|
||||
help = 'use float input')
|
||||
|
||||
parser.set_defaults(float = False,
|
||||
)
|
||||
(options, filenames) = parser.parse_args()
|
||||
|
||||
datatype = 'f' if options.float else 'i'
|
||||
|
||||
# --- loop over input files -------------------------------------------------------------------------
|
||||
|
||||
if filenames == []: filenames = [None]
|
||||
|
||||
for name in filenames:
|
||||
try: table = damask.ASCIItable(name = name,
|
||||
outname = os.path.splitext(name)[0]+'.txt' if name else name,
|
||||
buffered = False,
|
||||
labeled = False,
|
||||
)
|
||||
except: continue
|
||||
damask.util.report(scriptName,name)
|
||||
|
||||
# --- interpret header ----------------------------------------------------------------------------
|
||||
geom = damask.Geom.from_file(StringIO(''.join(sys.stdin.read())) if name is None else name)
|
||||
|
||||
table.head_read()
|
||||
info,extra_header = table.head_getGeom()
|
||||
damask.util.report_geom(info)
|
||||
|
||||
errors = []
|
||||
if np.any(info['grid'] < 1): errors.append('invalid grid a b c.')
|
||||
if np.any(info['size'] <= 0.0): errors.append('invalid size x y z.')
|
||||
if errors != []:
|
||||
damask.util.croak(errors)
|
||||
table.close(dismiss = True)
|
||||
continue
|
||||
|
||||
# --- read data ------------------------------------------------------------------------------------
|
||||
|
||||
microstructure = table.microstructure_read(info['grid'],datatype)
|
||||
|
||||
# ------------------------------------------ assemble header ---------------------------------------
|
||||
|
||||
table.info_clear()
|
||||
table.info_append(extra_header + [scriptID + '\t' + ' '.join(sys.argv[1:])])
|
||||
table.labels_clear()
|
||||
table.labels_append(['{}_{}'.format(1+i,'pos') for i in range(3)]+['microstructure'])
|
||||
table.head_write()
|
||||
table.output_flush()
|
||||
damask.util.croak(geom)
|
||||
|
||||
# --- generate grid --------------------------------------------------------------------------------
|
||||
|
||||
x = (0.5 + np.arange(info['grid'][0],dtype=float))/info['grid'][0]*info['size'][0]+info['origin'][0]
|
||||
y = (0.5 + np.arange(info['grid'][1],dtype=float))/info['grid'][1]*info['size'][1]+info['origin'][1]
|
||||
z = (0.5 + np.arange(info['grid'][2],dtype=float))/info['grid'][2]*info['size'][2]+info['origin'][2]
|
||||
grid = geom.get_grid()
|
||||
size = geom.get_size()
|
||||
origin = geom.get_origin()
|
||||
|
||||
xx = np.tile( x, info['grid'][1]* info['grid'][2])
|
||||
yy = np.tile(np.repeat(y,info['grid'][0] ),info['grid'][2])
|
||||
zz = np.repeat(z,info['grid'][0]*info['grid'][1])
|
||||
x = (0.5 + np.arange(grid[0],dtype=float))/grid[0]*size[0]+origin[0]
|
||||
y = (0.5 + np.arange(grid[1],dtype=float))/grid[1]*size[1]+origin[1]
|
||||
z = (0.5 + np.arange(grid[2],dtype=float))/grid[2]*size[2]+origin[2]
|
||||
|
||||
table.data = np.squeeze(np.dstack((xx,yy,zz,microstructure)),axis=0)
|
||||
xx = np.tile( x, grid[1]* grid[2])
|
||||
yy = np.tile(np.repeat(y,grid[0] ),grid[2])
|
||||
zz = np.repeat(z,grid[0]*grid[1])
|
||||
|
||||
# --- create ASCII table --------------------------------------------------------------------------
|
||||
|
||||
table = damask.ASCIItable(outname = os.path.splitext(name)[0]+'.txt' if name else name)
|
||||
table.info_append(geom.get_comments() + [scriptID + '\t' + ' '.join(sys.argv[1:])])
|
||||
table.labels_append(['{}_{}'.format(1+i,'pos') for i in range(3)]+['microstructure'])
|
||||
table.head_write()
|
||||
table.output_flush()
|
||||
table.data = np.squeeze(np.dstack((xx,yy,zz,geom.microstructure.flatten('F'))),axis=0)
|
||||
table.data_writeArray()
|
||||
|
||||
# ------------------------------------------ finalize output ---------------------------------------
|
||||
|
||||
table.close()
|
||||
|
|
|
@ -1,20 +1,23 @@
|
|||
#!/usr/bin/env python3
|
||||
# -*- coding: UTF-8 no BOM -*-
|
||||
|
||||
import os,sys,math
|
||||
import numpy as np
|
||||
import damask
|
||||
import os
|
||||
import sys
|
||||
from optparse import OptionParser
|
||||
from io import StringIO
|
||||
|
||||
import damask
|
||||
|
||||
|
||||
scriptName = os.path.splitext(os.path.basename(__file__))[0]
|
||||
scriptID = ' '.join([scriptName,damask.version])
|
||||
|
||||
|
||||
#--------------------------------------------------------------------------------------------------
|
||||
# MAIN
|
||||
#--------------------------------------------------------------------------------------------------
|
||||
|
||||
parser = OptionParser(option_class=damask.extendableOption, usage='%prog options [file[s]]', description = """
|
||||
translate microstructure indices (shift or substitute) and/or geometry origin.
|
||||
parser = OptionParser(option_class=damask.extendableOption, usage='%prog options [geomfile(s)]', description = """
|
||||
Translate microstructure indices (shift or substitute) and/or geometry origin.
|
||||
|
||||
""", version=scriptID)
|
||||
|
||||
|
@ -25,103 +28,37 @@ parser.add_option('-o', '--origin',
|
|||
parser.add_option('-m', '--microstructure',
|
||||
dest = 'microstructure',
|
||||
type = 'int', metavar = 'int',
|
||||
help = 'offset from old to new microstructure indices')
|
||||
help = 'offset from old to new microstructure indices (after substitution)')
|
||||
parser.add_option('-s', '--substitute',
|
||||
dest = 'substitute',
|
||||
action = 'extend', metavar = '<string LIST>',
|
||||
help = 'substitutions of microstructure indices from,to,from,to,...')
|
||||
parser.add_option('--float',
|
||||
dest = 'float',
|
||||
action = 'store_true',
|
||||
help = 'use float input')
|
||||
|
||||
parser.set_defaults(origin = (0.0,0.0,0.0),
|
||||
microstructure = 0,
|
||||
substitute = [],
|
||||
float = False,
|
||||
substitute = []
|
||||
)
|
||||
|
||||
(options, filenames) = parser.parse_args()
|
||||
|
||||
datatype = 'f' if options.float else 'i'
|
||||
sub = list(map(int,options.substitute))
|
||||
|
||||
sub = {}
|
||||
for i in range(len(options.substitute)//2): # split substitution list into "from" -> "to"
|
||||
sub[int(options.substitute[i*2])] = int(options.substitute[i*2+1])
|
||||
|
||||
# --- loop over input files ----------------------------------------------------------------------
|
||||
|
||||
if filenames == []: filenames = [None]
|
||||
|
||||
for name in filenames:
|
||||
try: table = damask.ASCIItable(name = name,
|
||||
buffered = False,
|
||||
labeled = False)
|
||||
except: continue
|
||||
damask.util.report(scriptName,name)
|
||||
|
||||
# --- interpret header ---------------------------------------------------------------------------
|
||||
geom = damask.Geom.from_file(StringIO(''.join(sys.stdin.read())) if name is None else name)
|
||||
|
||||
table.head_read()
|
||||
info,extra_header = table.head_getGeom()
|
||||
damask.util.report_geom(info)
|
||||
substituted = geom.get_microstructure()
|
||||
for old,new in zip(sub[0::2],sub[1::2]): substituted[substituted==old] = new # substitute microstructure indices
|
||||
substituted += options.microstructure # constant shift
|
||||
|
||||
errors = []
|
||||
if np.any(info['grid'] < 1): errors.append('invalid grid a b c.')
|
||||
if np.any(info['size'] <= 0.0): errors.append('invalid size x y z.')
|
||||
if errors != []:
|
||||
damask.util.croak(errors)
|
||||
table.close(dismiss = True)
|
||||
continue
|
||||
damask.util.croak(geom.update(substituted,origin=geom.get_origin()+options.origin))
|
||||
geom.add_comments(scriptID + ' ' + ' '.join(sys.argv[1:]))
|
||||
|
||||
# --- read data ----------------------------------------------------------------------------------
|
||||
|
||||
microstructure = table.microstructure_read(info['grid'],datatype) # read microstructure
|
||||
|
||||
# --- do work ------------------------------------------------------------------------------------
|
||||
|
||||
newInfo = {
|
||||
'origin': np.zeros(3,'d'),
|
||||
'microstructures': 0,
|
||||
}
|
||||
|
||||
substituted = np.copy(microstructure)
|
||||
for k, v in sub.items(): substituted[microstructure==k] = v # substitute microstructure indices
|
||||
|
||||
substituted += options.microstructure # shift microstructure indices
|
||||
|
||||
newInfo['origin'] = info['origin'] + options.origin
|
||||
newInfo['microstructures'] = len(np.unique(substituted))
|
||||
|
||||
# --- report -------------------------------------------------------------------------------------
|
||||
|
||||
remarks = []
|
||||
if (any(newInfo['origin'] != info['origin'])):
|
||||
remarks.append('--> origin x y z: {}'.format(' : '.join(map(str,newInfo['origin']))))
|
||||
if ( newInfo['microstructures'] != info['microstructures']):
|
||||
remarks.append('--> microstructures: {}'.format(newInfo['microstructures']))
|
||||
if remarks != []: damask.util.croak(remarks)
|
||||
|
||||
# --- write header -------------------------------------------------------------------------------
|
||||
|
||||
table.labels_clear()
|
||||
table.info_clear()
|
||||
table.info_append(extra_header+[
|
||||
scriptID + ' ' + ' '.join(sys.argv[1:]),
|
||||
"grid\ta {grid[0]}\tb {grid[1]}\tc {grid[2]}".format(grid=info['grid']),
|
||||
"size\tx {size[0]}\ty {size[1]}\tz {size[2]}".format(size=info['size']),
|
||||
"origin\tx {origin[0]}\ty {origin[1]}\tz {origin[2]}".format(origin=newInfo['origin']),
|
||||
"homogenization\t{homog}".format(homog=info['homogenization']),
|
||||
"microstructures\t{microstructures}".format(microstructures=newInfo['microstructures']),
|
||||
])
|
||||
table.head_write()
|
||||
|
||||
# --- write microstructure information -----------------------------------------------------------
|
||||
|
||||
format = '%g' if options.float else '%{}i'.format(int(math.floor(math.log10(np.nanmax(substituted))+1)))
|
||||
table.data = substituted.reshape((info['grid'][0],info['grid'][1]*info['grid'][2]),order='F').transpose()
|
||||
table.data_writeArray(format,delimiter = ' ')
|
||||
|
||||
# --- output finalization ------------------------------------------------------------------------
|
||||
|
||||
table.close() # close ASCII table
|
||||
if name is None:
|
||||
sys.stdout.write(str(geom.show()))
|
||||
else:
|
||||
geom.to_file(name)
|
||||
|
|
|
@ -1,80 +1,40 @@
|
|||
#!/usr/bin/env python3
|
||||
# -*- coding: UTF-8 no BOM -*-
|
||||
|
||||
import os,sys,math
|
||||
import numpy as np
|
||||
import os
|
||||
import sys
|
||||
from optparse import OptionParser
|
||||
from io import StringIO
|
||||
|
||||
import damask
|
||||
|
||||
|
||||
scriptName = os.path.splitext(os.path.basename(__file__))[0]
|
||||
scriptID = ' '.join([scriptName,damask.version])
|
||||
|
||||
|
||||
#--------------------------------------------------------------------------------------------------
|
||||
# MAIN
|
||||
#--------------------------------------------------------------------------------------------------
|
||||
|
||||
parser = OptionParser(option_class=damask.extendableOption, usage='%prog options [file[s]]', description = """
|
||||
Unpack geometry files containing ranges "a to b" and/or "n of x" multiples (exclusively in one line).
|
||||
parser = OptionParser(option_class=damask.extendableOption, usage='%prog [geomfile(s)]', description = """
|
||||
Unpack ranges "a to b" and/or "n of x" multiples (exclusively in one line).
|
||||
|
||||
""", version = scriptID)
|
||||
|
||||
parser.add_option('-1', '--onedimensional',
|
||||
dest = 'oneD',
|
||||
action = 'store_true',
|
||||
help = 'output geom file with one-dimensional data arrangement')
|
||||
|
||||
parser.set_defaults(oneD = False,
|
||||
)
|
||||
|
||||
(options, filenames) = parser.parse_args()
|
||||
|
||||
# --- loop over input files -------------------------------------------------------------------------
|
||||
|
||||
if filenames == []: filenames = [None]
|
||||
|
||||
for name in filenames:
|
||||
try:
|
||||
table = damask.ASCIItable(name = name,
|
||||
buffered = False, labeled = False)
|
||||
except: continue
|
||||
damask.util.report(scriptName,name)
|
||||
|
||||
# --- interpret header ----------------------------------------------------------------------------
|
||||
geom = damask.Geom.from_file(StringIO(''.join(sys.stdin.read())) if name is None else name)
|
||||
|
||||
table.head_read()
|
||||
info,extra_header = table.head_getGeom()
|
||||
damask.util.report_geom(info)
|
||||
damask.util.croak(geom)
|
||||
geom.add_comments(scriptID + ' ' + ' '.join(sys.argv[1:]))
|
||||
|
||||
errors = []
|
||||
if np.any(info['grid'] < 1): errors.append('invalid grid a b c.')
|
||||
if np.any(info['size'] <= 0.0): errors.append('invalid size x y z.')
|
||||
if errors != []:
|
||||
damask.util.croak(errors)
|
||||
table.close(dismiss = True)
|
||||
continue
|
||||
|
||||
# --- write header ---------------------------------------------------------------------------------
|
||||
|
||||
table.labels_clear()
|
||||
table.info_clear()
|
||||
table.info_append(extra_header+[
|
||||
scriptID + ' ' + ' '.join(sys.argv[1:]),
|
||||
"grid\ta {grid[0]}\tb {grid[1]}\tc {grid[2]}".format(grid=info['grid']),
|
||||
"size\tx {size[0]}\ty {size[1]}\tz {size[2]}".format(size=info['size']),
|
||||
"origin\tx {origin[0]}\ty {origin[1]}\tz {origin[2]}".format(origin=info['origin']),
|
||||
"homogenization\t{homog}".format(homog=info['homogenization']),
|
||||
"microstructures\t{microstructures}".format(microstructures=info['microstructures']),
|
||||
])
|
||||
table.head_write()
|
||||
|
||||
# --- write microstructure information ------------------------------------------------------------
|
||||
|
||||
microstructure = table.microstructure_read(info['grid']) # read microstructure
|
||||
formatwidth = int(math.floor(math.log10(microstructure.max())+1)) # efficient number printing format
|
||||
table.data = microstructure if options.oneD else \
|
||||
microstructure.reshape((info['grid'][0],info['grid'][1]*info['grid'][2]),order='F').transpose()
|
||||
table.data_writeArray('%%%ii'%(formatwidth),delimiter = ' ')
|
||||
|
||||
#--- output finalization --------------------------------------------------------------------------
|
||||
|
||||
table.close() # close ASCII table
|
||||
if name is None:
|
||||
sys.stdout.write(str(geom.show()))
|
||||
else:
|
||||
geom.to_file(name)
|
||||
|
|
|
@ -1,15 +1,20 @@
|
|||
#!/usr/bin/env python3
|
||||
# -*- coding: UTF-8 no BOM -*-
|
||||
|
||||
import os,sys,math
|
||||
import numpy as np
|
||||
from scipy import ndimage
|
||||
import os
|
||||
import sys
|
||||
from io import StringIO
|
||||
from optparse import OptionParser
|
||||
|
||||
from scipy import ndimage
|
||||
import numpy as np
|
||||
|
||||
import damask
|
||||
|
||||
|
||||
scriptName = os.path.splitext(os.path.basename(__file__))[0]
|
||||
scriptID = ' '.join([scriptName,damask.version])
|
||||
|
||||
|
||||
def taintedNeighborhood(stencil,trigger=[],size=1):
|
||||
|
||||
me = stencil[stencil.shape[0]//2]
|
||||
|
@ -21,11 +26,12 @@ def taintedNeighborhood(stencil,trigger=[],size=1):
|
|||
trigger = list(trigger)
|
||||
return np.any(np.in1d(stencil,np.array(trigger)))
|
||||
|
||||
|
||||
#--------------------------------------------------------------------------------------------------
|
||||
# MAIN
|
||||
#--------------------------------------------------------------------------------------------------
|
||||
|
||||
parser = OptionParser(option_class=damask.extendableOption, usage='%prog options [file[s]]', description = """
|
||||
parser = OptionParser(option_class=damask.extendableOption, usage='%prog options [geomfile(s)]', description = """
|
||||
Offset microstructure index for points which see a microstructure different from themselves
|
||||
(or listed as triggers) within a given (cubic) vicinity, i.e. within the region close to a grain/phase boundary.
|
||||
|
||||
|
@ -35,13 +41,13 @@ parser.add_option('-v', '--vicinity',
|
|||
dest = 'vicinity',
|
||||
type = 'int', metavar = 'int',
|
||||
help = 'voxel distance checked for presence of other microstructure [%default]')
|
||||
parser.add_option('-m', '--microstructureoffset',
|
||||
parser.add_option('-o', '--offset',
|
||||
dest='offset',
|
||||
type = 'int', metavar = 'int',
|
||||
help = 'offset (positive or negative) for tagged microstructure indices. '+
|
||||
'"0" selects maximum microstructure index [%default]')
|
||||
help='offset (positive or negative) to tag microstructure indices, defaults to max microstructure index')
|
||||
parser.add_option('-t', '--trigger',
|
||||
action = 'extend', dest = 'trigger', metavar = '<int LIST>',
|
||||
dest = 'trigger',
|
||||
action = 'extend', metavar = '<int LIST>',
|
||||
help = 'list of microstructure indices triggering a change')
|
||||
parser.add_option('-n', '--nonperiodic',
|
||||
dest = 'mode',
|
||||
|
@ -49,86 +55,34 @@ parser.add_option('-n', '--nonperiodic',
|
|||
help = 'assume geometry to be non-periodic')
|
||||
|
||||
parser.set_defaults(vicinity = 1,
|
||||
offset = 0,
|
||||
trigger = [],
|
||||
mode = 'wrap',
|
||||
)
|
||||
|
||||
(options, filenames) = parser.parse_args()
|
||||
|
||||
options.trigger = np.array(options.trigger, dtype=int)
|
||||
|
||||
|
||||
# --- loop over input files -------------------------------------------------------------------------
|
||||
|
||||
if filenames == []: filenames = [None]
|
||||
|
||||
for name in filenames:
|
||||
try:
|
||||
table = damask.ASCIItable(name = name,
|
||||
buffered = False, labeled = False)
|
||||
except: continue
|
||||
damask.util.report(scriptName,name)
|
||||
|
||||
# --- interpret header ----------------------------------------------------------------------------
|
||||
geom = damask.Geom.from_file(StringIO(''.join(sys.stdin.read())) if name is None else name)
|
||||
|
||||
table.head_read()
|
||||
info,extra_header = table.head_getGeom()
|
||||
damask.util.report_geom(info)
|
||||
offset = np.nanmax(geom.microstructure) if options.offset is None else options.offset
|
||||
|
||||
errors = []
|
||||
if np.any(info['grid'] < 1): errors.append('invalid grid a b c.')
|
||||
if np.any(info['size'] <= 0.0): errors.append('invalid size x y z.')
|
||||
if errors != []:
|
||||
damask.util.croak(errors)
|
||||
table.close(dismiss = True)
|
||||
continue
|
||||
|
||||
# --- read data ------------------------------------------------------------------------------------
|
||||
|
||||
microstructure = table.microstructure_read(info['grid']).reshape(info['grid'],order='F') # read microstructure
|
||||
|
||||
# --- do work ------------------------------------------------------------------------------------
|
||||
|
||||
newInfo = {
|
||||
'microstructures': 0,
|
||||
}
|
||||
|
||||
if options.offset == 0: options.offset = microstructure.max()
|
||||
|
||||
microstructure = np.where(ndimage.filters.generic_filter(microstructure,
|
||||
damask.util.croak(geom.update(np.where(ndimage.filters.generic_filter(
|
||||
geom.microstructure,
|
||||
taintedNeighborhood,
|
||||
size=1+2*options.vicinity,mode=options.mode,
|
||||
extra_arguments=(),
|
||||
extra_keywords={"trigger":options.trigger,"size":1+2*options.vicinity}),
|
||||
microstructure + options.offset,microstructure)
|
||||
geom.microstructure + offset,geom.microstructure)))
|
||||
geom.add_comments(scriptID + ' ' + ' '.join(sys.argv[1:]))
|
||||
|
||||
newInfo['microstructures'] = len(np.unique(microstructure))
|
||||
|
||||
# --- report ---------------------------------------------------------------------------------------
|
||||
|
||||
if (newInfo['microstructures'] != info['microstructures']):
|
||||
damask.util.croak('--> microstructures: %i'%newInfo['microstructures'])
|
||||
|
||||
# --- write header ---------------------------------------------------------------------------------
|
||||
|
||||
table.labels_clear()
|
||||
table.info_clear()
|
||||
table.info_append(extra_header+[
|
||||
scriptID + ' ' + ' '.join(sys.argv[1:]),
|
||||
"grid\ta {grid[0]}\tb {grid[1]}\tc {grid[2]}".format(grid=info['grid']),
|
||||
"size\tx {size[0]}\ty {size[1]}\tz {size[2]}".format(size=info['size']),
|
||||
"origin\tx {origin[0]}\ty {origin[1]}\tz {origin[2]}".format(origin=info['origin']),
|
||||
"homogenization\t{homog}".format(homog=info['homogenization']),
|
||||
"microstructures\t{microstructures}".format(microstructures=newInfo['microstructures']),
|
||||
])
|
||||
table.head_write()
|
||||
|
||||
# --- write microstructure information ------------------------------------------------------------
|
||||
|
||||
formatwidth = int(math.floor(math.log10(np.nanmax(microstructure))+1))
|
||||
table.data = microstructure.reshape((info['grid'][0],info['grid'][1]*info['grid'][2]),order='F').transpose()
|
||||
table.data_writeArray('%{}i'.format(formatwidth),delimiter = ' ')
|
||||
|
||||
# --- output finalization --------------------------------------------------------------------------
|
||||
|
||||
table.close() # close ASCII table
|
||||
if name is None:
|
||||
sys.stdout.write(str(geom.show()))
|
||||
else:
|
||||
geom.to_file(name)
|
||||
|
|
|
@ -1,5 +1,3 @@
|
|||
# -*- coding: UTF-8 no BOM -*-
|
||||
|
||||
####################################################################################################
|
||||
# Code below available according to the followin conditions on https://github.com/MarDiehl/3Drotations
|
||||
####################################################################################################
|
||||
|
|
|
@ -17,8 +17,7 @@ from .orientation import Symmetry, Lattice, Rotation, Orientation # noqa
|
|||
from .dadf5 import DADF5 # noqa
|
||||
|
||||
#from .block import Block # only one class
|
||||
from .result import Result # noqa
|
||||
from .geometry import Geometry # noqa
|
||||
from .geom import Geom # noqa
|
||||
from .solver import Solver # noqa
|
||||
from .test import Test # noqa
|
||||
from .util import extendableOption # noqa
|
||||
|
|
|
@ -1,6 +1,9 @@
|
|||
# -*- coding: UTF-8 no BOM -*-
|
||||
import os
|
||||
import sys
|
||||
import re
|
||||
import shlex
|
||||
from collections import Iterable
|
||||
|
||||
import os, sys
|
||||
import numpy as np
|
||||
|
||||
# ------------------------------------------------------------------
|
||||
|
@ -80,8 +83,6 @@ class ASCIItable():
|
|||
def _quote(self,
|
||||
what):
|
||||
"""Quote empty or white space-containing output"""
|
||||
import re
|
||||
|
||||
return '{quote}{content}{quote}'.format(
|
||||
quote = ('"' if str(what)=='' or re.search(r"\s",str(what)) else ''),
|
||||
content = what)
|
||||
|
@ -147,8 +148,6 @@ class ASCIItable():
|
|||
by either reading the first row or,
|
||||
if keyword "head[*]" is present, the last line of the header
|
||||
"""
|
||||
import re,shlex
|
||||
|
||||
try:
|
||||
self.__IO__['in'].seek(0)
|
||||
except IOError:
|
||||
|
@ -244,17 +243,6 @@ class ASCIItable():
|
|||
return info,extra_header
|
||||
|
||||
|
||||
# ------------------------------------------------------------------
|
||||
def head_putGeom(self,info):
|
||||
"""Translate geometry description to header"""
|
||||
self.info_append([
|
||||
"grid\ta {}\tb {}\tc {}".format(*info['grid']),
|
||||
"size\tx {}\ty {}\tz {}".format(*info['size']),
|
||||
"origin\tx {}\ty {}\tz {}".format(*info['origin']),
|
||||
"homogenization\t{}".format(info['homogenization']),
|
||||
"microstructures\t{}".format(info['microstructures']),
|
||||
])
|
||||
|
||||
# ------------------------------------------------------------------
|
||||
def labels_append(self,
|
||||
what,
|
||||
|
@ -287,8 +275,6 @@ class ASCIItable():
|
|||
"x" for "1_x","2_x",... unless raw output is requested.
|
||||
operates on object tags or given list.
|
||||
"""
|
||||
from collections import Iterable
|
||||
|
||||
if tags is None: tags = self.tags
|
||||
|
||||
if isinstance(tags, Iterable) and not raw: # check whether list of tags is requested
|
||||
|
@ -324,8 +310,6 @@ class ASCIItable():
|
|||
return numpy array if asked for list of labels.
|
||||
transparently deals with label positions implicitly given as numbers or their headings given as strings.
|
||||
"""
|
||||
from collections import Iterable
|
||||
|
||||
if isinstance(labels, Iterable) and not isinstance(labels, str): # check whether list of labels is requested
|
||||
idx = []
|
||||
for label in labels:
|
||||
|
@ -365,8 +349,6 @@ class ASCIItable():
|
|||
return numpy array if asked for list of labels.
|
||||
transparently deals with label positions implicitly given as numbers or their headings given as strings.
|
||||
"""
|
||||
from collections import Iterable
|
||||
|
||||
listOfLabels = isinstance(labels, Iterable) and not isinstance(labels, str) # check whether list of labels is requested
|
||||
if not listOfLabels: labels = [labels]
|
||||
|
||||
|
@ -400,8 +382,6 @@ class ASCIItable():
|
|||
return numpy array if asked for list of labels.
|
||||
transparently deals with label positions implicitly given as numbers or their headings given as strings.
|
||||
"""
|
||||
from collections import Iterable
|
||||
|
||||
start = self.label_index(labels)
|
||||
dim = self.label_dimension(labels)
|
||||
|
||||
|
@ -447,8 +427,6 @@ class ASCIItable():
|
|||
advance = True,
|
||||
respectLabels = True):
|
||||
"""Read next line (possibly buffered) and parse it into data array"""
|
||||
import shlex
|
||||
|
||||
self.line = self.__IO__['readBuffer'].pop(0) if len(self.__IO__['readBuffer']) > 0 \
|
||||
else self.__IO__['in'].readline().strip() # take buffered content or get next data row from file
|
||||
|
||||
|
@ -469,8 +447,6 @@ class ASCIItable():
|
|||
def data_readArray(self,
|
||||
labels = []):
|
||||
"""Read whole data of all (given) labels as numpy array"""
|
||||
from collections import Iterable
|
||||
|
||||
try: self.data_rewind() # try to wind back to start of data
|
||||
except: pass # assume/hope we are at data start already...
|
||||
|
||||
|
|
|
@ -1,8 +1,6 @@
|
|||
# -*- coding: UTF-8 no BOM -*-
|
||||
import math
|
||||
|
||||
import math,numpy as np
|
||||
|
||||
### --- COLOR CLASS --------------------------------------------------
|
||||
import numpy as np
|
||||
|
||||
class Color():
|
||||
"""
|
||||
|
|
|
@ -1,6 +1,5 @@
|
|||
# -*- coding: UTF-8 no BOM -*-
|
||||
|
||||
import os,re
|
||||
import os
|
||||
import re
|
||||
|
||||
class Environment():
|
||||
__slots__ = [ \
|
||||
|
|
|
@ -0,0 +1,269 @@
|
|||
import os
|
||||
from io import StringIO
|
||||
|
||||
import numpy as np
|
||||
import vtk
|
||||
from vtk.util import numpy_support
|
||||
|
||||
from . import util
|
||||
from . import version
|
||||
|
||||
|
||||
class Geom():
|
||||
"""Geometry definition for grid solvers"""
|
||||
|
||||
def __init__(self,microstructure,size,origin=[0.0,0.0,0.0],homogenization=1,comments=[]):
|
||||
"""New geometry definition from array of microstructures and size"""
|
||||
self.set_microstructure(microstructure)
|
||||
self.set_size(size)
|
||||
self.set_origin(origin)
|
||||
self.set_homogenization(homogenization)
|
||||
self.set_comments(comments)
|
||||
|
||||
|
||||
def __repr__(self):
|
||||
"""Basic information on geometry definition"""
|
||||
return util.srepr([
|
||||
'grid a b c: {}'.format(' x '.join(map(str,self.get_grid ()))),
|
||||
'size x y z: {}'.format(' x '.join(map(str,self.get_size ()))),
|
||||
'origin x y z: {}'.format(' '.join(map(str,self.get_origin()))),
|
||||
'homogenization: {}'.format(self.get_homogenization()),
|
||||
'# microstructures: {}'.format(len(np.unique(self.microstructure))),
|
||||
'max microstructure: {}'.format(np.nanmax(self.microstructure)),
|
||||
])
|
||||
|
||||
def update(self,microstructure=None,size=None,origin=None,rescale=False):
|
||||
"""Updates microstructure and size"""
|
||||
grid_old = self.get_grid()
|
||||
size_old = self.get_size()
|
||||
origin_old = self.get_origin()
|
||||
unique_old = len(np.unique(self.microstructure))
|
||||
max_old = np.nanmax(self.microstructure)
|
||||
|
||||
if size is not None and rescale:
|
||||
raise ValueError('Either set size explicitly or rescale automatically')
|
||||
|
||||
self.set_microstructure(microstructure)
|
||||
self.set_size(self.get_grid()/grid_old*self.size if rescale else size)
|
||||
self.set_origin(origin)
|
||||
|
||||
message = ['grid a b c: {}'.format(' x '.join(map(str,grid_old)))]
|
||||
if np.any(grid_old != self.get_grid()):
|
||||
message[-1] = util.delete(message[-1])
|
||||
message.append('grid a b c: {}'.format(' x '.join(map(str,self.get_grid()))))
|
||||
|
||||
message.append('size x y z: {}'.format(' x '.join(map(str,size_old))))
|
||||
if np.any(size_old != self.get_size()):
|
||||
message[-1] = util.delete(message[-1])
|
||||
message.append('size x y z: {}'.format(' x '.join(map(str,self.get_size()))))
|
||||
|
||||
message.append('origin x y z: {}'.format(' '.join(map(str,origin_old))))
|
||||
if np.any(origin_old != self.get_origin()):
|
||||
message[-1] = util.delete(message[-1])
|
||||
message.append('origin x y z: {}'.format(' '.join(map(str,self.get_origin()))))
|
||||
|
||||
message.append('homogenization: {}'.format(self.get_homogenization()))
|
||||
|
||||
message.append('# microstructures: {}'.format(unique_old))
|
||||
if unique_old != len(np.unique(self.microstructure)):
|
||||
message[-1] = util.delete(message[-1])
|
||||
message.append('# microstructures: {}'.format(len(np.unique(self.microstructure))))
|
||||
|
||||
message.append('max microstructure: {}'.format(max_old))
|
||||
if max_old != np.nanmax(self.microstructure):
|
||||
message[-1] = util.delete(message[-1])
|
||||
message.append('max microstructure: {}'.format(np.nanmax(self.microstructure)))
|
||||
|
||||
return util.return_message(message)
|
||||
|
||||
def set_comments(self,comments):
|
||||
self.comments = []
|
||||
self.add_comments(comments)
|
||||
|
||||
def add_comments(self,comments):
|
||||
self.comments += [str(c) for c in comments] if isinstance(comments,list) else [str(comments)]
|
||||
|
||||
def set_microstructure(self,microstructure):
|
||||
if microstructure is not None:
|
||||
if len(microstructure.shape) != 3:
|
||||
raise ValueError('Invalid microstructure shape {}'.format(*microstructure.shape))
|
||||
elif microstructure.dtype not in np.sctypes['float'] + np.sctypes['int']:
|
||||
raise TypeError('Invalid data type {} for microstructure'.format(microstructure.dtype))
|
||||
else:
|
||||
self.microstructure = np.copy(microstructure)
|
||||
|
||||
def set_size(self,size):
|
||||
if size is None:
|
||||
grid = np.asarray(self.microstructure.shape)
|
||||
self.size = grid/np.max(grid)
|
||||
else:
|
||||
if len(size) != 3 or any(np.array(size)<=0):
|
||||
raise ValueError('Invalid size {}'.format(*size))
|
||||
else:
|
||||
self.size = np.array(size)
|
||||
|
||||
def set_origin(self,origin):
|
||||
if origin is not None:
|
||||
if len(origin) != 3:
|
||||
raise ValueError('Invalid origin {}'.format(*origin))
|
||||
else:
|
||||
self.origin = np.array(origin)
|
||||
|
||||
def set_homogenization(self,homogenization):
|
||||
if homogenization is not None:
|
||||
if not isinstance(homogenization,int) or homogenization < 1:
|
||||
raise TypeError('Invalid homogenization {}'.format(homogenization))
|
||||
else:
|
||||
self.homogenization = homogenization
|
||||
|
||||
|
||||
def get_microstructure(self):
|
||||
return np.copy(self.microstructure)
|
||||
|
||||
def get_size(self):
|
||||
return np.copy(self.size)
|
||||
|
||||
def get_origin(self):
|
||||
return np.copy(self.origin)
|
||||
|
||||
def get_grid(self):
|
||||
return np.array(self.microstructure.shape)
|
||||
|
||||
def get_homogenization(self):
|
||||
return self.homogenization
|
||||
|
||||
def get_comments(self):
|
||||
return self.comments[:]
|
||||
|
||||
def get_header(self):
|
||||
header = ['{} header'.format(len(self.comments)+4)] + self.comments
|
||||
header.append('grid a {} b {} c {}'.format(*self.get_grid()))
|
||||
header.append('size x {} y {} z {}'.format(*self.get_size()))
|
||||
header.append('origin x {} y {} z {}'.format(*self.get_origin()))
|
||||
header.append('homogenization {}'.format(self.get_homogenization()))
|
||||
return header
|
||||
|
||||
@classmethod
|
||||
def from_file(cls,fname):
|
||||
"""Reads a geom file"""
|
||||
with (open(fname) if isinstance(fname,str) else fname) as f:
|
||||
f.seek(0)
|
||||
header_length,keyword = f.readline().split()[:2]
|
||||
header_length = int(header_length)
|
||||
content = f.readlines()
|
||||
|
||||
if not keyword.startswith('head') or header_length < 3:
|
||||
raise TypeError('Header length information missing or invalid')
|
||||
|
||||
comments = []
|
||||
for i,line in enumerate(content[:header_length]):
|
||||
items = line.lower().strip().split()
|
||||
key = items[0] if len(items) > 0 else ''
|
||||
if key == 'grid':
|
||||
grid = np.array([ int(dict(zip(items[1::2],items[2::2]))[i]) for i in ['a','b','c']])
|
||||
elif key == 'size':
|
||||
size = np.array([float(dict(zip(items[1::2],items[2::2]))[i]) for i in ['x','y','z']])
|
||||
elif key == 'origin':
|
||||
origin = np.array([float(dict(zip(items[1::2],items[2::2]))[i]) for i in ['x','y','z']])
|
||||
elif key == 'homogenization':
|
||||
homogenization = int(items[1])
|
||||
else:
|
||||
comments.append(line.strip())
|
||||
|
||||
microstructure = np.empty(grid.prod()) # initialize as flat array
|
||||
i = 0
|
||||
for line in content[header_length:]:
|
||||
items = line.split()
|
||||
if len(items) == 3:
|
||||
if items[1].lower() == 'of':
|
||||
items = np.ones(int(items[0]))*float(items[2])
|
||||
elif items[1].lower() == 'to':
|
||||
items = np.linspace(int(items[0]),int(items[2]),
|
||||
abs(int(items[2])-int(items[0]))+1,dtype=float)
|
||||
else: items = list(map(float,items))
|
||||
else: items = list(map(float,items))
|
||||
|
||||
microstructure[i:i+len(items)] = items
|
||||
i += len(items)
|
||||
|
||||
if i != grid.prod():
|
||||
raise TypeError('Invalid file: expected {} entries,found {}'.format(grid.prod(),i))
|
||||
|
||||
microstructure = microstructure.reshape(grid,order='F')
|
||||
if not np.any(np.mod(microstructure.flatten(),1) != 0.0): # no float present
|
||||
microstructure = microstructure.astype('int')
|
||||
|
||||
return cls(microstructure.reshape(grid),size,origin,homogenization,comments)
|
||||
|
||||
def to_file(self,fname):
|
||||
"""Writes to file"""
|
||||
header = self.get_header()
|
||||
grid = self.get_grid()
|
||||
format_string = '%{}i'.format(1+int(np.floor(np.log10(np.nanmax(self.microstructure))))) if self.microstructure.dtype == int \
|
||||
else '%g'
|
||||
np.savetxt(fname,
|
||||
self.microstructure.reshape([grid[0],np.prod(grid[1:])],order='F').T,
|
||||
header='\n'.join(header), fmt=format_string, comments='')
|
||||
|
||||
|
||||
|
||||
def to_vtk(self,fname=None):
|
||||
"""Generates vtk file. If file name is given, stored in file otherwise returned as string"""
|
||||
grid = self.get_grid() + np.ones(3,dtype=int)
|
||||
size = self.get_size()
|
||||
origin = self.get_origin()
|
||||
|
||||
coords = [
|
||||
np.linspace(0,size[0],grid[0]) + origin[0],
|
||||
np.linspace(0,size[1],grid[1]) + origin[1],
|
||||
np.linspace(0,size[2],grid[2]) + origin[2]
|
||||
]
|
||||
|
||||
rGrid = vtk.vtkRectilinearGrid()
|
||||
coordArray = [vtk.vtkDoubleArray(),vtk.vtkDoubleArray(),vtk.vtkDoubleArray()]
|
||||
|
||||
rGrid.SetDimensions(*grid)
|
||||
for d,coord in enumerate(coords):
|
||||
for c in coord:
|
||||
coordArray[d].InsertNextValue(c)
|
||||
|
||||
rGrid.SetXCoordinates(coordArray[0])
|
||||
rGrid.SetYCoordinates(coordArray[1])
|
||||
rGrid.SetZCoordinates(coordArray[2])
|
||||
|
||||
ms = numpy_support.numpy_to_vtk(num_array=self.microstructure.flatten(order='F'),
|
||||
array_type=vtk.VTK_INT if self.microstructure.dtype == int else vtk.VTK_FLOAT)
|
||||
ms.SetName('microstructure')
|
||||
rGrid.GetCellData().AddArray(ms)
|
||||
|
||||
|
||||
if fname is None:
|
||||
writer = vtk.vtkDataSetWriter()
|
||||
writer.SetHeader('damask.Geom '+version)
|
||||
writer.WriteToOutputStringOn()
|
||||
else:
|
||||
writer = vtk.vtkXMLRectilinearGridWriter()
|
||||
writer.SetCompressorTypeToZLib()
|
||||
writer.SetDataModeToBinary()
|
||||
|
||||
ext = os.path.splitext(fname)[1]
|
||||
if ext == '':
|
||||
name = fname + '.' + writer.GetDefaultFileExtension()
|
||||
elif ext == writer.GetDefaultFileExtension():
|
||||
name = fname
|
||||
else:
|
||||
raise ValueError("unknown extension {}".format(ext))
|
||||
writer.SetFileName(name)
|
||||
|
||||
writer.SetInputData(rGrid)
|
||||
writer.Write()
|
||||
|
||||
if fname is None: return writer.GetOutputString()
|
||||
|
||||
|
||||
def show(self):
|
||||
"""Show raw content (as in file)"""
|
||||
f=StringIO()
|
||||
self.to_file(f)
|
||||
f.seek(0)
|
||||
return ''.join(f.readlines())
|
|
@ -1,7 +0,0 @@
|
|||
# -*- coding: UTF-8 no BOM -*-
|
||||
|
||||
"""Aggregator for geometry handling"""
|
||||
|
||||
from .geometry import Geometry # noqa
|
||||
from .spectral import Spectral # noqa
|
||||
from .marc import Marc # noqa
|
|
@ -1,21 +0,0 @@
|
|||
# -*- coding: UTF-8 no BOM -*-
|
||||
|
||||
|
||||
import damask.geometry
|
||||
|
||||
class Geometry():
|
||||
"""
|
||||
General class for geometry parsing.
|
||||
|
||||
Sub-classed by the individual solvers.
|
||||
"""
|
||||
|
||||
def __init__(self,solver=''):
|
||||
solverClass = {
|
||||
'spectral': damask.geometry.Spectral,
|
||||
'marc': damask.geometry.Marc,
|
||||
}
|
||||
if solver.lower() in list(solverClass.keys()):
|
||||
self.__class__=solverClass[solver.lower()]
|
||||
self.__init__()
|
||||
|
|
@ -1,9 +0,0 @@
|
|||
# -*- coding: UTF-8 no BOM -*-
|
||||
|
||||
|
||||
from .geometry import Geometry
|
||||
|
||||
class Marc(Geometry):
|
||||
|
||||
def __init__(self):
|
||||
self.solver='Marc'
|
|
@ -1,9 +0,0 @@
|
|||
# -*- coding: UTF-8 no BOM -*-
|
||||
|
||||
|
||||
from .geometry import Geometry
|
||||
|
||||
class Spectral(Geometry):
|
||||
|
||||
def __init__(self):
|
||||
self.solver='Spectral'
|
|
@ -1,7 +1,7 @@
|
|||
# -*- coding: UTF-8 no BOM -*-
|
||||
|
||||
import math
|
||||
|
||||
import numpy as np
|
||||
|
||||
from . import Lambert
|
||||
from .quaternion import Quaternion
|
||||
from .quaternion import P
|
||||
|
|
|
@ -1,5 +1,3 @@
|
|||
# -*- coding: UTF-8 no BOM -*-
|
||||
|
||||
import numpy as np
|
||||
|
||||
P = -1 # convention (see DOI:10.1088/0965-0393/23/8/083501)
|
||||
|
|
|
@ -1,41 +0,0 @@
|
|||
# -*- coding: UTF-8 no BOM -*-
|
||||
|
||||
import numpy as np
|
||||
|
||||
try:
|
||||
import h5py
|
||||
except (ImportError) as e:
|
||||
pass # sys.stderr.write('\nREMARK: h5py module not available \n\n')
|
||||
|
||||
class Result():
|
||||
"""
|
||||
General class for result parsing.
|
||||
|
||||
Needs h5py to be installed
|
||||
"""
|
||||
|
||||
def __init__(self,resultsFile):
|
||||
self.data=h5py.File(resultsFile,"r")
|
||||
self.Npoints=self.data.attrs['Number of Materialpoints']
|
||||
print("Opened {} with {} points".format(resultsFile,self.Npoints))
|
||||
|
||||
def getCrystallite(self,labels,inc,constituent=1,points=None):
|
||||
if points is None: points = np.array(np.array(range(self.Npoints)))
|
||||
results = {}
|
||||
mapping=self.data['mapping/crystallite']
|
||||
for instance in self.data['increments/%s/crystallite/'%inc]:
|
||||
dsets = list(self.data['increments/%s/crystallite/%s'%(inc,instance)].keys())
|
||||
for label in labels:
|
||||
if label in dsets and label not in results:
|
||||
shape = np.shape(self.data['increments/%s/crystallite/%s/%s'%(inc,instance,label)])[1:]
|
||||
results[label] = np.nan*np.ones(np.array((self.Npoints,)+shape))
|
||||
for myPoint in range(len(points)):
|
||||
matPoint = points[myPoint]
|
||||
pos = mapping[matPoint,constituent-1]
|
||||
if pos[0] != 0:
|
||||
try:
|
||||
for label in labels:
|
||||
results[label][matPoint,...] = self.data['increments/%s/crystallite/%s/%s'%(inc,pos[0],label)][pos[1],...]
|
||||
except:
|
||||
pass
|
||||
return results
|
|
@ -1,11 +1,15 @@
|
|||
# -*- coding: UTF-8 no BOM -*-
|
||||
import sys,time,os,subprocess,shlex
|
||||
import numpy as np
|
||||
import sys
|
||||
import time
|
||||
import os
|
||||
import subprocess
|
||||
import shlex
|
||||
from optparse import Option
|
||||
from queue import Queue
|
||||
from threading import Thread
|
||||
|
||||
|
||||
import numpy as np
|
||||
|
||||
class bcolors:
|
||||
"""
|
||||
ASCII Colors (Blender code)
|
||||
|
@ -23,6 +27,7 @@ class bcolors:
|
|||
BOLD = '\033[1m'
|
||||
DIM = '\033[2m'
|
||||
UNDERLINE = '\033[4m'
|
||||
CROSSOUT = '\033[9m'
|
||||
|
||||
def disable(self):
|
||||
self.HEADER = ''
|
||||
|
@ -33,6 +38,7 @@ class bcolors:
|
|||
self.ENDC = ''
|
||||
self.BOLD = ''
|
||||
self.UNDERLINE = ''
|
||||
self.CROSSOUT = ''
|
||||
|
||||
|
||||
# -----------------------------
|
||||
|
@ -47,6 +53,7 @@ def srepr(arg,glue = '\n'):
|
|||
# -----------------------------
|
||||
def croak(what, newline = True):
|
||||
"""Writes formated to stderr"""
|
||||
if what is not None:
|
||||
sys.stderr.write(srepr(what,glue = '\n') + ('\n' if newline else ''))
|
||||
sys.stderr.flush()
|
||||
|
||||
|
@ -54,7 +61,7 @@ def croak(what, newline = True):
|
|||
def report(who = None,
|
||||
what = None):
|
||||
"""Reports script and file name"""
|
||||
croak( (emph(who)+': ' if who is not None else '') + (what if what is not None else '') )
|
||||
croak( (emph(who)+': ' if who is not None else '') + (what if what is not None else '') + '\n' )
|
||||
|
||||
|
||||
# -----------------------------
|
||||
|
@ -85,6 +92,11 @@ def delete(what):
|
|||
"""Dims string"""
|
||||
return bcolors.DIM+srepr(what)+bcolors.ENDC
|
||||
|
||||
# -----------------------------
|
||||
def strikeout(what):
|
||||
"""Dims string"""
|
||||
return bcolors.CROSSOUT+srepr(what)+bcolors.ENDC
|
||||
|
||||
# -----------------------------
|
||||
def execute(cmd,
|
||||
streamIn = None,
|
||||
|
@ -113,6 +125,19 @@ def coordGridAndSize(coordinates):
|
|||
grid = np.array(list(map(len,coords)),'i')
|
||||
size = grid/np.maximum(np.ones(dim,'d'), grid-1.0) * (maxcorner-mincorner) # size from edge to edge = dim * n/(n-1)
|
||||
size = np.where(grid > 1, size, min(size[grid > 1]/grid[grid > 1])) # spacing for grid==1 equal to smallest among other ones
|
||||
delta = size/grid
|
||||
|
||||
N = grid.prod()
|
||||
|
||||
if N != len(coordinates):
|
||||
raise ValueError('Data count {} does not match grid {}.'.format(len(coordinates),' x '.join(map(repr,grid))))
|
||||
|
||||
if np.any(np.abs(np.log10((coords[0][1:]-coords[0][:-1])/delta[0])) > 0.01) \
|
||||
or np.any(np.abs(np.log10((coords[1][1:]-coords[1][:-1])/delta[1])) > 0.01):
|
||||
raise ValueError('regular grid spacing {} violated.'.format(' x '.join(map(repr,delta))))
|
||||
if dim==3 and np.any(np.abs(np.log10((coords[2][1:]-coords[2][:-1])/delta[2])) > 0.01):
|
||||
raise ValueError('regular grid spacing {} violated.'.format(' x '.join(map(repr,delta))))
|
||||
|
||||
return grid,size
|
||||
|
||||
# -----------------------------
|
||||
|
@ -173,6 +198,17 @@ def progressBar(iteration, total, prefix='', bar_length=50):
|
|||
sys.stderr.flush()
|
||||
|
||||
|
||||
class return_message():
|
||||
"""Object with formatted return message"""
|
||||
|
||||
def __init__(self,message):
|
||||
self.message = message
|
||||
|
||||
def __repr__(self):
|
||||
"""Return message suitable for interactive shells"""
|
||||
return srepr(self.message)
|
||||
|
||||
|
||||
def leastsqBound(func, x0, args=(), bounds=None, Dfun=None, full_output=0,
|
||||
col_deriv=0, ftol=1.49012e-8, xtol=1.49012e-8,
|
||||
gtol=0.0, maxfev=0, epsfcn=None, factor=100, diag=None):
|
||||
|
|
|
@ -491,7 +491,7 @@ module lattice
|
|||
end enum
|
||||
|
||||
integer(kind(LATTICE_undefined_ID)), dimension(:), allocatable, public, protected :: &
|
||||
lattice_structure, trans_lattice_structure
|
||||
lattice_structure
|
||||
|
||||
|
||||
interface lattice_forestProjection ! DEPRECATED, use lattice_forestProjection_edge
|
||||
|
@ -557,7 +557,6 @@ subroutine lattice_init
|
|||
Nphases = size(config_phase)
|
||||
|
||||
allocate(lattice_structure(Nphases),source = LATTICE_undefined_ID)
|
||||
allocate(trans_lattice_structure(Nphases),source = LATTICE_undefined_ID)
|
||||
allocate(lattice_C66(6,6,Nphases), source=0.0_pReal)
|
||||
allocate(lattice_C3333(3,3,3,3,Nphases), source=0.0_pReal)
|
||||
|
||||
|
@ -595,14 +594,6 @@ subroutine lattice_init
|
|||
lattice_structure(p) = LATTICE_ort_ID
|
||||
end select
|
||||
|
||||
tag = 'undefined'
|
||||
tag = config_phase(p)%getString('trans_lattice_structure',defaultVal=tag)
|
||||
select case(trim(tag))
|
||||
case('bcc')
|
||||
trans_lattice_structure(p) = LATTICE_bcc_ID
|
||||
case('hex','hexagonal')
|
||||
trans_lattice_structure(p) = LATTICE_hex_ID
|
||||
end select
|
||||
|
||||
lattice_C66(1,1,p) = config_phase(p)%getFloat('c11',defaultVal=0.0_pReal)
|
||||
lattice_C66(1,2,p) = config_phase(p)%getFloat('c12',defaultVal=0.0_pReal)
|
||||
|
|
Loading…
Reference in New Issue