Merge branch 'development' of magit1.mpie.de:damask/DAMASK into miscImprovements

This commit is contained in:
Martin Diehl 2016-04-10 15:40:52 +02:00
commit b1d714581c
12 changed files with 288 additions and 119 deletions

View File

@ -2,7 +2,7 @@
# usage: source DAMASK_env.sh
if [ "$OSTYPE" == "linux-gnu" ] || [ "$OSTYPE" == 'linux' ]; then
DAMASK_ROOT=$(readlink -f "`dirname $BASH_SOURCE`")
DAMASK_ROOT=$(python -c "import os,sys; print(os.path.realpath(os.path.expanduser(sys.argv[1])))" "`dirname $BASH_SOURCE`")
else
[[ "${BASH_SOURCE::1}" == "/" ]] && BASE="" || BASE="`pwd`/"
STAT=$(stat "`dirname $BASE$BASH_SOURCE`")
@ -18,11 +18,11 @@ fi
SOLVER=`which DAMASK_spectral 2>/dev/null`
if [ "x$SOLVER" == "x" ]; then
export SOLVER='Not found!'
SOLVER='Not found!'
fi
PROCESSING=`which postResults 2>/dev/null`
if [ "x$PROCESSING" == "x" ]; then
export PROCESSING='Not found!'
PROCESSING='Not found!'
fi
# according to http://software.intel.com/en-us/forums/topic/501500
@ -55,7 +55,8 @@ if [ ! -z "$PS1" ]; then
echo "Multithreading DAMASK_NUM_THREADS=$DAMASK_NUM_THREADS"
if [ "x$PETSC_DIR" != "x" ]; then
echo "PETSc location $PETSC_DIR"
[[ `readlink -f $PETSC_DIR` == $PETSC_DIR ]] || echo " ~~> "`readlink -f $PETSC_DIR`
[[ `python -c "import os,sys; print(os.path.realpath(os.path.expanduser(sys.argv[1])))" "$PETSC_DIR"` == $PETSC_DIR ]] \
|| echo " ~~> "`python -c "import os,sys; print(os.path.realpath(os.path.expanduser(sys.argv[1])))" "$PETSC_DIR"`
fi
[[ "x$PETSC_ARCH" != "x" ]] && echo "PETSc architecture $PETSC_ARCH"
echo "MSC.Marc/Mentat $MSC_ROOT"

View File

@ -13,7 +13,8 @@ program DAMASK_spectral
pInt, &
pLongInt, &
pReal, &
tol_math_check
tol_math_check, &
dNeq
use DAMASK_interface, only: &
DAMASK_interface_init, &
loadCaseFile, &
@ -147,7 +148,9 @@ program DAMASK_spectral
MPI_file_seek, &
MPI_file_get_position, &
MPI_file_write, &
MPI_allreduce
MPI_abort, &
MPI_allreduce, &
PETScFinalize
!--------------------------------------------------------------------------------------------------
! init DAMASK (all modules)
@ -339,7 +342,7 @@ program DAMASK_spectral
reshape(spread(tol_math_check,1,9),[ 3,3]))&
.or. abs(math_det33(loadCases(currentLoadCase)%rotation)) > &
1.0_pReal + tol_math_check) errorID = 846_pInt ! given rotation matrix contains strain
if (any(loadCases(currentLoadCase)%rotation /= math_I3)) &
if (any(dNeq(loadCases(currentLoadCase)%rotation, math_I3))) &
write(6,'(2x,a,/,3(3(3x,f12.7,1x)/))',advance='no') 'rotation of loadframe:',&
math_transpose33(loadCases(currentLoadCase)%rotation)
if (loadCases(currentLoadCase)%time < 0.0_pReal) errorID = 834_pInt ! negative time increment
@ -423,17 +426,21 @@ program DAMASK_spectral
!--------------------------------------------------------------------------------------------------
! prepare MPI parallel out (including opening of file)
allocate(outputSize(worldsize), source = 0_MPI_OFFSET_KIND)
outputSize(worldrank+1) = int(size(materialpoint_results)*pReal,MPI_OFFSET_KIND)
outputSize(worldrank+1) = size(materialpoint_results,kind=MPI_OFFSET_KIND)*int(pReal,MPI_OFFSET_KIND)
call MPI_allreduce(MPI_IN_PLACE,outputSize,worldsize,MPI_INT,MPI_SUM,PETSC_COMM_WORLD,ierr) ! get total output size over each process
if(ierr /=0_pInt) call IO_error(894_pInt, ext_msg='MPI_allreduce')
call MPI_file_open(PETSC_COMM_WORLD, &
trim(getSolverWorkingDirectoryName())//trim(getSolverJobName())//'.spectralOut', &
MPI_MODE_WRONLY + MPI_MODE_APPEND, &
MPI_INFO_NULL, &
resUnit, &
ierr)
if(ierr /=0_pInt) call IO_error(894_pInt, ext_msg='MPI_file_open')
call MPI_file_get_position(resUnit,fileOffset,ierr) ! get offset from header
if(ierr /=0_pInt) call IO_error(894_pInt, ext_msg='MPI_file_get_position')
fileOffset = fileOffset + sum(outputSize(1:worldrank)) ! offset of my process in file (header + processes before me)
call MPI_file_seek (resUnit,fileOffset,MPI_SEEK_SET,ierr)
if(ierr /=0_pInt) call IO_error(894_pInt, ext_msg='MPI_file_seek')
if (.not. appendToOutFile) then ! if not restarting, write 0th increment
do i=1, size(materialpoint_results,3)/(maxByteOut/(materialpoint_sizeResults*pReal))+1 ! slice the output of my process in chunks not exceeding the limit for one output
@ -443,6 +450,7 @@ program DAMASK_spectral
[(outputIndex(2)-outputIndex(1)+1)*materialpoint_sizeResults]), &
(outputIndex(2)-outputIndex(1)+1)*materialpoint_sizeResults,&
MPI_DOUBLE, MPI_STATUS_IGNORE, ierr)
if(ierr /=0_pInt) call IO_error(894_pInt, ext_msg='MPI_file_write')
enddo
fileOffset = fileOffset + sum(outputSize) ! forward to current file position
if (worldrank == 0) &
@ -643,6 +651,7 @@ program DAMASK_spectral
write(6,'(1/,a)') ' ... writing results to file ......................................'
call materialpoint_postResults()
call MPI_file_seek (resUnit,fileOffset,MPI_SEEK_SET,ierr)
if(ierr /=0_pInt) call IO_error(894_pInt, ext_msg='MPI_file_seek')
do i=1, size(materialpoint_results,3)/(maxByteOut/(materialpoint_sizeResults*pReal))+1 ! slice the output of my process in chunks not exceeding the limit for one output
outputIndex=int([(i-1_pInt)*((maxByteOut/pReal)/materialpoint_sizeResults)+1_pInt, &
min(i*((maxByteOut/pReal)/materialpoint_sizeResults),size(materialpoint_results,3))],pLongInt)
@ -650,6 +659,7 @@ program DAMASK_spectral
[(outputIndex(2)-outputIndex(1)+1)*materialpoint_sizeResults]), &
(outputIndex(2)-outputIndex(1)+1)*materialpoint_sizeResults,&
MPI_DOUBLE, MPI_STATUS_IGNORE, ierr)
if(ierr /=0_pInt) call IO_error(894_pInt, ext_msg='MPI_file_write')
enddo
fileOffset = fileOffset + sum(outputSize) ! forward to current file position
endif
@ -698,7 +708,7 @@ program DAMASK_spectral
enddo
call utilities_destroy()
call PetscFinalize(ierr); CHKERRQ(ierr)
call PETScFinalize(ierr); CHKERRQ(ierr)
if (notConvergedCounter > 0_pInt) call quit(3_pInt) ! error if some are not converged
call quit(0_pInt) ! no complains ;)

View File

@ -1669,6 +1669,8 @@ subroutine IO_error(error_ID,el,ip,g,ext_msg)
msg = 'unknown filter type selected'
case (893_pInt)
msg = 'PETSc: SNES_DIVERGED_FNORM_NAN'
case (894_pInt)
msg = 'MPI error'
!-------------------------------------------------------------------------------------------------
! error messages related to parsing of Abaqus input file

View File

@ -1280,7 +1280,7 @@ subroutine material_populateGrains
integer(pInt) :: t,e,i,g,j,m,c,r,homog,micro,sgn,hme, myDebug, &
phaseID,textureID,dGrains,myNgrains,myNorientations,myNconstituents, &
grain,constituentGrain,ipGrain,symExtension, ip
real(pReal) :: extreme,rnd
real(pReal) :: deviation,extreme,rnd
integer(pInt), dimension (:,:), allocatable :: Nelems ! counts number of elements in homog, micro array
type(p_intvec), dimension (:,:), allocatable :: elemsOfHomogMicro ! lists element number in homog, micro array
@ -1407,8 +1407,11 @@ subroutine material_populateGrains
extreme = 0.0_pReal
t = 0_pInt
do i = 1_pInt,myNconstituents ! find largest deviator
if (real(sgn,pReal)*log(NgrainsOfConstituent(i)/myNgrains/microstructure_fraction(i,micro)) > extreme) then
extreme = real(sgn,pReal)*log(NgrainsOfConstituent(i)/myNgrains/microstructure_fraction(i,micro))
deviation = real(sgn,pReal)*log( microstructure_fraction(i,micro) / &
!-------------------------------- &
(real(NgrainsOfConstituent(i),pReal)/real(myNgrains,pReal) ) )
if (deviation > extreme) then
extreme = deviation
t = i
endif
enddo

View File

@ -113,7 +113,9 @@ module prec
public :: &
prec_init, &
prec_isNaN
prec_isNaN, &
dEq, &
dNeq
contains

View File

@ -40,12 +40,12 @@ class ASCIItable():
self.__IO__['in'] = name
try:
self.__IO__['out'] = (open(outname,'w') if (not os.path.isfile(outname) \
or os.access( outname, os.W_OK) \
) \
and (not self.__IO__['inPlace'] \
or not os.path.isfile(name) \
or os.access( name, os.W_OK) \
self.__IO__['out'] = (open(outname,'w') if (not os.path.isfile(outname) or
os.access( outname, os.W_OK)
) and
(not self.__IO__['inPlace'] or
not os.path.isfile(name) or
os.access( name, os.W_OK)
) else None) if outname else sys.stdout
except TypeError:
self.__IO__['out'] = outname
@ -272,7 +272,7 @@ class ASCIItable():
for label in labels:
if label is not None:
try:
idx.append(int(label)) # column given as integer number?
idx.append(int(label)-1) # column given as integer number?
except ValueError:
try:
idx.append(self.labels.index(label)) # locate string in label list
@ -283,7 +283,7 @@ class ASCIItable():
idx.append(-1) # not found...
else:
try:
idx = int(labels)
idx = int(labels)-1 # offset for python array indexing
except ValueError:
try:
idx = self.labels.index(labels)
@ -293,7 +293,7 @@ class ASCIItable():
except ValueError:
idx = None if labels is None else -1
return np.array(idx) if isinstance(idx,list) else idx
return np.array(idx) if isinstance(idx,Iterable) else idx
# ------------------------------------------------------------------
def label_dimension(self,
@ -312,7 +312,7 @@ class ASCIItable():
if label is not None:
myDim = -1
try: # column given as number?
idx = int(label)
idx = int(label)-1
myDim = 1 # if found has at least dimension 1
if self.labels[idx].startswith('1_'): # column has multidim indicator?
while idx+myDim < len(self.labels) and self.labels[idx+myDim].startswith("%i_"%(myDim+1)):
@ -331,7 +331,7 @@ class ASCIItable():
dim = -1 # assume invalid label
idx = -1
try: # column given as number?
idx = int(labels)
idx = int(labels)-1
dim = 1 # if found has at least dimension 1
if self.labels[idx].startswith('1_'): # column has multidim indicator?
while idx+dim < len(self.labels) and self.labels[idx+dim].startswith("%i_"%(dim+1)):
@ -345,7 +345,7 @@ class ASCIItable():
while idx+dim < len(self.labels) and self.labels[idx+dim].startswith("%i_"%(dim+1)):
dim += 1 # keep adding while going through object
return np.array(dim) if isinstance(dim,list) else dim
return np.array(dim) if isinstance(dim,Iterable) else dim
# ------------------------------------------------------------------
def label_indexrange(self,

Binary file not shown.

Before

Width:  |  Height:  |  Size: 80 KiB

After

Width:  |  Height:  |  Size: 34 KiB

View File

@ -37,9 +37,13 @@ if options.label is None:
if filenames == []: filenames = [None]
for name in filenames:
try:
table = damask.ASCIItable(name = name,
outname = options.label+'_averaged_'+name if name else name,
damask.util.croak(name)
try: table = damask.ASCIItable(name = name,
outname = os.path.join(
os.path.split(name)[0],
options.label+'_averaged_'+os.path.split(name)[1]
) if name else name,
buffered = False)
except: continue
damask.util.report(scriptName,name)

142
processing/post/histogram.py Executable file
View File

@ -0,0 +1,142 @@
#!/usr/bin/env python
# -*- coding: UTF-8 no BOM -*-
import os,sys
import numpy as np
from optparse import OptionParser
import damask
scriptName = os.path.splitext(os.path.basename(__file__))[0]
scriptID = ' '.join([scriptName,damask.version])
# --------------------------------------------------------------------
# MAIN
# --------------------------------------------------------------------
parser = OptionParser(option_class=damask.extendableOption, usage='%prog options [file[s]]', description = """
Generate histogram of N bins in given data range.
""", version = scriptID)
parser.add_option('-d','--data',
dest = 'data',
type = 'string', metavar = 'string',
help = 'column heading for data')
parser.add_option('-w','--weights',
dest = 'weights',
type = 'string', metavar = 'string',
help = 'column heading for weights')
parser.add_option('--range',
dest = 'range',
type = 'float', nargs = 2, metavar = 'float float',
help = 'data range of histogram [min - max]')
parser.add_option('-N',
dest = 'N',
type = 'int', metavar = 'int',
help = 'number of bins')
parser.add_option('--density',
dest = 'density',
action = 'store_true',
help = 'report probability density')
parser.add_option('--logarithmic',
dest = 'log',
action = 'store_true',
help = 'logarithmically spaced bins')
parser.set_defaults(data = None,
weights = None,
range = None,
N = None,
density = False,
log = False,
)
(options,filenames) = parser.parse_args()
if not options.data: parser.error('no data specified.')
if not options.N: parser.error('no bin number specified.')
if options.log:
def forward(x):
return np.log(x)
def reverse(x):
return np.exp(x)
else:
def forward(x):
return x
def reverse(x):
return x
# --- loop over input files ------------------------------------------------------------------------
if filenames == []: filenames = [None]
for name in filenames:
try: table = damask.ASCIItable(name = name,
buffered = False,
readonly = True)
except: continue
damask.util.report(scriptName,name)
# ------------------------------------------ read header ------------------------------------------
table.head_read()
# ------------------------------------------ sanity checks ----------------------------------------
errors = []
remarks = []
if table.label_dimension(options.data) != 1: errors.append('data {} are not scalar.'.format(options.data))
if options.weights and \
table.label_dimension(options.data) != 1: errors.append('weights {} are not scalar.'.format(options.weights))
if remarks != []: damask.util.croak(remarks)
if errors != []:
damask.util.croak(errors)
table.close(dismiss = True)
continue
# --------------- read data ----------------------------------------------------------------
table.data_readArray([options.data,options.weights])
# --------------- auto range ---------------------------------------------------------------
if options.range is None:
rangeMin,rangeMax = min(table.data[:,0]),max(table.data[:,0])
else:
rangeMin,rangeMax = min(options.range),max(options.range)
# --------------- bin data ----------------------------------------------------------------
count,edges = np.histogram(table.data[:,0],
bins = reverse(forward(rangeMin) + np.arange(options.N+1) *
(forward(rangeMax)-forward(rangeMin))/options.N),
range = (rangeMin,rangeMax),
weights = None if options.weights is None else table.data[:,1],
density = options.density,
)
bincenter = reverse(forward(rangeMin) + (0.5+np.arange(options.N)) *
(forward(rangeMax)-forward(rangeMin))/options.N) # determine center of bins
# ------------------------------------------ assemble header ---------------------------------------
table.info_clear()
table.info_append([scriptID + '\t' + ' '.join(sys.argv[1:]),
scriptID + ':\t' +
'data range {} -- {}'.format(rangeMin,rangeMax) +
(' (log)' if options.log else ''),
])
table.labels_clear()
table.labels_append(['bincenter','count'])
table.head_write()
# ------------------------------------------ output result -----------------------------------------
table.data = np.squeeze(np.dstack((bincenter,count)))
table.data_writeArray()
# ------------------------------------------ output finalization -----------------------------------
table.close() # close ASCII tables

View File

@ -1,7 +1,7 @@
#!/usr/bin/env python
# -*- coding: UTF-8 no BOM -*-
import os,sys
import os,sys,re
import damask
from optparse import OptionParser
@ -32,13 +32,16 @@ parser.set_defaults(label = [],
(options,filenames) = parser.parse_args()
pattern = [re.compile('^()(.+)$'), # label pattern for scalar
re.compile('^(\d+_)?(.+)$'), # label pattern for multidimension
]
# --- loop over input files -------------------------------------------------------------------------
if filenames == []: filenames = [None]
for name in filenames:
try:
table = damask.ASCIItable(name = name,
try: table = damask.ASCIItable(name = name,
buffered = False)
except: continue
damask.util.report(scriptName,name)
@ -63,8 +66,9 @@ for name in filenames:
for i,index in enumerate(indices):
if index == -1: remarks.append('label {} not present...'.format(options.label[i]))
else:
m = pattern[dimensions[i]>1].match(table.labels[index]) # isolate label name
for j in xrange(dimensions[i]):
table.labels[index+j] = table.labels[index+j].replace(options.label[i],options.substitute[i])
table.labels[index+j] = table.labels[index+j].replace(m.group(2),options.substitute[i]) # replace name with substitute
if remarks != []: damask.util.croak(remarks)
if errors != []:

View File

@ -4,7 +4,7 @@
import os,sys,math
import numpy as np
import multiprocessing
from optparse import OptionParser
from optparse import OptionParser,OptionGroup
from scipy import spatial
import damask
@ -109,71 +109,83 @@ Generate geometry description and material configuration by standard Voronoi tes
""", version = scriptID)
parser.add_option('-g', '--grid',
dest = 'grid',
type = 'int', nargs = 3, metavar = ' '.join(['int']*3),
help = 'a,b,c grid of hexahedral box [auto]')
parser.add_option('-s', '--size',
dest = 'size',
type = 'float', nargs = 3, metavar=' '.join(['float']*3),
help = 'x,y,z size of hexahedral box [auto]')
parser.add_option('-o', '--origin',
dest = 'origin',
type = 'float', nargs = 3, metavar=' '.join(['float']*3),
help = 'offset from old to new origin of grid')
parser.add_option('-p', '--position',
dest = 'position',
type = 'string', metavar = 'string',
help = 'column label for seed positions [%default]')
parser.add_option('-w', '--weight',
dest = 'weight',
type = 'string', metavar = 'string',
help = 'column label for seed weights [%default]')
parser.add_option('-m', '--microstructure',
dest = 'microstructure',
type = 'string', metavar = 'string',
help = 'column label for seed microstructures [%default]')
parser.add_option('-e', '--eulers',
dest = 'eulers',
type = 'string', metavar = 'string',
help = 'column label for seed Euler angles [%default]')
parser.add_option('--axes',
dest = 'axes',
type = 'string', nargs = 3, metavar = ' '.join(['string']*3),
help = 'orientation coordinate frame in terms of position coordinate frame')
parser.add_option('--homogenization',
dest = 'homogenization',
type = 'int', metavar = 'int',
help = 'homogenization index to be used [%default]')
parser.add_option('--crystallite',
dest = 'crystallite',
type = 'int', metavar = 'int',
help = 'crystallite index to be used [%default]')
parser.add_option('--phase',
dest = 'phase',
type = 'int', metavar = 'int',
help = 'phase index to be used [%default]')
parser.add_option('-r', '--rnd',
dest = 'randomSeed',
type = 'int', metavar='int',
help = 'seed of random number generator for second phase distribution [%default]')
parser.add_option('--secondphase',
dest = 'secondphase',
type = 'float', metavar= 'float',
help = 'volume fraction of randomly distribute second phase [%default]')
parser.add_option('-l', '--laguerre',
group = OptionGroup(parser, "Tessellation","")
group.add_option('-l', '--laguerre',
dest = 'laguerre',
action = 'store_true',
help = 'use Laguerre (weighted Voronoi) tessellation')
parser.add_option('--cpus',
group.add_option('--cpus',
dest = 'cpus',
type = 'int', metavar = 'int',
help = 'number of parallel processes to use for Laguerre tessellation [%default]')
parser.add_option('--nonperiodic',
group.add_option('--nonperiodic',
dest = 'nonperiodic',
action = 'store_true',
help = 'use nonperiodic tessellation')
parser.add_option_group(group)
group = OptionGroup(parser, "Geometry","")
group.add_option('-g', '--grid',
dest = 'grid',
type = 'int', nargs = 3, metavar = ' '.join(['int']*3),
help = 'a,b,c grid of hexahedral box [auto]')
group.add_option('-s', '--size',
dest = 'size',
type = 'float', nargs = 3, metavar=' '.join(['float']*3),
help = 'x,y,z size of hexahedral box [auto]')
group.add_option('-o', '--origin',
dest = 'origin',
type = 'float', nargs = 3, metavar=' '.join(['float']*3),
help = 'origin of grid')
parser.add_option_group(group)
group = OptionGroup(parser, "Seeds","")
group.add_option('-p', '--position',
dest = 'position',
type = 'string', metavar = 'string',
help = 'column label for seed positions [%default]')
group.add_option('-w', '--weight',
dest = 'weight',
type = 'string', metavar = 'string',
help = 'column label for seed weights [%default]')
group.add_option('-m', '--microstructure',
dest = 'microstructure',
type = 'string', metavar = 'string',
help = 'column label for seed microstructures [%default]')
group.add_option('-e', '--eulers',
dest = 'eulers',
type = 'string', metavar = 'string',
help = 'column label for seed Euler angles [%default]')
group.add_option('--axes',
dest = 'axes',
type = 'string', nargs = 3, metavar = ' '.join(['string']*3),
help = 'orientation coordinate frame in terms of position coordinate frame')
parser.add_option_group(group)
group = OptionGroup(parser, "Configuration","")
group.add_option('--homogenization',
dest = 'homogenization',
type = 'int', metavar = 'int',
help = 'homogenization index to be used [%default]')
group.add_option('--crystallite',
dest = 'crystallite',
type = 'int', metavar = 'int',
help = 'crystallite index to be used [%default]')
group.add_option('--phase',
dest = 'phase',
type = 'int', metavar = 'int',
help = 'phase index to be used [%default]')
parser.add_option_group(group)
parser.set_defaults(position = 'pos',
weight = 'weight',
microstructure = 'microstructure',
@ -181,24 +193,18 @@ parser.set_defaults(position = 'pos',
homogenization = 1,
crystallite = 1,
phase = 1,
secondphase = 0.0,
cpus = 2,
laguerre = False,
nonperiodic = False,
randomSeed = None,
)
(options,filenames) = parser.parse_args()
if options.secondphase > 1.0 or options.secondphase < 0.0:
parser.error('volume fraction of second phase ({}) out of bounds.'.format(options.secondphase))
# --- loop over input files -------------------------------------------------------------------------
if filenames == []: filenames = [None]
for name in filenames:
try:
table = damask.ASCIItable(name = name,
try: table = damask.ASCIItable(name = name,
outname = os.path.splitext(name)[-2]+'.geom' if name else name,
buffered = False)
except: continue
@ -294,20 +300,11 @@ for name in filenames:
config_header = []
formatwidth = 1+int(math.log10(NgrainIDs))
phase = options.phase * np.ones(NgrainIDs,'i')
if int(options.secondphase*info['microstructures']) > 0:
phase[0:int(options.secondphase*info['microstructures'])] += 1 # alter fraction 'options.secondphase' of used grainIDs
randomSeed = options.randomSeed if options.randomSeed \
else int(os.urandom(4).encode('hex'), 16) # random seed for second phase
np.random.seed(randomSeed)
np.random.shuffle(phase)
config_header += ['# random seed (phase shuffling): {}'.format(randomSeed)]
config_header += ['<microstructure>']
for i,ID in enumerate(grainIDs):
config_header += ['[Grain%s]'%(str(ID).zfill(formatwidth)),
'crystallite %i'%options.crystallite,
'(constituent)\tphase %i\ttexture %s\tfraction 1.0'%(phase[i],str(ID).rjust(formatwidth)),
'(constituent)\tphase %i\ttexture %s\tfraction 1.0'%(options.phase,str(ID).rjust(formatwidth)),
]
if hasEulers:
config_header += ['<texture>']

View File

@ -48,8 +48,11 @@ parser.add_option('-m', '--microstructure',
parser.add_option('-r', '--rnd',
dest = 'randomSeed', type = 'int', metavar = 'int',
help = 'seed of random number generator [%default]')
parser.add_option('--format',
dest = 'format', type = 'string', metavar = 'string',
help = 'number format of output [auto]')
group = OptionGroup(parser, "Laguerre Tessellation Options",
group = OptionGroup(parser, "Laguerre Tessellation",
"Parameters determining shape of weight distribution of seed points"
)
group.add_option('-w', '--weights',
@ -70,8 +73,8 @@ group.add_option('--sigma',
help='standard deviation of normal distribution for weights [%default]')
parser.add_option_group(group)
group = OptionGroup(parser, "Selective Seeding Options",
"More uniform distribution of seed points using Mitchell\'s Best Candidate Algorithm"
group = OptionGroup(parser, "Selective Seeding",
"More uniform distribution of seed points using Mitchell's Best Candidate Algorithm"
)
group.add_option('-s','--selective',
action = 'store_true',
@ -103,6 +106,7 @@ parser.set_defaults(randomSeed = None,
force = False,
distance = 0.2,
numCandidates = 10,
format = None,
)
(options,filenames) = parser.parse_args()
@ -215,7 +219,7 @@ for name in filenames:
# --- write seeds information ------------------------------------------------------------
table.data = seeds
table.data_writeArray()
table.data_writeArray(fmt = options.format)
# --- output finalization --------------------------------------------------------------------------