Merge remote branch 'origin/development' into cmake
This commit is contained in:
commit
672c8d0606
|
@ -0,0 +1,8 @@
|
||||||
|
# from https://help.github.com/articles/dealing-with-line-endings/
|
||||||
|
#
|
||||||
|
# always use LF, even if the files are edited on windows, they need to be compiled/used on unix
|
||||||
|
* text eol=lf
|
||||||
|
|
||||||
|
# Denote all files that are truly binary and should not be modified.
|
||||||
|
*.png binary
|
||||||
|
*.jpg binary
|
|
@ -5,10 +5,10 @@ set LOCATION=%~dp0
|
||||||
set DAMASK_ROOT=%LOCATION%\DAMASK
|
set DAMASK_ROOT=%LOCATION%\DAMASK
|
||||||
set DAMASK_NUM_THREADS=2
|
set DAMASK_NUM_THREADS=2
|
||||||
chcp 1252
|
chcp 1252
|
||||||
Title Düsseldorf Advanced Materials Simulation Kit - DAMASK, MPIE Düsseldorf
|
Title Düsseldorf Advanced Materials Simulation Kit - DAMASK, MPIE Düsseldorf
|
||||||
echo.
|
echo.
|
||||||
echo Düsseldorf Advanced Materials Simulation Kit - DAMASK
|
echo Düsseldorf Advanced Materials Simulation Kit - DAMASK
|
||||||
echo Max-Planck-Institut für Eisenforschung, Düsseldorf
|
echo Max-Planck-Institut für Eisenforschung, Düsseldorf
|
||||||
echo http://damask.mpie.de
|
echo http://damask.mpie.de
|
||||||
echo.
|
echo.
|
||||||
echo Preparing environment ...
|
echo Preparing environment ...
|
||||||
|
|
|
@ -2,7 +2,7 @@
|
||||||
# usage: source DAMASK_env.sh
|
# usage: source DAMASK_env.sh
|
||||||
|
|
||||||
if [ "$OSTYPE" == "linux-gnu" ] || [ "$OSTYPE" == 'linux' ]; then
|
if [ "$OSTYPE" == "linux-gnu" ] || [ "$OSTYPE" == 'linux' ]; then
|
||||||
DAMASK_ROOT=$(readlink -f "`dirname $BASH_SOURCE`")
|
DAMASK_ROOT=$(python -c "import os,sys; print(os.path.realpath(os.path.expanduser(sys.argv[1])))" "`dirname $BASH_SOURCE`")
|
||||||
else
|
else
|
||||||
[[ "${BASH_SOURCE::1}" == "/" ]] && BASE="" || BASE="`pwd`/"
|
[[ "${BASH_SOURCE::1}" == "/" ]] && BASE="" || BASE="`pwd`/"
|
||||||
STAT=$(stat "`dirname $BASE$BASH_SOURCE`")
|
STAT=$(stat "`dirname $BASE$BASH_SOURCE`")
|
||||||
|
@ -18,11 +18,11 @@ fi
|
||||||
|
|
||||||
SOLVER=`which DAMASK_spectral 2>/dev/null`
|
SOLVER=`which DAMASK_spectral 2>/dev/null`
|
||||||
if [ "x$SOLVER" == "x" ]; then
|
if [ "x$SOLVER" == "x" ]; then
|
||||||
export SOLVER='Not found!'
|
SOLVER='Not found!'
|
||||||
fi
|
fi
|
||||||
PROCESSING=`which postResults 2>/dev/null`
|
PROCESSING=`which postResults 2>/dev/null`
|
||||||
if [ "x$PROCESSING" == "x" ]; then
|
if [ "x$PROCESSING" == "x" ]; then
|
||||||
export PROCESSING='Not found!'
|
PROCESSING='Not found!'
|
||||||
fi
|
fi
|
||||||
|
|
||||||
# according to http://software.intel.com/en-us/forums/topic/501500
|
# according to http://software.intel.com/en-us/forums/topic/501500
|
||||||
|
@ -53,7 +53,11 @@ if [ ! -z "$PS1" ]; then
|
||||||
[[ "x$SOLVER" != "x" ]] && echo "Spectral Solver $SOLVER"
|
[[ "x$SOLVER" != "x" ]] && echo "Spectral Solver $SOLVER"
|
||||||
[[ "x$PROCESSING" != "x" ]] && echo "Post Processing $PROCESSING"
|
[[ "x$PROCESSING" != "x" ]] && echo "Post Processing $PROCESSING"
|
||||||
echo "Multithreading DAMASK_NUM_THREADS=$DAMASK_NUM_THREADS"
|
echo "Multithreading DAMASK_NUM_THREADS=$DAMASK_NUM_THREADS"
|
||||||
[[ "x$PETSC_DIR" != "x" ]] && echo "PETSc location $PETSC_DIR"
|
if [ "x$PETSC_DIR" != "x" ]; then
|
||||||
|
echo "PETSc location $PETSC_DIR"
|
||||||
|
[[ `python -c "import os,sys; print(os.path.realpath(os.path.expanduser(sys.argv[1])))" "$PETSC_DIR"` == $PETSC_DIR ]] \
|
||||||
|
|| echo " ~~> "`python -c "import os,sys; print(os.path.realpath(os.path.expanduser(sys.argv[1])))" "$PETSC_DIR"`
|
||||||
|
fi
|
||||||
[[ "x$PETSC_ARCH" != "x" ]] && echo "PETSc architecture $PETSC_ARCH"
|
[[ "x$PETSC_ARCH" != "x" ]] && echo "PETSc architecture $PETSC_ARCH"
|
||||||
echo "MSC.Marc/Mentat $MSC_ROOT"
|
echo "MSC.Marc/Mentat $MSC_ROOT"
|
||||||
echo
|
echo
|
||||||
|
|
|
@ -51,7 +51,10 @@ if [ ! -z "$PS1" ]; then
|
||||||
[[ "x$SOLVER" != "x" ]] && echo "Spectral Solver $SOLVER"
|
[[ "x$SOLVER" != "x" ]] && echo "Spectral Solver $SOLVER"
|
||||||
[[ "x$PROCESSING" != "x" ]] && echo "Post Processing $PROCESSING"
|
[[ "x$PROCESSING" != "x" ]] && echo "Post Processing $PROCESSING"
|
||||||
echo "Multithreading DAMASK_NUM_THREADS=$DAMASK_NUM_THREADS"
|
echo "Multithreading DAMASK_NUM_THREADS=$DAMASK_NUM_THREADS"
|
||||||
[[ "x$PETSC_DIR" != "x" ]] && echo "PETSc location $PETSC_DIR"
|
if [ "x$PETSC_DIR" != "x" ]; then
|
||||||
|
echo "PETSc location $PETSC_DIR"
|
||||||
|
[[ `readlink -f $PETSC_DIR` == $PETSC_DIR ]] || echo " ~~> "`readlink -f $PETSC_DIR`
|
||||||
|
fi
|
||||||
[[ "x$PETSC_ARCH" != "x" ]] && echo "PETSc architecture $PETSC_ARCH"
|
[[ "x$PETSC_ARCH" != "x" ]] && echo "PETSc architecture $PETSC_ARCH"
|
||||||
echo "MSC.Marc/Mentat $MSC_ROOT"
|
echo "MSC.Marc/Mentat $MSC_ROOT"
|
||||||
echo
|
echo
|
||||||
|
|
2
LICENSE
2
LICENSE
|
@ -1,4 +1,4 @@
|
||||||
Copyright 2011-15 Max-Planck-Institut für Eisenforschung GmbH
|
Copyright 2011-16 Max-Planck-Institut für Eisenforschung GmbH
|
||||||
|
|
||||||
This program is free software: you can redistribute it and/or modify
|
This program is free software: you can redistribute it and/or modify
|
||||||
it under the terms of the GNU General Public License as published by
|
it under the terms of the GNU General Public License as published by
|
||||||
|
|
4
README
4
README
|
@ -2,9 +2,9 @@ visit damask.mpie.de for installation and usage instructions
|
||||||
|
|
||||||
CONTACT INFORMATION
|
CONTACT INFORMATION
|
||||||
|
|
||||||
Max-Planck-Institut für Eisenforschung GmbH
|
Max-Planck-Institut für Eisenforschung GmbH
|
||||||
Max-Planck-Str. 1
|
Max-Planck-Str. 1
|
||||||
40237 Düsseldorf
|
40237 Düsseldorf
|
||||||
Germany
|
Germany
|
||||||
|
|
||||||
Email: DAMASK@mpie.de
|
Email: DAMASK@mpie.de
|
||||||
|
|
|
@ -0,0 +1,8 @@
|
||||||
|
# from https://help.github.com/articles/dealing-with-line-endings/
|
||||||
|
#
|
||||||
|
# always use LF, even if the files are edited on windows, they need to be compiled/used on unix
|
||||||
|
* text eol=lf
|
||||||
|
|
||||||
|
# Denote all files that are truly binary and should not be modified.
|
||||||
|
*.png binary
|
||||||
|
*.jpg binary
|
|
@ -0,0 +1,3 @@
|
||||||
|
DAMASK_marc*.f90
|
||||||
|
quit__genmod.f90
|
||||||
|
*.marc
|
|
@ -1,37 +0,0 @@
|
||||||
### $Id$ ###
|
|
||||||
|
|
||||||
[Tungsten]
|
|
||||||
elasticity hooke
|
|
||||||
plasticity dislokmc
|
|
||||||
|
|
||||||
### Material parameters ###
|
|
||||||
lattice_structure bcc
|
|
||||||
C11 523.0e9 # From Marinica et al. Journal of Physics: Condensed Matter(2013)
|
|
||||||
C12 202.0e9
|
|
||||||
C44 161.0e9
|
|
||||||
|
|
||||||
grainsize 2.0e-5 # Average grain size [m] 2.0e-5
|
|
||||||
SolidSolutionStrength 0.0 # Strength due to elements in solid solution
|
|
||||||
|
|
||||||
### Dislocation glide parameters ###
|
|
||||||
#per family
|
|
||||||
Nslip 12 0
|
|
||||||
slipburgers 2.72e-10 # Burgers vector of slip system [m]
|
|
||||||
rhoedge0 1.0e12 # Initial edge dislocation density [m/m**3]
|
|
||||||
rhoedgedip0 1.0 # Initial edged dipole dislocation density [m/m**3]
|
|
||||||
Qedge 2.725e-19 # Activation energy for dislocation glide [J]
|
|
||||||
v0 3560.3 # Initial glide velocity [m/s](kmC)
|
|
||||||
p_slip 0.16 # p-exponent in glide velocity
|
|
||||||
q_slip 1.00 # q-exponent in glide velocity
|
|
||||||
u_slip 2.47 # u-exponent of stress pre-factor (kmC)
|
|
||||||
s_slip 0.97 # self hardening in glide velocity (kmC)
|
|
||||||
tau_peierls 2.03e9 # peierls stress [Pa]
|
|
||||||
|
|
||||||
#hardening
|
|
||||||
dipoleformationfactor 0 # to have hardening due to dipole formation off
|
|
||||||
CLambdaSlip 10.0 # Adj. parameter controlling dislocation mean free path
|
|
||||||
D0 4.0e-5 # Vacancy diffusion prefactor [m**2/s]
|
|
||||||
Qsd 4.5e-19 # Activation energy for climb [J]
|
|
||||||
Catomicvolume 1.0 # Adj. parameter controlling the atomic volume [in b]
|
|
||||||
Cedgedipmindistance 1.0 # Adj. parameter controlling the minimum dipole distance [in b]
|
|
||||||
interaction_slipslip 0.2 0.11 0.19 0.15 0.11 0.17
|
|
|
@ -54,12 +54,12 @@ discrepancyPower_RGC 5.0
|
||||||
fixed_seed 0 # put any number larger than zero, integer, if you want to have a pseudo random distribution
|
fixed_seed 0 # put any number larger than zero, integer, if you want to have a pseudo random distribution
|
||||||
|
|
||||||
## spectral parameters ##
|
## spectral parameters ##
|
||||||
err_div_tolAbs 1.0e-3 # relative tolerance for fulfillment of stress equilibrium
|
err_div_tolAbs 1.0e-3 # absolute tolerance for fulfillment of stress equilibrium
|
||||||
err_div_tolRel 5.0e-4 # absolute tolerance for fulfillment of stress equilibrium
|
err_div_tolRel 5.0e-4 # relative tolerance for fulfillment of stress equilibrium
|
||||||
err_curl_tolAbs 1.0e-12 # relative tolerance for fulfillment of strain compatibility
|
err_curl_tolAbs 1.0e-12 # absolute tolerance for fulfillment of strain compatibility
|
||||||
err_curl_tolRel 5.0e-4 # absolute tolerance for fulfillment of strain compatibility
|
err_curl_tolRel 5.0e-4 # relative tolerance for fulfillment of strain compatibility
|
||||||
err_stress_tolrel 0.01 # relative tolerance for fulfillment of stress BC
|
err_stress_tolAbs 1.0e3 # absolute tolerance for fulfillment of stress BC
|
||||||
err_stress_tolabs 1.0e3 # absolute tolerance for fulfillment of stress BC
|
err_stress_tolRel 0.01 # relative tolerance for fulfillment of stress BC
|
||||||
fftw_timelimit -1.0 # timelimit of plan creation for FFTW, see manual on www.fftw.org, Default -1.0: disable timelimit
|
fftw_timelimit -1.0 # timelimit of plan creation for FFTW, see manual on www.fftw.org, Default -1.0: disable timelimit
|
||||||
rotation_tol 1.0e-12 # tolerance of rotation specified in loadcase, Default 1.0e-12: first guess
|
rotation_tol 1.0e-12 # tolerance of rotation specified in loadcase, Default 1.0e-12: first guess
|
||||||
fftw_plan_mode FFTW_PATIENT # reads the planing-rigor flag, see manual on www.fftw.org, Default FFTW_PATIENT: use patient planner flag
|
fftw_plan_mode FFTW_PATIENT # reads the planing-rigor flag, see manual on www.fftw.org, Default FFTW_PATIENT: use patient planner flag
|
||||||
|
|
|
@ -0,0 +1,2 @@
|
||||||
|
core.so
|
||||||
|
corientation.so
|
|
@ -1,27 +1,27 @@
|
||||||
# -*- coding: UTF-8 no BOM -*-
|
# -*- coding: UTF-8 no BOM -*-
|
||||||
|
|
||||||
# $Id$
|
"""Main aggregator"""
|
||||||
import sys, os
|
import sys, os
|
||||||
|
|
||||||
with open(os.path.join(os.path.dirname(__file__),'../../VERSION')) as f:
|
with open(os.path.join(os.path.dirname(__file__),'../../VERSION')) as f:
|
||||||
version = f.readline()[:-1]
|
version = f.readline()[:-1]
|
||||||
|
|
||||||
from .environment import Environment # only one class
|
from .environment import Environment # noqa
|
||||||
from .asciitable import ASCIItable # only one class
|
from .asciitable import ASCIItable # noqa
|
||||||
from .config import Material # will be extended to debug and numerics
|
from .config import Material # noqa
|
||||||
from .colormaps import Colormap, Color
|
from .colormaps import Colormap, Color # noqa
|
||||||
from .orientation import Quaternion, Rodrigues, Symmetry, Orientation
|
from .orientation import Quaternion, Rodrigues, Symmetry, Orientation # noqa
|
||||||
# try:
|
# try:
|
||||||
# from .corientation import Quaternion, Rodrigues, Symmetry, Orientation
|
# from .corientation import Quaternion, Rodrigues, Symmetry, Orientation
|
||||||
# print "Import Cython version of Orientation module"
|
# print "Import Cython version of Orientation module"
|
||||||
# except:
|
# except:
|
||||||
# from .orientation import Quaternion, Rodrigues, Symmetry, Orientation
|
# from .orientation import Quaternion, Rodrigues, Symmetry, Orientation
|
||||||
#from .block import Block # only one class
|
#from .block import Block # only one class
|
||||||
from .result import Result # only one class
|
from .result import Result # noqa
|
||||||
from .geometry import Geometry # one class with subclasses
|
from .geometry import Geometry # noqa
|
||||||
from .solver import Solver # one class with subclasses
|
from .solver import Solver # noqa
|
||||||
from .test import Test
|
from .test import Test # noqa
|
||||||
from .util import extendableOption
|
from .util import extendableOption # noqa
|
||||||
|
|
||||||
try:
|
try:
|
||||||
from . import core
|
from . import core
|
||||||
|
|
|
@ -4,12 +4,9 @@
|
||||||
|
|
||||||
import os,sys
|
import os,sys
|
||||||
import numpy as np
|
import numpy as np
|
||||||
import util
|
|
||||||
|
|
||||||
class ASCIItable():
|
class ASCIItable():
|
||||||
'''
|
"""Read and write to ASCII tables"""
|
||||||
There should be a doc string here :)
|
|
||||||
'''
|
|
||||||
|
|
||||||
__slots__ = ['__IO__',
|
__slots__ = ['__IO__',
|
||||||
'info',
|
'info',
|
||||||
|
@ -28,7 +25,7 @@ class ASCIItable():
|
||||||
readonly = False, # no reading from file
|
readonly = False, # no reading from file
|
||||||
):
|
):
|
||||||
self.__IO__ = {'output': [],
|
self.__IO__ = {'output': [],
|
||||||
'buffered': buffered,
|
'buffered': buffered,
|
||||||
'labeled': labeled, # header contains labels
|
'labeled': labeled, # header contains labels
|
||||||
'labels': [], # labels according to file info
|
'labels': [], # labels according to file info
|
||||||
'readBuffer': [], # buffer to hold non-advancing reads
|
'readBuffer': [], # buffer to hold non-advancing reads
|
||||||
|
@ -38,18 +35,18 @@ class ASCIItable():
|
||||||
self.__IO__['inPlace'] = not outname and name and not readonly
|
self.__IO__['inPlace'] = not outname and name and not readonly
|
||||||
if self.__IO__['inPlace']: outname = name + self.tmpext # transparently create tmp file
|
if self.__IO__['inPlace']: outname = name + self.tmpext # transparently create tmp file
|
||||||
try:
|
try:
|
||||||
self.__IO__['in'] = (open( name,'r') if os.access( name, os.R_OK) else None) if name else sys.stdin
|
self.__IO__['in'] = (open( name,'r') if os.access( name, os.R_OK) else None) if name else sys.stdin
|
||||||
except TypeError:
|
except TypeError:
|
||||||
self.__IO__['in'] = name
|
self.__IO__['in'] = name
|
||||||
|
|
||||||
try:
|
try:
|
||||||
self.__IO__['out'] = (open(outname,'w') if (not os.path.isfile(outname) \
|
self.__IO__['out'] = (open(outname,'w') if (not os.path.isfile(outname) or
|
||||||
or os.access( outname, os.W_OK) \
|
os.access( outname, os.W_OK)
|
||||||
) \
|
) and
|
||||||
and (not self.__IO__['inPlace'] \
|
(not self.__IO__['inPlace'] or
|
||||||
or not os.path.isfile(name) \
|
not os.path.isfile(name) or
|
||||||
or os.access( name, os.W_OK) \
|
os.access( name, os.W_OK)
|
||||||
) else None) if outname else sys.stdout
|
) else None) if outname else sys.stdout
|
||||||
except TypeError:
|
except TypeError:
|
||||||
self.__IO__['out'] = outname
|
self.__IO__['out'] = outname
|
||||||
|
|
||||||
|
@ -58,8 +55,8 @@ class ASCIItable():
|
||||||
self.data = []
|
self.data = []
|
||||||
self.line = ''
|
self.line = ''
|
||||||
|
|
||||||
if self.__IO__['in'] == None \
|
if self.__IO__['in'] is None \
|
||||||
or self.__IO__['out'] == None: raise IOError # complain if any required file access not possible
|
or self.__IO__['out'] is None: raise IOError # complain if any required file access not possible
|
||||||
|
|
||||||
# ------------------------------------------------------------------
|
# ------------------------------------------------------------------
|
||||||
def _transliterateToFloat(self,
|
def _transliterateToFloat(self,
|
||||||
|
@ -86,9 +83,7 @@ class ASCIItable():
|
||||||
# ------------------------------------------------------------------
|
# ------------------------------------------------------------------
|
||||||
def output_write(self,
|
def output_write(self,
|
||||||
what):
|
what):
|
||||||
'''
|
"""aggregate a single row (string) or list of (possibly containing further lists of) rows into output"""
|
||||||
aggregate a single row (string) or list of (possibly containing further lists of) rows into output
|
|
||||||
'''
|
|
||||||
if not isinstance(what, (str, unicode)):
|
if not isinstance(what, (str, unicode)):
|
||||||
try:
|
try:
|
||||||
for item in what: self.output_write(item)
|
for item in what: self.output_write(item)
|
||||||
|
@ -104,7 +99,7 @@ class ASCIItable():
|
||||||
clear = True):
|
clear = True):
|
||||||
try:
|
try:
|
||||||
self.__IO__['output'] == [] or self.__IO__['out'].write('\n'.join(self.__IO__['output']) + '\n')
|
self.__IO__['output'] == [] or self.__IO__['out'].write('\n'.join(self.__IO__['output']) + '\n')
|
||||||
except IOError as e:
|
except IOError:
|
||||||
return False
|
return False
|
||||||
if clear: self.output_clear()
|
if clear: self.output_clear()
|
||||||
return True
|
return True
|
||||||
|
@ -127,11 +122,12 @@ class ASCIItable():
|
||||||
|
|
||||||
# ------------------------------------------------------------------
|
# ------------------------------------------------------------------
|
||||||
def head_read(self):
|
def head_read(self):
|
||||||
'''
|
"""
|
||||||
get column labels by either reading
|
get column labels by either reading
|
||||||
the first row or, if keyword "head[*]" is present,
|
|
||||||
the last line of the header
|
the first row or, if keyword "head[*]" is present,
|
||||||
'''
|
the last line of the header
|
||||||
|
"""
|
||||||
import re
|
import re
|
||||||
|
|
||||||
try:
|
try:
|
||||||
|
@ -180,10 +176,7 @@ class ASCIItable():
|
||||||
# ------------------------------------------------------------------
|
# ------------------------------------------------------------------
|
||||||
def head_write(self,
|
def head_write(self,
|
||||||
header = True):
|
header = True):
|
||||||
'''
|
"""write current header information (info + labels)"""
|
||||||
write current header information (info + labels)
|
|
||||||
'''
|
|
||||||
|
|
||||||
head = ['{}\theader'.format(len(self.info)+self.__IO__['labeled'])] if header else []
|
head = ['{}\theader'.format(len(self.info)+self.__IO__['labeled'])] if header else []
|
||||||
head.append(self.info)
|
head.append(self.info)
|
||||||
if self.__IO__['labeled']: head.append('\t'.join(self.labels))
|
if self.__IO__['labeled']: head.append('\t'.join(self.labels))
|
||||||
|
@ -192,9 +185,7 @@ class ASCIItable():
|
||||||
|
|
||||||
# ------------------------------------------------------------------
|
# ------------------------------------------------------------------
|
||||||
def head_getGeom(self):
|
def head_getGeom(self):
|
||||||
'''
|
"""interpret geom header"""
|
||||||
interpret geom header
|
|
||||||
'''
|
|
||||||
identifiers = {
|
identifiers = {
|
||||||
'grid': ['a','b','c'],
|
'grid': ['a','b','c'],
|
||||||
'size': ['x','y','z'],
|
'size': ['x','y','z'],
|
||||||
|
@ -234,9 +225,7 @@ class ASCIItable():
|
||||||
|
|
||||||
# ------------------------------------------------------------------
|
# ------------------------------------------------------------------
|
||||||
def head_putGeom(self,info):
|
def head_putGeom(self,info):
|
||||||
'''
|
"""translate geometry description to header"""
|
||||||
translate geometry description to header
|
|
||||||
'''
|
|
||||||
self.info_append([
|
self.info_append([
|
||||||
"grid\ta {}\tb {}\tc {}".format(*info['grid']),
|
"grid\ta {}\tb {}\tc {}".format(*info['grid']),
|
||||||
"size\tx {}\ty {}\tz {}".format(*info['size']),
|
"size\tx {}\ty {}\tz {}".format(*info['size']),
|
||||||
|
@ -249,9 +238,7 @@ class ASCIItable():
|
||||||
def labels_append(self,
|
def labels_append(self,
|
||||||
what,
|
what,
|
||||||
reset = False):
|
reset = False):
|
||||||
'''
|
"""add item or list to existing set of labels (and switch on labeling)"""
|
||||||
add item or list to existing set of labels (and switch on labeling)
|
|
||||||
'''
|
|
||||||
if not isinstance(what, (str, unicode)):
|
if not isinstance(what, (str, unicode)):
|
||||||
try:
|
try:
|
||||||
for item in what: self.labels_append(item)
|
for item in what: self.labels_append(item)
|
||||||
|
@ -265,28 +252,27 @@ class ASCIItable():
|
||||||
|
|
||||||
# ------------------------------------------------------------------
|
# ------------------------------------------------------------------
|
||||||
def labels_clear(self):
|
def labels_clear(self):
|
||||||
'''
|
"""delete existing labels and switch to no labeling"""
|
||||||
delete existing labels and switch to no labeling
|
|
||||||
'''
|
|
||||||
self.labels = []
|
self.labels = []
|
||||||
self.__IO__['labeled'] = False
|
self.__IO__['labeled'] = False
|
||||||
|
|
||||||
# ------------------------------------------------------------------
|
# ------------------------------------------------------------------
|
||||||
def label_index(self,
|
def label_index(self,
|
||||||
labels):
|
labels):
|
||||||
'''
|
"""
|
||||||
tell index of column label(s).
|
tell index of column label(s).
|
||||||
return numpy array if asked for list of labels.
|
|
||||||
transparently deals with label positions implicitly given as numbers or their headings given as strings.
|
return numpy array if asked for list of labels.
|
||||||
'''
|
transparently deals with label positions implicitly given as numbers or their headings given as strings.
|
||||||
|
"""
|
||||||
from collections import Iterable
|
from collections import Iterable
|
||||||
|
|
||||||
if isinstance(labels, Iterable) and not isinstance(labels, str): # check whether list of labels is requested
|
if isinstance(labels, Iterable) and not isinstance(labels, str): # check whether list of labels is requested
|
||||||
idx = []
|
idx = []
|
||||||
for label in labels:
|
for label in labels:
|
||||||
if label != None:
|
if label is not None:
|
||||||
try:
|
try:
|
||||||
idx.append(int(label)) # column given as integer number?
|
idx.append(int(label)-1) # column given as integer number?
|
||||||
except ValueError:
|
except ValueError:
|
||||||
try:
|
try:
|
||||||
idx.append(self.labels.index(label)) # locate string in label list
|
idx.append(self.labels.index(label)) # locate string in label list
|
||||||
|
@ -297,7 +283,7 @@ class ASCIItable():
|
||||||
idx.append(-1) # not found...
|
idx.append(-1) # not found...
|
||||||
else:
|
else:
|
||||||
try:
|
try:
|
||||||
idx = int(labels)
|
idx = int(labels)-1 # offset for python array indexing
|
||||||
except ValueError:
|
except ValueError:
|
||||||
try:
|
try:
|
||||||
idx = self.labels.index(labels)
|
idx = self.labels.index(labels)
|
||||||
|
@ -305,28 +291,28 @@ class ASCIItable():
|
||||||
try:
|
try:
|
||||||
idx = self.labels.index('1_'+labels) # locate '1_'+string in label list
|
idx = self.labels.index('1_'+labels) # locate '1_'+string in label list
|
||||||
except ValueError:
|
except ValueError:
|
||||||
idx = None if labels == None else -1
|
idx = None if labels is None else -1
|
||||||
|
|
||||||
return np.array(idx) if isinstance(idx,list) else idx
|
return np.array(idx) if isinstance(idx,Iterable) else idx
|
||||||
|
|
||||||
# ------------------------------------------------------------------
|
# ------------------------------------------------------------------
|
||||||
def label_dimension(self,
|
def label_dimension(self,
|
||||||
labels):
|
labels):
|
||||||
'''
|
"""
|
||||||
tell dimension (length) of column label(s).
|
tell dimension (length) of column label(s).
|
||||||
return numpy array if asked for list of labels.
|
|
||||||
transparently deals with label positions implicitly given as numbers or their headings given as strings.
|
|
||||||
'''
|
|
||||||
|
|
||||||
|
return numpy array if asked for list of labels.
|
||||||
|
transparently deals with label positions implicitly given as numbers or their headings given as strings.
|
||||||
|
"""
|
||||||
from collections import Iterable
|
from collections import Iterable
|
||||||
|
|
||||||
if isinstance(labels, Iterable) and not isinstance(labels, str): # check whether list of labels is requested
|
if isinstance(labels, Iterable) and not isinstance(labels, str): # check whether list of labels is requested
|
||||||
dim = []
|
dim = []
|
||||||
for label in labels:
|
for label in labels:
|
||||||
if label != None:
|
if label is not None:
|
||||||
myDim = -1
|
myDim = -1
|
||||||
try: # column given as number?
|
try: # column given as number?
|
||||||
idx = int(label)
|
idx = int(label)-1
|
||||||
myDim = 1 # if found has at least dimension 1
|
myDim = 1 # if found has at least dimension 1
|
||||||
if self.labels[idx].startswith('1_'): # column has multidim indicator?
|
if self.labels[idx].startswith('1_'): # column has multidim indicator?
|
||||||
while idx+myDim < len(self.labels) and self.labels[idx+myDim].startswith("%i_"%(myDim+1)):
|
while idx+myDim < len(self.labels) and self.labels[idx+myDim].startswith("%i_"%(myDim+1)):
|
||||||
|
@ -345,7 +331,7 @@ class ASCIItable():
|
||||||
dim = -1 # assume invalid label
|
dim = -1 # assume invalid label
|
||||||
idx = -1
|
idx = -1
|
||||||
try: # column given as number?
|
try: # column given as number?
|
||||||
idx = int(labels)
|
idx = int(labels)-1
|
||||||
dim = 1 # if found has at least dimension 1
|
dim = 1 # if found has at least dimension 1
|
||||||
if self.labels[idx].startswith('1_'): # column has multidim indicator?
|
if self.labels[idx].startswith('1_'): # column has multidim indicator?
|
||||||
while idx+dim < len(self.labels) and self.labels[idx+dim].startswith("%i_"%(dim+1)):
|
while idx+dim < len(self.labels) and self.labels[idx+dim].startswith("%i_"%(dim+1)):
|
||||||
|
@ -359,17 +345,17 @@ class ASCIItable():
|
||||||
while idx+dim < len(self.labels) and self.labels[idx+dim].startswith("%i_"%(dim+1)):
|
while idx+dim < len(self.labels) and self.labels[idx+dim].startswith("%i_"%(dim+1)):
|
||||||
dim += 1 # keep adding while going through object
|
dim += 1 # keep adding while going through object
|
||||||
|
|
||||||
return np.array(dim) if isinstance(dim,list) else dim
|
return np.array(dim) if isinstance(dim,Iterable) else dim
|
||||||
|
|
||||||
# ------------------------------------------------------------------
|
# ------------------------------------------------------------------
|
||||||
def label_indexrange(self,
|
def label_indexrange(self,
|
||||||
labels):
|
labels):
|
||||||
'''
|
"""
|
||||||
tell index range for given label(s).
|
tell index range for given label(s).
|
||||||
return numpy array if asked for list of labels.
|
|
||||||
transparently deals with label positions implicitly given as numbers or their headings given as strings.
|
|
||||||
'''
|
|
||||||
|
|
||||||
|
return numpy array if asked for list of labels.
|
||||||
|
transparently deals with label positions implicitly given as numbers or their headings given as strings.
|
||||||
|
"""
|
||||||
from collections import Iterable
|
from collections import Iterable
|
||||||
|
|
||||||
start = self.label_index(labels)
|
start = self.label_index(labels)
|
||||||
|
@ -381,9 +367,7 @@ class ASCIItable():
|
||||||
# ------------------------------------------------------------------
|
# ------------------------------------------------------------------
|
||||||
def info_append(self,
|
def info_append(self,
|
||||||
what):
|
what):
|
||||||
'''
|
"""add item or list to existing set of infos"""
|
||||||
add item or list to existing set of infos
|
|
||||||
'''
|
|
||||||
if not isinstance(what, (str, unicode)):
|
if not isinstance(what, (str, unicode)):
|
||||||
try:
|
try:
|
||||||
for item in what: self.info_append(item)
|
for item in what: self.info_append(item)
|
||||||
|
@ -394,9 +378,7 @@ class ASCIItable():
|
||||||
|
|
||||||
# ------------------------------------------------------------------
|
# ------------------------------------------------------------------
|
||||||
def info_clear(self):
|
def info_clear(self):
|
||||||
'''
|
"""delete any info block"""
|
||||||
delete any info block
|
|
||||||
'''
|
|
||||||
self.info = []
|
self.info = []
|
||||||
|
|
||||||
# ------------------------------------------------------------------
|
# ------------------------------------------------------------------
|
||||||
|
@ -409,9 +391,7 @@ class ASCIItable():
|
||||||
# ------------------------------------------------------------------
|
# ------------------------------------------------------------------
|
||||||
def data_skipLines(self,
|
def data_skipLines(self,
|
||||||
count):
|
count):
|
||||||
'''
|
"""wind forward by count number of lines"""
|
||||||
wind forward by count number of lines
|
|
||||||
'''
|
|
||||||
for i in xrange(count):
|
for i in xrange(count):
|
||||||
alive = self.data_read()
|
alive = self.data_read()
|
||||||
|
|
||||||
|
@ -421,9 +401,7 @@ class ASCIItable():
|
||||||
def data_read(self,
|
def data_read(self,
|
||||||
advance = True,
|
advance = True,
|
||||||
respectLabels = True):
|
respectLabels = True):
|
||||||
'''
|
"""read next line (possibly buffered) and parse it into data array"""
|
||||||
read next line (possibly buffered) and parse it into data array
|
|
||||||
'''
|
|
||||||
self.line = self.__IO__['readBuffer'].pop(0) if len(self.__IO__['readBuffer']) > 0 \
|
self.line = self.__IO__['readBuffer'].pop(0) if len(self.__IO__['readBuffer']) > 0 \
|
||||||
else self.__IO__['in'].readline().strip() # take buffered content or get next data row from file
|
else self.__IO__['in'].readline().strip() # take buffered content or get next data row from file
|
||||||
|
|
||||||
|
@ -434,7 +412,7 @@ class ASCIItable():
|
||||||
|
|
||||||
if self.__IO__['labeled'] and respectLabels: # if table has labels
|
if self.__IO__['labeled'] and respectLabels: # if table has labels
|
||||||
items = self.line.split()[:len(self.__IO__['labels'])] # use up to label count (from original file info)
|
items = self.line.split()[:len(self.__IO__['labels'])] # use up to label count (from original file info)
|
||||||
self.data = items if len(items) == len(self.__IO__['labels']) else [] # take entries if correct number, i.e. not too few compared to label count
|
self.data = items if len(items) == len(self.__IO__['labels']) else [] # take entries if label count matches
|
||||||
else:
|
else:
|
||||||
self.data = self.line.split() # otherwise take all
|
self.data = self.line.split() # otherwise take all
|
||||||
|
|
||||||
|
@ -443,9 +421,7 @@ class ASCIItable():
|
||||||
# ------------------------------------------------------------------
|
# ------------------------------------------------------------------
|
||||||
def data_readArray(self,
|
def data_readArray(self,
|
||||||
labels = []):
|
labels = []):
|
||||||
'''
|
"""read whole data of all (given) labels as numpy array"""
|
||||||
read whole data of all (given) labels as numpy array
|
|
||||||
'''
|
|
||||||
from collections import Iterable
|
from collections import Iterable
|
||||||
|
|
||||||
try:
|
try:
|
||||||
|
@ -453,7 +429,7 @@ class ASCIItable():
|
||||||
except:
|
except:
|
||||||
pass # assume/hope we are at data start already...
|
pass # assume/hope we are at data start already...
|
||||||
|
|
||||||
if labels == None or labels == []:
|
if labels is None or labels == []:
|
||||||
use = None # use all columns (and keep labels intact)
|
use = None # use all columns (and keep labels intact)
|
||||||
labels_missing = []
|
labels_missing = []
|
||||||
else:
|
else:
|
||||||
|
@ -467,9 +443,10 @@ class ASCIItable():
|
||||||
|
|
||||||
columns = []
|
columns = []
|
||||||
for i,(c,d) in enumerate(zip(indices[present],dimensions[present])): # for all valid labels ...
|
for i,(c,d) in enumerate(zip(indices[present],dimensions[present])): # for all valid labels ...
|
||||||
|
# ... transparently add all components unless column referenced by number or with explicit dimension
|
||||||
columns += range(c,c + \
|
columns += range(c,c + \
|
||||||
(d if str(c) != str(labels[present[i]]) else \
|
(d if str(c) != str(labels[present[i]]) else \
|
||||||
1)) # ... transparently add all components unless column referenced by number or with explicit dimension
|
1))
|
||||||
use = np.array(columns)
|
use = np.array(columns)
|
||||||
|
|
||||||
self.labels = list(np.array(self.labels)[use]) # update labels with valid subset
|
self.labels = list(np.array(self.labels)[use]) # update labels with valid subset
|
||||||
|
@ -481,9 +458,7 @@ class ASCIItable():
|
||||||
# ------------------------------------------------------------------
|
# ------------------------------------------------------------------
|
||||||
def data_write(self,
|
def data_write(self,
|
||||||
delimiter = '\t'):
|
delimiter = '\t'):
|
||||||
'''
|
"""write current data array and report alive output back"""
|
||||||
write current data array and report alive output back
|
|
||||||
'''
|
|
||||||
if len(self.data) == 0: return True
|
if len(self.data) == 0: return True
|
||||||
|
|
||||||
if isinstance(self.data[0],list):
|
if isinstance(self.data[0],list):
|
||||||
|
@ -495,9 +470,7 @@ class ASCIItable():
|
||||||
def data_writeArray(self,
|
def data_writeArray(self,
|
||||||
fmt = None,
|
fmt = None,
|
||||||
delimiter = '\t'):
|
delimiter = '\t'):
|
||||||
'''
|
"""write whole numpy array data"""
|
||||||
write whole numpy array data
|
|
||||||
'''
|
|
||||||
for row in self.data:
|
for row in self.data:
|
||||||
try:
|
try:
|
||||||
output = [fmt % value for value in row] if fmt else map(repr,row)
|
output = [fmt % value for value in row] if fmt else map(repr,row)
|
||||||
|
@ -520,9 +493,7 @@ class ASCIItable():
|
||||||
# ------------------------------------------------------------------
|
# ------------------------------------------------------------------
|
||||||
def data_set(self,
|
def data_set(self,
|
||||||
what, where):
|
what, where):
|
||||||
'''
|
"""update data entry in column "where". grows data array if needed."""
|
||||||
update data entry in column "where". grows data array if needed.
|
|
||||||
'''
|
|
||||||
idx = -1
|
idx = -1
|
||||||
try:
|
try:
|
||||||
idx = self.label_index(where)
|
idx = self.label_index(where)
|
||||||
|
@ -546,25 +517,27 @@ class ASCIItable():
|
||||||
|
|
||||||
# ------------------------------------------------------------------
|
# ------------------------------------------------------------------
|
||||||
def microstructure_read(self,
|
def microstructure_read(self,
|
||||||
grid):
|
grid,
|
||||||
'''
|
type = 'i',
|
||||||
read microstructure data (from .geom format)
|
strict = False):
|
||||||
'''
|
"""read microstructure data (from .geom format)"""
|
||||||
|
def datatype(item):
|
||||||
|
return int(item) if type.lower() == 'i' else float(item)
|
||||||
|
|
||||||
N = grid.prod() # expected number of microstructure indices in data
|
N = grid.prod() # expected number of microstructure indices in data
|
||||||
microstructure = np.zeros(N,'i') # initialize as flat array
|
microstructure = np.zeros(N,type) # initialize as flat array
|
||||||
|
|
||||||
i = 0
|
i = 0
|
||||||
while i < N and self.data_read():
|
while i < N and self.data_read():
|
||||||
items = self.data
|
items = self.data
|
||||||
if len(items) > 2:
|
if len(items) > 2:
|
||||||
if items[1].lower() == 'of': items = [int(items[2])]*int(items[0])
|
if items[1].lower() == 'of': items = np.ones(datatype(items[0]))*datatype(items[2])
|
||||||
elif items[1].lower() == 'to': items = range(int(items[0]),1+int(items[2]))
|
elif items[1].lower() == 'to': items = np.arange(datatype(items[0]),1+datatype(items[2]))
|
||||||
else: items = map(int,items)
|
else: items = map(datatype,items)
|
||||||
else: items = map(int,items)
|
else: items = map(datatype,items)
|
||||||
|
|
||||||
s = min(len(items), N-i) # prevent overflow of microstructure array
|
s = min(len(items), N-i) # prevent overflow of microstructure array
|
||||||
microstructure[i:i+s] = items[:s]
|
microstructure[i:i+s] = items[:s]
|
||||||
i += s
|
i += len(items)
|
||||||
|
|
||||||
return microstructure
|
return (microstructure, i == N and not self.data_read()) if strict else microstructure # check for proper point count and end of file
|
||||||
|
|
|
@ -5,11 +5,12 @@ import math,numpy as np
|
||||||
### --- COLOR CLASS --------------------------------------------------
|
### --- COLOR CLASS --------------------------------------------------
|
||||||
|
|
||||||
class Color():
|
class Color():
|
||||||
'''
|
"""Conversion of colors between different color-spaces.
|
||||||
Conversion of colors between different color-spaces. Colors should be given in the form
|
|
||||||
Color('model',[vector]).To convert and copy color from one space to other, use the methods
|
Colors should be given in the form
|
||||||
convertTo('model') and expressAs('model')spectively
|
Color('model',[vector]).To convert and copy color from one space to other, use the methods
|
||||||
'''
|
convertTo('model') and expressAs('model')spectively
|
||||||
|
"""
|
||||||
|
|
||||||
__slots__ = [
|
__slots__ = [
|
||||||
'model',
|
'model',
|
||||||
|
@ -17,7 +18,7 @@ class Color():
|
||||||
]
|
]
|
||||||
|
|
||||||
|
|
||||||
# ------------------------------------------------------------------
|
# ------------------------------------------------------------------
|
||||||
def __init__(self,
|
def __init__(self,
|
||||||
model = 'RGB',
|
model = 'RGB',
|
||||||
color = np.zeros(3,'d')):
|
color = np.zeros(3,'d')):
|
||||||
|
@ -32,30 +33,32 @@ class Color():
|
||||||
|
|
||||||
model = model.upper()
|
model = model.upper()
|
||||||
if model not in self.__transforms__.keys(): model = 'RGB'
|
if model not in self.__transforms__.keys(): model = 'RGB'
|
||||||
if model == 'RGB' and max(color) > 1.0: # are we RGB255 ?
|
if model == 'RGB' and max(color) > 1.0: # are we RGB255 ?
|
||||||
for i in range(3):
|
for i in range(3):
|
||||||
color[i] /= 255.0 # rescale to RGB
|
color[i] /= 255.0 # rescale to RGB
|
||||||
|
|
||||||
if model == 'HSL': # are we HSL ?
|
if model == 'HSL': # are we HSL ?
|
||||||
if abs(color[0]) > 1.0: color[0] /= 360.0 # with angular hue?
|
if abs(color[0]) > 1.0: color[0] /= 360.0 # with angular hue?
|
||||||
while color[0] >= 1.0: color[0] -= 1.0 # rewind to proper range
|
while color[0] >= 1.0: color[0] -= 1.0 # rewind to proper range
|
||||||
while color[0] < 0.0: color[0] += 1.0 # rewind to proper range
|
while color[0] < 0.0: color[0] += 1.0 # rewind to proper range
|
||||||
|
|
||||||
self.model = model
|
self.model = model
|
||||||
self.color = np.array(color,'d')
|
self.color = np.array(color,'d')
|
||||||
|
|
||||||
|
|
||||||
# ------------------------------------------------------------------
|
# ------------------------------------------------------------------
|
||||||
def __repr__(self):
|
def __repr__(self):
|
||||||
|
"""Color model and values"""
|
||||||
return 'Model: %s Color: %s'%(self.model,str(self.color))
|
return 'Model: %s Color: %s'%(self.model,str(self.color))
|
||||||
|
|
||||||
|
|
||||||
# ------------------------------------------------------------------
|
# ------------------------------------------------------------------
|
||||||
def __str__(self):
|
def __str__(self):
|
||||||
|
"""Color model and values"""
|
||||||
return self.__repr__()
|
return self.__repr__()
|
||||||
|
|
||||||
|
|
||||||
# ------------------------------------------------------------------
|
# ------------------------------------------------------------------
|
||||||
def convertTo(self,toModel = 'RGB'):
|
def convertTo(self,toModel = 'RGB'):
|
||||||
toModel = toModel.upper()
|
toModel = toModel.upper()
|
||||||
if toModel not in self.__transforms__.keys(): return
|
if toModel not in self.__transforms__.keys(): return
|
||||||
|
@ -73,17 +76,19 @@ class Color():
|
||||||
return self
|
return self
|
||||||
|
|
||||||
|
|
||||||
# ------------------------------------------------------------------
|
# ------------------------------------------------------------------
|
||||||
def expressAs(self,asModel = 'RGB'):
|
def expressAs(self,asModel = 'RGB'):
|
||||||
return self.__class__(self.model,self.color).convertTo(asModel)
|
return self.__class__(self.model,self.color).convertTo(asModel)
|
||||||
|
|
||||||
|
|
||||||
# ------------------------------------------------------------------
|
|
||||||
# convert H(ue) S(aturation) L(uminance) to R(red) G(reen) B(lue)
|
|
||||||
# with S,L,H,R,G,B running from 0 to 1
|
|
||||||
# from http://en.wikipedia.org/wiki/HSL_and_HSV
|
|
||||||
def _HSL2RGB(self):
|
|
||||||
|
|
||||||
|
def _HSL2RGB(self):
|
||||||
|
"""
|
||||||
|
convert H(ue) S(aturation) L(uminance) to R(red) G(reen) B(lue)
|
||||||
|
|
||||||
|
with S,L,H,R,G,B running from 0 to 1
|
||||||
|
from http://en.wikipedia.org/wiki/HSL_and_HSV
|
||||||
|
"""
|
||||||
if self.model != 'HSL': return
|
if self.model != 'HSL': return
|
||||||
|
|
||||||
sextant = self.color[0]*6.0
|
sextant = self.color[0]*6.0
|
||||||
|
@ -103,12 +108,13 @@ class Color():
|
||||||
self.color = converted.color
|
self.color = converted.color
|
||||||
|
|
||||||
|
|
||||||
# ------------------------------------------------------------------
|
|
||||||
# convert R(ed) G(reen) B(lue) to H(ue) S(aturation) L(uminance)
|
|
||||||
# with S,L,H,R,G,B running from 0 to 1
|
|
||||||
# from http://130.113.54.154/~monger/hsl-rgb.html
|
|
||||||
def _RGB2HSL(self):
|
def _RGB2HSL(self):
|
||||||
|
"""
|
||||||
|
convert R(ed) G(reen) B(lue) to H(ue) S(aturation) L(uminance)
|
||||||
|
|
||||||
|
with S,L,H,R,G,B running from 0 to 1
|
||||||
|
from http://130.113.54.154/~monger/hsl-rgb.html
|
||||||
|
"""
|
||||||
if self.model != 'RGB': return
|
if self.model != 'RGB': return
|
||||||
|
|
||||||
HSL = np.zeros(3,'d')
|
HSL = np.zeros(3,'d')
|
||||||
|
@ -129,7 +135,7 @@ class Color():
|
||||||
HSL[0] = 2.0 + (self.color[2] - self.color[0])/(maxcolor - mincolor)
|
HSL[0] = 2.0 + (self.color[2] - self.color[0])/(maxcolor - mincolor)
|
||||||
elif (maxcolor == self.color[2]):
|
elif (maxcolor == self.color[2]):
|
||||||
HSL[0] = 4.0 + (self.color[0] - self.color[1])/(maxcolor - mincolor)
|
HSL[0] = 4.0 + (self.color[0] - self.color[1])/(maxcolor - mincolor)
|
||||||
HSL[0] = HSL[0]*60.0 # is it necessary to scale to 360 hue values? might be dangerous for small values <1..!
|
HSL[0] = HSL[0]*60.0 # scaling to 360 might be dangerous for small values
|
||||||
if (HSL[0] < 0.0):
|
if (HSL[0] < 0.0):
|
||||||
HSL[0] = HSL[0] + 360.0
|
HSL[0] = HSL[0] + 360.0
|
||||||
for i in xrange(2):
|
for i in xrange(2):
|
||||||
|
@ -141,12 +147,14 @@ class Color():
|
||||||
self.color = converted.color
|
self.color = converted.color
|
||||||
|
|
||||||
|
|
||||||
# ------------------------------------------------------------------
|
|
||||||
# convert R(ed) G(reen) B(lue) to CIE XYZ
|
|
||||||
# with all values in the range of 0 to 1
|
|
||||||
# from http://www.cs.rit.edu/~ncs/color/t_convert.html
|
|
||||||
def _RGB2XYZ(self):
|
|
||||||
|
|
||||||
|
def _RGB2XYZ(self):
|
||||||
|
"""
|
||||||
|
convert R(ed) G(reen) B(lue) to CIE XYZ
|
||||||
|
|
||||||
|
with all values in the range of 0 to 1
|
||||||
|
from http://www.cs.rit.edu/~ncs/color/t_convert.html
|
||||||
|
"""
|
||||||
if self.model != 'RGB': return
|
if self.model != 'RGB': return
|
||||||
|
|
||||||
XYZ = np.zeros(3,'d')
|
XYZ = np.zeros(3,'d')
|
||||||
|
@ -168,12 +176,14 @@ class Color():
|
||||||
self.color = converted.color
|
self.color = converted.color
|
||||||
|
|
||||||
|
|
||||||
# ------------------------------------------------------------------
|
|
||||||
# convert CIE XYZ to R(ed) G(reen) B(lue)
|
|
||||||
# with all values in the range of 0 to 1
|
|
||||||
# from http://www.cs.rit.edu/~ncs/color/t_convert.html
|
|
||||||
def _XYZ2RGB(self):
|
|
||||||
|
|
||||||
|
def _XYZ2RGB(self):
|
||||||
|
"""
|
||||||
|
convert CIE XYZ to R(ed) G(reen) B(lue)
|
||||||
|
|
||||||
|
with all values in the range of 0 to 1
|
||||||
|
from http://www.cs.rit.edu/~ncs/color/t_convert.html
|
||||||
|
"""
|
||||||
if self.model != 'XYZ': return
|
if self.model != 'XYZ': return
|
||||||
|
|
||||||
convert = np.array([[ 3.240479,-1.537150,-0.498535],
|
convert = np.array([[ 3.240479,-1.537150,-0.498535],
|
||||||
|
@ -189,7 +199,7 @@ class Color():
|
||||||
RGB[i] = min(RGB[i],1.0)
|
RGB[i] = min(RGB[i],1.0)
|
||||||
RGB[i] = max(RGB[i],0.0)
|
RGB[i] = max(RGB[i],0.0)
|
||||||
|
|
||||||
maxVal = max(RGB) # clipping colors according to the display gamut
|
maxVal = max(RGB) # clipping colors according to the display gamut
|
||||||
if (maxVal > 1.0): RGB /= maxVal
|
if (maxVal > 1.0): RGB /= maxVal
|
||||||
|
|
||||||
converted = Color('RGB', RGB)
|
converted = Color('RGB', RGB)
|
||||||
|
@ -197,15 +207,17 @@ class Color():
|
||||||
self.color = converted.color
|
self.color = converted.color
|
||||||
|
|
||||||
|
|
||||||
# ------------------------------------------------------------------
|
|
||||||
# convert CIE Lab to CIE XYZ
|
|
||||||
# with XYZ in the range of 0 to 1
|
|
||||||
# from http://www.easyrgb.com/index.php?X=MATH&H=07#text7
|
|
||||||
def _CIELAB2XYZ(self):
|
|
||||||
|
|
||||||
|
def _CIELAB2XYZ(self):
|
||||||
|
"""
|
||||||
|
convert CIE Lab to CIE XYZ
|
||||||
|
|
||||||
|
with XYZ in the range of 0 to 1
|
||||||
|
from http://www.easyrgb.com/index.php?X=MATH&H=07#text7
|
||||||
|
"""
|
||||||
if self.model != 'CIELAB': return
|
if self.model != 'CIELAB': return
|
||||||
|
|
||||||
ref_white = np.array([.95047, 1.00000, 1.08883],'d') # Observer = 2, Illuminant = D65
|
ref_white = np.array([.95047, 1.00000, 1.08883],'d') # Observer = 2, Illuminant = D65
|
||||||
XYZ = np.zeros(3,'d')
|
XYZ = np.zeros(3,'d')
|
||||||
|
|
||||||
XYZ[1] = (self.color[0] + 16.0 ) / 116.0
|
XYZ[1] = (self.color[0] + 16.0 ) / 116.0
|
||||||
|
@ -220,16 +232,16 @@ class Color():
|
||||||
self.model = converted.model
|
self.model = converted.model
|
||||||
self.color = converted.color
|
self.color = converted.color
|
||||||
|
|
||||||
|
|
||||||
# ------------------------------------------------------------------
|
|
||||||
# convert CIE XYZ to CIE Lab
|
|
||||||
# with XYZ in the range of 0 to 1
|
|
||||||
# from http://en.wikipedia.org/wiki/Lab_color_space, http://www.cs.rit.edu/~ncs/color/t_convert.html
|
|
||||||
def _XYZ2CIELAB(self):
|
def _XYZ2CIELAB(self):
|
||||||
|
"""
|
||||||
|
convert CIE XYZ to CIE Lab
|
||||||
|
|
||||||
|
with XYZ in the range of 0 to 1
|
||||||
|
from http://en.wikipedia.org/wiki/Lab_color_space, http://www.cs.rit.edu/~ncs/color/t_convert.html
|
||||||
|
"""
|
||||||
if self.model != 'XYZ': return
|
if self.model != 'XYZ': return
|
||||||
|
|
||||||
ref_white = np.array([.95047, 1.00000, 1.08883],'d') # Observer = 2, Illuminant = D65
|
ref_white = np.array([.95047, 1.00000, 1.08883],'d') # Observer = 2, Illuminant = D65
|
||||||
XYZ = self.color/ref_white
|
XYZ = self.color/ref_white
|
||||||
|
|
||||||
for i in xrange(len(XYZ)):
|
for i in xrange(len(XYZ)):
|
||||||
|
@ -243,11 +255,12 @@ class Color():
|
||||||
self.color = converted.color
|
self.color = converted.color
|
||||||
|
|
||||||
|
|
||||||
# ------------------------------------------------------------------
|
|
||||||
# convert CIE Lab to Msh colorspace
|
|
||||||
# from http://www.cs.unm.edu/~kmorel/documents/ColorMaps/DivergingColorMapWorkshop.xls
|
|
||||||
def _CIELAB2MSH(self):
|
def _CIELAB2MSH(self):
|
||||||
|
"""
|
||||||
|
convert CIE Lab to Msh colorspace
|
||||||
|
|
||||||
|
from http://www.cs.unm.edu/~kmorel/documents/ColorMaps/DivergingColorMapWorkshop.xls
|
||||||
|
"""
|
||||||
if self.model != 'CIELAB': return
|
if self.model != 'CIELAB': return
|
||||||
|
|
||||||
Msh = np.zeros(3,'d')
|
Msh = np.zeros(3,'d')
|
||||||
|
@ -262,12 +275,13 @@ class Color():
|
||||||
self.color = converted.color
|
self.color = converted.color
|
||||||
|
|
||||||
|
|
||||||
# ------------------------------------------------------------------
|
|
||||||
# convert Msh colorspace to CIE Lab
|
|
||||||
# s,h in radians
|
|
||||||
# from http://www.cs.unm.edu/~kmorel/documents/ColorMaps/DivergingColorMapWorkshop.xls
|
|
||||||
def _MSH2CIELAB(self):
|
def _MSH2CIELAB(self):
|
||||||
|
"""
|
||||||
|
convert Msh colorspace to CIE Lab
|
||||||
|
|
||||||
|
s,h in radians
|
||||||
|
from http://www.cs.unm.edu/~kmorel/documents/ColorMaps/DivergingColorMapWorkshop.xls
|
||||||
|
"""
|
||||||
if self.model != 'MSH': return
|
if self.model != 'MSH': return
|
||||||
|
|
||||||
Lab = np.zeros(3,'d')
|
Lab = np.zeros(3,'d')
|
||||||
|
@ -280,13 +294,8 @@ class Color():
|
||||||
self.color = converted.color
|
self.color = converted.color
|
||||||
|
|
||||||
|
|
||||||
|
|
||||||
### --- COLORMAP CLASS -----------------------------------------------
|
|
||||||
|
|
||||||
class Colormap():
|
class Colormap():
|
||||||
'''
|
"""perceptually uniform diverging or sequential colormaps."""
|
||||||
perceptually uniform diverging or sequential colormaps.
|
|
||||||
'''
|
|
||||||
|
|
||||||
__slots__ = [
|
__slots__ = [
|
||||||
'left',
|
'left',
|
||||||
|
@ -294,20 +303,40 @@ class Colormap():
|
||||||
'interpolate',
|
'interpolate',
|
||||||
]
|
]
|
||||||
__predefined__ = {
|
__predefined__ = {
|
||||||
'gray': {'left': Color('HSL',[0,1,1]), 'right': Color('HSL',[0,0,0.15]), 'interpolate': 'perceptualuniform'},
|
'gray': {'left': Color('HSL',[0,1,1]),
|
||||||
'grey': {'left': Color('HSL',[0,1,1]), 'right': Color('HSL',[0,0,0.15]), 'interpolate': 'perceptualuniform'},
|
'right': Color('HSL',[0,0,0.15]),
|
||||||
'red': {'left': Color('HSL',[0,1,0.14]), 'right': Color('HSL',[0,0.35,0.91]), 'interpolate': 'perceptualuniform'},
|
'interpolate': 'perceptualuniform'},
|
||||||
'green': {'left': Color('HSL',[0.33333,1,0.14]), 'right': Color('HSL',[0.33333,0.35,0.91]), 'interpolate': 'perceptualuniform'},
|
'grey': {'left': Color('HSL',[0,1,1]),
|
||||||
'blue': {'left': Color('HSL',[0.66,1,0.14]), 'right': Color('HSL',[0.66,0.35,0.91]), 'interpolate': 'perceptualuniform'},
|
'right': Color('HSL',[0,0,0.15]),
|
||||||
'seaweed': {'left': Color('HSL',[0.78,1.0,0.1]), 'right': Color('HSL',[0.40000,0.1,0.9]), 'interpolate': 'perceptualuniform'},
|
'interpolate': 'perceptualuniform'},
|
||||||
'bluebrown': {'left': Color('HSL',[0.65,0.53,0.49]), 'right': Color('HSL',[0.11,0.75,0.38]), 'interpolate': 'perceptualuniform'},
|
'red': {'left': Color('HSL',[0,1,0.14]),
|
||||||
'redgreen': {'left': Color('HSL',[0.97,0.96,0.36]), 'right': Color('HSL',[0.33333,1.0,0.14]), 'interpolate': 'perceptualuniform'},
|
'right': Color('HSL',[0,0.35,0.91]),
|
||||||
'bluered': {'left': Color('HSL',[0.65,0.53,0.49]), 'right': Color('HSL',[0.97,0.96,0.36]), 'interpolate': 'perceptualuniform'},
|
'interpolate': 'perceptualuniform'},
|
||||||
'blueredrainbow':{'left': Color('HSL',[2.0/3.0,1,0.5]), 'right': Color('HSL',[0,1,0.5]), 'interpolate': 'linear' },
|
'green': {'left': Color('HSL',[0.33333,1,0.14]),
|
||||||
|
'right': Color('HSL',[0.33333,0.35,0.91]),
|
||||||
|
'interpolate': 'perceptualuniform'},
|
||||||
|
'blue': {'left': Color('HSL',[0.66,1,0.14]),
|
||||||
|
'right': Color('HSL',[0.66,0.35,0.91]),
|
||||||
|
'interpolate': 'perceptualuniform'},
|
||||||
|
'seaweed': {'left': Color('HSL',[0.78,1.0,0.1]),
|
||||||
|
'right': Color('HSL',[0.40000,0.1,0.9]),
|
||||||
|
'interpolate': 'perceptualuniform'},
|
||||||
|
'bluebrown': {'left': Color('HSL',[0.65,0.53,0.49]),
|
||||||
|
'right': Color('HSL',[0.11,0.75,0.38]),
|
||||||
|
'interpolate': 'perceptualuniform'},
|
||||||
|
'redgreen': {'left': Color('HSL',[0.97,0.96,0.36]),
|
||||||
|
'right': Color('HSL',[0.33333,1.0,0.14]),
|
||||||
|
'interpolate': 'perceptualuniform'},
|
||||||
|
'bluered': {'left': Color('HSL',[0.65,0.53,0.49]),
|
||||||
|
'right': Color('HSL',[0.97,0.96,0.36]),
|
||||||
|
'interpolate': 'perceptualuniform'},
|
||||||
|
'blueredrainbow':{'left': Color('HSL',[2.0/3.0,1,0.5]),
|
||||||
|
'right': Color('HSL',[0,1,0.5]),
|
||||||
|
'interpolate': 'linear' },
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|
||||||
# ------------------------------------------------------------------
|
# ------------------------------------------------------------------
|
||||||
def __init__(self,
|
def __init__(self,
|
||||||
left = Color('RGB',[1,1,1]),
|
left = Color('RGB',[1,1,1]),
|
||||||
right = Color('RGB',[0,0,0]),
|
right = Color('RGB',[0,0,0]),
|
||||||
|
@ -330,26 +359,27 @@ class Colormap():
|
||||||
self.interpolate = interpolate
|
self.interpolate = interpolate
|
||||||
|
|
||||||
|
|
||||||
# ------------------------------------------------------------------
|
# ------------------------------------------------------------------
|
||||||
def __repr__(self):
|
def __repr__(self):
|
||||||
|
"""left and right value of colormap"""
|
||||||
return 'Left: %s Right: %s'%(self.left,self.right)
|
return 'Left: %s Right: %s'%(self.left,self.right)
|
||||||
|
|
||||||
|
|
||||||
# ------------------------------------------------------------------
|
# ------------------------------------------------------------------
|
||||||
def invert(self):
|
def invert(self):
|
||||||
(self.left, self.right) = (self.right, self.left)
|
(self.left, self.right) = (self.right, self.left)
|
||||||
return self
|
return self
|
||||||
|
|
||||||
|
|
||||||
# ------------------------------------------------------------------
|
# ------------------------------------------------------------------
|
||||||
def color(self,fraction = 0.5):
|
def color(self,fraction = 0.5):
|
||||||
|
|
||||||
def interpolate_Msh(lo, hi, frac):
|
def interpolate_Msh(lo, hi, frac):
|
||||||
|
|
||||||
def rad_diff(a,b):
|
def rad_diff(a,b):
|
||||||
return abs(a[2]-b[2])
|
return abs(a[2]-b[2])
|
||||||
|
# if saturation of one of the two colors is too less than the other, hue of the less
|
||||||
def adjust_hue(Msh_sat, Msh_unsat): # if saturation of one of the two colors is too less than the other, hue of the less
|
def adjust_hue(Msh_sat, Msh_unsat):
|
||||||
if Msh_sat[0] >= Msh_unsat[0]:
|
if Msh_sat[0] >= Msh_unsat[0]:
|
||||||
return Msh_sat[2]
|
return Msh_sat[2]
|
||||||
else:
|
else:
|
||||||
|
@ -375,10 +405,11 @@ class Colormap():
|
||||||
return Color('MSH',Msh)
|
return Color('MSH',Msh)
|
||||||
|
|
||||||
def interpolate_linear(lo, hi, frac):
|
def interpolate_linear(lo, hi, frac):
|
||||||
'''
|
"""
|
||||||
linearly interpolate color at given fraction between lower and higher color in model of lower color
|
linearly interpolate color at given fraction between lower and
|
||||||
'''
|
|
||||||
|
|
||||||
|
higher color in model of lower color
|
||||||
|
"""
|
||||||
interpolation = (1.0 - frac) * np.array(lo.color[:]) \
|
interpolation = (1.0 - frac) * np.array(lo.color[:]) \
|
||||||
+ frac * np.array(hi.expressAs(lo.model).color[:])
|
+ frac * np.array(hi.expressAs(lo.model).color[:])
|
||||||
|
|
||||||
|
@ -393,23 +424,23 @@ class Colormap():
|
||||||
else:
|
else:
|
||||||
raise NameError('unknown color interpolation method')
|
raise NameError('unknown color interpolation method')
|
||||||
|
|
||||||
# ------------------------------------------------------------------
|
# ------------------------------------------------------------------
|
||||||
def export(self,name = 'uniformPerceptualColorMap',\
|
def export(self,name = 'uniformPerceptualColorMap',\
|
||||||
format = 'paraview',\
|
format = 'paraview',\
|
||||||
steps = 2,\
|
steps = 2,\
|
||||||
crop = [-1.0,1.0],
|
crop = [-1.0,1.0],
|
||||||
model = 'RGB'):
|
model = 'RGB'):
|
||||||
'''
|
"""
|
||||||
[RGB] colormap for use in paraview or gmsh, or as raw string, or array.
|
[RGB] colormap for use in paraview or gmsh, or as raw string, or array.
|
||||||
|
|
||||||
arguments: name, format, steps, crop.
|
arguments: name, format, steps, crop.
|
||||||
format is one of (paraview, gmsh, raw, list).
|
format is one of (paraview, gmsh, raw, list).
|
||||||
crop selects a (sub)range in [-1.0,1.0].
|
crop selects a (sub)range in [-1.0,1.0].
|
||||||
generates sequential map if one limiting color is either white or black,
|
generates sequential map if one limiting color is either white or black,
|
||||||
diverging map otherwise.
|
diverging map otherwise.
|
||||||
'''
|
"""
|
||||||
|
format = format.lower() # consistent comparison basis
|
||||||
format = format.lower() # consistent comparison basis
|
frac = 0.5*(np.array(crop) + 1.0) # rescale crop range to fractions
|
||||||
frac = 0.5*(np.array(crop) + 1.0) # rescale crop range to fractions
|
|
||||||
colors = [self.color(float(i)/(steps-1)*(frac[1]-frac[0])+frac[0]).expressAs(model).color for i in xrange(steps)]
|
colors = [self.color(float(i)/(steps-1)*(frac[1]-frac[0])+frac[0]).expressAs(model).color for i in xrange(steps)]
|
||||||
|
|
||||||
if format == 'paraview':
|
if format == 'paraview':
|
||||||
|
|
|
@ -1,5 +1,5 @@
|
||||||
# -*- coding: UTF-8 no BOM -*-
|
# -*- coding: UTF-8 no BOM -*-
|
||||||
|
|
||||||
# $Id$
|
"""Aggregator for configuration file handling"""
|
||||||
|
|
||||||
from .material import Material
|
from .material import Material # noqa
|
||||||
|
|
|
@ -100,20 +100,19 @@ class Texture(Section):
|
||||||
|
|
||||||
|
|
||||||
class Material():
|
class Material():
|
||||||
|
"""Reads, manipulates and writes material.config files"""
|
||||||
|
|
||||||
'''
|
|
||||||
Reads, manipulates and writes material.config files
|
|
||||||
'''
|
|
||||||
__slots__ = ['data']
|
__slots__ = ['data']
|
||||||
|
|
||||||
def __init__(self,verbose=True):
|
def __init__(self,verbose=True):
|
||||||
|
"""generates ordered list of parts"""
|
||||||
self.parts = [
|
self.parts = [
|
||||||
'homogenization',
|
'homogenization',
|
||||||
'microstructure',
|
'microstructure',
|
||||||
'crystallite',
|
'crystallite',
|
||||||
'phase',
|
'phase',
|
||||||
'texture',
|
'texture',
|
||||||
] # ordered (!) list of parts
|
]
|
||||||
self.data = {\
|
self.data = {\
|
||||||
'homogenization': {'__order__': []},
|
'homogenization': {'__order__': []},
|
||||||
'microstructure': {'__order__': []},
|
'microstructure': {'__order__': []},
|
||||||
|
@ -124,6 +123,7 @@ class Material():
|
||||||
self.verbose = verbose
|
self.verbose = verbose
|
||||||
|
|
||||||
def __repr__(self):
|
def __repr__(self):
|
||||||
|
"""returns current configuration to be used as material.config"""
|
||||||
me = []
|
me = []
|
||||||
for part in self.parts:
|
for part in self.parts:
|
||||||
if self.verbose: print('doing '+part)
|
if self.verbose: print('doing '+part)
|
||||||
|
@ -144,7 +144,6 @@ class Material():
|
||||||
re_sec = re.compile(r'^\[(.+)\]$') # pattern for section
|
re_sec = re.compile(r'^\[(.+)\]$') # pattern for section
|
||||||
|
|
||||||
name_section = ''
|
name_section = ''
|
||||||
idx_section = 0
|
|
||||||
active = False
|
active = False
|
||||||
|
|
||||||
for line in content:
|
for line in content:
|
||||||
|
@ -197,8 +196,7 @@ class Material():
|
||||||
return saveFile
|
return saveFile
|
||||||
|
|
||||||
def add_section(self, part=None, section=None, initialData=None, merge = False):
|
def add_section(self, part=None, section=None, initialData=None, merge = False):
|
||||||
'''adding/updating'''
|
"""adding/updating"""
|
||||||
|
|
||||||
part = part.lower()
|
part = part.lower()
|
||||||
section = section.lower()
|
section = section.lower()
|
||||||
if part not in self.parts: raise Exception('invalid part %s'%part)
|
if part not in self.parts: raise Exception('invalid part %s'%part)
|
||||||
|
@ -227,10 +225,10 @@ class Material():
|
||||||
def add_microstructure(self, section='',
|
def add_microstructure(self, section='',
|
||||||
components={}, # dict of phase,texture, and fraction lists
|
components={}, # dict of phase,texture, and fraction lists
|
||||||
):
|
):
|
||||||
''' Experimental! Needs expansion to multi-constituent microstructures...'''
|
"""Experimental! Needs expansion to multi-constituent microstructures..."""
|
||||||
|
|
||||||
microstructure = Microstructure()
|
microstructure = Microstructure()
|
||||||
components=dict((k.lower(), v) for k,v in components.iteritems()) # make keys lower case (http://stackoverflow.com/questions/764235/dictionary-to-lowercase-in-python)
|
# make keys lower case (http://stackoverflow.com/questions/764235/dictionary-to-lowercase-in-python)
|
||||||
|
components=dict((k.lower(), v) for k,v in components.iteritems())
|
||||||
|
|
||||||
for key in ['phase','texture','fraction','crystallite']:
|
for key in ['phase','texture','fraction','crystallite']:
|
||||||
if type(components[key]) is not list:
|
if type(components[key]) is not list:
|
||||||
|
@ -245,7 +243,8 @@ class Material():
|
||||||
except AttributeError:
|
except AttributeError:
|
||||||
pass
|
pass
|
||||||
|
|
||||||
for (phase,texture,fraction,crystallite) in zip(components['phase'],components['texture'],components['fraction'],components['crystallite']):
|
for (phase,texture,fraction,crystallite) in zip(components['phase'],components['texture'],
|
||||||
|
components['fraction'],components['crystallite']):
|
||||||
microstructure.add_multiKey('constituent','phase %i\ttexture %i\tfraction %g\ncrystallite %i'%(
|
microstructure.add_multiKey('constituent','phase %i\ttexture %i\tfraction %g\ncrystallite %i'%(
|
||||||
self.data['phase']['__order__'].index(phase)+1,
|
self.data['phase']['__order__'].index(phase)+1,
|
||||||
self.data['texture']['__order__'].index(texture)+1,
|
self.data['texture']['__order__'].index(texture)+1,
|
||||||
|
@ -259,8 +258,8 @@ class Material():
|
||||||
section=None,
|
section=None,
|
||||||
key=None,
|
key=None,
|
||||||
value=None):
|
value=None):
|
||||||
if type(value) is not type([]):
|
if not isinstance(value,list):
|
||||||
if type(value) is not type('s'):
|
if not isinstance(value,str):
|
||||||
value = '%s'%value
|
value = '%s'%value
|
||||||
value = [value]
|
value = [value]
|
||||||
newlen = len(value)
|
newlen = len(value)
|
||||||
|
@ -271,17 +270,3 @@ class Material():
|
||||||
if newlen is not oldlen:
|
if newlen is not oldlen:
|
||||||
print('Length of value was changed from %i to %i!'%(oldlen,newlen))
|
print('Length of value was changed from %i to %i!'%(oldlen,newlen))
|
||||||
|
|
||||||
|
|
||||||
|
|
||||||
def ex1():
|
|
||||||
mat=Material()
|
|
||||||
p=Phase({'constitution':'lump'})
|
|
||||||
t=Texture()
|
|
||||||
t.add_component('gauss',{'eulers':[1,2,3]})
|
|
||||||
mat.add_section('phase','phase1',p)
|
|
||||||
mat.add_section('texture','tex1',t)
|
|
||||||
mat.add_microstructure('mustruct1',{'phase':['phase1']*2,'texture':['tex1']*2,'fraction':[0.2]*2})
|
|
||||||
print(mat)
|
|
||||||
mat.write(file='poop')
|
|
||||||
mat.write(file='poop',overwrite=True)
|
|
||||||
|
|
||||||
|
|
|
@ -2,7 +2,7 @@
|
||||||
|
|
||||||
# $Id$
|
# $Id$
|
||||||
|
|
||||||
import os,sys,string,re,subprocess,shlex
|
import os,subprocess,shlex
|
||||||
|
|
||||||
class Environment():
|
class Environment():
|
||||||
__slots__ = [ \
|
__slots__ = [ \
|
||||||
|
|
|
@ -1,7 +1,7 @@
|
||||||
# -*- coding: UTF-8 no BOM -*-
|
# -*- coding: UTF-8 no BOM -*-
|
||||||
|
|
||||||
# $Id$
|
"""Aggregator for geometry handling"""
|
||||||
|
|
||||||
from .geometry import Geometry # only one class
|
from .geometry import Geometry # noqa
|
||||||
from .spectral import Spectral # only one class
|
from .spectral import Spectral # noqa
|
||||||
from .marc import Marc # only one class
|
from .marc import Marc # noqa
|
||||||
|
|
|
@ -5,10 +5,11 @@
|
||||||
import damask.geometry
|
import damask.geometry
|
||||||
|
|
||||||
class Geometry():
|
class Geometry():
|
||||||
'''
|
"""
|
||||||
General class for geometry parsing.
|
General class for geometry parsing.
|
||||||
Sub-classed by the individual solvers.
|
|
||||||
'''
|
Sub-classed by the individual solvers.
|
||||||
|
"""
|
||||||
|
|
||||||
def __init__(self,solver=''):
|
def __init__(self,solver=''):
|
||||||
solverClass = {
|
solverClass = {
|
||||||
|
|
|
@ -9,7 +9,6 @@ import numpy as np
|
||||||
|
|
||||||
# ******************************************************************************************
|
# ******************************************************************************************
|
||||||
class Rodrigues:
|
class Rodrigues:
|
||||||
# ******************************************************************************************
|
|
||||||
|
|
||||||
def __init__(self, vector = np.zeros(3)):
|
def __init__(self, vector = np.zeros(3)):
|
||||||
self.vector = vector
|
self.vector = vector
|
||||||
|
@ -28,20 +27,22 @@ class Rodrigues:
|
||||||
|
|
||||||
# ******************************************************************************************
|
# ******************************************************************************************
|
||||||
class Quaternion:
|
class Quaternion:
|
||||||
# ******************************************************************************************
|
"""
|
||||||
# All methods and naming conventions based off
|
Orientation represented as unit quaternion
|
||||||
# http://www.euclideanspace.com/maths/algebra/realNormedAlgebra/quaternions
|
|
||||||
|
|
||||||
# w is the real part, (x, y, z) are the imaginary parts
|
All methods and naming conventions based on http://www.euclideanspace.com/maths/algebra/realNormedAlgebra/quaternions
|
||||||
|
|
||||||
# Representation of rotation is in ACTIVE form!
|
w is the real part, (x, y, z) are the imaginary parts
|
||||||
# (derived directly or through angleAxis, Euler angles, or active matrix)
|
Representation of rotation is in ACTIVE form!
|
||||||
# vector "a" (defined in coordinate system "A") is actively rotated to new coordinates "b"
|
(derived directly or through angleAxis, Euler angles, or active matrix)
|
||||||
# b = Q * a
|
vector "a" (defined in coordinate system "A") is actively rotated to new coordinates "b"
|
||||||
# b = np.dot(Q.asMatrix(),a)
|
b = Q * a
|
||||||
|
b = np.dot(Q.asMatrix(),a)
|
||||||
|
"""
|
||||||
|
|
||||||
def __init__(self,
|
def __init__(self,
|
||||||
quatArray = [1.0,0.0,0.0,0.0]):
|
quatArray = [1.0,0.0,0.0,0.0]):
|
||||||
|
"""initializes to identity if not given"""
|
||||||
self.w, \
|
self.w, \
|
||||||
self.x, \
|
self.x, \
|
||||||
self.y, \
|
self.y, \
|
||||||
|
@ -49,19 +50,23 @@ class Quaternion:
|
||||||
self.homomorph()
|
self.homomorph()
|
||||||
|
|
||||||
def __iter__(self):
|
def __iter__(self):
|
||||||
|
"""components"""
|
||||||
return iter([self.w,self.x,self.y,self.z])
|
return iter([self.w,self.x,self.y,self.z])
|
||||||
|
|
||||||
def __copy__(self):
|
def __copy__(self):
|
||||||
|
"""create copy"""
|
||||||
Q = Quaternion([self.w,self.x,self.y,self.z])
|
Q = Quaternion([self.w,self.x,self.y,self.z])
|
||||||
return Q
|
return Q
|
||||||
|
|
||||||
copy = __copy__
|
copy = __copy__
|
||||||
|
|
||||||
def __repr__(self):
|
def __repr__(self):
|
||||||
|
"""readbable string"""
|
||||||
return 'Quaternion(real=%+.6f, imag=<%+.6f, %+.6f, %+.6f>)' % \
|
return 'Quaternion(real=%+.6f, imag=<%+.6f, %+.6f, %+.6f>)' % \
|
||||||
(self.w, self.x, self.y, self.z)
|
(self.w, self.x, self.y, self.z)
|
||||||
|
|
||||||
def __pow__(self, exponent):
|
def __pow__(self, exponent):
|
||||||
|
"""power"""
|
||||||
omega = math.acos(self.w)
|
omega = math.acos(self.w)
|
||||||
vRescale = math.sin(exponent*omega)/math.sin(omega)
|
vRescale = math.sin(exponent*omega)/math.sin(omega)
|
||||||
Q = Quaternion()
|
Q = Quaternion()
|
||||||
|
@ -72,6 +77,7 @@ class Quaternion:
|
||||||
return Q
|
return Q
|
||||||
|
|
||||||
def __ipow__(self, exponent):
|
def __ipow__(self, exponent):
|
||||||
|
"""in place power"""
|
||||||
omega = math.acos(self.w)
|
omega = math.acos(self.w)
|
||||||
vRescale = math.sin(exponent*omega)/math.sin(omega)
|
vRescale = math.sin(exponent*omega)/math.sin(omega)
|
||||||
self.w = np.cos(exponent*omega)
|
self.w = np.cos(exponent*omega)
|
||||||
|
@ -81,6 +87,7 @@ class Quaternion:
|
||||||
return self
|
return self
|
||||||
|
|
||||||
def __mul__(self, other):
|
def __mul__(self, other):
|
||||||
|
"""multiplication"""
|
||||||
try: # quaternion
|
try: # quaternion
|
||||||
Aw = self.w
|
Aw = self.w
|
||||||
Ax = self.x
|
Ax = self.x
|
||||||
|
@ -128,6 +135,7 @@ class Quaternion:
|
||||||
return self.copy()
|
return self.copy()
|
||||||
|
|
||||||
def __imul__(self, other):
|
def __imul__(self, other):
|
||||||
|
"""in place multiplication"""
|
||||||
try: # Quaternion
|
try: # Quaternion
|
||||||
Ax = self.x
|
Ax = self.x
|
||||||
Ay = self.y
|
Ay = self.y
|
||||||
|
@ -145,6 +153,7 @@ class Quaternion:
|
||||||
return self
|
return self
|
||||||
|
|
||||||
def __div__(self, other):
|
def __div__(self, other):
|
||||||
|
"""division"""
|
||||||
if isinstance(other, (int,float,long)):
|
if isinstance(other, (int,float,long)):
|
||||||
w = self.w / other
|
w = self.w / other
|
||||||
x = self.x / other
|
x = self.x / other
|
||||||
|
@ -155,6 +164,7 @@ class Quaternion:
|
||||||
return NotImplemented
|
return NotImplemented
|
||||||
|
|
||||||
def __idiv__(self, other):
|
def __idiv__(self, other):
|
||||||
|
"""in place division"""
|
||||||
if isinstance(other, (int,float,long)):
|
if isinstance(other, (int,float,long)):
|
||||||
self.w /= other
|
self.w /= other
|
||||||
self.x /= other
|
self.x /= other
|
||||||
|
@ -163,6 +173,7 @@ class Quaternion:
|
||||||
return self
|
return self
|
||||||
|
|
||||||
def __add__(self, other):
|
def __add__(self, other):
|
||||||
|
"""addition"""
|
||||||
if isinstance(other, Quaternion):
|
if isinstance(other, Quaternion):
|
||||||
w = self.w + other.w
|
w = self.w + other.w
|
||||||
x = self.x + other.x
|
x = self.x + other.x
|
||||||
|
@ -173,6 +184,7 @@ class Quaternion:
|
||||||
return NotImplemented
|
return NotImplemented
|
||||||
|
|
||||||
def __iadd__(self, other):
|
def __iadd__(self, other):
|
||||||
|
"""in place division"""
|
||||||
if isinstance(other, Quaternion):
|
if isinstance(other, Quaternion):
|
||||||
self.w += other.w
|
self.w += other.w
|
||||||
self.x += other.x
|
self.x += other.x
|
||||||
|
@ -181,6 +193,7 @@ class Quaternion:
|
||||||
return self
|
return self
|
||||||
|
|
||||||
def __sub__(self, other):
|
def __sub__(self, other):
|
||||||
|
"""subtraction"""
|
||||||
if isinstance(other, Quaternion):
|
if isinstance(other, Quaternion):
|
||||||
Q = self.copy()
|
Q = self.copy()
|
||||||
Q.w -= other.w
|
Q.w -= other.w
|
||||||
|
@ -192,6 +205,7 @@ class Quaternion:
|
||||||
return self.copy()
|
return self.copy()
|
||||||
|
|
||||||
def __isub__(self, other):
|
def __isub__(self, other):
|
||||||
|
"""in place subtraction"""
|
||||||
if isinstance(other, Quaternion):
|
if isinstance(other, Quaternion):
|
||||||
self.w -= other.w
|
self.w -= other.w
|
||||||
self.x -= other.x
|
self.x -= other.x
|
||||||
|
@ -200,6 +214,7 @@ class Quaternion:
|
||||||
return self
|
return self
|
||||||
|
|
||||||
def __neg__(self):
|
def __neg__(self):
|
||||||
|
"""additive inverse"""
|
||||||
self.w = -self.w
|
self.w = -self.w
|
||||||
self.x = -self.x
|
self.x = -self.x
|
||||||
self.y = -self.y
|
self.y = -self.y
|
||||||
|
@ -207,6 +222,7 @@ class Quaternion:
|
||||||
return self
|
return self
|
||||||
|
|
||||||
def __abs__(self):
|
def __abs__(self):
|
||||||
|
"""norm"""
|
||||||
return math.sqrt(self.w ** 2 + \
|
return math.sqrt(self.w ** 2 + \
|
||||||
self.x ** 2 + \
|
self.x ** 2 + \
|
||||||
self.y ** 2 + \
|
self.y ** 2 + \
|
||||||
|
@ -215,6 +231,7 @@ class Quaternion:
|
||||||
magnitude = __abs__
|
magnitude = __abs__
|
||||||
|
|
||||||
def __eq__(self,other):
|
def __eq__(self,other):
|
||||||
|
"""equal at e-8 precision"""
|
||||||
return (abs(self.w-other.w) < 1e-8 and \
|
return (abs(self.w-other.w) < 1e-8 and \
|
||||||
abs(self.x-other.x) < 1e-8 and \
|
abs(self.x-other.x) < 1e-8 and \
|
||||||
abs(self.y-other.y) < 1e-8 and \
|
abs(self.y-other.y) < 1e-8 and \
|
||||||
|
@ -226,9 +243,11 @@ class Quaternion:
|
||||||
abs(-self.z-other.z) < 1e-8)
|
abs(-self.z-other.z) < 1e-8)
|
||||||
|
|
||||||
def __ne__(self,other):
|
def __ne__(self,other):
|
||||||
|
"""not equal at e-8 precision"""
|
||||||
return not self.__eq__(self,other)
|
return not self.__eq__(self,other)
|
||||||
|
|
||||||
def __cmp__(self,other):
|
def __cmp__(self,other):
|
||||||
|
"""linear ordering"""
|
||||||
return cmp(self.Rodrigues(),other.Rodrigues())
|
return cmp(self.Rodrigues(),other.Rodrigues())
|
||||||
|
|
||||||
def magnitude_squared(self):
|
def magnitude_squared(self):
|
||||||
|
@ -290,9 +309,10 @@ class Quaternion:
|
||||||
return np.outer([i for i in self],[i for i in self])
|
return np.outer([i for i in self],[i for i in self])
|
||||||
|
|
||||||
def asMatrix(self):
|
def asMatrix(self):
|
||||||
return np.array([[1.0-2.0*(self.y*self.y+self.z*self.z), 2.0*(self.x*self.y-self.z*self.w), 2.0*(self.x*self.z+self.y*self.w)],
|
return np.array(
|
||||||
[ 2.0*(self.x*self.y+self.z*self.w), 1.0-2.0*(self.x*self.x+self.z*self.z), 2.0*(self.y*self.z-self.x*self.w)],
|
[[1.0-2.0*(self.y*self.y+self.z*self.z), 2.0*(self.x*self.y-self.z*self.w), 2.0*(self.x*self.z+self.y*self.w)],
|
||||||
[ 2.0*(self.x*self.z-self.y*self.w), 2.0*(self.x*self.w+self.y*self.z), 1.0-2.0*(self.x*self.x+self.y*self.y)]])
|
[ 2.0*(self.x*self.y+self.z*self.w), 1.0-2.0*(self.x*self.x+self.z*self.z), 2.0*(self.y*self.z-self.x*self.w)],
|
||||||
|
[ 2.0*(self.x*self.z-self.y*self.w), 2.0*(self.x*self.w+self.y*self.z), 1.0-2.0*(self.x*self.x+self.y*self.y)]])
|
||||||
|
|
||||||
def asAngleAxis(self,
|
def asAngleAxis(self,
|
||||||
degrees = False):
|
degrees = False):
|
||||||
|
@ -315,15 +335,17 @@ class Quaternion:
|
||||||
return np.inf*np.ones(3) if self.w == 0.0 else np.array([self.x, self.y, self.z])/self.w
|
return np.inf*np.ones(3) if self.w == 0.0 else np.array([self.x, self.y, self.z])/self.w
|
||||||
|
|
||||||
def asEulers(self,
|
def asEulers(self,
|
||||||
type = 'bunge',
|
type = "bunge",
|
||||||
degrees = False,
|
degrees = False,
|
||||||
standardRange = False):
|
standardRange = False):
|
||||||
'''
|
u"""
|
||||||
|
Orientation as Bunge-Euler angles
|
||||||
|
|
||||||
conversion of ACTIVE rotation to Euler angles taken from:
|
conversion of ACTIVE rotation to Euler angles taken from:
|
||||||
Melcher, A.; Unser, A.; Reichhardt, M.; Nestler, B.; Pötschke, M.; Selzer, M.
|
Melcher, A.; Unser, A.; Reichhardt, M.; Nestler, B.; Pötschke, M.; Selzer, M.
|
||||||
Conversion of EBSD data by a quaternion based algorithm to be used for grain structure simulations
|
Conversion of EBSD data by a quaternion based algorithm to be used for grain structure simulations
|
||||||
Technische Mechanik 30 (2010) pp 401--413
|
Technische Mechanik 30 (2010) pp 401--413
|
||||||
'''
|
"""
|
||||||
angles = [0.0,0.0,0.0]
|
angles = [0.0,0.0,0.0]
|
||||||
|
|
||||||
if type.lower() == 'bunge' or type.lower() == 'zxz':
|
if type.lower() == 'bunge' or type.lower() == 'zxz':
|
||||||
|
@ -369,7 +391,7 @@ class Quaternion:
|
||||||
|
|
||||||
@classmethod
|
@classmethod
|
||||||
def fromRandom(cls,randomSeed = None):
|
def fromRandom(cls,randomSeed = None):
|
||||||
if randomSeed == None:
|
if randomSeed is None:
|
||||||
randomSeed = int(os.urandom(4).encode('hex'), 16)
|
randomSeed = int(os.urandom(4).encode('hex'), 16)
|
||||||
np.random.seed(randomSeed)
|
np.random.seed(randomSeed)
|
||||||
r = np.random.random(3)
|
r = np.random.random(3)
|
||||||
|
@ -420,7 +442,6 @@ class Quaternion:
|
||||||
y = - c1 * s2 * s3 + s1 * s2 * c3
|
y = - c1 * s2 * s3 + s1 * s2 * c3
|
||||||
z = c1 * c2 * s3 + s1 * c2 * c3
|
z = c1 * c2 * s3 + s1 * c2 * c3
|
||||||
else:
|
else:
|
||||||
# print 'unknown Euler convention'
|
|
||||||
w = c1 * c2 * c3 - s1 * s2 * s3
|
w = c1 * c2 * c3 - s1 * s2 * s3
|
||||||
x = s1 * s2 * c3 + c1 * c2 * s3
|
x = s1 * s2 * c3 + c1 * c2 * s3
|
||||||
y = s1 * c2 * c3 + c1 * s2 * s3
|
y = s1 * c2 * c3 + c1 * s2 * s3
|
||||||
|
@ -428,7 +449,8 @@ class Quaternion:
|
||||||
return cls([w,x,y,z])
|
return cls([w,x,y,z])
|
||||||
|
|
||||||
|
|
||||||
## Modified Method to calculate Quaternion from Orientation Matrix, Source: http://www.euclideanspace.com/maths/geometry/rotations/conversions/matrixToQuaternion/
|
# Modified Method to calculate Quaternion from Orientation Matrix,
|
||||||
|
# Source: http://www.euclideanspace.com/maths/geometry/rotations/conversions/matrixToQuaternion/
|
||||||
|
|
||||||
@classmethod
|
@classmethod
|
||||||
def fromMatrix(cls, m):
|
def fromMatrix(cls, m):
|
||||||
|
@ -482,8 +504,12 @@ class Quaternion:
|
||||||
|
|
||||||
@classmethod
|
@classmethod
|
||||||
def new_interpolate(cls, q1, q2, t):
|
def new_interpolate(cls, q1, q2, t):
|
||||||
# see http://ntrs.nasa.gov/archive/nasa/casi.ntrs.nasa.gov/20070017872_2007014421.pdf for (another?) way to interpolate quaternions
|
"""
|
||||||
|
interpolation
|
||||||
|
|
||||||
|
see http://ntrs.nasa.gov/archive/nasa/casi.ntrs.nasa.gov/20070017872_2007014421.pdf
|
||||||
|
for (another?) way to interpolate quaternions
|
||||||
|
"""
|
||||||
assert isinstance(q1, Quaternion) and isinstance(q2, Quaternion)
|
assert isinstance(q1, Quaternion) and isinstance(q2, Quaternion)
|
||||||
Q = cls()
|
Q = cls()
|
||||||
|
|
||||||
|
@ -522,11 +548,11 @@ class Quaternion:
|
||||||
|
|
||||||
# ******************************************************************************************
|
# ******************************************************************************************
|
||||||
class Symmetry:
|
class Symmetry:
|
||||||
# ******************************************************************************************
|
|
||||||
|
|
||||||
lattices = [None,'orthorhombic','tetragonal','hexagonal','cubic',]
|
lattices = [None,'orthorhombic','tetragonal','hexagonal','cubic',]
|
||||||
|
|
||||||
def __init__(self, symmetry = None):
|
def __init__(self, symmetry = None):
|
||||||
|
"""lattice with given symmetry, defaults to None"""
|
||||||
if isinstance(symmetry, basestring) and symmetry.lower() in Symmetry.lattices:
|
if isinstance(symmetry, basestring) and symmetry.lower() in Symmetry.lattices:
|
||||||
self.lattice = symmetry.lower()
|
self.lattice = symmetry.lower()
|
||||||
else:
|
else:
|
||||||
|
@ -534,29 +560,31 @@ class Symmetry:
|
||||||
|
|
||||||
|
|
||||||
def __copy__(self):
|
def __copy__(self):
|
||||||
|
"""copy"""
|
||||||
return self.__class__(self.lattice)
|
return self.__class__(self.lattice)
|
||||||
|
|
||||||
copy = __copy__
|
copy = __copy__
|
||||||
|
|
||||||
|
|
||||||
def __repr__(self):
|
def __repr__(self):
|
||||||
|
"""readbable string"""
|
||||||
return '%s' % (self.lattice)
|
return '%s' % (self.lattice)
|
||||||
|
|
||||||
|
|
||||||
def __eq__(self, other):
|
def __eq__(self, other):
|
||||||
|
"""equal"""
|
||||||
return self.lattice == other.lattice
|
return self.lattice == other.lattice
|
||||||
|
|
||||||
|
|
||||||
def __neq__(self, other):
|
def __neq__(self, other):
|
||||||
|
"""not equal"""
|
||||||
return not self.__eq__(other)
|
return not self.__eq__(other)
|
||||||
|
|
||||||
def __cmp__(self,other):
|
def __cmp__(self,other):
|
||||||
|
"""linear ordering"""
|
||||||
return cmp(Symmetry.lattices.index(self.lattice),Symmetry.lattices.index(other.lattice))
|
return cmp(Symmetry.lattices.index(self.lattice),Symmetry.lattices.index(other.lattice))
|
||||||
|
|
||||||
def symmetryQuats(self,who = []):
|
def symmetryQuats(self,who = []):
|
||||||
'''
|
"""List of symmetry operations as quaternions."""
|
||||||
List of symmetry operations as quaternions.
|
|
||||||
'''
|
|
||||||
if self.lattice == 'cubic':
|
if self.lattice == 'cubic':
|
||||||
symQuats = [
|
symQuats = [
|
||||||
[ 1.0, 0.0, 0.0, 0.0 ],
|
[ 1.0, 0.0, 0.0, 0.0 ],
|
||||||
|
@ -629,18 +657,15 @@ class Symmetry:
|
||||||
def equivalentQuaternions(self,
|
def equivalentQuaternions(self,
|
||||||
quaternion,
|
quaternion,
|
||||||
who = []):
|
who = []):
|
||||||
'''
|
"""List of symmetrically equivalent quaternions based on own symmetry."""
|
||||||
List of symmetrically equivalent quaternions based on own symmetry.
|
|
||||||
'''
|
|
||||||
return [quaternion*q for q in self.symmetryQuats(who)]
|
return [quaternion*q for q in self.symmetryQuats(who)]
|
||||||
|
|
||||||
|
|
||||||
def inFZ(self,R):
|
def inFZ(self,R):
|
||||||
'''
|
"""Check whether given Rodrigues vector falls into fundamental zone of own symmetry."""
|
||||||
Check whether given Rodrigues vector falls into fundamental zone of own symmetry.
|
|
||||||
'''
|
|
||||||
if isinstance(R, Quaternion): R = R.asRodrigues() # translate accidentially passed quaternion
|
if isinstance(R, Quaternion): R = R.asRodrigues() # translate accidentially passed quaternion
|
||||||
R = abs(R) # fundamental zone in Rodrigues space is point symmetric around origin
|
# fundamental zone in Rodrigues space is point symmetric around origin
|
||||||
|
R = abs(R)
|
||||||
if self.lattice == 'cubic':
|
if self.lattice == 'cubic':
|
||||||
return math.sqrt(2.0)-1.0 >= R[0] \
|
return math.sqrt(2.0)-1.0 >= R[0] \
|
||||||
and math.sqrt(2.0)-1.0 >= R[1] \
|
and math.sqrt(2.0)-1.0 >= R[1] \
|
||||||
|
@ -662,12 +687,13 @@ class Symmetry:
|
||||||
|
|
||||||
|
|
||||||
def inDisorientationSST(self,R):
|
def inDisorientationSST(self,R):
|
||||||
'''
|
"""
|
||||||
Check whether given Rodrigues vector (of misorientation) falls into standard stereographic triangle of own symmetry.
|
Check whether given Rodrigues vector (of misorientation) falls into standard stereographic triangle of own symmetry.
|
||||||
|
|
||||||
Determination of disorientations follow the work of A. Heinz and P. Neumann:
|
Determination of disorientations follow the work of A. Heinz and P. Neumann:
|
||||||
Representation of Orientation and Disorientation Data for Cubic, Hexagonal, Tetragonal and Orthorhombic Crystals
|
Representation of Orientation and Disorientation Data for Cubic, Hexagonal, Tetragonal and Orthorhombic Crystals
|
||||||
Acta Cryst. (1991). A47, 780-789
|
Acta Cryst. (1991). A47, 780-789
|
||||||
'''
|
"""
|
||||||
if isinstance(R, Quaternion): R = R.asRodrigues() # translate accidentially passed quaternion
|
if isinstance(R, Quaternion): R = R.asRodrigues() # translate accidentially passed quaternion
|
||||||
|
|
||||||
epsilon = 0.0
|
epsilon = 0.0
|
||||||
|
@ -691,11 +717,12 @@ class Symmetry:
|
||||||
vector,
|
vector,
|
||||||
proper = False,
|
proper = False,
|
||||||
color = False):
|
color = False):
|
||||||
'''
|
"""
|
||||||
Check whether given vector falls into standard stereographic triangle of own symmetry.
|
Check whether given vector falls into standard stereographic triangle of own symmetry.
|
||||||
|
|
||||||
proper considers only vectors with z >= 0, hence uses two neighboring SSTs.
|
proper considers only vectors with z >= 0, hence uses two neighboring SSTs.
|
||||||
Return inverse pole figure color if requested.
|
Return inverse pole figure color if requested.
|
||||||
'''
|
"""
|
||||||
# basis = {'cubic' : np.linalg.inv(np.array([[0.,0.,1.], # direction of red
|
# basis = {'cubic' : np.linalg.inv(np.array([[0.,0.,1.], # direction of red
|
||||||
# [1.,0.,1.]/np.sqrt(2.), # direction of green
|
# [1.,0.,1.]/np.sqrt(2.), # direction of green
|
||||||
# [1.,1.,1.]/np.sqrt(3.)]).transpose()), # direction of blue
|
# [1.,1.,1.]/np.sqrt(3.)]).transpose()), # direction of blue
|
||||||
|
@ -752,15 +779,15 @@ class Symmetry:
|
||||||
inSST = np.all(theComponents >= 0.0)
|
inSST = np.all(theComponents >= 0.0)
|
||||||
else:
|
else:
|
||||||
v = np.array(vector,dtype = float)
|
v = np.array(vector,dtype = float)
|
||||||
if proper: # check both improper ...
|
if proper: # check both improper ...
|
||||||
theComponents = np.dot(basis['improper'],v)
|
theComponents = np.dot(basis['improper'],v)
|
||||||
inSST = np.all(theComponents >= 0.0)
|
inSST = np.all(theComponents >= 0.0)
|
||||||
if not inSST: # ... and proper SST
|
if not inSST: # ... and proper SST
|
||||||
theComponents = np.dot(basis['proper'],v)
|
theComponents = np.dot(basis['proper'],v)
|
||||||
inSST = np.all(theComponents >= 0.0)
|
inSST = np.all(theComponents >= 0.0)
|
||||||
else:
|
else:
|
||||||
v[2] = abs(v[2]) # z component projects identical for positive and negative values
|
v[2] = abs(v[2]) # z component projects identical
|
||||||
theComponents = np.dot(basis['improper'],v)
|
theComponents = np.dot(basis['improper'],v) # for positive and negative values
|
||||||
inSST = np.all(theComponents >= 0.0)
|
inSST = np.all(theComponents >= 0.0)
|
||||||
|
|
||||||
if color: # have to return color array
|
if color: # have to return color array
|
||||||
|
@ -781,7 +808,6 @@ class Symmetry:
|
||||||
|
|
||||||
# ******************************************************************************************
|
# ******************************************************************************************
|
||||||
class Orientation:
|
class Orientation:
|
||||||
# ******************************************************************************************
|
|
||||||
|
|
||||||
__slots__ = ['quaternion','symmetry']
|
__slots__ = ['quaternion','symmetry']
|
||||||
|
|
||||||
|
@ -791,7 +817,7 @@ class Orientation:
|
||||||
angleAxis = None,
|
angleAxis = None,
|
||||||
matrix = None,
|
matrix = None,
|
||||||
Eulers = None,
|
Eulers = None,
|
||||||
random = False, # put any integer to have a fixed seed or True for real random
|
random = False, # integer to have a fixed seed or True for real random
|
||||||
symmetry = None,
|
symmetry = None,
|
||||||
):
|
):
|
||||||
if random: # produce random orientation
|
if random: # produce random orientation
|
||||||
|
@ -815,12 +841,14 @@ class Orientation:
|
||||||
self.symmetry = Symmetry(symmetry)
|
self.symmetry = Symmetry(symmetry)
|
||||||
|
|
||||||
def __copy__(self):
|
def __copy__(self):
|
||||||
|
"""copy"""
|
||||||
return self.__class__(quaternion=self.quaternion,symmetry=self.symmetry.lattice)
|
return self.__class__(quaternion=self.quaternion,symmetry=self.symmetry.lattice)
|
||||||
|
|
||||||
copy = __copy__
|
copy = __copy__
|
||||||
|
|
||||||
|
|
||||||
def __repr__(self):
|
def __repr__(self):
|
||||||
|
"""value as all implemented representations"""
|
||||||
return 'Symmetry: %s\n' % (self.symmetry) + \
|
return 'Symmetry: %s\n' % (self.symmetry) + \
|
||||||
'Quaternion: %s\n' % (self.quaternion) + \
|
'Quaternion: %s\n' % (self.quaternion) + \
|
||||||
'Matrix:\n%s\n' % ( '\n'.join(['\t'.join(map(str,self.asMatrix()[i,:])) for i in range(3)]) ) + \
|
'Matrix:\n%s\n' % ( '\n'.join(['\t'.join(map(str,self.asMatrix()[i,:])) for i in range(3)]) ) + \
|
||||||
|
@ -863,10 +891,7 @@ class Orientation:
|
||||||
self.equivalentQuaternions(who))
|
self.equivalentQuaternions(who))
|
||||||
|
|
||||||
def reduced(self):
|
def reduced(self):
|
||||||
'''
|
"""Transform orientation to fall into fundamental zone according to symmetry"""
|
||||||
Transform orientation to fall into fundamental zone according to symmetry
|
|
||||||
'''
|
|
||||||
|
|
||||||
for me in self.symmetry.equivalentQuaternions(self.quaternion):
|
for me in self.symmetry.equivalentQuaternions(self.quaternion):
|
||||||
if self.symmetry.inFZ(me.asRodrigues()): break
|
if self.symmetry.inFZ(me.asRodrigues()): break
|
||||||
|
|
||||||
|
@ -876,13 +901,13 @@ class Orientation:
|
||||||
def disorientation(self,
|
def disorientation(self,
|
||||||
other,
|
other,
|
||||||
SST = True):
|
SST = True):
|
||||||
'''
|
"""
|
||||||
Disorientation between myself and given other orientation.
|
Disorientation between myself and given other orientation.
|
||||||
|
|
||||||
Rotation axis falls into SST if SST == True.
|
Rotation axis falls into SST if SST == True.
|
||||||
(Currently requires same symmetry for both orientations.
|
(Currently requires same symmetry for both orientations.
|
||||||
Look into A. Heinz and P. Neumann 1991 for cases with differing sym.)
|
Look into A. Heinz and P. Neumann 1991 for cases with differing sym.)
|
||||||
'''
|
"""
|
||||||
|
|
||||||
if self.symmetry != other.symmetry: raise TypeError('disorientation between different symmetry classes not supported yet.')
|
if self.symmetry != other.symmetry: raise TypeError('disorientation between different symmetry classes not supported yet.')
|
||||||
|
|
||||||
misQ = self.quaternion.conjugated()*other.quaternion
|
misQ = self.quaternion.conjugated()*other.quaternion
|
||||||
|
@ -900,32 +925,27 @@ class Orientation:
|
||||||
if breaker: break
|
if breaker: break
|
||||||
if breaker: break
|
if breaker: break
|
||||||
|
|
||||||
|
# disorientation, own sym, other sym, self-->other: True, self<--other: False
|
||||||
return (Orientation(quaternion = theQ,symmetry = self.symmetry.lattice),
|
return (Orientation(quaternion = theQ,symmetry = self.symmetry.lattice),
|
||||||
i,j,k == 1) # disorientation, own sym, other sym, self-->other: True, self<--other: False
|
i,j,k == 1)
|
||||||
|
|
||||||
|
|
||||||
def inversePole(self,
|
def inversePole(self,
|
||||||
axis,
|
axis,
|
||||||
proper = False,
|
proper = False,
|
||||||
SST = True):
|
SST = True):
|
||||||
'''
|
"""axis rotated according to orientation (using crystal symmetry to ensure location falls into SST)"""
|
||||||
axis rotated according to orientation (using crystal symmetry to ensure location falls into SST)
|
|
||||||
'''
|
|
||||||
|
|
||||||
if SST: # pole requested to be within SST
|
if SST: # pole requested to be within SST
|
||||||
for i,q in enumerate(self.symmetry.equivalentQuaternions(self.quaternion)): # test all symmetric equivalent quaternions
|
for i,q in enumerate(self.symmetry.equivalentQuaternions(self.quaternion)): # test all symmetric equivalent quaternions
|
||||||
pole = q.conjugated()*axis # align crystal direction to axis
|
pole = q.conjugated()*axis # align crystal direction to axis
|
||||||
if self.symmetry.inSST(pole,proper): break # found SST version
|
if self.symmetry.inSST(pole,proper): break # found SST version
|
||||||
else:
|
else:
|
||||||
pole = self.quaternion.conjugated()*axis # align crystal direction to axis
|
pole = self.quaternion.conjugated()*axis # align crystal direction to axis
|
||||||
|
|
||||||
return (pole,i if SST else 0)
|
return (pole,i if SST else 0)
|
||||||
|
|
||||||
def IPFcolor(self,axis):
|
def IPFcolor(self,axis):
|
||||||
'''
|
"""TSL color of inverse pole figure for given axis"""
|
||||||
TSL color of inverse pole figure for given axis
|
|
||||||
'''
|
|
||||||
|
|
||||||
color = np.zeros(3,'d')
|
color = np.zeros(3,'d')
|
||||||
|
|
||||||
for q in self.symmetry.equivalentQuaternions(self.quaternion):
|
for q in self.symmetry.equivalentQuaternions(self.quaternion):
|
||||||
|
@ -939,7 +959,9 @@ class Orientation:
|
||||||
def average(cls,
|
def average(cls,
|
||||||
orientations,
|
orientations,
|
||||||
multiplicity = []):
|
multiplicity = []):
|
||||||
"""RETURN THE AVERAGE ORIENTATION
|
"""
|
||||||
|
average orientation
|
||||||
|
|
||||||
ref: F. Landis Markley, Yang Cheng, John Lucas Crassidis, and Yaakov Oshman.
|
ref: F. Landis Markley, Yang Cheng, John Lucas Crassidis, and Yaakov Oshman.
|
||||||
Averaging Quaternions,
|
Averaging Quaternions,
|
||||||
Journal of Guidance, Control, and Dynamics, Vol. 30, No. 4 (2007), pp. 1193-1197.
|
Journal of Guidance, Control, and Dynamics, Vol. 30, No. 4 (2007), pp. 1193-1197.
|
||||||
|
@ -949,7 +971,6 @@ class Orientation:
|
||||||
b = Orientation(Eulers=np.radians([20, 0, 0]), symmetry='hexagonal')
|
b = Orientation(Eulers=np.radians([20, 0, 0]), symmetry='hexagonal')
|
||||||
avg = Orientation.average([a,b])
|
avg = Orientation.average([a,b])
|
||||||
"""
|
"""
|
||||||
|
|
||||||
if not all(isinstance(item, Orientation) for item in orientations):
|
if not all(isinstance(item, Orientation) for item in orientations):
|
||||||
raise TypeError("Only instances of Orientation can be averaged.")
|
raise TypeError("Only instances of Orientation can be averaged.")
|
||||||
|
|
||||||
|
@ -960,8 +981,7 @@ class Orientation:
|
||||||
reference = orientations[0] # take first as reference
|
reference = orientations[0] # take first as reference
|
||||||
for i,(o,n) in enumerate(zip(orientations,multiplicity)):
|
for i,(o,n) in enumerate(zip(orientations,multiplicity)):
|
||||||
closest = o.equivalentOrientations(reference.disorientation(o,SST = False)[2])[0] # select sym orientation with lowest misorientation
|
closest = o.equivalentOrientations(reference.disorientation(o,SST = False)[2])[0] # select sym orientation with lowest misorientation
|
||||||
M = closest.quaternion.asM() * n if i == 0 else M + closest.quaternion.asM() * n # add (multiples) of this orientation to average
|
M = closest.quaternion.asM() * n if i == 0 else M + closest.quaternion.asM() * n # noqa add (multiples) of this orientation to average noqa
|
||||||
|
|
||||||
eig, vec = np.linalg.eig(M/N)
|
eig, vec = np.linalg.eig(M/N)
|
||||||
|
|
||||||
return Orientation(quaternion = Quaternion(quatArray = np.real(vec.T[eig.argmax()])),
|
return Orientation(quaternion = Quaternion(quatArray = np.real(vec.T[eig.argmax()])),
|
||||||
|
|
|
@ -11,10 +11,11 @@ except (ImportError) as e:
|
||||||
sys.stderr.write('\nREMARK: h5py module not available \n\n')
|
sys.stderr.write('\nREMARK: h5py module not available \n\n')
|
||||||
|
|
||||||
class Result():
|
class Result():
|
||||||
'''
|
"""
|
||||||
General class for result parsing.
|
General class for result parsing.
|
||||||
Needs h5py to be installed
|
|
||||||
'''
|
Needs h5py to be installed
|
||||||
|
"""
|
||||||
|
|
||||||
def __init__(self,resultsFile):
|
def __init__(self,resultsFile):
|
||||||
self.data=h5py.File(resultsFile,"r")
|
self.data=h5py.File(resultsFile,"r")
|
||||||
|
|
|
@ -1,6 +1,5 @@
|
||||||
# -*- coding: UTF-8 no BOM -*-
|
# -*- coding: UTF-8 no BOM -*-
|
||||||
|
|
||||||
# $Id$
|
|
||||||
# This tool converts a msc.marc result file into the vtk format that
|
# This tool converts a msc.marc result file into the vtk format that
|
||||||
# can be viewed by Paraview software (Kitware), or MayaVi (needs xml-vtk, or ...
|
# can be viewed by Paraview software (Kitware), or MayaVi (needs xml-vtk, or ...
|
||||||
#
|
#
|
||||||
|
@ -8,13 +7,8 @@
|
||||||
# Some example vtk files: http://people.sc.fsu.edu/~jburkardt/data/vtk/vtk.html
|
# Some example vtk files: http://people.sc.fsu.edu/~jburkardt/data/vtk/vtk.html
|
||||||
# www.paraview.org
|
# www.paraview.org
|
||||||
|
|
||||||
import os,sys,math,time,re
|
import os,sys,re
|
||||||
# python external
|
import numpy as np
|
||||||
try:
|
|
||||||
import numpy as N
|
|
||||||
import numpy
|
|
||||||
except:
|
|
||||||
print('Could not import numpy.')
|
|
||||||
|
|
||||||
import py_post # MSC closed source module to access marc result files
|
import py_post # MSC closed source module to access marc result files
|
||||||
|
|
||||||
|
@ -27,7 +21,7 @@ class MARC_POST():
|
||||||
self.fpath=os.path.join(self.projdir,self.postname)
|
self.fpath=os.path.join(self.projdir,self.postname)
|
||||||
print('Trying to open ',self.fpath,' ...')
|
print('Trying to open ',self.fpath,' ...')
|
||||||
self.p=py_post.post_open(self.fpath)
|
self.p=py_post.post_open(self.fpath)
|
||||||
if self.p==None:
|
if self.p is None:
|
||||||
print('Could not open %s.'%self.postname); #return 'err'#; sys.exit(1)
|
print('Could not open %s.'%self.postname); #return 'err'#; sys.exit(1)
|
||||||
raise Exception('Could not open t16')
|
raise Exception('Could not open t16')
|
||||||
print('Postfile %s%s is open ...'%(self.projdir,self.postname))
|
print('Postfile %s%s is open ...'%(self.projdir,self.postname))
|
||||||
|
@ -105,7 +99,6 @@ class MARC_POST():
|
||||||
def writeNodes2VTK(self, fobj):
|
def writeNodes2VTK(self, fobj):
|
||||||
self.points=[]
|
self.points=[]
|
||||||
self.VTKcnt=200 # number of values per line in vtk file
|
self.VTKcnt=200 # number of values per line in vtk file
|
||||||
ndCnt=1
|
|
||||||
fobj.write('POINTS %i'%self.p.nodes()+' float\n')
|
fobj.write('POINTS %i'%self.p.nodes()+' float\n')
|
||||||
self.nodes_dict={} # store the node IDs in case of holes in the numbering
|
self.nodes_dict={} # store the node IDs in case of holes in the numbering
|
||||||
for iNd in self.nodes:
|
for iNd in self.nodes:
|
||||||
|
@ -126,8 +119,6 @@ class MARC_POST():
|
||||||
el=self.p.element(iEl)
|
el=self.p.element(iEl)
|
||||||
cell_nodes=[] # for pyvtk
|
cell_nodes=[] # for pyvtk
|
||||||
ndlist=el.items
|
ndlist=el.items
|
||||||
#for k in [0, 1, 3, 2, 4, 5, 7, 6]: # FOR CELL TPYE VTK_VOXEL
|
|
||||||
#for k in [0, 4, 3, 1, 5, 7, 6, 2]:
|
|
||||||
for k in [0, 1, 2, 3, 4, 5, 6, 7]: # FOR CELL TYPE VTK_HEXAHEDRON
|
for k in [0, 1, 2, 3, 4, 5, 6, 7]: # FOR CELL TYPE VTK_HEXAHEDRON
|
||||||
node=ndlist[k]-1
|
node=ndlist[k]-1
|
||||||
cell_nodes.append(self.nodes_dict[node])
|
cell_nodes.append(self.nodes_dict[node])
|
||||||
|
@ -147,7 +138,6 @@ class MARC_POST():
|
||||||
fobj.write('\n');cnt=0
|
fobj.write('\n');cnt=0
|
||||||
fobj.write('\n')
|
fobj.write('\n')
|
||||||
print('Elements written to VTK: %i'%self.p.elements())
|
print('Elements written to VTK: %i'%self.p.elements())
|
||||||
#print('Nr of nodes: ',self.nodes)
|
|
||||||
|
|
||||||
def writeElScalars2NodesVTK(self,fobj):
|
def writeElScalars2NodesVTK(self,fobj):
|
||||||
fobj.write('\nPOINT_DATA %i\n'%self.p.nodes())
|
fobj.write('\nPOINT_DATA %i\n'%self.p.nodes())
|
||||||
|
@ -157,7 +147,6 @@ class MARC_POST():
|
||||||
fobj.write('LOOKUP_TABLE default\n')
|
fobj.write('LOOKUP_TABLE default\n')
|
||||||
idxScal=self.nscal_list.index('Displacement Z')
|
idxScal=self.nscal_list.index('Displacement Z')
|
||||||
for iNd in self.nodes:
|
for iNd in self.nodes:
|
||||||
#fobj.write('%f %f '%(self.p.node_scalar(iNd,idxScal), N.random.rand()))
|
|
||||||
fobj.write('%f '%(self.p.node_scalar(iNd,idxScal)))
|
fobj.write('%f '%(self.p.node_scalar(iNd,idxScal)))
|
||||||
for iEl in range(0,self.nel):
|
for iEl in range(0,self.nel):
|
||||||
el=self.p.element(iEl)
|
el=self.p.element(iEl)
|
||||||
|
@ -173,8 +162,6 @@ class MARC_POST():
|
||||||
|
|
||||||
def writeNodeScalars2VTK(self,fobj):
|
def writeNodeScalars2VTK(self,fobj):
|
||||||
fobj.write('\nPOINT_DATA %i\n'%self.p.nodes())
|
fobj.write('\nPOINT_DATA %i\n'%self.p.nodes())
|
||||||
nNdDat=self.nscals
|
|
||||||
nComponents=1+nNdDat
|
|
||||||
self.pointDataScalars=[]
|
self.pointDataScalars=[]
|
||||||
for idxNdScal in range(-3,self.nscals): #now include node x,y,z
|
for idxNdScal in range(-3,self.nscals): #now include node x,y,z
|
||||||
if idxNdScal>=0:
|
if idxNdScal>=0:
|
||||||
|
@ -209,8 +196,6 @@ class MARC_POST():
|
||||||
idx_sig_vMises=self.getLabelNr('Equivalent Von Mises Stress')
|
idx_sig_vMises=self.getLabelNr('Equivalent Von Mises Stress')
|
||||||
idx_sig33=self.getLabelNr('Comp 33 of Cauchy Stress')
|
idx_sig33=self.getLabelNr('Comp 33 of Cauchy Stress')
|
||||||
fobj.write('\nCELL_DATA %i\n'%self.p.elements())
|
fobj.write('\nCELL_DATA %i\n'%self.p.elements())
|
||||||
nElDat=self.elscals
|
|
||||||
nComponents=1+nElDat
|
|
||||||
for idxElScal in range(0,self.elscals):
|
for idxElScal in range(0,self.elscals):
|
||||||
datalabel=self.elscal_list[idxElScal]
|
datalabel=self.elscal_list[idxElScal]
|
||||||
datalabel=re.sub("\s",'_',datalabel)
|
datalabel=re.sub("\s",'_',datalabel)
|
||||||
|
@ -251,18 +236,15 @@ class MARC_POST():
|
||||||
return result
|
return result
|
||||||
|
|
||||||
def writeUniaxiality2VTK(self,fobj):
|
def writeUniaxiality2VTK(self,fobj):
|
||||||
#fobj.write('\nCELL_DATA %i\n'%self.p.elements())
|
|
||||||
datalabel='uniaxiality_sig_vMises_durch_sig33'
|
datalabel='uniaxiality_sig_vMises_durch_sig33'
|
||||||
fobj.write('SCALARS %s float %i\n'%(datalabel,1))
|
fobj.write('SCALARS %s float %i\n'%(datalabel,1))
|
||||||
fobj.write('LOOKUP_TABLE default\n')
|
fobj.write('LOOKUP_TABLE default\n')
|
||||||
cnt=0
|
cnt=0
|
||||||
for iEl in range(0,self.nel):
|
for iEl in range(0,self.nel):
|
||||||
cnt=cnt+1
|
cnt=cnt+1
|
||||||
#if abs(self.sig33[iEl])<1e-5:
|
|
||||||
if abs(self.sig_vMises[iEl])<1e-5:
|
if abs(self.sig_vMises[iEl])<1e-5:
|
||||||
datum=0.
|
datum=0.
|
||||||
else:
|
else:
|
||||||
#datum=self.sig_vMises[iEl]/self.sig33[iEl]
|
|
||||||
datum=self.sig33[iEl]/self.sig_vMises[iEl]
|
datum=self.sig33[iEl]/self.sig_vMises[iEl]
|
||||||
fobj.write('%E '%(datum))
|
fobj.write('%E '%(datum))
|
||||||
if cnt>self.VTKcnt:
|
if cnt>self.VTKcnt:
|
||||||
|
@ -283,8 +265,8 @@ class MARC_POST():
|
||||||
self.mean_stress.append(self.meanStress(sig))
|
self.mean_stress.append(self.meanStress(sig))
|
||||||
|
|
||||||
def triaxiality_per_element(self):
|
def triaxiality_per_element(self):
|
||||||
# classical triaxiality
|
# classical triaxiality
|
||||||
# 1/3 : uniax tension
|
# 1/3 : uniax tension
|
||||||
self.triaxiality=[]
|
self.triaxiality=[]
|
||||||
for iEl in range(0,self.nel):
|
for iEl in range(0,self.nel):
|
||||||
t=self.mean_stress[iEl]/self.sig_vMises[iEl]
|
t=self.mean_stress[iEl]/self.sig_vMises[iEl]
|
||||||
|
@ -303,10 +285,6 @@ class MARC_POST():
|
||||||
fobj.write('\n')
|
fobj.write('\n')
|
||||||
|
|
||||||
def calc_lode_parameter(self):
|
def calc_lode_parameter(self):
|
||||||
# [-1 ... +1] see e.g. Wippler & Boehlke triaxiality measures doi:10.1002/pamm.201010061
|
|
||||||
# +1 : uniax tensile?
|
|
||||||
# 0 : shear
|
|
||||||
# -1 : uniax compr ?
|
|
||||||
self.lode=[]
|
self.lode=[]
|
||||||
try:
|
try:
|
||||||
self.stress
|
self.stress
|
||||||
|
@ -328,10 +306,11 @@ class MARC_POST():
|
||||||
def princStress(self, stress):
|
def princStress(self, stress):
|
||||||
"""
|
"""
|
||||||
Function to compute 3D principal stresses and sort them.
|
Function to compute 3D principal stresses and sort them.
|
||||||
|
|
||||||
from: http://geodynamics.org/svn/cig/short/3D/PyLith/trunk/playpen/postproc/vtkcff.py
|
from: http://geodynamics.org/svn/cig/short/3D/PyLith/trunk/playpen/postproc/vtkcff.py
|
||||||
"""
|
"""
|
||||||
stressMat=N.array(stress)
|
stressMat=np.array(stress)
|
||||||
(princStress, princAxes) = numpy.linalg.eigh(stressMat)
|
(princStress, princAxes) = np.linalg.eigh(stressMat)
|
||||||
idx = princStress.argsort()
|
idx = princStress.argsort()
|
||||||
princStressOrdered = princStress[idx]
|
princStressOrdered = princStress[idx]
|
||||||
princAxesOrdered = princAxes[:,idx]
|
princAxesOrdered = princAxes[:,idx]
|
||||||
|
@ -339,36 +318,28 @@ class MARC_POST():
|
||||||
|
|
||||||
def avg_elten(self,
|
def avg_elten(self,
|
||||||
idxElTen, mat=0, elID=None):
|
idxElTen, mat=0, elID=None):
|
||||||
tensum=N.zeros((3,3));
|
tensum=np.zeros((3,3));
|
||||||
T=N.zeros((3,3));
|
T=np.zeros((3,3));
|
||||||
pts=0;
|
pts=0;
|
||||||
avg=N.zeros((3,3));
|
avg=np.zeros((3,3));
|
||||||
#print 'Element Scalars'
|
|
||||||
#print self.p.element_scalar_label(elscal2)
|
if elID is None:
|
||||||
if elID==None:
|
|
||||||
averaged_elements=range(0,self.nel)
|
averaged_elements=range(0,self.nel)
|
||||||
else:
|
else:
|
||||||
averaged_elements=[elID]
|
averaged_elements=[elID]
|
||||||
#for i in range (0,self.nel):
|
|
||||||
for i in averaged_elements:
|
for i in averaged_elements:
|
||||||
if mat==0 or int(self.p.element_scalar(i,4)[0].value)==mat:
|
if mat==0 or int(self.p.element_scalar(i,4)[0].value)==mat:
|
||||||
eldata=self.p.element(i)
|
|
||||||
T=self.p.element_tensor(i,idxElTen)
|
T=self.p.element_tensor(i,idxElTen)
|
||||||
for k in range (0,8):
|
for k in range (0,8):
|
||||||
tensum[0][0] = tensum[0][0] + T[k].t11
|
tensum[0][0] = tensum[0][0] + T[k].t11
|
||||||
tensum[0][1] = tensum[0][1] + T[k].t12
|
tensum[0][1] = tensum[0][1] + T[k].t12
|
||||||
tensum[0][2] = tensum[0][2] + T[k].t13
|
tensum[0][2] = tensum[0][2] + T[k].t13
|
||||||
#tensum1[1][0] = tensum1[1][0] + T1[k].t21
|
|
||||||
tensum[1][1] = tensum[1][1] + T[k].t22
|
tensum[1][1] = tensum[1][1] + T[k].t22
|
||||||
tensum[1][2] = tensum[1][2] + T[k].t23
|
tensum[1][2] = tensum[1][2] + T[k].t23
|
||||||
#tensum1[2][0] = tensum1[2][0] + T1[k].t31
|
|
||||||
#tensum1[2][1] = tensum1[2][1] + T1[k].t32
|
|
||||||
tensum[2][2] = tensum[2][2] + T[k].t33
|
tensum[2][2] = tensum[2][2] + T[k].t33
|
||||||
pts=pts+1
|
pts=pts+1
|
||||||
avg=tensum/pts
|
avg=tensum/pts
|
||||||
#print avg
|
|
||||||
avg=self.fillComponents(avg)
|
avg=self.fillComponents(avg)
|
||||||
#print avg
|
|
||||||
del [T]
|
del [T]
|
||||||
return (avg,tensum,pts)
|
return (avg,tensum,pts)
|
||||||
|
|
||||||
|
@ -384,7 +355,7 @@ class MARC_POST():
|
||||||
t=tensor33
|
t=tensor33
|
||||||
s=(t[0,0]-t[1,1])**2+(t[1,1]-t[2,2])**2+(t[0,0]-t[2,2])**2+\
|
s=(t[0,0]-t[1,1])**2+(t[1,1]-t[2,2])**2+(t[0,0]-t[2,2])**2+\
|
||||||
6*(t[0,1]**2+t[1,2]**2+t[2,0]**2)
|
6*(t[0,1]**2+t[1,2]**2+t[2,0]**2)
|
||||||
vM=N.sqrt(s/2.)
|
vM=np.sqrt(s/2.)
|
||||||
return vM
|
return vM
|
||||||
|
|
||||||
def meanStress(self,tensor33):
|
def meanStress(self,tensor33):
|
||||||
|
@ -398,7 +369,6 @@ class MARC_POST():
|
||||||
I1=t[0,0]+t[1,1]+t[2,2]
|
I1=t[0,0]+t[1,1]+t[2,2]
|
||||||
I2=t[0,0]*t[1,1]+t[1,1]*t[2,2]+t[0,0]*t[2,2]-\
|
I2=t[0,0]*t[1,1]+t[1,1]*t[2,2]+t[0,0]*t[2,2]-\
|
||||||
t[0,1]**2-t[1,2]**2-t[0,2]**2
|
t[0,1]**2-t[1,2]**2-t[0,2]**2
|
||||||
# I3 = det(t)
|
|
||||||
I3=t[0,0]*t[1,1]*t[2,2]+\
|
I3=t[0,0]*t[1,1]*t[2,2]+\
|
||||||
2*t[0,1]*t[1,2]*t[2,0]-\
|
2*t[0,1]*t[1,2]*t[2,0]-\
|
||||||
t[2,2]*t[0,1]**2-t[0,0]*t[1,2]**2-t[1,1]*t[0,2]**2
|
t[2,2]*t[0,1]**2-t[0,0]*t[1,2]**2-t[1,1]*t[0,2]**2
|
||||||
|
@ -406,17 +376,18 @@ class MARC_POST():
|
||||||
|
|
||||||
|
|
||||||
class VTK_WRITER():
|
class VTK_WRITER():
|
||||||
'''
|
"""
|
||||||
The resulting vtk-file can be imported in Paraview 3.12
|
The resulting vtk-file can be imported in Paraview 3.12
|
||||||
Then use Filters: Cell Data to Point Data + Contour
|
|
||||||
to plot semi-transparent iso-surfaces.
|
Then use Filters: Cell Data to Point Data + Contour
|
||||||
'''
|
to plot semi-transparent iso-surfaces.
|
||||||
|
"""
|
||||||
|
|
||||||
import re
|
import re
|
||||||
def __init__(self):
|
def __init__(self):
|
||||||
self.p=MARC_POST() # self.p
|
self.p=MARC_POST() # self.p
|
||||||
|
|
||||||
def openFile(self, filename='test.vtp'):
|
def openFile(self, filename='test.vtp'):
|
||||||
#if not self.f:#==None:
|
|
||||||
self.f=open(filename,'w+')
|
self.f=open(filename,'w+')
|
||||||
self.fname=filename
|
self.fname=filename
|
||||||
|
|
||||||
|
@ -427,7 +398,7 @@ class VTK_WRITER():
|
||||||
dformat='ASCII', # BINARY | [ASCII]
|
dformat='ASCII', # BINARY | [ASCII]
|
||||||
dtype='UNSTRUCTURED_GRID' # UNSTRUCTURED GRID
|
dtype='UNSTRUCTURED_GRID' # UNSTRUCTURED GRID
|
||||||
):
|
):
|
||||||
if vtkFile==None:
|
if vtkFile is None:
|
||||||
vtkFile=self.f
|
vtkFile=self.f
|
||||||
# First Line contains Data format version
|
# First Line contains Data format version
|
||||||
self.versionVTK=version
|
self.versionVTK=version
|
||||||
|
@ -440,7 +411,6 @@ class VTK_WRITER():
|
||||||
|
|
||||||
def marc2vtkBatch(self):
|
def marc2vtkBatch(self):
|
||||||
for iori in range(1,63):
|
for iori in range(1,63):
|
||||||
#self.p=msc_post.MSC_POST()
|
|
||||||
self.p.postname='indent_fric0.3_R2.70_cA146.0_h0.320_ori%03i_OST_h19d.t16'%(iori)
|
self.p.postname='indent_fric0.3_R2.70_cA146.0_h0.320_ori%03i_OST_h19d.t16'%(iori)
|
||||||
if os.path.exists(self.p.postname):
|
if os.path.exists(self.p.postname):
|
||||||
self.marc2vtk(mode='fast', batchMode=1)
|
self.marc2vtk(mode='fast', batchMode=1)
|
||||||
|
@ -496,14 +466,14 @@ class VTK_WRITER():
|
||||||
def scaleBar(self, length=1.0, posXYZ=[0., 0., 0.]):
|
def scaleBar(self, length=1.0, posXYZ=[0., 0., 0.]):
|
||||||
self.fsb=open('micronbar_l%.1f.vtp'%length,'w+')
|
self.fsb=open('micronbar_l%.1f.vtp'%length,'w+')
|
||||||
self.writeFirstLines(self.fsb, comment='micronbar')
|
self.writeFirstLines(self.fsb, comment='micronbar')
|
||||||
pts=N.array([])
|
pts=np.array([])
|
||||||
width=length*1.
|
width=length*1.
|
||||||
height=length*1.
|
height=length*1.
|
||||||
wVec=N.array([0., width, 0.])
|
wVec=np.array([0., width, 0.])
|
||||||
lVec=N.array([length,0.,0.])
|
lVec=np.array([length,0.,0.])
|
||||||
hVec=N.array([0.,0.,height])
|
hVec=np.array([0.,0.,height])
|
||||||
posXYZ=posXYZ-0.5*wVec-0.5*lVec#-0.5*hVec # CENTERING Y/N
|
posXYZ=posXYZ-0.5*wVec-0.5*lVec#-0.5*hVec # CENTERING Y/N
|
||||||
posXYZ=N.array(posXYZ)
|
posXYZ=np.array(posXYZ)
|
||||||
pts=[posXYZ, posXYZ+lVec,
|
pts=[posXYZ, posXYZ+lVec,
|
||||||
posXYZ+wVec,
|
posXYZ+wVec,
|
||||||
posXYZ+wVec+lVec]
|
posXYZ+wVec+lVec]
|
||||||
|
@ -514,34 +484,22 @@ class VTK_WRITER():
|
||||||
self.fsb.write('%f %f %f\n'%(pts[npts][0], pts[npts][1], pts[npts][2]))
|
self.fsb.write('%f %f %f\n'%(pts[npts][0], pts[npts][1], pts[npts][2]))
|
||||||
if 1: #Triad
|
if 1: #Triad
|
||||||
nCells=3
|
nCells=3
|
||||||
#nCells=1 #One Line
|
|
||||||
ptsPerCell=2 # Lines (Type=3)
|
ptsPerCell=2 # Lines (Type=3)
|
||||||
#ptsPerCell=4 # Quads (Type=9)
|
|
||||||
#ptsPerCell=8 # Hexahedron (Type=12)
|
|
||||||
cellSize=(ptsPerCell+1)*nCells
|
cellSize=(ptsPerCell+1)*nCells
|
||||||
self.fsb.write('CELLS %i %i\n'%(nCells,cellSize))
|
self.fsb.write('CELLS %i %i\n'%(nCells,cellSize))
|
||||||
self.fsb.write('2 0 1\n') #X-Line
|
self.fsb.write('2 0 1\n') #X-Line
|
||||||
self.fsb.write('2 0 2\n') #Y-Line
|
self.fsb.write('2 0 2\n') #Y-Line
|
||||||
self.fsb.write('2 0 4\n') #Z-Line
|
self.fsb.write('2 0 4\n') #Z-Line
|
||||||
#self.fsb.write('4 0 1 3 2\n') #Quad
|
|
||||||
#self.fsb.write('%i 0 1 3 2 4 5 7 6\n'%ptsPerCell) #Hexahedron
|
|
||||||
self.fsb.write('CELL_TYPES %i\n'%(nCells))
|
self.fsb.write('CELL_TYPES %i\n'%(nCells))
|
||||||
self.fsb.write('3\n3\n3\n')#Line
|
self.fsb.write('3\n3\n3\n')#Line
|
||||||
#self.fsb.write('12\n')#Hexahedron
|
|
||||||
else: # Cube, change posXYZ
|
else: # Cube, change posXYZ
|
||||||
nCells=1
|
nCells=1
|
||||||
ptsPerCell=2 # Lines (Type=3)
|
ptsPerCell=2 # Lines (Type=3)
|
||||||
#ptsPerCell=4 # Quads (Type=9)
|
|
||||||
#ptsPerCell=8 # Hexahedron (Type=12)
|
|
||||||
cellSize=(ptsPerCell+1)*nCells
|
cellSize=(ptsPerCell+1)*nCells
|
||||||
self.fsb.write('CELLS %i %i\n'%(nCells,cellSize))
|
self.fsb.write('CELLS %i %i\n'%(nCells,cellSize))
|
||||||
self.fsb.write('2 0 1\n') #Line
|
self.fsb.write('2 0 1\n') #Line
|
||||||
#self.fsb.write('4 0 1 3 2\n') #Quad
|
|
||||||
#self.fsb.write('%i 0 1 3 2 4 5 7 6\n'%ptsPerCell) #Hexahedron
|
|
||||||
self.fsb.write('CELL_TYPES %i\n'%(nCells))
|
self.fsb.write('CELL_TYPES %i\n'%(nCells))
|
||||||
self.fsb.write('3\n')#Line
|
self.fsb.write('3\n')#Line
|
||||||
#self.fsb.write('12\n')#Hexahedron
|
|
||||||
|
|
||||||
|
|
||||||
self.fsb.write('\n')
|
self.fsb.write('\n')
|
||||||
self.fsb.close()
|
self.fsb.close()
|
||||||
|
@ -549,8 +507,7 @@ class VTK_WRITER():
|
||||||
|
|
||||||
def example_unstructured(self):
|
def example_unstructured(self):
|
||||||
self.openFile(filename='example_unstructured_grid.vtk')
|
self.openFile(filename='example_unstructured_grid.vtk')
|
||||||
#self.writeFirstLines()
|
self.f.write("""
|
||||||
self.f.write('''
|
|
||||||
# vtk DataFile Version 2.0
|
# vtk DataFile Version 2.0
|
||||||
example_unstruct_grid
|
example_unstruct_grid
|
||||||
ASCII
|
ASCII
|
||||||
|
@ -590,61 +547,40 @@ LOOKUP_TABLE default
|
||||||
1.02
|
1.02
|
||||||
1.50
|
1.50
|
||||||
0.00
|
0.00
|
||||||
3 5 6 23423423423423423423.23423423''')
|
3 5 6 23423423423423423423.23423423""")
|
||||||
self.f.close()
|
self.f.close()
|
||||||
|
|
||||||
|
|
||||||
|
|
||||||
def writeNodes2VTK(self, fobj):
|
def writeNodes2VTK(self, fobj):
|
||||||
self.VTKcnt=200 # how many numbers per line in vtk file
|
self.VTKcnt=200 # how many numbers per line in vtk file
|
||||||
#self.VTKcnt=6
|
|
||||||
ndCnt=1
|
|
||||||
#self.nodes=range(0,10)
|
|
||||||
fobj.write('POINTS %i'%self.p.nodes()+' float\n')
|
fobj.write('POINTS %i'%self.p.nodes()+' float\n')
|
||||||
for iNd in self.nodes:
|
for iNd in self.nodes:
|
||||||
nd=self.p.node(iNd)
|
nd=self.p.node(iNd)
|
||||||
disp=self.p.node_displacement(iNd)
|
disp=self.p.node_displacement(iNd)
|
||||||
#contact=self.p.node_scalar(iNd,contactNr)
|
|
||||||
#ndCnt=ndCnt+1
|
|
||||||
fobj.write('%f %f %f \n'%
|
fobj.write('%f %f %f \n'%
|
||||||
#(nd.x, nd.y, nd.z))
|
|
||||||
(nd.x+disp[0], nd.y+disp[1], nd.z+disp[2]))
|
(nd.x+disp[0], nd.y+disp[1], nd.z+disp[2]))
|
||||||
|
|
||||||
#if ndCnt>6:
|
|
||||||
# fobj.write('\n')
|
|
||||||
# ndCnt=1
|
|
||||||
fobj.write('\n')
|
fobj.write('\n')
|
||||||
print('Nodes written to VTK: %i'%self.p.nodes())
|
print('Nodes written to VTK: %i'%self.p.nodes())
|
||||||
#print('Nr of nodes: ',self.nodes)
|
|
||||||
|
|
||||||
def writeElements2VTK(self, fobj):
|
def writeElements2VTK(self, fobj):
|
||||||
fobj.write('\nCELLS %i %i'%(self.p.elements(),self.p.elements()*9)+'\n')
|
fobj.write('\nCELLS %i %i'%(self.p.elements(),self.p.elements()*9)+'\n')
|
||||||
for iEl in range(0,self.nel):
|
for iEl in range(0,self.nel):
|
||||||
el=self.p.element(iEl)
|
el=self.p.element(iEl)
|
||||||
#disp=self.p.node_displacement(iNd)
|
|
||||||
#contact=self.p.node_scalar(iNd,contactNr)
|
|
||||||
#ndCnt=ndCnt+1
|
|
||||||
fobj.write('8 ')
|
fobj.write('8 ')
|
||||||
ndlist=el.items
|
ndlist=el.items
|
||||||
#for k in [0, 1, 3, 2, 4, 5, 7, 6]: # FOR CELL TPYE VTK_VOXEL
|
|
||||||
#for k in [0, 4, 3, 1, 5, 7, 6, 2]:
|
|
||||||
for k in [0, 1, 2, 3, 4, 5, 6, 7]: # FOR CELL TYPE VTK_HEXAHEDRON
|
for k in [0, 1, 2, 3, 4, 5, 6, 7]: # FOR CELL TYPE VTK_HEXAHEDRON
|
||||||
fobj.write('%6i '%(ndlist[k]-1))
|
fobj.write('%6i '%(ndlist[k]-1))
|
||||||
fobj.write('\n')
|
fobj.write('\n')
|
||||||
#if ndCnt>6:
|
|
||||||
# fobj.write('\n')
|
|
||||||
# ndCnt=1
|
|
||||||
fobj.write('\nCELL_TYPES %i'%self.p.elements()+'\n')
|
fobj.write('\nCELL_TYPES %i'%self.p.elements()+'\n')
|
||||||
cnt=0
|
cnt=0
|
||||||
for iEl in range(0,self.nel):
|
for iEl in range(0,self.nel):
|
||||||
cnt=cnt+1
|
cnt=cnt+1
|
||||||
#fobj.write('11\n') #VTK_VOXEL
|
|
||||||
fobj.write('12 ') #VTK_HEXAHEDRON
|
fobj.write('12 ') #VTK_HEXAHEDRON
|
||||||
if cnt>self.VTKcnt:
|
if cnt>self.VTKcnt:
|
||||||
fobj.write('\n');cnt=0
|
fobj.write('\n');cnt=0
|
||||||
fobj.write('\n')
|
fobj.write('\n')
|
||||||
print('Elements written to VTK: %i'%self.p.elements())
|
print('Elements written to VTK: %i'%self.p.elements())
|
||||||
#print('Nr of nodes: ',self.nodes)
|
|
||||||
|
|
||||||
def writeElScalars2NodesVTK(self,fobj):
|
def writeElScalars2NodesVTK(self,fobj):
|
||||||
fobj.write('\nPOINT_DATA %i\n'%self.p.nodes())
|
fobj.write('\nPOINT_DATA %i\n'%self.p.nodes())
|
||||||
|
@ -668,10 +604,7 @@ LOOKUP_TABLE default
|
||||||
fobj.write('\n')
|
fobj.write('\n')
|
||||||
|
|
||||||
def writeNodeScalars2VTK(self,fobj):
|
def writeNodeScalars2VTK(self,fobj):
|
||||||
#print('writeElementData2VTK')
|
|
||||||
fobj.write('\nPOINT_DATA %i\n'%self.p.nodes())
|
fobj.write('\nPOINT_DATA %i\n'%self.p.nodes())
|
||||||
nNdDat=self.nscals
|
|
||||||
nComponents=1+nNdDat
|
|
||||||
for idxNdScal in range(-3,self.nscals): # include node x,y,z
|
for idxNdScal in range(-3,self.nscals): # include node x,y,z
|
||||||
if idxNdScal>=0:
|
if idxNdScal>=0:
|
||||||
datalabel=self.nscal_list[idxNdScal]
|
datalabel=self.nscal_list[idxNdScal]
|
||||||
|
@ -700,10 +633,7 @@ LOOKUP_TABLE default
|
||||||
fobj.write('\n')
|
fobj.write('\n')
|
||||||
|
|
||||||
def writeElementData2VTK(self,fobj):
|
def writeElementData2VTK(self,fobj):
|
||||||
#print('writeElementData2VTK')
|
|
||||||
fobj.write('\nCELL_DATA %i\n'%self.p.elements())
|
fobj.write('\nCELL_DATA %i\n'%self.p.elements())
|
||||||
nElDat=self.elscals
|
|
||||||
nComponents=1+nElDat
|
|
||||||
for idxElScal in range(0,self.elscals):
|
for idxElScal in range(0,self.elscals):
|
||||||
datalabel=self.elscal_list[idxElScal]
|
datalabel=self.elscal_list[idxElScal]
|
||||||
datalabel=re.sub("\s",'_',datalabel)
|
datalabel=re.sub("\s",'_',datalabel)
|
||||||
|
@ -730,7 +660,7 @@ LOOKUP_TABLE default
|
||||||
def example1(self):
|
def example1(self):
|
||||||
self.openFile()
|
self.openFile()
|
||||||
self.writeFirstLines()
|
self.writeFirstLines()
|
||||||
self.f.write('''DATASET POLYDATA
|
self.f.write("""DATASET POLYDATA
|
||||||
POINTS 8 float
|
POINTS 8 float
|
||||||
0.0 0.0 0.0
|
0.0 0.0 0.0
|
||||||
1.0 0.0 0.0
|
1.0 0.0 0.0
|
||||||
|
@ -789,18 +719,20 @@ LOOKUP_TABLE my_table 8
|
||||||
0.0 0.0 1.0 1.0
|
0.0 0.0 1.0 1.0
|
||||||
1.0 0.0 1.0 1.0
|
1.0 0.0 1.0 1.0
|
||||||
0.0 1.0 1.0 1.0
|
0.0 1.0 1.0 1.0
|
||||||
1.0 1.0 1.0 1.0''')
|
1.0 1.0 1.0 1.0""")
|
||||||
self.f.close()
|
self.f.close()
|
||||||
|
|
||||||
|
|
||||||
import pyvtk
|
import pyvtk
|
||||||
class marc_to_vtk():
|
class marc_to_vtk():
|
||||||
'''
|
"""
|
||||||
Anybody wants to implement it with pyvtk?
|
Anybody wants to implement it with pyvtk?
|
||||||
The advantage would be that pyvtk can also wirte the
|
|
||||||
<xml>-VTK format and binary.
|
The advantage would be that pyvtk can also wirte the
|
||||||
These can be plotted with mayavi.
|
<xml>-VTK format and binary.
|
||||||
'''
|
These can be plotted with mayavi.
|
||||||
|
"""
|
||||||
|
|
||||||
def __init__(self):
|
def __init__(self):
|
||||||
self.p=[]#MARC_POST() # self.p
|
self.p=[]#MARC_POST() # self.p
|
||||||
|
|
||||||
|
@ -810,5 +742,4 @@ class marc_to_vtk():
|
||||||
hexahedron=self.p.cells),
|
hexahedron=self.p.cells),
|
||||||
'm2v output')
|
'm2v output')
|
||||||
vtk.tofile('m2v_file')
|
vtk.tofile('m2v_file')
|
||||||
#vtk.tofile('example3b','binary')
|
|
||||||
#VtkData('example3')
|
|
||||||
|
|
|
@ -1,8 +1,7 @@
|
||||||
# -*- coding: UTF-8 no BOM -*-
|
# -*- coding: UTF-8 no BOM -*-
|
||||||
|
"""Tools to control the various BVP solvers"""
|
||||||
|
|
||||||
# $Id$
|
from .solver import Solver # noqa
|
||||||
|
from .spectral import Spectral # noqa
|
||||||
from .solver import Solver # only one class
|
from .marc import Marc # noqa
|
||||||
from .spectral import Spectral # only one class
|
from .abaqus import Abaqus # noqa
|
||||||
from .marc import Marc # only one class
|
|
||||||
from .abaqus import Abaqus # only one class
|
|
||||||
|
|
|
@ -7,7 +7,8 @@ from .solver import Solver
|
||||||
|
|
||||||
class Abaqus(Solver):
|
class Abaqus(Solver):
|
||||||
|
|
||||||
def __init__(self,version='',solver=''): # example of version string: 6.12-2, solver: either std or exp
|
|
||||||
|
def __init__(self,version='',solver=''): # example version string: 6.12-2, solver: std or exp
|
||||||
self.solver='Abaqus'
|
self.solver='Abaqus'
|
||||||
if version =='':
|
if version =='':
|
||||||
import subprocess
|
import subprocess
|
||||||
|
|
|
@ -7,9 +7,7 @@ from .solver import Solver
|
||||||
|
|
||||||
class Marc(Solver):
|
class Marc(Solver):
|
||||||
|
|
||||||
#--------------------------
|
|
||||||
def __init__(self):
|
def __init__(self):
|
||||||
#--------------------------
|
|
||||||
self.solver = 'Marc'
|
self.solver = 'Marc'
|
||||||
self.releases = { \
|
self.releases = { \
|
||||||
'2015': ['linux64',''],
|
'2015': ['linux64',''],
|
||||||
|
@ -24,7 +22,6 @@ class Marc(Solver):
|
||||||
|
|
||||||
#--------------------------
|
#--------------------------
|
||||||
def version(self,rootRelation = ''):
|
def version(self,rootRelation = ''):
|
||||||
#--------------------------
|
|
||||||
import os,damask.environment
|
import os,damask.environment
|
||||||
|
|
||||||
MSCpath = damask.environment.Environment(rootRelation).options['MSC_ROOT']
|
MSCpath = damask.environment.Environment(rootRelation).options['MSC_ROOT']
|
||||||
|
@ -40,7 +37,6 @@ class Marc(Solver):
|
||||||
|
|
||||||
#--------------------------
|
#--------------------------
|
||||||
def libraryPath(self,rootRelation = '',releases = []):
|
def libraryPath(self,rootRelation = '',releases = []):
|
||||||
#--------------------------
|
|
||||||
import os,damask.environment
|
import os,damask.environment
|
||||||
|
|
||||||
MSCpath = damask.environment.Environment(rootRelation).options['MSC_ROOT']
|
MSCpath = damask.environment.Environment(rootRelation).options['MSC_ROOT']
|
||||||
|
@ -59,7 +55,6 @@ class Marc(Solver):
|
||||||
|
|
||||||
#--------------------------
|
#--------------------------
|
||||||
def toolsPath(self,rootRelation = '',release = ''):
|
def toolsPath(self,rootRelation = '',release = ''):
|
||||||
#--------------------------
|
|
||||||
import os,damask.environment
|
import os,damask.environment
|
||||||
|
|
||||||
MSCpath = damask.environment.Environment(rootRelation).options['MSC_ROOT']
|
MSCpath = damask.environment.Environment(rootRelation).options['MSC_ROOT']
|
||||||
|
@ -72,7 +67,6 @@ class Marc(Solver):
|
||||||
|
|
||||||
#--------------------------
|
#--------------------------
|
||||||
def submit_job(self,
|
def submit_job(self,
|
||||||
#--------------------------
|
|
||||||
rootRelation = '',
|
rootRelation = '',
|
||||||
release = '',
|
release = '',
|
||||||
model = 'model',
|
model = 'model',
|
||||||
|
@ -84,7 +78,7 @@ class Marc(Solver):
|
||||||
):
|
):
|
||||||
|
|
||||||
import os,damask.environment
|
import os,damask.environment
|
||||||
import subprocess,shlex,shutil
|
import subprocess,shlex
|
||||||
|
|
||||||
if len(release) == 0: release = self.version(rootRelation)
|
if len(release) == 0: release = self.version(rootRelation)
|
||||||
|
|
||||||
|
@ -94,7 +88,7 @@ class Marc(Solver):
|
||||||
|
|
||||||
damaskEnv = damask.environment.Environment(rootRelation)
|
damaskEnv = damask.environment.Environment(rootRelation)
|
||||||
|
|
||||||
user = os.path.join(damaskEnv.relPath('code/'),'DAMASK_marc') # might be updated if special version is found (usually symlink)
|
user = os.path.join(damaskEnv.relPath('code/'),'DAMASK_marc') # might be updated if special version (symlink) is found
|
||||||
if compile:
|
if compile:
|
||||||
if os.path.isfile(os.path.join(damaskEnv.relPath('code/'),'DAMASK_marc%s.f90'%release)):
|
if os.path.isfile(os.path.join(damaskEnv.relPath('code/'),'DAMASK_marc%s.f90'%release)):
|
||||||
user = os.path.join(damaskEnv.relPath('code/'),'DAMASK_marc%s'%release)
|
user = os.path.join(damaskEnv.relPath('code/'),'DAMASK_marc%s'%release)
|
||||||
|
@ -123,7 +117,6 @@ class Marc(Solver):
|
||||||
|
|
||||||
#--------------------------
|
#--------------------------
|
||||||
def exit_number_from_outFile(self,outFile=None):
|
def exit_number_from_outFile(self,outFile=None):
|
||||||
#--------------------------
|
|
||||||
import string
|
import string
|
||||||
exitnumber = -1
|
exitnumber = -1
|
||||||
fid_out = open(outFile,'r')
|
fid_out = open(outFile,'r')
|
||||||
|
|
|
@ -5,10 +5,11 @@
|
||||||
import damask.solver
|
import damask.solver
|
||||||
|
|
||||||
class Solver():
|
class Solver():
|
||||||
'''
|
"""
|
||||||
General class for solver specific functionality.
|
General class for solver specific functionality.
|
||||||
Sub-classed by the individual solvers.
|
|
||||||
'''
|
Sub-classed by the individual solvers.
|
||||||
|
"""
|
||||||
|
|
||||||
def __init__(self,solver=''):
|
def __init__(self,solver=''):
|
||||||
solverClass = {
|
solverClass = {
|
||||||
|
|
|
@ -1,5 +1,5 @@
|
||||||
# -*- coding: UTF-8 no BOM -*-
|
# -*- coding: UTF-8 no BOM -*-
|
||||||
|
|
||||||
# $Id$
|
"""Test functionality"""
|
||||||
|
|
||||||
from .test import Test
|
from .test import Test # noqa
|
||||||
|
|
|
@ -2,17 +2,19 @@
|
||||||
|
|
||||||
# $Id$
|
# $Id$
|
||||||
|
|
||||||
import os, sys, shlex, inspect
|
import os,sys,shutil
|
||||||
import subprocess,shutil,string
|
import logging,logging.config
|
||||||
import logging, logging.config
|
|
||||||
import damask
|
import damask
|
||||||
|
import numpy as np
|
||||||
|
from collections import Iterable
|
||||||
from optparse import OptionParser
|
from optparse import OptionParser
|
||||||
|
|
||||||
class Test():
|
class Test():
|
||||||
'''
|
"""
|
||||||
General class for testing.
|
General class for testing.
|
||||||
Is sub-classed by the individual tests.
|
|
||||||
'''
|
Is sub-classed by the individual tests.
|
||||||
|
"""
|
||||||
|
|
||||||
variants = []
|
variants = []
|
||||||
|
|
||||||
|
@ -20,11 +22,11 @@ class Test():
|
||||||
|
|
||||||
logger = logging.getLogger()
|
logger = logging.getLogger()
|
||||||
logger.setLevel(0)
|
logger.setLevel(0)
|
||||||
fh = logging.FileHandler('test.log') # create file handler which logs even debug messages
|
fh = logging.FileHandler('test.log') # create file handler which logs even debug messages
|
||||||
fh.setLevel(logging.DEBUG)
|
fh.setLevel(logging.DEBUG)
|
||||||
full = logging.Formatter('%(asctime)s - %(levelname)s: \n%(message)s')
|
full = logging.Formatter('%(asctime)s - %(levelname)s: \n%(message)s')
|
||||||
fh.setFormatter(full)
|
fh.setFormatter(full)
|
||||||
ch = logging.StreamHandler(stream=sys.stdout) # create console handler with a higher log level
|
ch = logging.StreamHandler(stream=sys.stdout) # create console handler with a higher log level
|
||||||
ch.setLevel(logging.INFO)
|
ch.setLevel(logging.INFO)
|
||||||
# create formatter and add it to the handlers
|
# create formatter and add it to the handlers
|
||||||
plain = logging.Formatter('%(message)s')
|
plain = logging.Formatter('%(message)s')
|
||||||
|
@ -52,18 +54,16 @@ class Test():
|
||||||
accept=False)
|
accept=False)
|
||||||
|
|
||||||
def execute(self):
|
def execute(self):
|
||||||
'''
|
"""Run all variants and report first failure."""
|
||||||
Run all variants and report first failure.
|
|
||||||
'''
|
|
||||||
if self.options.debug:
|
if self.options.debug:
|
||||||
for variant in xrange(len(self.variants)):
|
for variant in xrange(len(self.variants)):
|
||||||
try:
|
try:
|
||||||
self.postprocess(variant)
|
self.postprocess(variant)
|
||||||
if not self.compare(variant):
|
if not self.compare(variant):
|
||||||
return variant+1 # return culprit
|
return variant+1 # return culprit
|
||||||
except Exception as e :
|
except Exception as e :
|
||||||
logging.critical('\nWARNING:\n %s\n'%e)
|
logging.critical('\nWARNING:\n {}\n'.format(e))
|
||||||
return variant+1 # return culprit
|
return variant+1 # return culprit
|
||||||
return 0
|
return 0
|
||||||
else:
|
else:
|
||||||
if not self.testPossible(): return -1
|
if not self.testPossible(): return -1
|
||||||
|
@ -74,139 +74,109 @@ class Test():
|
||||||
self.prepare(variant)
|
self.prepare(variant)
|
||||||
self.run(variant)
|
self.run(variant)
|
||||||
self.postprocess(variant)
|
self.postprocess(variant)
|
||||||
if self.updateRequested: # update requested
|
if self.updateRequested: # update requested
|
||||||
self.update(variant)
|
self.update(variant)
|
||||||
elif not (self.options.accept or self.compare(variant)): # no update, do comparison
|
elif not (self.options.accept or self.compare(variant)): # no update, do comparison
|
||||||
return variant+1 # return culprit
|
return variant+1 # return culprit
|
||||||
except Exception as e :
|
except Exception as e :
|
||||||
logging.critical('\nWARNING:\n %s\n'%e)
|
logging.critical('\nWARNING:\n {}\n'.format(e))
|
||||||
return variant+1 # return culprit
|
return variant+1 # return culprit
|
||||||
return 0
|
return 0
|
||||||
|
|
||||||
def testPossible(self):
|
def testPossible(self):
|
||||||
'''
|
"""Check if test is possible or not (e.g. no license available)."""
|
||||||
Check if test is possible or not (e.g. no license available).
|
|
||||||
'''
|
|
||||||
return True
|
return True
|
||||||
|
|
||||||
def clean(self):
|
def clean(self):
|
||||||
'''
|
"""Delete directory tree containing current results."""
|
||||||
Delete directory tree containing current results.
|
|
||||||
'''
|
|
||||||
status = True
|
status = True
|
||||||
|
|
||||||
try:
|
try:
|
||||||
shutil.rmtree(self.dirCurrent())
|
shutil.rmtree(self.dirCurrent())
|
||||||
except:
|
except:
|
||||||
logging.warning('removal of directory "%s" not possible...'%(self.dirCurrent()))
|
logging.warning('removal of directory "{}" not possible...'.format(self.dirCurrent()))
|
||||||
status = status and False
|
status = status and False
|
||||||
|
|
||||||
try:
|
try:
|
||||||
os.mkdir(self.dirCurrent())
|
os.mkdir(self.dirCurrent())
|
||||||
except:
|
except:
|
||||||
logging.critical('creation of directory "%s" failed...'%(self.dirCurrent()))
|
logging.critical('creation of directory "{}" failed...'.format(self.dirCurrent()))
|
||||||
status = status and False
|
status = status and False
|
||||||
|
|
||||||
return status
|
return status
|
||||||
|
|
||||||
def prepareAll(self):
|
def prepareAll(self):
|
||||||
'''
|
"""Do all necessary preparations for the whole test"""
|
||||||
Do all necessary preparations for the whole test
|
|
||||||
'''
|
|
||||||
return True
|
return True
|
||||||
|
|
||||||
def prepare(self,variant):
|
def prepare(self,variant):
|
||||||
'''
|
"""Do all necessary preparations for the run of each test variant"""
|
||||||
Do all necessary preparations for the run of each test variant
|
|
||||||
'''
|
|
||||||
return True
|
return True
|
||||||
|
|
||||||
|
|
||||||
def run(self,variant):
|
def run(self,variant):
|
||||||
'''
|
"""Execute the requested test variant."""
|
||||||
Execute the requested test variant.
|
|
||||||
'''
|
|
||||||
return True
|
return True
|
||||||
|
|
||||||
|
|
||||||
def postprocess(self,variant):
|
def postprocess(self,variant):
|
||||||
'''
|
"""Perform post-processing of generated results for this test variant."""
|
||||||
Perform post-processing of generated results for this test variant.
|
|
||||||
'''
|
|
||||||
return True
|
return True
|
||||||
|
|
||||||
|
|
||||||
def compare(self,variant):
|
def compare(self,variant):
|
||||||
'''
|
"""Compare reference to current results."""
|
||||||
Compare reference to current results.
|
|
||||||
'''
|
|
||||||
return True
|
return True
|
||||||
|
|
||||||
|
|
||||||
def update(self,variant):
|
def update(self,variant):
|
||||||
'''
|
"""Update reference with current results."""
|
||||||
Update reference with current results.
|
|
||||||
'''
|
|
||||||
logging.debug('Update not necessary')
|
logging.debug('Update not necessary')
|
||||||
return True
|
return True
|
||||||
|
|
||||||
|
|
||||||
def dirReference(self):
|
def dirReference(self):
|
||||||
'''
|
"""Directory containing reference results of the test."""
|
||||||
Directory containing reference results of the test.
|
|
||||||
'''
|
|
||||||
return os.path.normpath(os.path.join(self.dirBase,'reference/'))
|
return os.path.normpath(os.path.join(self.dirBase,'reference/'))
|
||||||
|
|
||||||
|
|
||||||
def dirCurrent(self):
|
def dirCurrent(self):
|
||||||
'''
|
"""Directory containing current results of the test."""
|
||||||
Directory containing current results of the test.
|
|
||||||
'''
|
|
||||||
return os.path.normpath(os.path.join(self.dirBase,'current/'))
|
return os.path.normpath(os.path.join(self.dirBase,'current/'))
|
||||||
|
|
||||||
|
|
||||||
def dirProof(self):
|
def dirProof(self):
|
||||||
'''
|
"""Directory containing human readable proof of correctness for the test."""
|
||||||
Directory containing human readable proof of correctness for the test.
|
|
||||||
'''
|
|
||||||
return os.path.normpath(os.path.join(self.dirBase,'proof/'))
|
return os.path.normpath(os.path.join(self.dirBase,'proof/'))
|
||||||
|
|
||||||
|
|
||||||
def fileInRoot(self,dir,file):
|
def fileInRoot(self,dir,file):
|
||||||
'''
|
"""Path to a file in the root directory of DAMASK."""
|
||||||
Path to a file in the root directory of DAMASK.
|
|
||||||
'''
|
|
||||||
return os.path.join(damask.Environment().rootDir(),dir,file)
|
return os.path.join(damask.Environment().rootDir(),dir,file)
|
||||||
|
|
||||||
|
|
||||||
def fileInReference(self,file):
|
def fileInReference(self,file):
|
||||||
'''
|
"""Path to a file in the refrence directory for the test."""
|
||||||
Path to a file in the refrence directory for the test.
|
|
||||||
'''
|
|
||||||
return os.path.join(self.dirReference(),file)
|
return os.path.join(self.dirReference(),file)
|
||||||
|
|
||||||
|
|
||||||
def fileInCurrent(self,file):
|
def fileInCurrent(self,file):
|
||||||
'''
|
"""Path to a file in the current results directory for the test."""
|
||||||
Path to a file in the current results directory for the test.
|
|
||||||
'''
|
|
||||||
return os.path.join(self.dirCurrent(),file)
|
return os.path.join(self.dirCurrent(),file)
|
||||||
|
|
||||||
|
|
||||||
def fileInProof(self,file):
|
def fileInProof(self,file):
|
||||||
'''
|
"""Path to a file in the proof directory for the test."""
|
||||||
Path to a file in the proof directory for the test.
|
|
||||||
'''
|
|
||||||
return os.path.join(self.dirProof(),file)
|
return os.path.join(self.dirProof(),file)
|
||||||
|
|
||||||
|
|
||||||
def copy(self, mapA, mapB,
|
def copy(self, mapA, mapB,
|
||||||
A = [], B = []):
|
A = [], B = []):
|
||||||
'''
|
"""
|
||||||
copy list of files from (mapped) source to target.
|
copy list of files from (mapped) source to target.
|
||||||
mapA/B is one of self.fileInX.
|
|
||||||
'''
|
|
||||||
|
|
||||||
|
mapA/B is one of self.fileInX.
|
||||||
|
"""
|
||||||
if not B or len(B) == 0: B = A
|
if not B or len(B) == 0: B = A
|
||||||
|
|
||||||
for source,target in zip(map(mapA,A),map(mapB,B)):
|
for source,target in zip(map(mapA,A),map(mapB,B)):
|
||||||
|
@ -223,19 +193,19 @@ class Test():
|
||||||
try:
|
try:
|
||||||
shutil.copy2(self.fileInReference(file),self.fileInCurrent(targetfiles[i]))
|
shutil.copy2(self.fileInReference(file),self.fileInCurrent(targetfiles[i]))
|
||||||
except:
|
except:
|
||||||
logging.critical('Reference2Current: Unable to copy file %s'%file)
|
logging.critical('Reference2Current: Unable to copy file "{}"'.format(file))
|
||||||
|
|
||||||
|
|
||||||
def copy_Base2Current(self,sourceDir,sourcefiles=[],targetfiles=[]):
|
def copy_Base2Current(self,sourceDir,sourcefiles=[],targetfiles=[]):
|
||||||
|
|
||||||
source=os.path.normpath(os.path.join(self.dirBase,'../../../'+sourceDir))
|
source=os.path.normpath(os.path.join(self.dirBase,'../../..',sourceDir))
|
||||||
if len(targetfiles) == 0: targetfiles = sourcefiles
|
if len(targetfiles) == 0: targetfiles = sourcefiles
|
||||||
for i,file in enumerate(sourcefiles):
|
for i,file in enumerate(sourcefiles):
|
||||||
try:
|
try:
|
||||||
shutil.copy2(os.path.join(source,file),self.fileInCurrent(targetfiles[i]))
|
shutil.copy2(os.path.join(source,file),self.fileInCurrent(targetfiles[i]))
|
||||||
except:
|
except:
|
||||||
logging.error(os.path.join(source,file))
|
logging.error(os.path.join(source,file))
|
||||||
logging.critical('Base2Current: Unable to copy file %s'%file)
|
logging.critical('Base2Current: Unable to copy file "{}"'.format(file))
|
||||||
|
|
||||||
|
|
||||||
def copy_Current2Reference(self,sourcefiles=[],targetfiles=[]):
|
def copy_Current2Reference(self,sourcefiles=[],targetfiles=[]):
|
||||||
|
@ -245,7 +215,7 @@ class Test():
|
||||||
try:
|
try:
|
||||||
shutil.copy2(self.fileInCurrent(file),self.fileInReference(targetfiles[i]))
|
shutil.copy2(self.fileInCurrent(file),self.fileInReference(targetfiles[i]))
|
||||||
except:
|
except:
|
||||||
logging.critical('Current2Reference: Unable to copy file %s'%file)
|
logging.critical('Current2Reference: Unable to copy file "{}"'.format(file))
|
||||||
|
|
||||||
|
|
||||||
def copy_Proof2Current(self,sourcefiles=[],targetfiles=[]):
|
def copy_Proof2Current(self,sourcefiles=[],targetfiles=[]):
|
||||||
|
@ -255,7 +225,7 @@ class Test():
|
||||||
try:
|
try:
|
||||||
shutil.copy2(self.fileInProof(file),self.fileInCurrent(targetfiles[i]))
|
shutil.copy2(self.fileInProof(file),self.fileInCurrent(targetfiles[i]))
|
||||||
except:
|
except:
|
||||||
logging.critical('Proof2Current: Unable to copy file %s'%file)
|
logging.critical('Proof2Current: Unable to copy file "{}"'.format(file))
|
||||||
|
|
||||||
|
|
||||||
def copy_Current2Current(self,sourcefiles=[],targetfiles=[]):
|
def copy_Current2Current(self,sourcefiles=[],targetfiles=[]):
|
||||||
|
@ -264,7 +234,7 @@ class Test():
|
||||||
try:
|
try:
|
||||||
shutil.copy2(self.fileInReference(file),self.fileInCurrent(targetfiles[i]))
|
shutil.copy2(self.fileInReference(file),self.fileInCurrent(targetfiles[i]))
|
||||||
except:
|
except:
|
||||||
logging.critical('Current2Current: Unable to copy file %s'%file)
|
logging.critical('Current2Current: Unable to copy file "{}"'.format(file))
|
||||||
|
|
||||||
|
|
||||||
def execute_inCurrentDir(self,cmd,streamIn=None):
|
def execute_inCurrentDir(self,cmd,streamIn=None):
|
||||||
|
@ -282,7 +252,7 @@ class Test():
|
||||||
def compare_Array(self,File1,File2):
|
def compare_Array(self,File1,File2):
|
||||||
|
|
||||||
import numpy as np
|
import numpy as np
|
||||||
logging.info('comparing\n '+File1+'\n '+File2)
|
logging.info('\n '.join(['comparing',File1,File2]))
|
||||||
table1 = damask.ASCIItable(name=File1,readonly=True)
|
table1 = damask.ASCIItable(name=File1,readonly=True)
|
||||||
table1.head_read()
|
table1.head_read()
|
||||||
len1=len(table1.info)+2
|
len1=len(table1.info)+2
|
||||||
|
@ -300,8 +270,9 @@ class Test():
|
||||||
max_loc=np.argmax(abs(refArrayNonZero[curArray.nonzero()]/curArray[curArray.nonzero()]-1.))
|
max_loc=np.argmax(abs(refArrayNonZero[curArray.nonzero()]/curArray[curArray.nonzero()]-1.))
|
||||||
refArrayNonZero = refArrayNonZero[curArray.nonzero()]
|
refArrayNonZero = refArrayNonZero[curArray.nonzero()]
|
||||||
curArray = curArray[curArray.nonzero()]
|
curArray = curArray[curArray.nonzero()]
|
||||||
print(' ********\n * maximum relative error %e for %e and %e\n ********'
|
print(' ********\n * maximum relative error {} between {} and {}\n ********'.format(max_err,
|
||||||
%(max_err, refArrayNonZero[max_loc],curArray[max_loc]))
|
refArrayNonZero[max_loc],
|
||||||
|
curArray[max_loc]))
|
||||||
return max_err
|
return max_err
|
||||||
else:
|
else:
|
||||||
raise Exception('mismatch in array size to compare')
|
raise Exception('mismatch in array size to compare')
|
||||||
|
@ -325,10 +296,11 @@ class Test():
|
||||||
absoluteTolerance=False,perLine=False,skipLines=[]):
|
absoluteTolerance=False,perLine=False,skipLines=[]):
|
||||||
|
|
||||||
import numpy as np
|
import numpy as np
|
||||||
logging.info('comparing ASCII Tables\n %s \n %s'%(file0,file1))
|
logging.info('\n '.join(['comparing ASCII Tables',file0,file1]))
|
||||||
if normHeadings == '': normHeadings = headings0
|
if normHeadings == '': normHeadings = headings0
|
||||||
|
|
||||||
if len(headings0) == len(headings1) == len(normHeadings): #check if comparison is possible and determine lenght of columns
|
# check if comparison is possible and determine lenght of columns
|
||||||
|
if len(headings0) == len(headings1) == len(normHeadings):
|
||||||
dataLength = len(headings0)
|
dataLength = len(headings0)
|
||||||
length = [1 for i in xrange(dataLength)]
|
length = [1 for i in xrange(dataLength)]
|
||||||
shape = [[] for i in xrange(dataLength)]
|
shape = [[] for i in xrange(dataLength)]
|
||||||
|
@ -344,7 +316,7 @@ class Test():
|
||||||
|
|
||||||
for i in xrange(dataLength):
|
for i in xrange(dataLength):
|
||||||
if headings0[i]['shape'] != headings1[i]['shape']:
|
if headings0[i]['shape'] != headings1[i]['shape']:
|
||||||
raise Exception('shape mismatch when comparing %s with %s '%(headings0[i]['label'],headings1[i]['label']))
|
raise Exception('shape mismatch between {} and {} '.format(headings0[i]['label'],headings1[i]['label']))
|
||||||
shape[i] = headings0[i]['shape']
|
shape[i] = headings0[i]['shape']
|
||||||
for j in xrange(np.shape(shape[i])[0]):
|
for j in xrange(np.shape(shape[i])[0]):
|
||||||
length[i] *= shape[i][j]
|
length[i] *= shape[i][j]
|
||||||
|
@ -352,7 +324,9 @@ class Test():
|
||||||
for j in xrange(np.shape(normShape[i])[0]):
|
for j in xrange(np.shape(normShape[i])[0]):
|
||||||
normLength[i] *= normShape[i][j]
|
normLength[i] *= normShape[i][j]
|
||||||
else:
|
else:
|
||||||
raise Exception('trying to compare %i with %i normed by %i data sets'%(len(headings0),len(headings1),len(normHeadings)))
|
raise Exception('trying to compare {} with {} normed by {} data sets'.format(len(headings0),
|
||||||
|
len(headings1),
|
||||||
|
len(normHeadings)))
|
||||||
|
|
||||||
table0 = damask.ASCIItable(name=file0,readonly=True)
|
table0 = damask.ASCIItable(name=file0,readonly=True)
|
||||||
table0.head_read()
|
table0.head_read()
|
||||||
|
@ -360,37 +334,34 @@ class Test():
|
||||||
table1.head_read()
|
table1.head_read()
|
||||||
|
|
||||||
for i in xrange(dataLength):
|
for i in xrange(dataLength):
|
||||||
key0 = {True :'1_%s',
|
key0 = ('1_' if length[i]>1 else '') + headings0[i]['label']
|
||||||
False:'%s' }[length[i]>1]%headings0[i]['label']
|
key1 = ('1_' if length[i]>1 else '') + headings1[i]['label']
|
||||||
key1 = {True :'1_%s',
|
normKey = ('1_' if normLength[i]>1 else '') + normHeadings[i]['label']
|
||||||
False:'%s' }[length[i]>1]%headings1[i]['label']
|
|
||||||
normKey = {True :'1_%s',
|
|
||||||
False:'%s' }[normLength[i]>1]%normHeadings[i]['label']
|
|
||||||
if key0 not in table0.labels:
|
if key0 not in table0.labels:
|
||||||
raise Exception('column %s not found in 1. table...\n'%key0)
|
raise Exception('column {} not found in 1. table...\n'.format(key0))
|
||||||
elif key1 not in table1.labels:
|
elif key1 not in table1.labels:
|
||||||
raise Exception('column %s not found in 2. table...\n'%key1)
|
raise Exception('column {} not found in 2. table...\n'.format(key1))
|
||||||
elif normKey not in table0.labels:
|
elif normKey not in table0.labels:
|
||||||
raise Exception('column %s not found in 1. table...\n'%normKey)
|
raise Exception('column {} not found in 1. table...\n'.format(normKey))
|
||||||
else:
|
else:
|
||||||
column[0][i] = table0.labels.index(key0) # remember columns of requested data
|
column[0][i] = table0.labels.index(key0)
|
||||||
column[1][i] = table1.labels.index(key1) # remember columns of requested data in second column
|
column[1][i] = table1.labels.index(key1)
|
||||||
normColumn[i] = table0.labels.index(normKey) # remember columns of requested data in second column
|
normColumn[i] = table0.labels.index(normKey)
|
||||||
|
|
||||||
line0 = 0
|
line0 = 0
|
||||||
while table0.data_read(): # read next data line of ASCII table
|
while table0.data_read(): # read next data line of ASCII table
|
||||||
if line0 not in skipLines:
|
if line0 not in skipLines:
|
||||||
for i in xrange(dataLength):
|
for i in xrange(dataLength):
|
||||||
myData = np.array(map(float,table0.data[column[0][i]:\
|
myData = np.array(map(float,table0.data[column[0][i]:\
|
||||||
column[0][i]+length[i]]),'d')
|
column[0][i]+length[i]]),'d')
|
||||||
normData = np.array(map(float,table0.data[normColumn[i]:\
|
normData = np.array(map(float,table0.data[normColumn[i]:\
|
||||||
normColumn[i]+normLength[i]]),'d')
|
normColumn[i]+normLength[i]]),'d')
|
||||||
data[i] = np.append(data[i],np.reshape(myData,shape[i]))
|
data[i] = np.append(data[i],np.reshape(myData,shape[i]))
|
||||||
if normType == 'pInf':
|
if normType == 'pInf':
|
||||||
norm[i] = np.append(norm[i],np.max(np.abs(normData)))
|
norm[i] = np.append(norm[i],np.max(np.abs(normData)))
|
||||||
else:
|
else:
|
||||||
norm[i] = np.append(norm[i],np.linalg.norm(np.reshape(normData,normShape[i]),normType))
|
norm[i] = np.append(norm[i],np.linalg.norm(np.reshape(normData,normShape[i]),normType))
|
||||||
line0 +=1
|
line0 += 1
|
||||||
|
|
||||||
for i in xrange(dataLength):
|
for i in xrange(dataLength):
|
||||||
if not perLine: norm[i] = [np.max(norm[i]) for j in xrange(line0-len(skipLines))]
|
if not perLine: norm[i] = [np.max(norm[i]) for j in xrange(line0-len(skipLines))]
|
||||||
|
@ -399,12 +370,12 @@ class Test():
|
||||||
norm[i] = [1.0 for j in xrange(line0-len(skipLines))]
|
norm[i] = [1.0 for j in xrange(line0-len(skipLines))]
|
||||||
absTol[i] = True
|
absTol[i] = True
|
||||||
if perLine:
|
if perLine:
|
||||||
logging.warning('At least one norm of %s in 1. table is 0.0, using absolute tolerance'%headings0[i]['label'])
|
logging.warning('At least one norm of {} in 1. table is 0.0, using absolute tolerance'.format(headings0[i]['label']))
|
||||||
else:
|
else:
|
||||||
logging.warning('Maximum norm of %s in 1. table is 0.0, using absolute tolerance'%headings0[i]['label'])
|
logging.warning('Maximum norm of {} in 1. table is 0.0, using absolute tolerance'.format(headings0[i]['label']))
|
||||||
|
|
||||||
line1 = 0
|
line1 = 0
|
||||||
while table1.data_read(): # read next data line of ASCII table
|
while table1.data_read(): # read next data line of ASCII table
|
||||||
if line1 not in skipLines:
|
if line1 not in skipLines:
|
||||||
for i in xrange(dataLength):
|
for i in xrange(dataLength):
|
||||||
myData = np.array(map(float,table1.data[column[1][i]:\
|
myData = np.array(map(float,table1.data[column[1][i]:\
|
||||||
|
@ -413,45 +384,45 @@ class Test():
|
||||||
norm[i][line1-len(skipLines)])
|
norm[i][line1-len(skipLines)])
|
||||||
line1 +=1
|
line1 +=1
|
||||||
|
|
||||||
if (line0 != line1): raise Exception('found %s lines in 1. table and %s in 2. table'%(line0,line1))
|
if (line0 != line1): raise Exception('found {} lines in 1. table but {} in 2. table'.format(line0,line1))
|
||||||
|
|
||||||
logging.info(' ********')
|
logging.info(' ********')
|
||||||
for i in xrange(dataLength):
|
for i in xrange(dataLength):
|
||||||
if absTol[i]:
|
if absTol[i]:
|
||||||
logging.info(' * maximum absolute error %e for %s and %s'%(maxError[i],headings0[i]['label'],headings1[i]['label']))
|
logging.info(' * maximum absolute error {} between {} and {}'.format(maxError[i],
|
||||||
|
headings0[i]['label'],
|
||||||
|
headings1[i]['label']))
|
||||||
else:
|
else:
|
||||||
logging.info(' * maximum relative error %e for %s and %s'%(maxError[i],headings0[i]['label'],headings1[i]['label']))
|
logging.info(' * maximum relative error {} between {} and {}'.format(maxError[i],
|
||||||
|
headings0[i]['label'],
|
||||||
|
headings1[i]['label']))
|
||||||
logging.info(' ********')
|
logging.info(' ********')
|
||||||
return maxError
|
return maxError
|
||||||
|
|
||||||
|
|
||||||
def compare_TablesStatistically(self,
|
def compare_TablesStatistically(self,
|
||||||
files = [None,None], # list of file names
|
files = [None,None], # list of file names
|
||||||
columns = [None], # list of list of column labels (per file)
|
columns = [None], # list of list of column labels (per file)
|
||||||
meanTol = 1.0e-4,
|
meanTol = 1.0e-4,
|
||||||
stdTol = 1.0e-6,
|
stdTol = 1.0e-6,
|
||||||
preFilter = 1.0e-9):
|
preFilter = 1.0e-9):
|
||||||
|
"""
|
||||||
|
calculate statistics of tables
|
||||||
|
|
||||||
'''
|
threshold can be used to ignore small values (a negative number disables this feature)
|
||||||
calculate statistics of tables
|
"""
|
||||||
threshold can be used to ignore small values (a negative number disables this feature)
|
if not (isinstance(files, Iterable) and not isinstance(files, str)): # check whether list of files is requested
|
||||||
'''
|
|
||||||
|
|
||||||
import numpy as np
|
|
||||||
from collections import Iterable
|
|
||||||
|
|
||||||
if not (isinstance(files, Iterable) and not isinstance(files, str)): # check whether list of files is requested
|
|
||||||
files = [str(files)]
|
files = [str(files)]
|
||||||
|
|
||||||
tables = [damask.ASCIItable(name = filename,readonly = True) for filename in files]
|
tables = [damask.ASCIItable(name = filename,readonly = True) for filename in files]
|
||||||
for table in tables:
|
for table in tables:
|
||||||
table.head_read()
|
table.head_read()
|
||||||
|
|
||||||
columns += [columns[0]]*(len(files)-len(columns)) # extend to same length as files
|
columns += [columns[0]]*(len(files)-len(columns)) # extend to same length as files
|
||||||
columns = columns[:len(files)] # truncate to same length as files
|
columns = columns[:len(files)] # truncate to same length as files
|
||||||
|
|
||||||
for i,column in enumerate(columns):
|
for i,column in enumerate(columns):
|
||||||
if column is None: columns[i] = tables[i].labels # if no column is given, read all
|
if column is None: columns[i] = tables[i].labels # if no column is given, read all
|
||||||
|
|
||||||
logging.info('comparing ASCIItables statistically')
|
logging.info('comparing ASCIItables statistically')
|
||||||
for i in xrange(len(columns)):
|
for i in xrange(len(columns)):
|
||||||
|
@ -461,7 +432,7 @@ class Test():
|
||||||
)
|
)
|
||||||
logging.info(files[i]+':'+','.join(columns[i]))
|
logging.info(files[i]+':'+','.join(columns[i]))
|
||||||
|
|
||||||
if len(files) < 2: return True # single table is always close to itself...
|
if len(files) < 2: return True # single table is always close to itself...
|
||||||
|
|
||||||
data = []
|
data = []
|
||||||
for table,labels in zip(tables,columns):
|
for table,labels in zip(tables,columns):
|
||||||
|
@ -476,42 +447,38 @@ class Test():
|
||||||
normedDelta = np.where(normBy>preFilter,delta/normBy,0.0)
|
normedDelta = np.where(normBy>preFilter,delta/normBy,0.0)
|
||||||
mean = np.amax(np.abs(np.mean(normedDelta,0)))
|
mean = np.amax(np.abs(np.mean(normedDelta,0)))
|
||||||
std = np.amax(np.std(normedDelta,0))
|
std = np.amax(np.std(normedDelta,0))
|
||||||
logging.info('mean: %f'%mean)
|
logging.info('mean: {:f}'.format(mean))
|
||||||
logging.info('std: %f'%std)
|
logging.info('std: {:f}'.format(std))
|
||||||
|
|
||||||
return (mean<meanTol) & (std < stdTol)
|
return (mean<meanTol) & (std < stdTol)
|
||||||
|
|
||||||
|
|
||||||
|
|
||||||
def compare_Tables(self,
|
def compare_Tables(self,
|
||||||
files = [None,None], # list of file names
|
files = [None,None], # list of file names
|
||||||
columns = [None], # list of list of column labels (per file)
|
columns = [None], # list of list of column labels (per file)
|
||||||
rtol = 1e-5,
|
rtol = 1e-5,
|
||||||
atol = 1e-8,
|
atol = 1e-8,
|
||||||
preFilter = -1.0,
|
preFilter = -1.0,
|
||||||
postFilter = -1.0,
|
postFilter = -1.0,
|
||||||
debug = False):
|
debug = False):
|
||||||
|
"""
|
||||||
|
compare tables with np.allclose
|
||||||
|
|
||||||
'''
|
threshold can be used to ignore small values (a negative number disables this feature)
|
||||||
compare tables with np.allclose
|
"""
|
||||||
threshold can be used to ignore small values (a negative number disables this feature)
|
if not (isinstance(files, Iterable) and not isinstance(files, str)): # check whether list of files is requested
|
||||||
'''
|
|
||||||
|
|
||||||
import numpy as np
|
|
||||||
from collections import Iterable
|
|
||||||
|
|
||||||
if not (isinstance(files, Iterable) and not isinstance(files, str)): # check whether list of files is requested
|
|
||||||
files = [str(files)]
|
files = [str(files)]
|
||||||
|
|
||||||
tables = [damask.ASCIItable(name = filename,readonly = True) for filename in files]
|
tables = [damask.ASCIItable(name = filename,readonly = True) for filename in files]
|
||||||
for table in tables:
|
for table in tables:
|
||||||
table.head_read()
|
table.head_read()
|
||||||
|
|
||||||
columns += [columns[0]]*(len(files)-len(columns)) # extend to same length as files
|
columns += [columns[0]]*(len(files)-len(columns)) # extend to same length as files
|
||||||
columns = columns[:len(files)] # truncate to same length as files
|
columns = columns[:len(files)] # truncate to same length as files
|
||||||
|
|
||||||
for i,column in enumerate(columns):
|
for i,column in enumerate(columns):
|
||||||
if column is None: columns[i] = tables[i].labels # if no column is given, read all
|
if column is None: columns[i] = tables[i].labels # if no column is given, read all
|
||||||
|
|
||||||
logging.info('comparing ASCIItables')
|
logging.info('comparing ASCIItables')
|
||||||
for i in xrange(len(columns)):
|
for i in xrange(len(columns)):
|
||||||
|
@ -521,7 +488,7 @@ class Test():
|
||||||
)
|
)
|
||||||
logging.info(files[i]+':'+','.join(columns[i]))
|
logging.info(files[i]+':'+','.join(columns[i]))
|
||||||
|
|
||||||
if len(files) < 2: return True # single table is always close to itself...
|
if len(files) < 2: return True # single table is always close to itself...
|
||||||
|
|
||||||
maximum = np.zeros(len(columns[0]),dtype='f')
|
maximum = np.zeros(len(columns[0]),dtype='f')
|
||||||
data = []
|
data = []
|
||||||
|
@ -532,26 +499,26 @@ class Test():
|
||||||
table.close()
|
table.close()
|
||||||
|
|
||||||
maximum /= len(tables)
|
maximum /= len(tables)
|
||||||
maximum = np.where(maximum >0.0, maximum, 1) # do not devide by zero for empty columns
|
maximum = np.where(maximum >0.0, maximum, 1) # avoid div by zero for empty columns
|
||||||
for i in xrange(len(data)):
|
for i in xrange(len(data)):
|
||||||
data[i] /= maximum
|
data[i] /= maximum
|
||||||
|
|
||||||
mask = np.zeros_like(table.data,dtype='bool')
|
mask = np.zeros_like(table.data,dtype='bool')
|
||||||
|
|
||||||
for table in data:
|
for table in data:
|
||||||
mask |= np.where(np.abs(table)<postFilter,True,False) # mask out (all) tiny values
|
mask |= np.where(np.abs(table)<postFilter,True,False) # mask out (all) tiny values
|
||||||
|
|
||||||
|
|
||||||
allclose = True # start optimistic
|
allclose = True # start optimistic
|
||||||
for i in xrange(1,len(data)):
|
for i in xrange(1,len(data)):
|
||||||
if debug:
|
if debug:
|
||||||
t0 = np.where(mask,0.0,data[i-1])
|
t0 = np.where(mask,0.0,data[i-1])
|
||||||
t1 = np.where(mask,0.0,data[i ])
|
t1 = np.where(mask,0.0,data[i ])
|
||||||
j = np.argmin(np.abs(t1)*rtol+atol-np.abs(t0-t1))
|
j = np.argmin(np.abs(t1)*rtol+atol-np.abs(t0-t1))
|
||||||
logging.info('%f'%np.amax(np.abs(t0-t1)/(np.abs(t1)*rtol+atol)))
|
logging.info('{:f}'.format(np.amax(np.abs(t0-t1)/(np.abs(t1)*rtol+atol))))
|
||||||
logging.info('%f %f'%((t0*maximum).flatten()[j],(t1*maximum).flatten()[j]))
|
logging.info('{:f} {:f}'.format((t0*maximum).flatten()[j],(t1*maximum).flatten()[j]))
|
||||||
allclose &= np.allclose(np.where(mask,0.0,data[i-1]),
|
allclose &= np.allclose(np.where(mask,0.0,data[i-1]),
|
||||||
np.where(mask,0.0,data[i ]),rtol,atol) # accumulate "pessimism"
|
np.where(mask,0.0,data[i ]),rtol,atol) # accumulate "pessimism"
|
||||||
|
|
||||||
return allclose
|
return allclose
|
||||||
|
|
||||||
|
@ -580,14 +547,13 @@ class Test():
|
||||||
def report_Success(self,culprit):
|
def report_Success(self,culprit):
|
||||||
|
|
||||||
if culprit == 0:
|
if culprit == 0:
|
||||||
logging.critical('%s passed.'%({False: 'The test',
|
logging.critical(('The test' if len(self.variants) == 1 else 'All {} tests'.format(len(self.variants))) + ' passed')
|
||||||
True: 'All %i tests'%(len(self.variants))}[len(self.variants) > 1]))
|
|
||||||
logging.critical('\n!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!\n')
|
logging.critical('\n!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!\n')
|
||||||
return 0
|
return 0
|
||||||
if culprit == -1:
|
if culprit == -1:
|
||||||
logging.warning('Warning: Could not start test')
|
logging.warning('Warning: Could not start test')
|
||||||
return 0
|
return 0
|
||||||
else:
|
else:
|
||||||
logging.critical(' ********\n * Test %i failed...\n ********'%(culprit))
|
logging.critical(' ********\n * Test {} failed...\n ********'.format(culprit))
|
||||||
logging.critical('\n!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!\n')
|
logging.critical('\n!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!\n')
|
||||||
return culprit
|
return culprit
|
||||||
|
|
|
@ -6,11 +6,13 @@ import numpy as np
|
||||||
from optparse import Option
|
from optparse import Option
|
||||||
|
|
||||||
class bcolors:
|
class bcolors:
|
||||||
'''
|
"""
|
||||||
ASCII Colors (Blender code)
|
ASCII Colors (Blender code)
|
||||||
https://svn.blender.org/svnroot/bf-blender/trunk/blender/build_files/scons/tools/bcolors.py
|
|
||||||
http://stackoverflow.com/questions/287871/print-in-terminal-with-colors-using-python
|
https://svn.blender.org/svnroot/bf-blender/trunk/blender/build_files/scons/tools/bcolors.py
|
||||||
'''
|
http://stackoverflow.com/questions/287871/print-in-terminal-with-colors-using-python
|
||||||
|
"""
|
||||||
|
|
||||||
HEADER = '\033[95m'
|
HEADER = '\033[95m'
|
||||||
OKBLUE = '\033[94m'
|
OKBLUE = '\033[94m'
|
||||||
OKGREEN = '\033[92m'
|
OKGREEN = '\033[92m'
|
||||||
|
@ -32,32 +34,48 @@ class bcolors:
|
||||||
|
|
||||||
|
|
||||||
# -----------------------------
|
# -----------------------------
|
||||||
def srepr(arg,
|
def srepr(arg,glue = '\n'):
|
||||||
glue = '\n'):
|
"""joins arguments as individual lines"""
|
||||||
# -----------------------------
|
if (not hasattr(arg, "strip") and
|
||||||
if (not hasattr(arg, "strip") and
|
hasattr(arg, "__getitem__") or
|
||||||
hasattr(arg, "__getitem__") or
|
hasattr(arg, "__iter__")):
|
||||||
hasattr(arg, "__iter__")):
|
return glue.join(srepr(x) for x in arg)
|
||||||
return glue.join(srepr(x) for x in arg)
|
return arg if isinstance(arg,basestring) else repr(arg)
|
||||||
return arg if isinstance(arg,basestring) else repr(arg)
|
|
||||||
|
|
||||||
# -----------------------------
|
# -----------------------------
|
||||||
def croak(what,
|
def croak(what, newline = True):
|
||||||
newline = True):
|
"""writes formated to stderr"""
|
||||||
# -----------------------------
|
|
||||||
sys.stderr.write(srepr(what,glue = '\n') + ('\n' if newline else ''))
|
sys.stderr.write(srepr(what,glue = '\n') + ('\n' if newline else ''))
|
||||||
sys.stderr.flush()
|
sys.stderr.flush()
|
||||||
|
|
||||||
# -----------------------------
|
# -----------------------------
|
||||||
def report(who,what):
|
def report(who,what):
|
||||||
# -----------------------------
|
"""reports script and file name"""
|
||||||
croak( (emph(who) if who else '') + (': '+what if what else '') )
|
croak( (emph(who) if who else '') + (': '+what if what else '') )
|
||||||
|
|
||||||
# -----------------------------
|
# -----------------------------
|
||||||
def emph(what):
|
def emph(what):
|
||||||
# -----------------------------
|
"""emphasizes string on screen"""
|
||||||
return bcolors.BOLD+srepr(what)+bcolors.ENDC
|
return bcolors.BOLD+srepr(what)+bcolors.ENDC
|
||||||
|
|
||||||
|
# -----------------------------
|
||||||
|
def execute(cmd,
|
||||||
|
streamIn = None,
|
||||||
|
wd = './'):
|
||||||
|
"""executes a command in given directory and returns stdout and stderr for optional stdin"""
|
||||||
|
initialPath = os.getcwd()
|
||||||
|
os.chdir(wd)
|
||||||
|
process = subprocess.Popen(shlex.split(cmd),
|
||||||
|
stdout = subprocess.PIPE,
|
||||||
|
stderr = subprocess.PIPE,
|
||||||
|
stdin = subprocess.PIPE)
|
||||||
|
out,error = [i.replace("\x08","") for i in (process.communicate() if streamIn is None
|
||||||
|
else process.communicate(streamIn.read()))]
|
||||||
|
os.chdir(initialPath)
|
||||||
|
if process.returncode != 0: raise RuntimeError('{} failed with returncode {}'.format(cmd,process.returncode))
|
||||||
|
return out,error
|
||||||
|
|
||||||
|
|
||||||
# -----------------------------
|
# -----------------------------
|
||||||
# Matlab like trigonometric functions that take and return angles in degrees.
|
# Matlab like trigonometric functions that take and return angles in degrees.
|
||||||
# -----------------------------
|
# -----------------------------
|
||||||
|
@ -68,7 +86,6 @@ for f in ['cos', 'sin', 'tan']:
|
||||||
|
|
||||||
# -----------------------------
|
# -----------------------------
|
||||||
def gridLocation(idx,res):
|
def gridLocation(idx,res):
|
||||||
# -----------------------------
|
|
||||||
return ( idx % res[0], \
|
return ( idx % res[0], \
|
||||||
( idx // res[0]) % res[1], \
|
( idx // res[0]) % res[1], \
|
||||||
( idx // res[0] // res[1]) % res[2] )
|
( idx // res[0] // res[1]) % res[2] )
|
||||||
|
@ -76,17 +93,18 @@ def gridLocation(idx,res):
|
||||||
|
|
||||||
# -----------------------------
|
# -----------------------------
|
||||||
def gridIndex(location,res):
|
def gridIndex(location,res):
|
||||||
# -----------------------------
|
return ( location[0] % res[0] + \
|
||||||
return ( location[0] % res[0] + \
|
|
||||||
( location[1] % res[1]) * res[0] + \
|
( location[1] % res[1]) * res[0] + \
|
||||||
( location[2] % res[2]) * res[1] * res[0] )
|
( location[2] % res[2]) * res[1] * res[0] )
|
||||||
|
|
||||||
|
|
||||||
# -----------------------------
|
# -----------------------------
|
||||||
class extendableOption(Option):
|
class extendableOption(Option):
|
||||||
# -----------------------------
|
"""
|
||||||
# used for definition of new option parser action 'extend', which enables to take multiple option arguments
|
used for definition of new option parser action 'extend', which enables to take multiple option arguments
|
||||||
# taken from online tutorial http://docs.python.org/library/optparse.html
|
|
||||||
|
taken from online tutorial http://docs.python.org/library/optparse.html
|
||||||
|
"""
|
||||||
|
|
||||||
ACTIONS = Option.ACTIONS + ("extend",)
|
ACTIONS = Option.ACTIONS + ("extend",)
|
||||||
STORE_ACTIONS = Option.STORE_ACTIONS + ("extend",)
|
STORE_ACTIONS = Option.STORE_ACTIONS + ("extend",)
|
||||||
|
@ -102,28 +120,36 @@ class extendableOption(Option):
|
||||||
|
|
||||||
# -----------------------------
|
# -----------------------------
|
||||||
class backgroundMessage(threading.Thread):
|
class backgroundMessage(threading.Thread):
|
||||||
# -----------------------------
|
"""reporting with animation to indicate progress"""
|
||||||
choices = {'bounce': ['_','o','O','°','¯','¯','°','O','o',],
|
|
||||||
|
choices = {'bounce': ['_', 'o', 'O', u'\u00B0',
|
||||||
|
u'\u203e',u'\u203e',u'\u00B0','O','o','_'],
|
||||||
|
'spin': [u'\u25dc',u'\u25dd',u'\u25de',u'\u25df'],
|
||||||
'circle': [u'\u25f4',u'\u25f5',u'\u25f6',u'\u25f7'],
|
'circle': [u'\u25f4',u'\u25f5',u'\u25f6',u'\u25f7'],
|
||||||
'hexagon': [u'\u2b22',u'\u2b23'],
|
'hexagon': [u'\u2b22',u'\u2b23'],
|
||||||
'square': [u'\u2596',u'\u2598',u'\u259d',u'\u2597'],
|
'square': [u'\u2596',u'\u2598',u'\u259d',u'\u2597'],
|
||||||
'triangle': [u'\u140a',u'\u140a',u'\u1403',u'\u1405',u'\u1405',u'\u1403'],
|
'triangle': [u'\u140a',u'\u140a',u'\u1403',u'\u1405',u'\u1405',u'\u1403'],
|
||||||
'amoeba': [u'\u2596',u'\u258f',u'\u2598',u'\u2594',u'\u259d',u'\u2595',u'\u2597',u'\u2582'],
|
'amoeba': [u'\u2596',u'\u258f',u'\u2598',u'\u2594',u'\u259d',u'\u2595',
|
||||||
'beat': [u'\u2581',u'\u2582',u'\u2583',u'\u2585',u'\u2586',u'\u2587',u'\u2587',u'\u2586',u'\u2585',u'\u2583',u'\u2582',],
|
u'\u2597',u'\u2582'],
|
||||||
'prison': [u'\u168b',u'\u168c',u'\u168d',u'\u168f',u'\u168e',u'\u168d',u'\u168c',u'\u168b',],
|
'beat': [u'\u2581',u'\u2582',u'\u2583',u'\u2585',u'\u2586',u'\u2587',
|
||||||
'breath': [u'\u1690',u'\u1691',u'\u1692',u'\u1693',u'\u1694',u'\u1693',u'\u1692',u'\u1691',u'\u1690',],
|
u'\u2587',u'\u2586',u'\u2585',u'\u2583',u'\u2582',],
|
||||||
|
'prison': [u'\u168b',u'\u168c',u'\u168d',u'\u168f',u'\u168e',u'\u168d',
|
||||||
|
u'\u168c',u'\u168b',],
|
||||||
|
'breath': [u'\u1690',u'\u1691',u'\u1692',u'\u1693',u'\u1694',u'\u1693',
|
||||||
|
u'\u1692',u'\u1691',u'\u1690',],
|
||||||
'pulse': [u'·',u'•',u'\u25cf',u'\u25cf',u'•',],
|
'pulse': [u'·',u'•',u'\u25cf',u'\u25cf',u'•',],
|
||||||
'ant': [u'\u2801',u'\u2802',u'\u2810',u'\u2820',u'\u2804',u'\u2840',u'\u2880',u'\u2820',u'\u2804',u'\u2802',u'\u2810',u'\u2808'],
|
'ant': [u'\u2801',u'\u2802',u'\u2810',u'\u2820',u'\u2804',u'\u2840',
|
||||||
'juggle': [u'\ua708',u'\ua709',u'\ua70a',u'\ua70b',u'\ua70c',u'\ua711',u'\ua710',u'\ua70f',u'\ua70d',],
|
u'\u2880',u'\u2820',u'\u2804',u'\u2802',u'\u2810',u'\u2808'],
|
||||||
|
'juggle': [u'\ua708',u'\ua709',u'\ua70a',u'\ua70b',u'\ua70c',u'\ua711',
|
||||||
|
u'\ua710',u'\ua70f',u'\ua70d',],
|
||||||
# 'wobbler': [u'\u2581',u'\u25e3',u'\u258f',u'\u25e4',u'\u2594',u'\u25e5',u'\u2595',u'\u25e2',],
|
# 'wobbler': [u'\u2581',u'\u25e3',u'\u258f',u'\u25e4',u'\u2594',u'\u25e5',u'\u2595',u'\u25e2',],
|
||||||
'grout': [u'\u2581',u'\u258f',u'\u2594',u'\u2595',],
|
'grout': [u'\u2581',u'\u258f',u'\u2594',u'\u2595',],
|
||||||
'partner': [u'\u26ac',u'\u26ad',u'\u26ae',u'\u26af',u'\u26ae',u'\u26ad',],
|
'partner': [u'\u26ac',u'\u26ad',u'\u26ae',u'\u26af',u'\u26ae',u'\u26ad',],
|
||||||
'classic': ['-', '\\', '|', '/',],
|
'classic': ['-', '\\', '|', '/',],
|
||||||
}
|
}
|
||||||
|
|
||||||
def __init__(self,
|
def __init__(self,symbol = None,wait = 0.1):
|
||||||
symbol = None,
|
"""sets animation symbol"""
|
||||||
wait = 0.1):
|
|
||||||
super(backgroundMessage, self).__init__()
|
super(backgroundMessage, self).__init__()
|
||||||
self._stop = threading.Event()
|
self._stop = threading.Event()
|
||||||
self.message = ''
|
self.message = ''
|
||||||
|
@ -134,20 +160,20 @@ class backgroundMessage(threading.Thread):
|
||||||
self.waittime = wait
|
self.waittime = wait
|
||||||
|
|
||||||
def __quit__(self):
|
def __quit__(self):
|
||||||
|
"""cleans output"""
|
||||||
length = len(self.symbols[self.counter] + self.gap + self.message)
|
length = len(self.symbols[self.counter] + self.gap + self.message)
|
||||||
sys.stderr.write(chr(8)*length + ' '*length + chr(8)*length)
|
sys.stderr.write(chr(8)*length + ' '*length + chr(8)*length)
|
||||||
sys.stderr.write('')
|
sys.stderr.write('')
|
||||||
sys.stderr.flush()
|
sys.stderr.flush()
|
||||||
|
|
||||||
def stop(self):
|
def stop(self):
|
||||||
self._stop.set()
|
self._stop.set()
|
||||||
|
|
||||||
def stopped(self):
|
def stopped(self):
|
||||||
return self._stop.is_set()
|
return self._stop.is_set()
|
||||||
|
|
||||||
def run(self):
|
def run(self):
|
||||||
# while not threading.enumerate()[0]._Thread__stopped:
|
while not threading.enumerate()[0]._Thread__stopped:
|
||||||
while not self.stopped():
|
|
||||||
time.sleep(self.waittime)
|
time.sleep(self.waittime)
|
||||||
self.update_message()
|
self.update_message()
|
||||||
self.__quit__()
|
self.__quit__()
|
||||||
|
@ -159,7 +185,7 @@ class backgroundMessage(threading.Thread):
|
||||||
def print_message(self):
|
def print_message(self):
|
||||||
length = len(self.symbols[self.counter] + self.gap + self.message)
|
length = len(self.symbols[self.counter] + self.gap + self.message)
|
||||||
sys.stderr.write(chr(8)*length + ' '*length + chr(8)*length + \
|
sys.stderr.write(chr(8)*length + ' '*length + chr(8)*length + \
|
||||||
self.symbols[self.counter] + self.gap + self.new_message) # delete former and print new message
|
self.symbols[self.counter].encode('utf-8') + self.gap + self.new_message) # delete former and print new message
|
||||||
sys.stderr.flush()
|
sys.stderr.flush()
|
||||||
self.message = self.new_message
|
self.message = self.new_message
|
||||||
|
|
||||||
|
@ -170,24 +196,38 @@ class backgroundMessage(threading.Thread):
|
||||||
def animation(self,which = None):
|
def animation(self,which = None):
|
||||||
return ''.join(self.choices[which]) if which in self.choices else ''
|
return ''.join(self.choices[which]) if which in self.choices else ''
|
||||||
|
|
||||||
'''
|
|
||||||
Non-linear least square fitting (Levenberg-Marquardt method) with
|
|
||||||
bounded parameters.
|
|
||||||
the codes of transformation between int <-> ext refers to the work of
|
|
||||||
Jonathan J. Helmus: https://github.com/jjhelmus/leastsqbound-scipy
|
|
||||||
other codes refers to the source code of minpack.py:
|
|
||||||
..\Lib\site-packages\scipy\optimize\minpack.py
|
|
||||||
'''
|
|
||||||
from numpy import (array, arcsin, asarray, cos, dot, eye, empty_like,
|
|
||||||
isscalar,finfo, take, triu, transpose, sqrt, sin)
|
|
||||||
|
|
||||||
def _check_func(checker, argname, thefunc, x0, args, numinputs,
|
def leastsqBound(func, x0, args=(), bounds=None, Dfun=None, full_output=0,
|
||||||
|
col_deriv=0, ftol=1.49012e-8, xtol=1.49012e-8,
|
||||||
|
gtol=0.0, maxfev=0, epsfcn=None, factor=100, diag=None):
|
||||||
|
from scipy.optimize import _minpack
|
||||||
|
"""
|
||||||
|
Non-linear least square fitting (Levenberg-Marquardt method) with
|
||||||
|
bounded parameters.
|
||||||
|
the codes of transformation between int <-> ext refers to the work of
|
||||||
|
Jonathan J. Helmus: https://github.com/jjhelmus/leastsqbound-scipy
|
||||||
|
other codes refers to the source code of minpack.py:
|
||||||
|
..\Lib\site-packages\scipy\optimize\minpack.py
|
||||||
|
|
||||||
|
An internal parameter list is used to enforce contraints on the fitting
|
||||||
|
parameters. The transfomation is based on that of MINUIT package.
|
||||||
|
please see: F. James and M. Winkler. MINUIT User's Guide, 2004.
|
||||||
|
|
||||||
|
bounds : list
|
||||||
|
(min, max) pairs for each parameter, use None for 'min' or 'max'
|
||||||
|
when there is no bound in that direction.
|
||||||
|
For example: if there are two parameters needed to be fitting, then
|
||||||
|
bounds is [(min1,max1), (min2,max2)]
|
||||||
|
|
||||||
|
This function is based on 'leastsq' of minpack.py, the annotation of
|
||||||
|
other parameters can be found in 'leastsq'.
|
||||||
|
..\Lib\site-packages\scipy\optimize\minpack.py
|
||||||
|
"""
|
||||||
|
|
||||||
|
def _check_func(checker, argname, thefunc, x0, args, numinputs,
|
||||||
output_shape=None):
|
output_shape=None):
|
||||||
from numpy import atleast_1d, shape, issubdtype, dtype, inexact
|
"""The same as that of minpack.py"""
|
||||||
'''
|
res = np.atleast_1d(thefunc(*((x0[:numinputs],) + args)))
|
||||||
The same as that of minpack.py,
|
|
||||||
'''
|
|
||||||
res = atleast_1d(thefunc(*((x0[:numinputs],) + args)))
|
|
||||||
if (output_shape is not None) and (shape(res) != output_shape):
|
if (output_shape is not None) and (shape(res) != output_shape):
|
||||||
if (output_shape[0] != 1):
|
if (output_shape[0] != 1):
|
||||||
if len(output_shape) > 1:
|
if len(output_shape) > 1:
|
||||||
|
@ -201,203 +241,171 @@ def _check_func(checker, argname, thefunc, x0, args, numinputs,
|
||||||
else:
|
else:
|
||||||
msg += "."
|
msg += "."
|
||||||
raise TypeError(msg)
|
raise TypeError(msg)
|
||||||
if issubdtype(res.dtype, inexact):
|
if np.issubdtype(res.dtype, np.inexact):
|
||||||
dt = res.dtype
|
dt = res.dtype
|
||||||
else:
|
else:
|
||||||
dt = dtype(float)
|
dt = dtype(float)
|
||||||
return shape(res), dt
|
return shape(res), dt
|
||||||
|
|
||||||
def _int2extGrad(p_int, bounds):
|
def _int2extGrad(p_int, bounds):
|
||||||
"""
|
"""Calculate the gradients of transforming the internal (unconstrained) to external (constrained) parameter."""
|
||||||
Calculate the gradients of transforming the internal (unconstrained)
|
grad = np.empty_like(p_int)
|
||||||
to external (constained) parameter.
|
|
||||||
"""
|
|
||||||
grad = empty_like(p_int)
|
|
||||||
for i, (x, bound) in enumerate(zip(p_int, bounds)):
|
for i, (x, bound) in enumerate(zip(p_int, bounds)):
|
||||||
lower, upper = bound
|
lower, upper = bound
|
||||||
if lower is None and upper is None: # No constraints
|
if lower is None and upper is None: # No constraints
|
||||||
grad[i] = 1.0
|
grad[i] = 1.0
|
||||||
elif upper is None: # only lower bound
|
elif upper is None: # only lower bound
|
||||||
grad[i] = x/sqrt(x*x + 1.0)
|
grad[i] = x/np.sqrt(x*x + 1.0)
|
||||||
elif lower is None: # only upper bound
|
elif lower is None: # only upper bound
|
||||||
grad[i] = -x/sqrt(x*x + 1.0)
|
grad[i] = -x/np.sqrt(x*x + 1.0)
|
||||||
else: # lower and upper bounds
|
else: # lower and upper bounds
|
||||||
grad[i] = (upper - lower)*cos(x)/2.0
|
grad[i] = (upper - lower)*np.cos(x)/2.0
|
||||||
return grad
|
return grad
|
||||||
|
|
||||||
def _int2extFunc(bounds):
|
def _int2extFunc(bounds):
|
||||||
'''
|
"""transform internal parameters into external parameters."""
|
||||||
transform internal parameters into external parameters.
|
|
||||||
'''
|
|
||||||
local = [_int2extLocal(b) for b in bounds]
|
local = [_int2extLocal(b) for b in bounds]
|
||||||
def _transform_i2e(p_int):
|
def _transform_i2e(p_int):
|
||||||
p_ext = empty_like(p_int)
|
p_ext = np.empty_like(p_int)
|
||||||
p_ext[:] = [i(j) for i, j in zip(local, p_int)]
|
p_ext[:] = [i(j) for i, j in zip(local, p_int)]
|
||||||
return p_ext
|
return p_ext
|
||||||
return _transform_i2e
|
return _transform_i2e
|
||||||
|
|
||||||
def _ext2intFunc(bounds):
|
def _ext2intFunc(bounds):
|
||||||
'''
|
"""transform external parameters into internal parameters."""
|
||||||
transform external parameters into internal parameters.
|
|
||||||
'''
|
|
||||||
local = [_ext2intLocal(b) for b in bounds]
|
local = [_ext2intLocal(b) for b in bounds]
|
||||||
def _transform_e2i(p_ext):
|
def _transform_e2i(p_ext):
|
||||||
p_int = empty_like(p_ext)
|
p_int = np.empty_like(p_ext)
|
||||||
p_int[:] = [i(j) for i, j in zip(local, p_ext)]
|
p_int[:] = [i(j) for i, j in zip(local, p_ext)]
|
||||||
return p_int
|
return p_int
|
||||||
return _transform_e2i
|
return _transform_e2i
|
||||||
|
|
||||||
def _int2extLocal(bound):
|
def _int2extLocal(bound):
|
||||||
'''
|
"""transform a single internal parameter to an external parameter."""
|
||||||
transform a single internal parameter to an external parameter.
|
|
||||||
'''
|
|
||||||
lower, upper = bound
|
lower, upper = bound
|
||||||
if lower is None and upper is None: # no constraints
|
if lower is None and upper is None: # no constraints
|
||||||
return lambda x: x
|
return lambda x: x
|
||||||
elif upper is None: # only lower bound
|
elif upper is None: # only lower bound
|
||||||
return lambda x: lower - 1.0 + sqrt(x*x + 1.0)
|
return lambda x: lower - 1.0 + np.sqrt(x*x + 1.0)
|
||||||
elif lower is None: # only upper bound
|
elif lower is None: # only upper bound
|
||||||
return lambda x: upper + 1.0 - sqrt(x*x + 1.0)
|
return lambda x: upper + 1.0 - np.sqrt(x*x + 1.0)
|
||||||
else:
|
else:
|
||||||
return lambda x: lower + ((upper - lower)/2.0)*(sin(x) + 1.0)
|
return lambda x: lower + ((upper - lower)/2.0)*(np.sin(x) + 1.0)
|
||||||
|
|
||||||
def _ext2intLocal(bound):
|
def _ext2intLocal(bound):
|
||||||
'''
|
"""transform a single external parameter to an internal parameter."""
|
||||||
transform a single external parameter to an internal parameter.
|
|
||||||
'''
|
|
||||||
lower, upper = bound
|
lower, upper = bound
|
||||||
if lower is None and upper is None: # no constraints
|
if lower is None and upper is None: # no constraints
|
||||||
return lambda x: x
|
return lambda x: x
|
||||||
elif upper is None: # only lower bound
|
elif upper is None: # only lower bound
|
||||||
return lambda x: sqrt((x - lower + 1.0)**2 - 1.0)
|
return lambda x: np.sqrt((x - lower + 1.0)**2 - 1.0)
|
||||||
elif lower is None: # only upper bound
|
elif lower is None: # only upper bound
|
||||||
return lambda x: sqrt((x - upper - 1.0)**2 - 1.0)
|
return lambda x: np.sqrt((x - upper - 1.0)**2 - 1.0)
|
||||||
else:
|
else:
|
||||||
return lambda x: arcsin((2.0*(x - lower)/(upper - lower)) - 1.0)
|
return lambda x: np.arcsin((2.0*(x - lower)/(upper - lower)) - 1.0)
|
||||||
|
|
||||||
def leastsqBound(func, x0, args=(), bounds=None, Dfun=None, full_output=0,
|
i2e = _int2extFunc(bounds)
|
||||||
col_deriv=0, ftol=1.49012e-8, xtol=1.49012e-8,
|
e2i = _ext2intFunc(bounds)
|
||||||
gtol=0.0, maxfev=0, epsfcn=None, factor=100, diag=None):
|
|
||||||
from scipy.optimize import _minpack
|
|
||||||
'''
|
|
||||||
An internal parameter list is used to enforce contraints on the fitting
|
|
||||||
parameters. The transfomation is based on that of MINUIT package.
|
|
||||||
please see: F. James and M. Winkler. MINUIT User's Guide, 2004.
|
|
||||||
|
|
||||||
bounds : list
|
x0 = np.asarray(x0).flatten()
|
||||||
(min, max) pairs for each parameter, use None for 'min' or 'max'
|
n = len(x0)
|
||||||
when there is no bound in that direction.
|
|
||||||
For example: if there are two parameters needed to be fitting, then
|
|
||||||
bounds is [(min1,max1), (min2,max2)]
|
|
||||||
|
|
||||||
This function is based on 'leastsq' of minpack.py, the annotation of
|
if len(bounds) != n:
|
||||||
other parameters can be found in 'leastsq'.
|
raise ValueError('the length of bounds is inconsistent with the number of parameters ')
|
||||||
..\Lib\site-packages\scipy\optimize\minpack.py
|
|
||||||
'''
|
|
||||||
i2e = _int2extFunc(bounds)
|
|
||||||
e2i = _ext2intFunc(bounds)
|
|
||||||
|
|
||||||
x0 = asarray(x0).flatten()
|
if not isinstance(args, tuple):
|
||||||
n = len(x0)
|
args = (args,)
|
||||||
|
|
||||||
if len(bounds) != n:
|
shape, dtype = _check_func('leastsq', 'func', func, x0, args, n)
|
||||||
raise ValueError('the length of bounds is inconsistent with the number of parameters ')
|
m = shape[0]
|
||||||
|
|
||||||
if not isinstance(args, tuple):
|
if n > m:
|
||||||
args = (args,)
|
raise TypeError('Improper input: N=%s must not exceed M=%s' % (n, m))
|
||||||
|
if epsfcn is None:
|
||||||
|
epsfcn = np.finfo(dtype).eps
|
||||||
|
|
||||||
shape, dtype = _check_func('leastsq', 'func', func, x0, args, n)
|
def funcWarp(x, *args):
|
||||||
m = shape[0]
|
return func(i2e(x), *args)
|
||||||
|
|
||||||
if n > m:
|
xi0 = e2i(x0)
|
||||||
raise TypeError('Improper input: N=%s must not exceed M=%s' % (n, m))
|
|
||||||
if epsfcn is None:
|
|
||||||
epsfcn = finfo(dtype).eps
|
|
||||||
|
|
||||||
# wrapped func
|
if Dfun is None:
|
||||||
def funcWarp(x, *args):
|
if maxfev == 0:
|
||||||
return func(i2e(x), *args)
|
maxfev = 200*(n + 1)
|
||||||
|
retval = _minpack._lmdif(funcWarp, xi0, args, full_output, ftol, xtol,
|
||||||
|
gtol, maxfev, epsfcn, factor, diag)
|
||||||
|
else:
|
||||||
|
if col_deriv:
|
||||||
|
_check_func('leastsq', 'Dfun', Dfun, x0, args, n, (n, m))
|
||||||
|
else:
|
||||||
|
_check_func('leastsq', 'Dfun', Dfun, x0, args, n, (m, n))
|
||||||
|
if maxfev == 0:
|
||||||
|
maxfev = 100*(n + 1)
|
||||||
|
|
||||||
xi0 = e2i(x0)
|
def DfunWarp(x, *args):
|
||||||
|
return Dfun(i2e(x), *args)
|
||||||
|
|
||||||
if Dfun is None:
|
retval = _minpack._lmder(funcWarp, DfunWarp, xi0, args, full_output, col_deriv,
|
||||||
if maxfev == 0:
|
ftol, xtol, gtol, maxfev, factor, diag)
|
||||||
maxfev = 200*(n + 1)
|
|
||||||
retval = _minpack._lmdif(funcWarp, xi0, args, full_output, ftol, xtol,
|
|
||||||
gtol, maxfev, epsfcn, factor, diag)
|
|
||||||
else:
|
|
||||||
if col_deriv:
|
|
||||||
_check_func('leastsq', 'Dfun', Dfun, x0, args, n, (n, m))
|
|
||||||
else:
|
|
||||||
_check_func('leastsq', 'Dfun', Dfun, x0, args, n, (m, n))
|
|
||||||
if maxfev == 0:
|
|
||||||
maxfev = 100*(n + 1)
|
|
||||||
|
|
||||||
# wrapped Dfun
|
errors = {0: ["Improper input parameters.", TypeError],
|
||||||
def DfunWarp(x, *args):
|
1: ["Both actual and predicted relative reductions "
|
||||||
return Dfun(i2e(x), *args)
|
"in the sum of squares\n are at most %f" % ftol, None],
|
||||||
|
2: ["The relative error between two consecutive "
|
||||||
|
"iterates is at most %f" % xtol, None],
|
||||||
|
3: ["Both actual and predicted relative reductions in "
|
||||||
|
"the sum of squares\n are at most %f and the "
|
||||||
|
"relative error between two consecutive "
|
||||||
|
"iterates is at \n most %f" % (ftol, xtol), None],
|
||||||
|
4: ["The cosine of the angle between func(x) and any "
|
||||||
|
"column of the\n Jacobian is at most %f in "
|
||||||
|
"absolute value" % gtol, None],
|
||||||
|
5: ["Number of calls to function has reached "
|
||||||
|
"maxfev = %d." % maxfev, ValueError],
|
||||||
|
6: ["ftol=%f is too small, no further reduction "
|
||||||
|
"in the sum of squares\n is possible.""" % ftol,
|
||||||
|
ValueError],
|
||||||
|
7: ["xtol=%f is too small, no further improvement in "
|
||||||
|
"the approximate\n solution is possible." % xtol,
|
||||||
|
ValueError],
|
||||||
|
8: ["gtol=%f is too small, func(x) is orthogonal to the "
|
||||||
|
"columns of\n the Jacobian to machine "
|
||||||
|
"precision." % gtol, ValueError],
|
||||||
|
'unknown': ["Unknown error.", TypeError]}
|
||||||
|
|
||||||
retval = _minpack._lmder(funcWarp, DfunWarp, xi0, args, full_output, col_deriv,
|
info = retval[-1] # The FORTRAN return value
|
||||||
ftol, xtol, gtol, maxfev, factor, diag)
|
|
||||||
|
|
||||||
errors = {0: ["Improper input parameters.", TypeError],
|
if info not in [1, 2, 3, 4] and not full_output:
|
||||||
1: ["Both actual and predicted relative reductions "
|
if info in [5, 6, 7, 8]:
|
||||||
"in the sum of squares\n are at most %f" % ftol, None],
|
np.warnings.warn(errors[info][0], RuntimeWarning)
|
||||||
2: ["The relative error between two consecutive "
|
else:
|
||||||
"iterates is at most %f" % xtol, None],
|
try:
|
||||||
3: ["Both actual and predicted relative reductions in "
|
raise errors[info][1](errors[info][0])
|
||||||
"the sum of squares\n are at most %f and the "
|
except KeyError:
|
||||||
"relative error between two consecutive "
|
raise errors['unknown'][1](errors['unknown'][0])
|
||||||
"iterates is at \n most %f" % (ftol, xtol), None],
|
|
||||||
4: ["The cosine of the angle between func(x) and any "
|
|
||||||
"column of the\n Jacobian is at most %f in "
|
|
||||||
"absolute value" % gtol, None],
|
|
||||||
5: ["Number of calls to function has reached "
|
|
||||||
"maxfev = %d." % maxfev, ValueError],
|
|
||||||
6: ["ftol=%f is too small, no further reduction "
|
|
||||||
"in the sum of squares\n is possible.""" % ftol,
|
|
||||||
ValueError],
|
|
||||||
7: ["xtol=%f is too small, no further improvement in "
|
|
||||||
"the approximate\n solution is possible." % xtol,
|
|
||||||
ValueError],
|
|
||||||
8: ["gtol=%f is too small, func(x) is orthogonal to the "
|
|
||||||
"columns of\n the Jacobian to machine "
|
|
||||||
"precision." % gtol, ValueError],
|
|
||||||
'unknown': ["Unknown error.", TypeError]}
|
|
||||||
|
|
||||||
info = retval[-1] # The FORTRAN return value
|
mesg = errors[info][0]
|
||||||
|
x = i2e(retval[0])
|
||||||
|
|
||||||
if info not in [1, 2, 3, 4] and not full_output:
|
if full_output:
|
||||||
if info in [5, 6, 7, 8]:
|
grad = _int2extGrad(retval[0], bounds)
|
||||||
warnings.warn(errors[info][0], RuntimeWarning)
|
retval[1]['fjac'] = (retval[1]['fjac'].T / np.take(grad,
|
||||||
else:
|
retval[1]['ipvt'] - 1)).T
|
||||||
try:
|
cov_x = None
|
||||||
raise errors[info][1](errors[info][0])
|
if info in [1, 2, 3, 4]:
|
||||||
except KeyError:
|
from numpy.dual import inv
|
||||||
raise errors['unknown'][1](errors['unknown'][0])
|
from numpy.linalg import LinAlgError
|
||||||
|
perm = np.take(np.eye(n), retval[1]['ipvt'] - 1, 0)
|
||||||
mesg = errors[info][0]
|
r = np.triu(np.transpose(retval[1]['fjac'])[:n, :])
|
||||||
x = i2e(retval[0])
|
R = np.dot(r, perm)
|
||||||
|
try:
|
||||||
if full_output:
|
cov_x = inv(np.dot(np.transpose(R), R))
|
||||||
grad = _int2extGrad(retval[0], bounds)
|
except LinAlgError as inverror:
|
||||||
retval[1]['fjac'] = (retval[1]['fjac'].T / take(grad,
|
print inverror
|
||||||
retval[1]['ipvt'] - 1)).T
|
pass
|
||||||
cov_x = None
|
return (x, cov_x) + retval[1:-1] + (mesg, info)
|
||||||
if info in [1, 2, 3, 4]:
|
else:
|
||||||
from numpy.dual import inv
|
return (x, info)
|
||||||
from numpy.linalg import LinAlgError
|
|
||||||
perm = take(eye(n), retval[1]['ipvt'] - 1, 0)
|
|
||||||
r = triu(transpose(retval[1]['fjac'])[:n, :])
|
|
||||||
R = dot(r, perm)
|
|
||||||
try:
|
|
||||||
cov_x = inv(dot(transpose(R), R))
|
|
||||||
except LinAlgError as inverror:
|
|
||||||
print inverror
|
|
||||||
pass
|
|
||||||
return (x, cov_x) + retval[1:-1] + (mesg, info)
|
|
||||||
else:
|
|
||||||
return (x, info)
|
|
||||||
|
|
||||||
def _general_function(params, ydata, xdata, function):
|
def _general_function(params, ydata, xdata, function):
|
||||||
return function(xdata, *params) - ydata
|
return function(xdata, *params) - ydata
|
||||||
|
@ -405,7 +413,7 @@ def _weighted_general_function(params, ydata, xdata, function, weights):
|
||||||
return (function(xdata, *params) - ydata)*weights
|
return (function(xdata, *params) - ydata)*weights
|
||||||
|
|
||||||
def curve_fit_bound(f, xdata, ydata, p0=None, sigma=None, bounds=None, **kw):
|
def curve_fit_bound(f, xdata, ydata, p0=None, sigma=None, bounds=None, **kw):
|
||||||
''' Similar as 'curve_fit' in minpack.py'''
|
"""Similar as 'curve_fit' in minpack.py"""
|
||||||
if p0 is None:
|
if p0 is None:
|
||||||
# determine number of parameters by inspecting the function
|
# determine number of parameters by inspecting the function
|
||||||
import inspect
|
import inspect
|
||||||
|
@ -418,15 +426,15 @@ def curve_fit_bound(f, xdata, ydata, p0=None, sigma=None, bounds=None, **kw):
|
||||||
else:
|
else:
|
||||||
p0 = [1.0] * (len(args)-1)
|
p0 = [1.0] * (len(args)-1)
|
||||||
|
|
||||||
if isscalar(p0):
|
if np.isscalar(p0):
|
||||||
p0 = array([p0])
|
p0 = np.array([p0])
|
||||||
|
|
||||||
args = (ydata, xdata, f)
|
args = (ydata, xdata, f)
|
||||||
if sigma is None:
|
if sigma is None:
|
||||||
func = _general_function
|
func = _general_function
|
||||||
else:
|
else:
|
||||||
func = _weighted_general_function
|
func = _weighted_general_function
|
||||||
args += (1.0/asarray(sigma),)
|
args += (1.0/np.asarray(sigma),)
|
||||||
|
|
||||||
return_full = kw.pop('full_output', False)
|
return_full = kw.pop('full_output', False)
|
||||||
res = leastsqBound(func, p0, args=args, bounds = bounds, full_output=True, **kw)
|
res = leastsqBound(func, p0, args=args, bounds = bounds, full_output=True, **kw)
|
||||||
|
@ -440,26 +448,6 @@ def curve_fit_bound(f, xdata, ydata, p0=None, sigma=None, bounds=None, **kw):
|
||||||
s_sq = (func(popt, *args)**2).sum()/(len(ydata)-len(p0))
|
s_sq = (func(popt, *args)**2).sum()/(len(ydata)-len(p0))
|
||||||
pcov = pcov * s_sq
|
pcov = pcov * s_sq
|
||||||
else:
|
else:
|
||||||
pcov = inf
|
pcov = np.inf
|
||||||
|
|
||||||
if return_full:
|
|
||||||
return popt, pcov, infodict, errmsg, ier
|
|
||||||
else:
|
|
||||||
return popt, pcov
|
|
||||||
|
|
||||||
|
|
||||||
def execute(cmd,streamIn=None,wd='./'):
|
|
||||||
'''
|
|
||||||
executes a command in given directory and returns stdout and stderr for optional stdin
|
|
||||||
'''
|
|
||||||
initialPath=os.getcwd()
|
|
||||||
os.chdir(wd)
|
|
||||||
process = subprocess.Popen(shlex.split(cmd),stdout=subprocess.PIPE,stderr = subprocess.PIPE,stdin=subprocess.PIPE)
|
|
||||||
if streamIn != None:
|
|
||||||
out,error = process.communicate(streamIn.read())
|
|
||||||
else:
|
|
||||||
out,error = process.communicate()
|
|
||||||
os.chdir(initialPath)
|
|
||||||
if process.returncode !=0: raise RuntimeError(cmd+' failed with returncode '+str(process.returncode))
|
|
||||||
return out,error
|
|
||||||
|
|
||||||
|
return (popt, pcov, infodict, errmsg, ier) if return_full else (popt, pcov)
|
||||||
|
|
Binary file not shown.
Before Width: | Height: | Size: 80 KiB After Width: | Height: | Size: 34 KiB |
|
@ -1,8 +1,7 @@
|
||||||
#!/usr/bin/env python
|
#!/usr/bin/env python
|
||||||
# -*- coding: UTF-8 no BOM -*-
|
# -*- coding: UTF-8 no BOM -*-
|
||||||
|
|
||||||
import os,sys,string,vtk
|
import os
|
||||||
import numpy as np
|
|
||||||
from optparse import OptionParser
|
from optparse import OptionParser
|
||||||
import damask
|
import damask
|
||||||
|
|
||||||
|
@ -48,7 +47,7 @@ for name in filenames:
|
||||||
table.labels_append(['1_Euler','2_Euler','3_Euler',
|
table.labels_append(['1_Euler','2_Euler','3_Euler',
|
||||||
'1_pos','2_pos',
|
'1_pos','2_pos',
|
||||||
'IQ','CI','PhaseID','Intensity','Fit',
|
'IQ','CI','PhaseID','Intensity','Fit',
|
||||||
], # labels according to OIM Analysis 7.2 Manual, p 403 (of 517)
|
], # OIM Analysis 7.2 Manual, p 403 (of 517)
|
||||||
reset = True)
|
reset = True)
|
||||||
|
|
||||||
# ------------------------------------------ assemble header ---------------------------------------
|
# ------------------------------------------ assemble header ---------------------------------------
|
||||||
|
|
|
@ -1,9 +1,9 @@
|
||||||
#!/usr/bin/python
|
#!/usr/bin/python
|
||||||
# -*- coding: UTF-8 no BOM -*-
|
# -*- coding: UTF-8 no BOM -*-
|
||||||
|
|
||||||
import threading,time,os,subprocess,shlex,string
|
import threading,os,string
|
||||||
import numpy as np
|
import numpy as np
|
||||||
from optparse import OptionParser, OptionGroup
|
from optparse import OptionParser
|
||||||
from shutil import copy2
|
from shutil import copy2
|
||||||
from re import split
|
from re import split
|
||||||
import damask
|
import damask
|
||||||
|
@ -14,26 +14,10 @@ scriptID = ' '.join([scriptName,damask.version])
|
||||||
def list_split(option, opt, value, parser):
|
def list_split(option, opt, value, parser):
|
||||||
setattr(parser.values, option.dest, value.split(','))
|
setattr(parser.values, option.dest, value.split(','))
|
||||||
|
|
||||||
def execute(cmd,streamIn=None,wd='./'):
|
|
||||||
'''
|
|
||||||
executes a command in given directory and returns stdout and stderr for optional stdin
|
|
||||||
'''
|
|
||||||
initialPath=os.getcwd()
|
|
||||||
os.chdir(wd)
|
|
||||||
process = subprocess.Popen(shlex.split(cmd),stdout=subprocess.PIPE,stderr = subprocess.PIPE,stdin=subprocess.PIPE)
|
|
||||||
if streamIn != None:
|
|
||||||
out,error = process.communicate(streamIn.read())
|
|
||||||
else:
|
|
||||||
out,error = process.communicate()
|
|
||||||
os.chdir(initialPath)
|
|
||||||
return out,error
|
|
||||||
|
|
||||||
#---------------------------------------------------------------------------------------------------
|
#---------------------------------------------------------------------------------------------------
|
||||||
class myThread (threading.Thread):
|
class myThread (threading.Thread):
|
||||||
#---------------------------------------------------------------------------------------------------
|
"""Runner"""
|
||||||
'''
|
|
||||||
Runner class
|
|
||||||
'''
|
|
||||||
def __init__(self, threadID):
|
def __init__(self, threadID):
|
||||||
threading.Thread.__init__(self)
|
threading.Thread.__init__(self)
|
||||||
self.threadID = threadID
|
self.threadID = threadID
|
||||||
|
@ -48,8 +32,6 @@ class myThread (threading.Thread):
|
||||||
s.release()
|
s.release()
|
||||||
|
|
||||||
def doSim(delay,thread):
|
def doSim(delay,thread):
|
||||||
# s.acquire() and s.release() are couple
|
|
||||||
#
|
|
||||||
global dirCurrent
|
global dirCurrent
|
||||||
s.acquire()
|
s.acquire()
|
||||||
delta_angle = offsetPhi()
|
delta_angle = offsetPhi()
|
||||||
|
@ -63,22 +45,22 @@ def doSim(delay,thread):
|
||||||
os.mkdir(dire,0755)
|
os.mkdir(dire,0755)
|
||||||
for file in [options.geometry+'.geom',options.load+'.load','numerics.config']:
|
for file in [options.geometry+'.geom',options.load+'.load','numerics.config']:
|
||||||
copy2(dirCurrent+'/'+file, dire)
|
copy2(dirCurrent+'/'+file, dire)
|
||||||
newMatConfig = newMaterialConfig(dirCurrent,delta_angle)
|
newMaterialConfig(dirCurrent,delta_angle)
|
||||||
|
|
||||||
os.chdir(dire)
|
os.chdir(dire)
|
||||||
if not os.path.isfile('%s_%s.spectralOut'%(options.geometry,options.load)):
|
if not os.path.isfile('%s_%s.spectralOut'%(options.geometry,options.load)):
|
||||||
print('starting uniaxial tension in direction of angle %s from %s'%(file_angle,thread))
|
print('starting uniaxial tension in direction of angle %s from %s'%(file_angle,thread))
|
||||||
s.release()
|
s.release()
|
||||||
execute('DAMASK_spectral -g %s -l %s'%(options.geometry,options.load))
|
damask.util.execute('DAMASK_spectral -g %s -l %s'%(options.geometry,options.load))
|
||||||
else: s.release()
|
else: s.release()
|
||||||
|
|
||||||
s.acquire()
|
s.acquire()
|
||||||
if not os.path.isfile('./%s/%s_%s.txt'%('Rvalues',options.geometry,options.load)):
|
if not os.path.isfile('./%s/%s_%s.txt'%('Rvalues',options.geometry,options.load)):
|
||||||
print('starting post processing for angle %s from %s'%(file_angle,thread))
|
print('starting post processing for angle %s from %s'%(file_angle,thread))
|
||||||
s.release()
|
s.release()
|
||||||
execute('postResults --cr f,p -d %s %s_%s.spectralOut'%('Rvalues',options.geometry,options.load))
|
damask.util.execute('postResults --cr f,p -d %s %s_%s.spectralOut'%('Rvalues',options.geometry,options.load))
|
||||||
execute('addCauchy ./%s/%s_%s.txt'%('Rvalues',options.geometry,options.load))
|
damask.util.execute('addCauchy ./%s/%s_%s.txt'%('Rvalues',options.geometry,options.load))
|
||||||
execute('addStrainTensors -l -v ./%s/%s_%s.txt'%('Rvalues',options.geometry,options.load))
|
damask.util.execute('addStrainTensors -l -v ./%s/%s_%s.txt'%('Rvalues',options.geometry,options.load))
|
||||||
print('post processing for angle %s from %s is finished'%(file_angle,thread))
|
print('post processing for angle %s from %s is finished'%(file_angle,thread))
|
||||||
|
|
||||||
else:
|
else:
|
||||||
|
@ -122,7 +104,6 @@ def newMaterialConfig(dire,angle):
|
||||||
line2 = line
|
line2 = line
|
||||||
f.write(line2)
|
f.write(line2)
|
||||||
f.close()
|
f.close()
|
||||||
return True
|
|
||||||
|
|
||||||
# --------------------------------------------------------------------
|
# --------------------------------------------------------------------
|
||||||
# MAIN
|
# MAIN
|
||||||
|
@ -135,16 +116,21 @@ strength anisotropic coefficients (normalized yield stress)
|
||||||
""", version=string.replace(scriptID,'\n','\\n')
|
""", version=string.replace(scriptID,'\n','\\n')
|
||||||
)
|
)
|
||||||
|
|
||||||
parser.add_option('-l','--load' , dest='load', type='string',
|
parser.add_option('-l','--load' ,
|
||||||
help='name of the load file [%default]', metavar='string')
|
dest='load', type='string',
|
||||||
parser.add_option('-g','--geometry', dest='geometry', type='string',
|
help='name of the load file [%default]', metavar='string')
|
||||||
help='name of the geometry file [%default]', metavar='string')
|
parser.add_option('-g','--geometry',
|
||||||
parser.add_option('-s', '--strain', dest='strain', type='string', action='callback', callback=list_split,
|
dest='geometry', type='string',
|
||||||
help='the threshold strains, using comma to seperate multiple strains [%default]', metavar='string')
|
help='name of the geometry file [%default]', metavar='string')
|
||||||
parser.add_option('-t','--threads', dest='threads', type='int',
|
parser.add_option('-s', '--strain',
|
||||||
help='number of parallel executions [%default]', metavar='int')
|
dest='strain', type='string', action='callback', callback=list_split,
|
||||||
parser.add_option('-n','--number', dest='number', type='int',
|
help='threshold strains, using comma to seperate multiple strains [%default]', metavar='string')
|
||||||
help='Number of uni-axial tensile tests [%default]', metavar='int')
|
parser.add_option('-t','--threads',
|
||||||
|
dest='threads', type='int',
|
||||||
|
help='number of parallel executions [%default]', metavar='int')
|
||||||
|
parser.add_option('-n','--number',
|
||||||
|
dest='number', type='int',
|
||||||
|
help='Number of uni-axial tensile tests [%default]', metavar='int')
|
||||||
|
|
||||||
parser.set_defaults(geometry = '20grains16x16x16')
|
parser.set_defaults(geometry = '20grains16x16x16')
|
||||||
parser.set_defaults(load = 'tensionX')
|
parser.set_defaults(load = 'tensionX')
|
||||||
|
|
|
@ -1,33 +1,15 @@
|
||||||
#!/usr/bin/env python
|
#!/usr/bin/env python
|
||||||
# -*- coding: UTF-8 no BOM -*-
|
# -*- coding: UTF-8 no BOM -*-
|
||||||
|
|
||||||
import os,sys,string,re,numpy,scipy.ndimage,scipy.signal,vtk
|
import os,string,scipy
|
||||||
|
import numpy as np
|
||||||
import damask
|
import damask
|
||||||
from optparse import OptionParser, OptionGroup, Option, SUPPRESS_HELP
|
from optparse import OptionParser
|
||||||
|
|
||||||
scriptName = os.path.splitext(os.path.basename(__file__))[0]
|
scriptName = os.path.splitext(os.path.basename(__file__))[0]
|
||||||
scriptID = ' '.join([scriptName,damask.version])
|
scriptID = ' '.join([scriptName,damask.version])
|
||||||
|
|
||||||
#--------------------------------------------------------------------------------------------------
|
parser = OptionParser(option_class=damask.extendableOption, usage='%prog options [file[s]]', description = """
|
||||||
class extendedOption(Option):
|
|
||||||
#--------------------------------------------------------------------------------------------------
|
|
||||||
# used for definition of new option parser action 'extend', which enables to take multiple option arguments
|
|
||||||
# taken from online tutorial http://docs.python.org/library/optparse.html
|
|
||||||
|
|
||||||
ACTIONS = Option.ACTIONS + ("extend",)
|
|
||||||
STORE_ACTIONS = Option.STORE_ACTIONS + ("extend",)
|
|
||||||
TYPED_ACTIONS = Option.TYPED_ACTIONS + ("extend",)
|
|
||||||
ALWAYS_TYPED_ACTIONS = Option.ALWAYS_TYPED_ACTIONS + ("extend",)
|
|
||||||
|
|
||||||
def take_action(self, action, dest, opt, value, values, parser):
|
|
||||||
if action == "extend":
|
|
||||||
lvalue = value.split(",")
|
|
||||||
values.ensure_value(dest, []).extend(lvalue)
|
|
||||||
else:
|
|
||||||
Option.take_action(self, action, dest, opt, value, values, parser)
|
|
||||||
|
|
||||||
|
|
||||||
parser = OptionParser(option_class=extendedOption, usage='%prog options [file[s]]', description = """
|
|
||||||
Apply filter(s) to Gwyddion data.
|
Apply filter(s) to Gwyddion data.
|
||||||
""" + string.replace(scriptID,'\n','\\n')
|
""" + string.replace(scriptID,'\n','\\n')
|
||||||
)
|
)
|
||||||
|
@ -59,7 +41,7 @@ for file in filenames:
|
||||||
if pieces[1] == 'Height:': height = float(pieces[2])
|
if pieces[1] == 'Height:': height = float(pieces[2])
|
||||||
header.append(line.lstrip('#').strip())
|
header.append(line.lstrip('#').strip())
|
||||||
|
|
||||||
elevation = numpy.loadtxt(file)#*1e6
|
elevation = np.loadtxt(file)#*1e6
|
||||||
|
|
||||||
if options.opening > 0:
|
if options.opening > 0:
|
||||||
elevation = scipy.ndimage.morphology.grey_opening(elevation,options.opening)
|
elevation = scipy.ndimage.morphology.grey_opening(elevation,options.opening)
|
||||||
|
@ -80,5 +62,5 @@ for file in filenames:
|
||||||
elevation = scipy.ndimage.filters.median_filter(elevation,options.median)
|
elevation = scipy.ndimage.filters.median_filter(elevation,options.median)
|
||||||
filters += '_median%i'%options.median
|
filters += '_median%i'%options.median
|
||||||
|
|
||||||
numpy.savetxt(os.path.splitext(file)[0]+filters+os.path.splitext(file)[1],elevation,header='\n'.join(header))
|
np.savetxt(os.path.splitext(file)[0]+filters+os.path.splitext(file)[1],elevation,header='\n'.join(header))
|
||||||
|
|
||||||
|
|
|
@ -1,9 +1,10 @@
|
||||||
#!/usr/bin/env python
|
#!/usr/bin/env python
|
||||||
# -*- coding: UTF-8 no BOM -*-
|
# -*- coding: UTF-8 no BOM -*-
|
||||||
|
|
||||||
import os,sys,string,re,numpy,scipy.ndimage,scipy.signal,vtk
|
import os,string,vtk
|
||||||
|
import numpy as np
|
||||||
import damask
|
import damask
|
||||||
from optparse import OptionParser, OptionGroup, Option, SUPPRESS_HELP
|
from optparse import OptionParser
|
||||||
|
|
||||||
scriptName = os.path.splitext(os.path.basename(__file__))[0]
|
scriptName = os.path.splitext(os.path.basename(__file__))[0]
|
||||||
scriptID = ' '.join([scriptName,damask.version])
|
scriptID = ' '.join([scriptName,damask.version])
|
||||||
|
@ -26,26 +27,7 @@ scalingFactor = { \
|
||||||
},
|
},
|
||||||
}
|
}
|
||||||
|
|
||||||
#--------------------------------------------------------------------------------------------------
|
parser = OptionParser(option_class=damask.extendableOption, usage='%prog options [file[s]]', description = """
|
||||||
class extendedOption(Option):
|
|
||||||
#--------------------------------------------------------------------------------------------------
|
|
||||||
# used for definition of new option parser action 'extend', which enables to take multiple option arguments
|
|
||||||
# taken from online tutorial http://docs.python.org/library/optparse.html
|
|
||||||
|
|
||||||
ACTIONS = Option.ACTIONS + ("extend",)
|
|
||||||
STORE_ACTIONS = Option.STORE_ACTIONS + ("extend",)
|
|
||||||
TYPED_ACTIONS = Option.TYPED_ACTIONS + ("extend",)
|
|
||||||
ALWAYS_TYPED_ACTIONS = Option.ALWAYS_TYPED_ACTIONS + ("extend",)
|
|
||||||
|
|
||||||
def take_action(self, action, dest, opt, value, values, parser):
|
|
||||||
if action == "extend":
|
|
||||||
lvalue = value.split(",")
|
|
||||||
values.ensure_value(dest, []).extend(lvalue)
|
|
||||||
else:
|
|
||||||
Option.take_action(self, action, dest, opt, value, values, parser)
|
|
||||||
|
|
||||||
|
|
||||||
parser = OptionParser(option_class=extendedOption, usage='%prog options [file[s]]', description = """
|
|
||||||
Produce VTK rectilinear grid from Gwyddion dataset exported as text.
|
Produce VTK rectilinear grid from Gwyddion dataset exported as text.
|
||||||
""" + string.replace(scriptID,'\n','\\n')
|
""" + string.replace(scriptID,'\n','\\n')
|
||||||
)
|
)
|
||||||
|
@ -78,16 +60,16 @@ for file in filenames:
|
||||||
if options.scaling == 0.0:
|
if options.scaling == 0.0:
|
||||||
options.scaling = scalingFactor[lateralunit][elevationunit]
|
options.scaling = scalingFactor[lateralunit][elevationunit]
|
||||||
|
|
||||||
elevation = numpy.loadtxt(file)*options.scaling
|
elevation = np.loadtxt(file)*options.scaling
|
||||||
|
|
||||||
grid = vtk.vtkRectilinearGrid()
|
grid = vtk.vtkRectilinearGrid()
|
||||||
grid.SetDimensions(elevation.shape[1],elevation.shape[0],1)
|
grid.SetDimensions(elevation.shape[1],elevation.shape[0],1)
|
||||||
|
|
||||||
xCoords = vtk.vtkDoubleArray()
|
xCoords = vtk.vtkDoubleArray()
|
||||||
for x in numpy.arange(0.0,width,width/elevation.shape[1],'d'):
|
for x in np.arange(0.0,width,width/elevation.shape[1],'d'):
|
||||||
xCoords.InsertNextValue(x)
|
xCoords.InsertNextValue(x)
|
||||||
yCoords = vtk.vtkDoubleArray()
|
yCoords = vtk.vtkDoubleArray()
|
||||||
for y in numpy.arange(0.0,height,height/elevation.shape[0],'d'):
|
for y in np.arange(0.0,height,height/elevation.shape[0],'d'):
|
||||||
yCoords.InsertNextValue(y)
|
yCoords.InsertNextValue(y)
|
||||||
zCoords = vtk.vtkDoubleArray()
|
zCoords = vtk.vtkDoubleArray()
|
||||||
zCoords.InsertNextValue(0.0)
|
zCoords.InsertNextValue(0.0)
|
||||||
|
@ -99,8 +81,8 @@ for file in filenames:
|
||||||
vector = vtk.vtkFloatArray()
|
vector = vtk.vtkFloatArray()
|
||||||
vector.SetName("elevation");
|
vector.SetName("elevation");
|
||||||
vector.SetNumberOfComponents(3);
|
vector.SetNumberOfComponents(3);
|
||||||
vector.SetNumberOfTuples(numpy.prod(elevation.shape));
|
vector.SetNumberOfTuples(np.prod(elevation.shape));
|
||||||
for i,z in enumerate(numpy.ravel(elevation)):
|
for i,z in enumerate(np.ravel(elevation)):
|
||||||
vector.SetTuple3(i,0,0,z)
|
vector.SetTuple3(i,0,0,z)
|
||||||
|
|
||||||
grid.GetPointData().AddArray(vector)
|
grid.GetPointData().AddArray(vector)
|
||||||
|
|
|
@ -1,7 +1,7 @@
|
||||||
#!/usr/bin/python
|
#!/usr/bin/python
|
||||||
# -*- coding: UTF-8 no BOM -*-
|
# -*- coding: UTF-8 no BOM -*-
|
||||||
|
|
||||||
import threading,time,os,subprocess,string,sys
|
import threading,time,os
|
||||||
import numpy as np
|
import numpy as np
|
||||||
from optparse import OptionParser
|
from optparse import OptionParser
|
||||||
import damask
|
import damask
|
||||||
|
@ -56,10 +56,11 @@ def runFit(exponent, eqStress, dimension, criterion):
|
||||||
damask.util.croak(fitResidual)
|
damask.util.croak(fitResidual)
|
||||||
|
|
||||||
def principalStresses(sigmas):
|
def principalStresses(sigmas):
|
||||||
'''
|
"""
|
||||||
computes principal stresses (i.e. eigenvalues) for a set of Cauchy stresses.
|
computes principal stresses (i.e. eigenvalues) for a set of Cauchy stresses.
|
||||||
sorted in descending order.
|
|
||||||
'''
|
sorted in descending order.
|
||||||
|
"""
|
||||||
lambdas=np.zeros(0,'d')
|
lambdas=np.zeros(0,'d')
|
||||||
for i in xrange(np.shape(sigmas)[1]):
|
for i in xrange(np.shape(sigmas)[1]):
|
||||||
eigenvalues = np.linalg.eigvalsh(sym6toT33(sigmas[:,i]))
|
eigenvalues = np.linalg.eigvalsh(sym6toT33(sigmas[:,i]))
|
||||||
|
@ -82,27 +83,25 @@ def principalStress(p):
|
||||||
t1 + t2*np.cos(phi+np.pi*4.0/3.0)])
|
t1 + t2*np.cos(phi+np.pi*4.0/3.0)])
|
||||||
|
|
||||||
def principalStrs_Der(p, (s1, s2, s3, s4, s5, s6), dim, Karafillis=False):
|
def principalStrs_Der(p, (s1, s2, s3, s4, s5, s6), dim, Karafillis=False):
|
||||||
'''
|
"""Derivative of principal stress with respect to stress"""
|
||||||
Derivative of principal stress with respect to stress
|
|
||||||
'''
|
|
||||||
third = 1.0/3.0
|
third = 1.0/3.0
|
||||||
third2 = 2.0*third
|
third2 = 2.0*third
|
||||||
|
|
||||||
I = invariant(p)
|
I = invariant(p)
|
||||||
I1s3I2= np.sqrt(I[0]**2 - 3.0*I[1])
|
I1s3I2= np.sqrt(I[0]**2 - 3.0*I[1])
|
||||||
numer = 2.0*I1**3 - 9.0*I[0]*I[1] + 27.0*I[2]
|
numer = 2.0*I[0]**3 - 9.0*I[0]*I[1] + 27.0*I[2]
|
||||||
denom = 2.0*I1s3I2**3
|
denom = 2.0*I1s3I2**3
|
||||||
cs = numer/denom
|
cs = numer/denom
|
||||||
phi = np.arccos(cs)/3.0
|
phi = np.arccos(cs)/3.0
|
||||||
|
|
||||||
dphidcs = -third/np.sqrt(1.0 - cs**2)
|
dphidcs = -third/np.sqrt(1.0 - cs**2)
|
||||||
dcsddenom = 0.5*numer*(-1.5)*I1s3I2**(-5.0)
|
dcsddenom = 0.5*numer*(-1.5)*I1s3I2**(-5.0)
|
||||||
dcsdI1 = (6.0*I1**2 - 9.0*I2)*denom + dcsddenom*(2.0*I1)
|
dcsdI1 = (6.0*I[0]**2 - 9.0*I[1])*denom + dcsddenom*(2.0*I[0])
|
||||||
dcsdI2 = ( - 9.0*I1)*denom + dcsddenom*(-3.0)
|
dcsdI2 = ( - 9.0*I[0])*denom + dcsddenom*(-3.0)
|
||||||
dcsdI3 = 27.0*denom
|
dcsdI3 = 27.0*denom
|
||||||
dphidI1, dphidI2, dphidI3 = dphidcs*dcsdI1, dphidcs*dcsdI2, dphidcs*dcsdI3
|
dphidI1, dphidI2, dphidI3 = dphidcs*dcsdI1, dphidcs*dcsdI2, dphidcs*dcsdI3
|
||||||
|
|
||||||
dI1s3I2dI1 = I1/I1s3I2
|
dI1s3I2dI1 = I[0]/I1s3I2
|
||||||
dI1s3I2dI2 = -1.5/I1s3I2
|
dI1s3I2dI2 = -1.5/I1s3I2
|
||||||
tcoeff = third2*I1s3I2
|
tcoeff = third2*I1s3I2
|
||||||
|
|
||||||
|
@ -150,13 +149,13 @@ def math_ln(x):
|
||||||
return np.log(x + 1.0e-32)
|
return np.log(x + 1.0e-32)
|
||||||
|
|
||||||
def sym6toT33(sym6):
|
def sym6toT33(sym6):
|
||||||
''' Shape the symmetric stress tensor(6) into (3,3) '''
|
"""Shape the symmetric stress tensor(6) into (3,3)"""
|
||||||
return np.array([[sym6[0],sym6[3],sym6[5]],
|
return np.array([[sym6[0],sym6[3],sym6[5]],
|
||||||
[sym6[3],sym6[1],sym6[4]],
|
[sym6[3],sym6[1],sym6[4]],
|
||||||
[sym6[5],sym6[4],sym6[2]]])
|
[sym6[5],sym6[4],sym6[2]]])
|
||||||
|
|
||||||
def t33toSym6(t33):
|
def t33toSym6(t33):
|
||||||
''' Shape the stress tensor(3,3) into symmetric (6) '''
|
"""Shape the stress tensor(3,3) into symmetric (6)"""
|
||||||
return np.array([ t33[0,0],
|
return np.array([ t33[0,0],
|
||||||
t33[1,1],
|
t33[1,1],
|
||||||
t33[2,2],
|
t33[2,2],
|
||||||
|
@ -165,9 +164,6 @@ def t33toSym6(t33):
|
||||||
(t33[2,0] + t33[0,2])/2.0,]) # * * 2
|
(t33[2,0] + t33[0,2])/2.0,]) # * * 2
|
||||||
|
|
||||||
class Criteria(object):
|
class Criteria(object):
|
||||||
'''
|
|
||||||
needs doc string
|
|
||||||
'''
|
|
||||||
def __init__(self, criterion, uniaxialStress,exponent, dimension):
|
def __init__(self, criterion, uniaxialStress,exponent, dimension):
|
||||||
self.stress0 = uniaxialStress
|
self.stress0 = uniaxialStress
|
||||||
if exponent < 0.0: # Fitting exponent m
|
if exponent < 0.0: # Fitting exponent m
|
||||||
|
@ -183,9 +179,8 @@ class Criteria(object):
|
||||||
return self.func(self.stress0, paras, sigmas,self.mFix,self.criteria,self.dim,Jac=True)
|
return self.func(self.stress0, paras, sigmas,self.mFix,self.criteria,self.dim,Jac=True)
|
||||||
|
|
||||||
class Vegter(object):
|
class Vegter(object):
|
||||||
'''
|
"""Vegter yield criterion"""
|
||||||
Vegter yield criterion
|
|
||||||
'''
|
|
||||||
def __init__(self, refPts, refNormals,nspace=11):
|
def __init__(self, refPts, refNormals,nspace=11):
|
||||||
self.refPts, self.refNormals = self._getRefPointsNormals(refPts, refNormals)
|
self.refPts, self.refNormals = self._getRefPointsNormals(refPts, refNormals)
|
||||||
self.hingePts = self._getHingePoints()
|
self.hingePts = self._getHingePoints()
|
||||||
|
@ -211,11 +206,12 @@ class Vegter(object):
|
||||||
return refPts,refNormals
|
return refPts,refNormals
|
||||||
|
|
||||||
def _getHingePoints(self):
|
def _getHingePoints(self):
|
||||||
'''
|
"""
|
||||||
calculate the hinge point B according to the reference points A,C and the normals n,m
|
calculate the hinge point B according to the reference points A,C and the normals n,m
|
||||||
refPoints = np.array([[p1_x, p1_y], [p2_x, p2_y]]);
|
|
||||||
refNormals = np.array([[n1_x, n1_y], [n2_x, n2_y]])
|
refPoints = np.array([[p1_x, p1_y], [p2_x, p2_y]]);
|
||||||
'''
|
refNormals = np.array([[n1_x, n1_y], [n2_x, n2_y]])
|
||||||
|
"""
|
||||||
def hingPoint(points, normals):
|
def hingPoint(points, normals):
|
||||||
A1 = points[0][0]; A2 = points[0][1]
|
A1 = points[0][0]; A2 = points[0][1]
|
||||||
C1 = points[1][0]; C2 = points[1][1]
|
C1 = points[1][0]; C2 = points[1][1]
|
||||||
|
@ -235,9 +231,7 @@ class Vegter(object):
|
||||||
return np.array([bezier(self.refPts[i:i+2],self.hingePts[i]) for i in xrange(len(self.refPts)-1)])
|
return np.array([bezier(self.refPts[i:i+2],self.hingePts[i]) for i in xrange(len(self.refPts)-1)])
|
||||||
|
|
||||||
def VetgerCriterion(stress,lankford, rhoBi0, theta=0.0):
|
def VetgerCriterion(stress,lankford, rhoBi0, theta=0.0):
|
||||||
'''
|
"""0-pure shear; 1-uniaxial; 2-plane strain; 3-equi-biaxial"""
|
||||||
0-pure shear; 1-uniaxial; 2-plane strain; 3-equi-biaxial
|
|
||||||
'''
|
|
||||||
def getFourierParas(r):
|
def getFourierParas(r):
|
||||||
# get the value after Fourier transformation
|
# get the value after Fourier transformation
|
||||||
nset = len(r)
|
nset = len(r)
|
||||||
|
@ -262,12 +256,6 @@ def VetgerCriterion(stress,lankford, rhoBi0, theta=0.0):
|
||||||
for j in xrange(3):
|
for j in xrange(3):
|
||||||
refPts[j,i] = np.dot(getFourierParas(strsSet[:,j,i]), fouriercoeffs)
|
refPts[j,i] = np.dot(getFourierParas(strsSet[:,j,i]), fouriercoeffs)
|
||||||
|
|
||||||
rhoUn = np.dot(getFourierParas(-lankford/(lankford+1)), fouriercoeffs)
|
|
||||||
rhoBi = (rhoBi0+1 + (rhoBi0-1)*np.cos(2.0*theta))/(rhoBi0+1 - (rhoBi0-1)*np.cos(2.0*theta))
|
|
||||||
nVec = lambda rho : np.array([1.0,rho]/np.sqrt(1.0+rho**2))
|
|
||||||
refNormals = np.array([nVec(-1.0),nVec(rhoUn),nVec(0.0),nVec(rhoBi)])
|
|
||||||
|
|
||||||
vegter = Vegter(refPts, refNormals)
|
|
||||||
|
|
||||||
def Tresca(eqStress=None, #not needed/supported
|
def Tresca(eqStress=None, #not needed/supported
|
||||||
paras=None,
|
paras=None,
|
||||||
|
@ -276,10 +264,11 @@ def Tresca(eqStress=None, #not needed/supported
|
||||||
criteria=None, #not needed/supported
|
criteria=None, #not needed/supported
|
||||||
dim=3,
|
dim=3,
|
||||||
Jac=False):
|
Jac=False):
|
||||||
'''
|
"""
|
||||||
Tresca yield criterion
|
Tresca yield criterion
|
||||||
the fitted parameters is: paras(sigma0)
|
|
||||||
'''
|
the fitted parameter is paras(sigma0)
|
||||||
|
"""
|
||||||
if not Jac:
|
if not Jac:
|
||||||
lambdas = principalStresses(sigmas)
|
lambdas = principalStresses(sigmas)
|
||||||
r = np.amax(np.array([abs(lambdas[2,:]-lambdas[1,:]),\
|
r = np.amax(np.array([abs(lambdas[2,:]-lambdas[1,:]),\
|
||||||
|
@ -296,13 +285,14 @@ def Cazacu_Barlat(eqStress=None,
|
||||||
criteria=None,
|
criteria=None,
|
||||||
dim=3, #2D also possible
|
dim=3, #2D also possible
|
||||||
Jac=False):
|
Jac=False):
|
||||||
'''
|
"""
|
||||||
Cazacu-Barlat (CB) yield criterion
|
Cazacu-Barlat (CB) yield criterion
|
||||||
the fitted parameters are:
|
|
||||||
a1,a2,a3,a6; b1,b2,b3,b4,b5,b10; c for plane stress
|
the fitted parameters are:
|
||||||
a1,a2,a3,a4,a5,a6; b1,b2,b3,b4,b5,b6,b7,b8,b9,b10,b11; c: for general case
|
a1,a2,a3,a6; b1,b2,b3,b4,b5,b10; c for plane stress
|
||||||
mFix are invalid input
|
a1,a2,a3,a4,a5,a6; b1,b2,b3,b4,b5,b6,b7,b8,b9,b10,b11; c: for general case
|
||||||
'''
|
mFix is ignored
|
||||||
|
"""
|
||||||
s11,s22,s33,s12,s23,s31 = sigmas
|
s11,s22,s33,s12,s23,s31 = sigmas
|
||||||
if dim == 2:
|
if dim == 2:
|
||||||
(a1,a2,a3,a4), (b1,b2,b3,b4,b5,b10), c = paras[0:4],paras[4:10],paras[10]
|
(a1,a2,a3,a4), (b1,b2,b3,b4,b5,b10), c = paras[0:4],paras[4:10],paras[10]
|
||||||
|
@ -356,13 +346,14 @@ def Drucker(eqStress=None,#not needed/supported
|
||||||
criteria=None,
|
criteria=None,
|
||||||
dim=3,
|
dim=3,
|
||||||
Jac=False):
|
Jac=False):
|
||||||
'''
|
"""
|
||||||
Drucker yield criterion
|
Drucker yield criterion
|
||||||
the fitted parameters are
|
|
||||||
sigma0, C_D for Drucker(p=1);
|
the fitted parameters are
|
||||||
sigma0, C_D, p for general Drucker
|
sigma0, C_D for Drucker(p=1);
|
||||||
eqStress, mFix are invalid inputs
|
sigma0, C_D, p for general Drucker
|
||||||
'''
|
eqStress, mFix are invalid inputs
|
||||||
|
"""
|
||||||
if criteria == 'drucker':
|
if criteria == 'drucker':
|
||||||
sigma0, C_D= paras
|
sigma0, C_D= paras
|
||||||
p = 1.0
|
p = 1.0
|
||||||
|
@ -386,7 +377,7 @@ def Drucker(eqStress=None,#not needed/supported
|
||||||
if criteria == 'drucker':
|
if criteria == 'drucker':
|
||||||
return np.vstack((-r/sigma0, -drdl*J3_2p)).T
|
return np.vstack((-r/sigma0, -drdl*J3_2p)).T
|
||||||
else:
|
else:
|
||||||
dldp = 3.0*J2_3p*math_ln(J2) - 2.0*C_D*J3_2p*math_ln(J3)
|
dldp = 3.0*J2_3p*math_ln(J[1]) - 2.0*C_D*J3_2p*math_ln(J[2])
|
||||||
jp = drdl*dldp + r*math_ln(left)/(-6.0*p*p)
|
jp = drdl*dldp + r*math_ln(left)/(-6.0*p*p)
|
||||||
|
|
||||||
if mFix[0]: return np.vstack((-r/sigma0, -drdl*J3_2p)).T
|
if mFix[0]: return np.vstack((-r/sigma0, -drdl*J3_2p)).T
|
||||||
|
@ -399,12 +390,13 @@ def Hill1948(eqStress=None,#not needed/supported
|
||||||
criteria=None,#not needed/supported
|
criteria=None,#not needed/supported
|
||||||
dim=3,
|
dim=3,
|
||||||
Jac=False):
|
Jac=False):
|
||||||
'''
|
"""
|
||||||
Hill 1948 yield criterion
|
Hill 1948 yield criterion
|
||||||
the fitted parameters are:
|
|
||||||
F, G, H, L, M, N for 3D
|
the fitted parameters are:
|
||||||
F, G, H, N for 2D
|
F, G, H, L, M, N for 3D
|
||||||
'''
|
F, G, H, N for 2D
|
||||||
|
"""
|
||||||
s11,s22,s33,s12,s23,s31 = sigmas
|
s11,s22,s33,s12,s23,s31 = sigmas
|
||||||
if dim == 2: # plane stress
|
if dim == 2: # plane stress
|
||||||
jac = np.array([ s22**2, s11**2, (s11-s22)**2, 2.0*s12**2])
|
jac = np.array([ s22**2, s11**2, (s11-s22)**2, 2.0*s12**2])
|
||||||
|
@ -423,11 +415,11 @@ def Hill1979(eqStress=None,#not needed/supported
|
||||||
criteria=None,#not needed/supported
|
criteria=None,#not needed/supported
|
||||||
dim=3,
|
dim=3,
|
||||||
Jac=False):
|
Jac=False):
|
||||||
'''
|
"""
|
||||||
Hill 1979 yield criterion
|
Hill 1979 yield criterion
|
||||||
the fitted parameters are: f,g,h,a,b,c,m
|
|
||||||
'''
|
|
||||||
|
|
||||||
|
the fitted parameters are: f,g,h,a,b,c,m
|
||||||
|
"""
|
||||||
if mFix[0]:
|
if mFix[0]:
|
||||||
m = mFix[1]
|
m = mFix[1]
|
||||||
else:
|
else:
|
||||||
|
@ -458,14 +450,14 @@ def Hosford(eqStress=None,
|
||||||
criteria=None,
|
criteria=None,
|
||||||
dim=3,
|
dim=3,
|
||||||
Jac=False):
|
Jac=False):
|
||||||
'''
|
"""
|
||||||
Hosford family criteria
|
Hosford family criteria
|
||||||
the fitted parameters are:
|
|
||||||
von Mises: sigma0
|
|
||||||
Hershey: (1) sigma0, a, when a is not fixed; (2) sigma0, when a is fixed
|
|
||||||
general Hosford: (1) F,G,H, a, when a is not fixed; (2) F,G,H, when a is fixed
|
|
||||||
'''
|
|
||||||
|
|
||||||
|
the fitted parameters are:
|
||||||
|
von Mises: sigma0
|
||||||
|
Hershey: (1) sigma0, a, when a is not fixed; (2) sigma0, when a is fixed
|
||||||
|
general Hosford: (1) F,G,H, a, when a is not fixed; (2) F,G,H, when a is fixed
|
||||||
|
"""
|
||||||
if criteria == 'vonmises':
|
if criteria == 'vonmises':
|
||||||
sigma0 = paras
|
sigma0 = paras
|
||||||
coeff = np.ones(3)
|
coeff = np.ones(3)
|
||||||
|
@ -509,11 +501,12 @@ def Barlat1989(eqStress=None,
|
||||||
criteria=None,
|
criteria=None,
|
||||||
dim=3,
|
dim=3,
|
||||||
Jac=False):
|
Jac=False):
|
||||||
'''
|
"""
|
||||||
Barlat-Lian 1989 yield criteria
|
Barlat-Lian 1989 yield criteria
|
||||||
the fitted parameters are:
|
|
||||||
Anisotropic: a, h, p, m; m is optional
|
the fitted parameters are:
|
||||||
'''
|
Anisotropic: a, h, p, m; m is optional
|
||||||
|
"""
|
||||||
a, h, p = paras[0:3]
|
a, h, p = paras[0:3]
|
||||||
if mFix[0]: m = mFix[1]
|
if mFix[0]: m = mFix[1]
|
||||||
else: m = paras[-1]
|
else: m = paras[-1]
|
||||||
|
@ -536,7 +529,7 @@ def Barlat1989(eqStress=None,
|
||||||
drdl, drdm = r/m/left, r*math_ln(0.5*left)*(-1.0/m/m)
|
drdl, drdm = r/m/left, r*math_ln(0.5*left)*(-1.0/m/m)
|
||||||
dldm = np.dot(np.array([a,a,c]),fm*math_ln(fs))*0.5
|
dldm = np.dot(np.array([a,a,c]),fm*math_ln(fs))*0.5
|
||||||
|
|
||||||
ja = drdl*dlda
|
ja,jc = drdl*dlda, drdl*dldc
|
||||||
jh,jp = drdl*(dldk1*dk1dh + dldk2*dk2dh), drdl*dldk2*dk2dp
|
jh,jp = drdl*(dldk1*dk1dh + dldk2*dk2dh), drdl*dldk2*dk2dp
|
||||||
jm = drdl*dldm + drdm
|
jm = drdl*dldm + drdm
|
||||||
|
|
||||||
|
@ -544,13 +537,14 @@ def Barlat1989(eqStress=None,
|
||||||
else: return np.vstack((ja,jc,jh,jp,jm)).T
|
else: return np.vstack((ja,jc,jh,jp,jm)).T
|
||||||
|
|
||||||
def Barlat1991(eqStress, paras, sigmas, mFix, criteria, dim, Jac=False):
|
def Barlat1991(eqStress, paras, sigmas, mFix, criteria, dim, Jac=False):
|
||||||
'''
|
"""
|
||||||
Barlat 1991 criteria
|
Barlat 1991 criteria
|
||||||
the fitted parameters are:
|
|
||||||
Anisotropic: a, b, c, f, g, h, m for 3D
|
the fitted parameters are:
|
||||||
a, b, c, h, m for plane stress
|
Anisotropic: a, b, c, f, g, h, m for 3D
|
||||||
m is optional
|
a, b, c, h, m for plane stress
|
||||||
'''
|
m is optional
|
||||||
|
"""
|
||||||
if dim == 2: coeff = paras[0:4] # plane stress
|
if dim == 2: coeff = paras[0:4] # plane stress
|
||||||
else: coeff = paras[0:6] # general case
|
else: coeff = paras[0:6] # general case
|
||||||
if mFix[0]: m = mFix[1]
|
if mFix[0]: m = mFix[1]
|
||||||
|
@ -605,12 +599,13 @@ def Barlat1991(eqStress, paras, sigmas, mFix, criteria, dim, Jac=False):
|
||||||
else: return np.vstack((dfdI2*dI2dx + dfdI3*dI3dx, jm)).T
|
else: return np.vstack((dfdI2*dI2dx + dfdI3*dI3dx, jm)).T
|
||||||
|
|
||||||
def BBC2000(eqStress, paras, sigmas, mFix, criteria, dim, Jac=False):
|
def BBC2000(eqStress, paras, sigmas, mFix, criteria, dim, Jac=False):
|
||||||
'''
|
"""
|
||||||
BBC2000 yield criterion
|
BBC2000 yield criterion
|
||||||
the fitted parameters are
|
|
||||||
d,e,f,g, b,c,a, k; k is optional
|
the fitted parameters are
|
||||||
criteria are invalid input
|
d,e,f,g, b,c,a, k; k is optional
|
||||||
'''
|
criteria are invalid input
|
||||||
|
"""
|
||||||
d,e,f,g, b,c,a= paras[0:7]
|
d,e,f,g, b,c,a= paras[0:7]
|
||||||
if mFix[0]: k = mFix[1]
|
if mFix[0]: k = mFix[1]
|
||||||
else: k = paras[-1]
|
else: k = paras[-1]
|
||||||
|
@ -647,12 +642,13 @@ def BBC2000(eqStress, paras, sigmas, mFix, criteria, dim, Jac=False):
|
||||||
|
|
||||||
|
|
||||||
def BBC2003(eqStress, paras, sigmas, mFix, criteria, dim, Jac=False):
|
def BBC2003(eqStress, paras, sigmas, mFix, criteria, dim, Jac=False):
|
||||||
'''
|
"""
|
||||||
BBC2003 yield criterion
|
BBC2003 yield criterion
|
||||||
the fitted parameters are
|
|
||||||
M,N,P,Q,R,S,T,a, k; k is optional
|
the fitted parameters are
|
||||||
criteria are invalid input
|
M,N,P,Q,R,S,T,a, k; k is optional
|
||||||
'''
|
criteria are invalid input
|
||||||
|
"""
|
||||||
M,N,P,Q,R,S,T,a = paras[0:8]
|
M,N,P,Q,R,S,T,a = paras[0:8]
|
||||||
if mFix[0]: k = mFix[1]
|
if mFix[0]: k = mFix[1]
|
||||||
else: k = paras[-1]
|
else: k = paras[-1]
|
||||||
|
@ -689,12 +685,13 @@ def BBC2003(eqStress, paras, sigmas, mFix, criteria, dim, Jac=False):
|
||||||
else : return np.vstack((J, drdl*dldk+drdk)).T
|
else : return np.vstack((J, drdl*dldk+drdk)).T
|
||||||
|
|
||||||
def BBC2005(eqStress, paras, sigmas, mFix, criteria, dim, Jac=False):
|
def BBC2005(eqStress, paras, sigmas, mFix, criteria, dim, Jac=False):
|
||||||
'''
|
"""
|
||||||
BBC2005 yield criterion
|
BBC2005 yield criterion
|
||||||
the fitted parameters are
|
|
||||||
a, b, L ,M, N, P, Q, R, k; k is optional
|
the fitted parameters are
|
||||||
criteria are invalid input
|
a, b, L ,M, N, P, Q, R, k k are optional
|
||||||
'''
|
criteria is invalid input
|
||||||
|
"""
|
||||||
a,b,L, M, N, P, Q, R = paras[0:8]
|
a,b,L, M, N, P, Q, R = paras[0:8]
|
||||||
if mFix[0]: k = mFix[1]
|
if mFix[0]: k = mFix[1]
|
||||||
else: k = paras[-1]
|
else: k = paras[-1]
|
||||||
|
@ -739,10 +736,12 @@ def BBC2005(eqStress, paras, sigmas, mFix, criteria, dim, Jac=False):
|
||||||
else : return np.vstack(J, dldk+dsBarde*dedk).T
|
else : return np.vstack(J, dldk+dsBarde*dedk).T
|
||||||
|
|
||||||
def Yld2000(eqStress, paras, sigmas, mFix, criteria, dim, Jac=False):
|
def Yld2000(eqStress, paras, sigmas, mFix, criteria, dim, Jac=False):
|
||||||
'''
|
"""
|
||||||
C: c11,c22,c66 c12=c21=1.0 JAC NOT PASS
|
Yld2000 yield criterion
|
||||||
D: d11,d12,d21,d22,d66
|
|
||||||
'''
|
C: c11,c22,c66 c12=c21=1.0 JAC NOT PASS
|
||||||
|
D: d11,d12,d21,d22,d66
|
||||||
|
"""
|
||||||
C,D = paras[0:3], paras[3:8]
|
C,D = paras[0:3], paras[3:8]
|
||||||
if mFix[0]: m = mFix[1]
|
if mFix[0]: m = mFix[1]
|
||||||
else: m = paras[-1]
|
else: m = paras[-1]
|
||||||
|
@ -769,8 +768,7 @@ def Yld2000(eqStress, paras, sigmas, mFix, criteria, dim, Jac=False):
|
||||||
drdl, drdm = r/m/left, r*math_ln(0.5*left)*(-1.0/m/m) #/(-m*m)
|
drdl, drdm = r/m/left, r*math_ln(0.5*left)*(-1.0/m/m) #/(-m*m)
|
||||||
dldm = ( phi1*math_ln(phi1s) + phi21*math_ln(phi21s) + phi22*math_ln(phi22s) )*0.5
|
dldm = ( phi1*math_ln(phi1s) + phi21*math_ln(phi21s) + phi22*math_ln(phi22s) )*0.5
|
||||||
zero = np.zeros_like(s11); num = len(s11)
|
zero = np.zeros_like(s11); num = len(s11)
|
||||||
def dPrincipalds((X1,X2,X12)):
|
def dPrincipalds((X1,X2,X12)): # derivative of principla with respect to stress
|
||||||
# the derivative of principla with regards to stress
|
|
||||||
temp = 1.0/np.sqrt( (X1-X2)**2 + 4.0*X12**2 )
|
temp = 1.0/np.sqrt( (X1-X2)**2 + 4.0*X12**2 )
|
||||||
dP1dsi = 0.5*np.array([ 1.0+temp*(X1-X2), 1.0-temp*(X1-X2), temp*4.0*X12])
|
dP1dsi = 0.5*np.array([ 1.0+temp*(X1-X2), 1.0-temp*(X1-X2), temp*4.0*X12])
|
||||||
dP2dsi = 0.5*np.array([ 1.0-temp*(X1-X2), 1.0+temp*(X1-X2), -temp*4.0*X12])
|
dP2dsi = 0.5*np.array([ 1.0-temp*(X1-X2), 1.0+temp*(X1-X2), -temp*4.0*X12])
|
||||||
|
@ -798,14 +796,15 @@ def Yld2000(eqStress, paras, sigmas, mFix, criteria, dim, Jac=False):
|
||||||
else: return np.vstack((jC,jD,jm)).T
|
else: return np.vstack((jC,jD,jm)).T
|
||||||
|
|
||||||
def Yld200418p(eqStress, paras, sigmas, mFix, criteria, dim, Jac=False):
|
def Yld200418p(eqStress, paras, sigmas, mFix, criteria, dim, Jac=False):
|
||||||
'''
|
"""
|
||||||
Yld2004-18p yield criterion
|
Yld2004-18p yield criterion
|
||||||
the fitted parameters are
|
|
||||||
C: c12,c21,c23,c32,c31,c13,c44,c55,c66; D: d12,d21,d23,d32,d31,d13,d44,d55,d66 for 3D
|
the fitted parameters are
|
||||||
C: c12,c21,c23,c32,c31,c13,c44; D: d12,d21,d23,d32,d31,d13,d44 for 2D
|
C: c12,c21,c23,c32,c31,c13,c44,c55,c66; D: d12,d21,d23,d32,d31,d13,d44,d55,d66 for 3D
|
||||||
and m, m is optional
|
C: c12,c21,c23,c32,c31,c13,c44; D: d12,d21,d23,d32,d31,d13,d44 for 2D
|
||||||
criteria are invalid input
|
and m, m are optional
|
||||||
'''
|
criteria is ignored
|
||||||
|
"""
|
||||||
if dim == 2: C,D = np.append(paras[0:7],[0.0,0.0]), np.append(paras[7:14],[0.0,0.0])
|
if dim == 2: C,D = np.append(paras[0:7],[0.0,0.0]), np.append(paras[7:14],[0.0,0.0])
|
||||||
else: C,D = paras[0:9], paras[9:18]
|
else: C,D = paras[0:9], paras[9:18]
|
||||||
if mFix[0]: m = mFix[1]
|
if mFix[0]: m = mFix[1]
|
||||||
|
@ -843,14 +842,15 @@ def Yld200418p(eqStress, paras, sigmas, mFix, criteria, dim, Jac=False):
|
||||||
else: return np.vstack((jc,jd,jm)).T
|
else: return np.vstack((jc,jd,jm)).T
|
||||||
|
|
||||||
def KarafillisBoyce(eqStress, paras, sigmas, mFix, criteria, dim, Jac=False):
|
def KarafillisBoyce(eqStress, paras, sigmas, mFix, criteria, dim, Jac=False):
|
||||||
'''
|
"""
|
||||||
Karafillis-Boyce
|
Karafillis-Boyce
|
||||||
the fitted parameters are
|
|
||||||
c11,c12,c13,c14,c15,c16,c,m for 3D
|
the fitted parameters are
|
||||||
c11,c12,c13,c14,c,m for plane stress
|
c11,c12,c13,c14,c15,c16,c,m for 3D
|
||||||
0<c<1, m are optional
|
c11,c12,c13,c14,c,m for plane stress
|
||||||
criteria are invalid input
|
0<c<1, m are optional
|
||||||
'''
|
criteria are invalid input
|
||||||
|
"""
|
||||||
ks = lambda (s1,s2,s3,s4,s5,s6),(c1,c2,c3,c4,c5,c6): np.array( [
|
ks = lambda (s1,s2,s3,s4,s5,s6),(c1,c2,c3,c4,c5,c6): np.array( [
|
||||||
((c2+c3)*s1-c3*s2-c2*s3)/3.0, ((c3+c1)*s2-c3*s1-c1*s3)/3.0,
|
((c2+c3)*s1-c3*s2-c2*s3)/3.0, ((c3+c1)*s2-c3*s1-c1*s3)/3.0,
|
||||||
((c1+c2)*s3-c2*s1-c1*s2)/3.0, c4*s4, c5*s5, c6*s6 ])
|
((c1+c2)*s3-c2*s1-c1*s2)/3.0, c4*s4, c5*s5, c6*s6 ])
|
||||||
|
@ -1007,7 +1007,8 @@ fitCriteria = {
|
||||||
'nExpo': 1,'err':np.inf,
|
'nExpo': 1,'err':np.inf,
|
||||||
'dimen': [3],
|
'dimen': [3],
|
||||||
'bound': [[(None,None)]*18+[(1.0,8.0)], [(None,None)]*14+[(1.0,8.0)]],
|
'bound': [[(None,None)]*18+[(1.0,8.0)], [(None,None)]*14+[(1.0,8.0)]],
|
||||||
'labels': [['c12','c21','c23','c32','c31','c13','c44','c55','c66','d12','d21','d23','d32','d31','d13','d44','d55','d66','m'],
|
'labels': [['c12','c21','c23','c32','c31','c13','c44','c55','c66',
|
||||||
|
'd12','d21','d23','d32','d31','d13','d44','d55','d66','m'],
|
||||||
['c12','c21','c23','c32','c31','c13','c44','d12','d21','d23','d32','d31','d13','d44','m']],
|
['c12','c21','c23','c32','c31','c13','c44','d12','d21','d23','d32','d31','d13','d44','m']],
|
||||||
},
|
},
|
||||||
'karafillis' :{'name': 'Karafillis-Boyce',
|
'karafillis' :{'name': 'Karafillis-Boyce',
|
||||||
|
@ -1028,12 +1029,8 @@ thresholdParameter = ['totalshear','equivalentStrain']
|
||||||
|
|
||||||
#---------------------------------------------------------------------------------------------------
|
#---------------------------------------------------------------------------------------------------
|
||||||
class Loadcase():
|
class Loadcase():
|
||||||
#---------------------------------------------------------------------------------------------------
|
"""generating load cases for the spectral solver"""
|
||||||
'''
|
|
||||||
Class for generating load cases for the spectral solver
|
|
||||||
'''
|
|
||||||
|
|
||||||
# ------------------------------------------------------------------
|
|
||||||
def __init__(self,finalStrain,incs,time,nSet=1,dimension=3,vegter=False):
|
def __init__(self,finalStrain,incs,time,nSet=1,dimension=3,vegter=False):
|
||||||
self.finalStrain = finalStrain
|
self.finalStrain = finalStrain
|
||||||
self.incs = incs
|
self.incs = incs
|
||||||
|
@ -1087,7 +1084,6 @@ class Loadcase():
|
||||||
' time %s'%self.time
|
' time %s'%self.time
|
||||||
|
|
||||||
def _getLoadcase2dVegter(self,number): #for a 2D simulation, I would use this generator before switching to a random 2D generator
|
def _getLoadcase2dVegter(self,number): #for a 2D simulation, I would use this generator before switching to a random 2D generator
|
||||||
NDzero=[[1,2,3,6],[1,3,5,7],[2,5,6,7]] # no deformation / * for stress
|
|
||||||
# biaxial f1 = f2
|
# biaxial f1 = f2
|
||||||
# shear f1 = -f2
|
# shear f1 = -f2
|
||||||
# unixaial f1 , f2 =0
|
# unixaial f1 , f2 =0
|
||||||
|
@ -1102,9 +1098,7 @@ class Loadcase():
|
||||||
' time %s'%self.time
|
' time %s'%self.time
|
||||||
|
|
||||||
def _vegterLoadcase(self):
|
def _vegterLoadcase(self):
|
||||||
'''
|
"""generate the stress points for Vegter criteria (incomplete/untested)"""
|
||||||
generate the stress points for Vegter criteria
|
|
||||||
'''
|
|
||||||
theta = np.linspace(0.0,np.pi/2.0,self.nSet)
|
theta = np.linspace(0.0,np.pi/2.0,self.nSet)
|
||||||
f = [0.0, 0.0, '*']*3; loadcase = []
|
f = [0.0, 0.0, '*']*3; loadcase = []
|
||||||
for i in xrange(self.nSet*4): loadcase.append(f)
|
for i in xrange(self.nSet*4): loadcase.append(f)
|
||||||
|
@ -1115,16 +1109,14 @@ class Loadcase():
|
||||||
[[1.1, 0.1], [0.1, 1.1]], # eq-biaxial
|
[[1.1, 0.1], [0.1, 1.1]], # eq-biaxial
|
||||||
[[1.1, 0.1], [0.1, 1.1]], # eq-biaxial
|
[[1.1, 0.1], [0.1, 1.1]], # eq-biaxial
|
||||||
])
|
])
|
||||||
# for i,t in enumerate(theta):
|
for i,t in enumerate(theta):
|
||||||
# R = np.array([np.cos(t), np.sin(t), -np.sin(t), np.cos(t)]).reshape(2,2)
|
R = np.array([np.cos(t), np.sin(t), -np.sin(t), np.cos(t)]).reshape(2,2)
|
||||||
# for j in xrange(4):
|
for j in xrange(4):
|
||||||
# loadcase[i*4+j][0],loadcase[i*4+j][1],loadcase[i*4+j][3],loadcase[i*4+j][4] = np.dot(R.T,np.dot(F[j],R)).reshape(4)
|
loadcase[i*4+j][0],loadcase[i*4+j][1],loadcase[i*4+j][3],loadcase[i*4+j][4] = np.dot(R.T,np.dot(F[j],R)).reshape(4)
|
||||||
# return loadcase
|
return loadcase
|
||||||
|
|
||||||
def _getLoadcase2dRandom(self):
|
def _getLoadcase2dRandom(self):
|
||||||
'''
|
"""generate random stress points for 2D tests"""
|
||||||
generate random stress points for 2D tests
|
|
||||||
'''
|
|
||||||
self.NgeneratedLoadCases+=1
|
self.NgeneratedLoadCases+=1
|
||||||
defgrad=['0', '0', '*']*3
|
defgrad=['0', '0', '*']*3
|
||||||
stress =['*', '*', '0']*3
|
stress =['*', '*', '0']*3
|
||||||
|
@ -1135,8 +1127,6 @@ class Loadcase():
|
||||||
' incs %s'%self.incs+\
|
' incs %s'%self.incs+\
|
||||||
' time %s'%self.time
|
' time %s'%self.time
|
||||||
def _defgradScale(self, defgrad):
|
def _defgradScale(self, defgrad):
|
||||||
'''
|
|
||||||
'''
|
|
||||||
def fill_star(a,b):
|
def fill_star(a,b):
|
||||||
if a != '*' and b != '*': return a,b
|
if a != '*' and b != '*': return a,b
|
||||||
elif a == '*' and b != '*': return b,b
|
elif a == '*' and b != '*': return b,b
|
||||||
|
@ -1160,10 +1150,8 @@ class Loadcase():
|
||||||
|
|
||||||
#---------------------------------------------------------------------------------------------------
|
#---------------------------------------------------------------------------------------------------
|
||||||
class Criterion(object):
|
class Criterion(object):
|
||||||
#---------------------------------------------------------------------------------------------------
|
"""Fitting to certain criterion"""
|
||||||
'''
|
|
||||||
Fitting to certain criterion
|
|
||||||
'''
|
|
||||||
def __init__(self, exponent, uniaxial, dimension, label='vonmises'):
|
def __init__(self, exponent, uniaxial, dimension, label='vonmises'):
|
||||||
self.name = label
|
self.name = label
|
||||||
self.expo = exponent
|
self.expo = exponent
|
||||||
|
@ -1187,7 +1175,7 @@ class Criterion(object):
|
||||||
|
|
||||||
def fit(self,stress):
|
def fit(self,stress):
|
||||||
global fitResults; fitErrors; fitResidual
|
global fitResults; fitErrors; fitResidual
|
||||||
if options.exponent > 0.0: nExponent = nExpo
|
if options.exponent > 0.0: nExponent = options.exponent
|
||||||
else: nExponent = 0
|
else: nExponent = 0
|
||||||
nameCriterion = self.name.lower()
|
nameCriterion = self.name.lower()
|
||||||
criteria = Criteria(nameCriterion,self.uniaxial,self.expo, self.dimen)
|
criteria = Criteria(nameCriterion,self.uniaxial,self.expo, self.dimen)
|
||||||
|
@ -1225,13 +1213,10 @@ class Criterion(object):
|
||||||
pass
|
pass
|
||||||
return popt
|
return popt
|
||||||
|
|
||||||
|
|
||||||
#---------------------------------------------------------------------------------------------------
|
#---------------------------------------------------------------------------------------------------
|
||||||
class myThread (threading.Thread):
|
class myThread (threading.Thread):
|
||||||
#---------------------------------------------------------------------------------------------------
|
"""Runner"""
|
||||||
'''
|
|
||||||
Runner class
|
|
||||||
'''
|
|
||||||
def __init__(self, threadID):
|
def __init__(self, threadID):
|
||||||
threading.Thread.__init__(self)
|
threading.Thread.__init__(self)
|
||||||
self.threadID = threadID
|
self.threadID = threadID
|
||||||
|
@ -1246,8 +1231,6 @@ class myThread (threading.Thread):
|
||||||
s.release()
|
s.release()
|
||||||
|
|
||||||
def doSim(thread):
|
def doSim(thread):
|
||||||
|
|
||||||
# if load case do not exist, create new one
|
|
||||||
s.acquire()
|
s.acquire()
|
||||||
global myLoad
|
global myLoad
|
||||||
loadNo=loadcaseNo()
|
loadNo=loadcaseNo()
|
||||||
|
@ -1337,7 +1320,7 @@ def doSim(thread):
|
||||||
strainAll[i]=np.append(strainAll[i], deformationRate[i])
|
strainAll[i]=np.append(strainAll[i], deformationRate[i])
|
||||||
f.write( str(threshold)+' '+
|
f.write( str(threshold)+' '+
|
||||||
' '.join(map(str,myFit.fit(stressAll[i].reshape(len(stressAll[i])//6,6).transpose())))+'\n')
|
' '.join(map(str,myFit.fit(stressAll[i].reshape(len(stressAll[i])//6,6).transpose())))+'\n')
|
||||||
except Exception as detail:
|
except Exception:
|
||||||
damask.util.croak('Could not fit results of simulation (%s)'%thread)
|
damask.util.croak('Could not fit results of simulation (%s)'%thread)
|
||||||
s.release()
|
s.release()
|
||||||
return
|
return
|
||||||
|
@ -1440,7 +1423,7 @@ else : stressUnit = 1.0e6
|
||||||
if options.dimension not in fitCriteria[options.criterion]['dimen']:
|
if options.dimension not in fitCriteria[options.criterion]['dimen']:
|
||||||
parser.error('invalid dimension for selected criterion')
|
parser.error('invalid dimension for selected criterion')
|
||||||
|
|
||||||
if options.criterion not in ['vonmises','tresca','drucker','hill1984'] and options.eqStress == None:
|
if options.criterion not in ['vonmises','tresca','drucker','hill1984'] and options.eqStress is None:
|
||||||
parser.error('please specifie an equivalent stress (e.g. fitting to von Mises)')
|
parser.error('please specifie an equivalent stress (e.g. fitting to von Mises)')
|
||||||
|
|
||||||
run = runFit(options.exponent, options.eqStress, options.dimension, options.criterion)
|
run = runFit(options.exponent, options.eqStress, options.dimension, options.criterion)
|
||||||
|
|
|
@ -1,7 +1,7 @@
|
||||||
#!/usr/bin/env python
|
#!/usr/bin/env python
|
||||||
# -*- coding: UTF-8 no BOM -*-
|
# -*- coding: UTF-8 no BOM -*-
|
||||||
|
|
||||||
import os,sys,re,string,fnmatch,vtk
|
import os,sys,re,fnmatch,vtk
|
||||||
import numpy as np
|
import numpy as np
|
||||||
from optparse import OptionParser
|
from optparse import OptionParser
|
||||||
import damask
|
import damask
|
||||||
|
@ -56,8 +56,7 @@ def unravel(item):
|
||||||
|
|
||||||
# ++++++++++++++++++++++++++++++++++++++++++++++++++++
|
# ++++++++++++++++++++++++++++++++++++++++++++++++++++
|
||||||
def vtk_writeASCII_mesh(mesh,data,res,sep):
|
def vtk_writeASCII_mesh(mesh,data,res,sep):
|
||||||
# ++++++++++++++++++++++++++++++++++++++++++++++++++++
|
"""function writes data array defined on a hexahedral mesh (geometry)"""
|
||||||
""" function writes data array defined on a hexahedral mesh (geometry) """
|
|
||||||
info = {\
|
info = {\
|
||||||
'tensor': {'name':'tensor','len':9},\
|
'tensor': {'name':'tensor','len':9},\
|
||||||
'vector': {'name':'vector','len':3},\
|
'vector': {'name':'vector','len':3},\
|
||||||
|
@ -111,10 +110,9 @@ def vtk_writeASCII_mesh(mesh,data,res,sep):
|
||||||
|
|
||||||
return cmds
|
return cmds
|
||||||
|
|
||||||
# +++++++++++++++++++++++++++++++++++++++++++++++++++
|
#+++++++++++++++++++++++++++++++++++++++++++++++++++++++++++
|
||||||
def vtk_writeASCII_points(coordinates,data,res,sep):
|
def vtk_writeASCII_points(coordinates,data,res,sep):
|
||||||
# +++++++++++++++++++++++++++++++++++++++++++++++++++
|
"""function writes data array defined on a point field"""
|
||||||
""" function writes data array defined on a point field """
|
|
||||||
N = res[0]*res[1]*res[2]
|
N = res[0]*res[1]*res[2]
|
||||||
|
|
||||||
cmds = [\
|
cmds = [\
|
||||||
|
@ -216,7 +214,7 @@ for filename in args:
|
||||||
content = file.readlines()
|
content = file.readlines()
|
||||||
file.close()
|
file.close()
|
||||||
m = re.search('(\d+)\s*head', content[0].lower())
|
m = re.search('(\d+)\s*head', content[0].lower())
|
||||||
if m == None:
|
if m is None:
|
||||||
continue
|
continue
|
||||||
print filename,'\n'
|
print filename,'\n'
|
||||||
sys.stdout.flush()
|
sys.stdout.flush()
|
||||||
|
|
|
@ -1,7 +1,7 @@
|
||||||
#!/usr/bin/env python
|
#!/usr/bin/env python
|
||||||
# -*- coding: UTF-8 no BOM -*-
|
# -*- coding: UTF-8 no BOM -*-
|
||||||
|
|
||||||
import os,sys,string
|
import os,sys
|
||||||
import numpy as np
|
import numpy as np
|
||||||
from optparse import OptionParser
|
from optparse import OptionParser
|
||||||
import damask
|
import damask
|
||||||
|
@ -19,13 +19,13 @@ Transform X,Y,Z,F APS BeamLine 34 coordinates to x,y,z APS strain coordinates.
|
||||||
|
|
||||||
""", version = scriptID)
|
""", version = scriptID)
|
||||||
|
|
||||||
parser.add_option('-f','--frame', dest='frame', nargs=4, type='string', metavar='<string string string string>',
|
parser.add_option('-f','--frame', dest='frame', nargs=4, type='string', metavar='string string string string',
|
||||||
help='APS X,Y,Z coords, and depth F')
|
help='APS X,Y,Z coords, and depth F')
|
||||||
parser.set_defaults(frame = None)
|
parser.set_defaults(frame = None)
|
||||||
|
|
||||||
(options,filenames) = parser.parse_args()
|
(options,filenames) = parser.parse_args()
|
||||||
|
|
||||||
if options.frame == None:
|
if options.frame is None:
|
||||||
parser.error('no data column specified...')
|
parser.error('no data column specified...')
|
||||||
|
|
||||||
|
|
||||||
|
@ -33,7 +33,7 @@ datainfo = {'len':3,
|
||||||
'label':[]
|
'label':[]
|
||||||
}
|
}
|
||||||
|
|
||||||
if options.frame != None: datainfo['label'] += options.frame
|
datainfo['label'] += options.frame
|
||||||
|
|
||||||
# --- loop over input files -------------------------------------------------------------------------
|
# --- loop over input files -------------------------------------------------------------------------
|
||||||
if filenames == []:
|
if filenames == []:
|
||||||
|
@ -75,8 +75,8 @@ for name in filenames:
|
||||||
# ------------------------------------------ process data ------------------------------------------
|
# ------------------------------------------ process data ------------------------------------------
|
||||||
theta=-0.75*np.pi
|
theta=-0.75*np.pi
|
||||||
RotMat2TSL=np.array([[1., 0., 0.],
|
RotMat2TSL=np.array([[1., 0., 0.],
|
||||||
[0., np.cos(theta), np.sin(theta)],
|
[0., np.cos(theta), np.sin(theta)], # Orientation to account for -135 deg
|
||||||
[0., -np.sin(theta), np.cos(theta)]]) # Orientation Matrix to account for -135 degree rotation for TSL Convention[Adapted from Chen Zhang's code]
|
[0., -np.sin(theta), np.cos(theta)]]) # rotation for TSL convention
|
||||||
vec = np.zeros(4)
|
vec = np.zeros(4)
|
||||||
|
|
||||||
outputAlive = True
|
outputAlive = True
|
||||||
|
|
|
@ -1,8 +1,8 @@
|
||||||
#!/usr/bin/env python
|
#!/usr/bin/env python
|
||||||
# -*- coding: UTF-8 no BOM -*-
|
# -*- coding: UTF-8 no BOM -*-
|
||||||
|
|
||||||
import os,re,sys,string
|
import os,re,sys
|
||||||
import math # flake8: noqa
|
import math # noqa
|
||||||
import numpy as np
|
import numpy as np
|
||||||
from optparse import OptionParser
|
from optparse import OptionParser
|
||||||
import damask
|
import damask
|
||||||
|
@ -39,7 +39,7 @@ parser.add_option('-f','--formula',
|
||||||
|
|
||||||
(options,filenames) = parser.parse_args()
|
(options,filenames) = parser.parse_args()
|
||||||
|
|
||||||
if options.labels == None or options.formulas == None:
|
if options.labels is None or options.formulas is None:
|
||||||
parser.error('no formulas and/or labels specified.')
|
parser.error('no formulas and/or labels specified.')
|
||||||
if len(options.labels) != len(options.formulas):
|
if len(options.labels) != len(options.formulas):
|
||||||
parser.error('number of labels ({}) and formulas ({}) do not match.'.format(len(options.labels),len(options.formulas)))
|
parser.error('number of labels ({}) and formulas ({}) do not match.'.format(len(options.labels),len(options.formulas)))
|
||||||
|
|
|
@ -1,7 +1,7 @@
|
||||||
#!/usr/bin/env python
|
#!/usr/bin/env python
|
||||||
# -*- coding: UTF-8 no BOM -*-
|
# -*- coding: UTF-8 no BOM -*-
|
||||||
|
|
||||||
import os,sys,string
|
import os,sys
|
||||||
import numpy as np
|
import numpy as np
|
||||||
from optparse import OptionParser
|
from optparse import OptionParser
|
||||||
import damask
|
import damask
|
||||||
|
|
|
@ -1,36 +1,16 @@
|
||||||
#!/usr/bin/env python
|
#!/usr/bin/env python
|
||||||
# -*- coding: UTF-8 no BOM -*-
|
# -*- coding: UTF-8 no BOM -*-
|
||||||
|
|
||||||
import os,re,sys,math,string,h5py
|
import os,string,h5py
|
||||||
import numpy as np
|
import numpy as np
|
||||||
|
from optparse import OptionParser
|
||||||
import damask
|
import damask
|
||||||
from optparse import OptionParser, Option
|
|
||||||
|
|
||||||
# -----------------------------
|
|
||||||
class extendableOption(Option):
|
|
||||||
# -----------------------------
|
|
||||||
# used for definition of new option parser action 'extend', which enables to take multiple option arguments
|
|
||||||
# taken from online tutorial http://docs.python.org/library/optparse.html
|
|
||||||
|
|
||||||
ACTIONS = Option.ACTIONS + ("extend",)
|
|
||||||
STORE_ACTIONS = Option.STORE_ACTIONS + ("extend",)
|
|
||||||
TYPED_ACTIONS = Option.TYPED_ACTIONS + ("extend",)
|
|
||||||
ALWAYS_TYPED_ACTIONS = Option.ALWAYS_TYPED_ACTIONS + ("extend",)
|
|
||||||
|
|
||||||
def take_action(self, action, dest, opt, value, values, parser):
|
|
||||||
if action == "extend":
|
|
||||||
lvalue = value.split(",")
|
|
||||||
values.ensure_value(dest, []).extend(lvalue)
|
|
||||||
else:
|
|
||||||
Option.take_action(self, action, dest, opt, value, values, parser)
|
|
||||||
|
|
||||||
|
|
||||||
|
|
||||||
# --------------------------------------------------------------------
|
# --------------------------------------------------------------------
|
||||||
# MAIN
|
# MAIN
|
||||||
# --------------------------------------------------------------------
|
# --------------------------------------------------------------------
|
||||||
|
|
||||||
parser = OptionParser(option_class=extendableOption, usage='%prog options [file[s]]', description = """
|
parser = OptionParser(option_class=damask.extendableOption, usage='%prog options [file[s]]', description = """
|
||||||
Add column(s) containing Cauchy stress based on given column(s) of
|
Add column(s) containing Cauchy stress based on given column(s) of
|
||||||
deformation gradient and first Piola--Kirchhoff stress.
|
deformation gradient and first Piola--Kirchhoff stress.
|
||||||
|
|
||||||
|
@ -50,7 +30,7 @@ parser.set_defaults(output = 'crystallite')
|
||||||
|
|
||||||
(options,filenames) = parser.parse_args()
|
(options,filenames) = parser.parse_args()
|
||||||
|
|
||||||
if options.defgrad == None or options.stress == None or options.output == None:
|
if options.defgrad is None or options.stress is None or options.output is None:
|
||||||
parser.error('missing data column...')
|
parser.error('missing data column...')
|
||||||
|
|
||||||
|
|
||||||
|
@ -79,6 +59,3 @@ for myFile in files:
|
||||||
cauchy[p,...] = 1.0/np.linalg.det(defgrad[p,...])*np.dot(stress[p,...],defgrad[p,...].T) # [Cauchy] = (1/det(F)) * [P].[F_transpose]
|
cauchy[p,...] = 1.0/np.linalg.det(defgrad[p,...])*np.dot(stress[p,...],defgrad[p,...].T) # [Cauchy] = (1/det(F)) * [P].[F_transpose]
|
||||||
cauchyFile = myFile['file']['increments/'+inc+'/'+options.output+'/'+instance].create_dataset('cauchy', data=cauchy)
|
cauchyFile = myFile['file']['increments/'+inc+'/'+options.output+'/'+instance].create_dataset('cauchy', data=cauchy)
|
||||||
cauchyFile.attrs['units'] = 'Pa'
|
cauchyFile.attrs['units'] = 'Pa'
|
||||||
|
|
||||||
|
|
||||||
|
|
||||||
|
|
|
@ -1,7 +1,7 @@
|
||||||
#!/usr/bin/env python
|
#!/usr/bin/env python
|
||||||
# -*- coding: UTF-8 no BOM -*-
|
# -*- coding: UTF-8 no BOM -*-
|
||||||
|
|
||||||
import os,sys,string
|
import os,sys
|
||||||
import numpy as np
|
import numpy as np
|
||||||
from optparse import OptionParser
|
from optparse import OptionParser
|
||||||
import damask
|
import damask
|
||||||
|
@ -81,7 +81,6 @@ for name in filenames:
|
||||||
table.info_append(scriptID + '\t' + ' '.join(sys.argv[1:]))
|
table.info_append(scriptID + '\t' + ' '.join(sys.argv[1:]))
|
||||||
if options.shape: table.labels_append('shapeMismatch({})'.format(options.defgrad))
|
if options.shape: table.labels_append('shapeMismatch({})'.format(options.defgrad))
|
||||||
if options.volume: table.labels_append('volMismatch({})'.format(options.defgrad))
|
if options.volume: table.labels_append('volMismatch({})'.format(options.defgrad))
|
||||||
#table.head_write()
|
|
||||||
|
|
||||||
# --------------- figure out size and grid ---------------------------------------------------------
|
# --------------- figure out size and grid ---------------------------------------------------------
|
||||||
|
|
||||||
|
@ -92,7 +91,7 @@ for name in filenames:
|
||||||
maxcorner = np.array(map(max,coords))
|
maxcorner = np.array(map(max,coords))
|
||||||
grid = np.array(map(len,coords),'i')
|
grid = np.array(map(len,coords),'i')
|
||||||
size = grid/np.maximum(np.ones(3,'d'), grid-1.0) * (maxcorner-mincorner) # size from edge to edge = dim * n/(n-1)
|
size = grid/np.maximum(np.ones(3,'d'), grid-1.0) * (maxcorner-mincorner) # size from edge to edge = dim * n/(n-1)
|
||||||
size = np.where(grid > 1, size, min(size[grid > 1]/grid[grid > 1])) # spacing for grid==1 equal to smallest among other spacings
|
size = np.where(grid > 1, size, min(size[grid > 1]/grid[grid > 1])) # spacing for grid==1 set to smallest among other spacings
|
||||||
|
|
||||||
N = grid.prod()
|
N = grid.prod()
|
||||||
|
|
||||||
|
@ -117,7 +116,7 @@ for name in filenames:
|
||||||
(x,y,z) = damask.util.gridLocation(idx,grid) # figure out (x,y,z) position from line count
|
(x,y,z) = damask.util.gridLocation(idx,grid) # figure out (x,y,z) position from line count
|
||||||
idx += 1
|
idx += 1
|
||||||
F[0:3,0:3,x,y,z] = np.array(map(float,table.data[column:column+9]),'d').reshape(3,3)
|
F[0:3,0:3,x,y,z] = np.array(map(float,table.data[column:column+9]),'d').reshape(3,3)
|
||||||
print 'hm'
|
|
||||||
Favg = damask.core.math.tensorAvg(F)
|
Favg = damask.core.math.tensorAvg(F)
|
||||||
centres = damask.core.mesh.deformedCoordsFFT(size,F,Favg,[1.0,1.0,1.0])
|
centres = damask.core.mesh.deformedCoordsFFT(size,F,Favg,[1.0,1.0,1.0])
|
||||||
|
|
||||||
|
|
|
@ -1,7 +1,7 @@
|
||||||
#!/usr/bin/env python
|
#!/usr/bin/env python
|
||||||
# -*- coding: UTF-8 no BOM -*-
|
# -*- coding: UTF-8 no BOM -*-
|
||||||
|
|
||||||
import os,sys,string
|
import os,sys
|
||||||
import numpy as np
|
import numpy as np
|
||||||
from optparse import OptionParser
|
from optparse import OptionParser
|
||||||
import damask
|
import damask
|
||||||
|
|
|
@ -1,7 +1,7 @@
|
||||||
#!/usr/bin/env python
|
#!/usr/bin/env python
|
||||||
# -*- coding: UTF-8 no BOM -*-
|
# -*- coding: UTF-8 no BOM -*-
|
||||||
|
|
||||||
import os,sys,string,math
|
import os,sys,math
|
||||||
import numpy as np
|
import numpy as np
|
||||||
from optparse import OptionParser
|
from optparse import OptionParser
|
||||||
import damask
|
import damask
|
||||||
|
@ -10,40 +10,35 @@ scriptName = os.path.splitext(os.path.basename(__file__))[0]
|
||||||
scriptID = ' '.join([scriptName,damask.version])
|
scriptID = ' '.join([scriptName,damask.version])
|
||||||
|
|
||||||
def curlFFT(geomdim,field):
|
def curlFFT(geomdim,field):
|
||||||
N = grid.prod() # field size
|
grid = np.array(np.shape(field)[2::-1])
|
||||||
n = np.array(np.shape(field)[3:]).prod() # data size
|
N = grid.prod() # field size
|
||||||
|
n = np.array(np.shape(field)[3:]).prod() # data size
|
||||||
|
|
||||||
if n == 3:
|
if n == 3: dataType = 'vector'
|
||||||
dataType = 'vector'
|
elif n == 9: dataType = 'tensor'
|
||||||
elif n == 9:
|
|
||||||
dataType = 'tensor'
|
|
||||||
|
|
||||||
field_fourier = np.fft.fftpack.rfftn(field,axes=(0,1,2))
|
field_fourier = np.fft.fftpack.rfftn(field,axes=(0,1,2))
|
||||||
curl_fourier = np.zeros(field_fourier.shape,'c16')
|
curl_fourier = np.zeros(field_fourier.shape,'c16')
|
||||||
|
|
||||||
# differentiation in Fourier space
|
# differentiation in Fourier space
|
||||||
k_s = np.zeros([3],'i')
|
k_s = np.zeros([3],'i')
|
||||||
TWOPIIMG = (0.0+2.0j*math.pi)
|
TWOPIIMG = 2.0j*math.pi
|
||||||
for i in xrange(grid[2]):
|
for i in xrange(grid[2]):
|
||||||
k_s[0] = i
|
k_s[0] = i
|
||||||
if(grid[2]%2==0 and i == grid[2]//2): # for even grid, set Nyquist freq to 0 (Johnson, MIT, 2011)
|
if grid[2]%2 == 0 and i == grid[2]//2: k_s[0] = 0 # for even grid, set Nyquist freq to 0 (Johnson, MIT, 2011)
|
||||||
k_s[0]=0
|
elif i > grid[2]//2: k_s[0] -= grid[2]
|
||||||
elif (i > grid[2]//2):
|
|
||||||
k_s[0] = k_s[0] - grid[2]
|
|
||||||
|
|
||||||
for j in xrange(grid[1]):
|
for j in xrange(grid[1]):
|
||||||
k_s[1] = j
|
k_s[1] = j
|
||||||
if(grid[1]%2==0 and j == grid[1]//2): # for even grid, set Nyquist freq to 0 (Johnson, MIT, 2011)
|
if grid[1]%2 == 0 and j == grid[1]//2: k_s[1] = 0 # for even grid, set Nyquist freq to 0 (Johnson, MIT, 2011)
|
||||||
k_s[1]=0
|
elif j > grid[1]//2: k_s[1] -= grid[1]
|
||||||
elif (j > grid[1]//2):
|
|
||||||
k_s[1] = k_s[1] - grid[1]
|
|
||||||
|
|
||||||
for k in xrange(grid[0]//2+1):
|
for k in xrange(grid[0]//2+1):
|
||||||
k_s[2] = k
|
k_s[2] = k
|
||||||
if(grid[0]%2==0 and k == grid[0]//2): # for even grid, set Nyquist freq to 0 (Johnson, MIT, 2011)
|
if grid[0]%2 == 0 and k == grid[0]//2: k_s[2] = 0 # for even grid, set Nyquist freq to 0 (Johnson, MIT, 2011)
|
||||||
k_s[2]=0
|
|
||||||
|
xi = (k_s/geomdim)[2::-1].astype('c16') # reversing the field input order
|
||||||
|
|
||||||
xi = np.array([k_s[2]/geomdim[2]+0.0j,k_s[1]/geomdim[1]+0.j,k_s[0]/geomdim[0]+0.j],'c16')
|
|
||||||
if dataType == 'tensor':
|
if dataType == 'tensor':
|
||||||
for l in xrange(3):
|
for l in xrange(3):
|
||||||
curl_fourier[i,j,k,0,l] = ( field_fourier[i,j,k,l,2]*xi[1]\
|
curl_fourier[i,j,k,0,l] = ( field_fourier[i,j,k,l,2]*xi[1]\
|
||||||
|
@ -76,23 +71,23 @@ Deals with both vector- and tensor-valued fields.
|
||||||
|
|
||||||
parser.add_option('-c','--coordinates',
|
parser.add_option('-c','--coordinates',
|
||||||
dest = 'coords',
|
dest = 'coords',
|
||||||
type = 'string', metavar='string',
|
type = 'string', metavar = 'string',
|
||||||
help = 'column heading for coordinates [%default]')
|
help = 'column label of coordinates [%default]')
|
||||||
parser.add_option('-v','--vector',
|
parser.add_option('-v','--vector',
|
||||||
dest = 'vector',
|
dest = 'vector',
|
||||||
action = 'extend', metavar = '<string LIST>',
|
action = 'extend', metavar = '<string LIST>',
|
||||||
help = 'heading of columns containing vector field values')
|
help = 'column label(s) of vector field values')
|
||||||
parser.add_option('-t','--tensor',
|
parser.add_option('-t','--tensor',
|
||||||
dest = 'tensor',
|
dest = 'tensor',
|
||||||
action = 'extend', metavar = '<string LIST>',
|
action = 'extend', metavar = '<string LIST>',
|
||||||
help = 'heading of columns containing tensor field values')
|
help = 'column label(s) of tensor field values')
|
||||||
|
|
||||||
parser.set_defaults(coords = 'ipinitialcoord',
|
parser.set_defaults(coords = 'pos',
|
||||||
)
|
)
|
||||||
|
|
||||||
(options,filenames) = parser.parse_args()
|
(options,filenames) = parser.parse_args()
|
||||||
|
|
||||||
if options.vector == None and options.tensor == None:
|
if options.vector is None and options.tensor is None:
|
||||||
parser.error('no data column specified.')
|
parser.error('no data column specified.')
|
||||||
|
|
||||||
# --- loop over input files -------------------------------------------------------------------------
|
# --- loop over input files -------------------------------------------------------------------------
|
||||||
|
@ -100,10 +95,8 @@ if options.vector == None and options.tensor == None:
|
||||||
if filenames == []: filenames = [None]
|
if filenames == []: filenames = [None]
|
||||||
|
|
||||||
for name in filenames:
|
for name in filenames:
|
||||||
try:
|
try: table = damask.ASCIItable(name = name,buffered = False)
|
||||||
table = damask.ASCIItable(name = name,buffered = False)
|
except: continue
|
||||||
except:
|
|
||||||
continue
|
|
||||||
damask.util.report(scriptName,name)
|
damask.util.report(scriptName,name)
|
||||||
|
|
||||||
# ------------------------------------------ read header ------------------------------------------
|
# ------------------------------------------ read header ------------------------------------------
|
||||||
|
@ -161,8 +154,9 @@ for name in filenames:
|
||||||
stack = [table.data]
|
stack = [table.data]
|
||||||
for type, data in items.iteritems():
|
for type, data in items.iteritems():
|
||||||
for i,label in enumerate(data['active']):
|
for i,label in enumerate(data['active']):
|
||||||
stack.append(curlFFT(size[::-1], # we need to reverse order here, because x is fastest,ie rightmost, but leftmost in our x,y,z notation
|
# we need to reverse order here, because x is fastest,ie rightmost, but leftmost in our x,y,z notation
|
||||||
table.data[:,data['column'][i]:data['column'][i]+data['dim']].\
|
stack.append(curlFFT(size[::-1],
|
||||||
|
table.data[:,data['column'][i]:data['column'][i]+data['dim']].
|
||||||
reshape([grid[2],grid[1],grid[0]]+data['shape'])))
|
reshape([grid[2],grid[1],grid[0]]+data['shape'])))
|
||||||
|
|
||||||
# ------------------------------------------ output result -----------------------------------------
|
# ------------------------------------------ output result -----------------------------------------
|
||||||
|
|
|
@ -1,164 +0,0 @@
|
||||||
#!/usr/bin/env python
|
|
||||||
# -*- coding: UTF-8 no BOM -*-
|
|
||||||
|
|
||||||
import os,sys,string,math
|
|
||||||
import numpy as np
|
|
||||||
from optparse import OptionParser
|
|
||||||
import damask
|
|
||||||
|
|
||||||
scriptName = os.path.splitext(os.path.basename(__file__))[0]
|
|
||||||
scriptID = ' '.join([scriptName,damask.version])
|
|
||||||
|
|
||||||
#--------------------------------------------------------------------------------------------------
|
|
||||||
def deformedCoordsFFT(F,undeformed=False):
|
|
||||||
#--------------------------------------------------------------------------------------------------
|
|
||||||
wgt = 1.0/grid.prod()
|
|
||||||
integrator = np.array([0.+1.j,0.+1.j,0.+1.j],'c16') * size/ 2.0 / math.pi
|
|
||||||
step = size/grid
|
|
||||||
|
|
||||||
F_fourier = np.fft.rfftn(F,axes=(0,1,2))
|
|
||||||
coords_fourier = np.zeros(F_fourier.shape[0:4],'c16')
|
|
||||||
|
|
||||||
if undeformed:
|
|
||||||
Favg=np.eye(3)
|
|
||||||
else:
|
|
||||||
Favg=np.real(F_fourier[0,0,0,:,:])*wgt
|
|
||||||
#--------------------------------------------------------------------------------------------------
|
|
||||||
# integration in Fourier space
|
|
||||||
k_s = np.zeros([3],'i')
|
|
||||||
for i in xrange(grid[2]):
|
|
||||||
k_s[2] = i
|
|
||||||
if(i > grid[2]//2 ): k_s[2] = k_s[2] - grid[2]
|
|
||||||
for j in xrange(grid[1]):
|
|
||||||
k_s[1] = j
|
|
||||||
if(j > grid[1]//2 ): k_s[1] = k_s[1] - grid[1]
|
|
||||||
for k in xrange(grid[0]//2+1):
|
|
||||||
k_s[0] = k
|
|
||||||
for m in xrange(3):
|
|
||||||
coords_fourier[i,j,k,m] = sum(F_fourier[i,j,k,m,0:3]*k_s*integrator)
|
|
||||||
if (any(k_s != 0)):
|
|
||||||
coords_fourier[i,j,k,0:3] /= -sum(k_s*k_s)
|
|
||||||
|
|
||||||
#--------------------------------------------------------------------------------------------------
|
|
||||||
# add average to scaled fluctuation and put (0,0,0) on (0,0,0)
|
|
||||||
coords = np.fft.irfftn(coords_fourier,F.shape[0:3],axes=(0,1,2))
|
|
||||||
|
|
||||||
offset_coords = np.dot(F[0,0,0,:,:],step/2.0) - scaling*coords[0,0,0,0:3]
|
|
||||||
for z in xrange(grid[2]):
|
|
||||||
for y in xrange(grid[1]):
|
|
||||||
for x in xrange(grid[0]):
|
|
||||||
coords[z,y,x,0:3] = scaling*coords[z,y,x,0:3] \
|
|
||||||
+ offset_coords \
|
|
||||||
+ np.dot(Favg,step*np.array([x,y,z]))
|
|
||||||
|
|
||||||
return coords
|
|
||||||
|
|
||||||
# --------------------------------------------------------------------
|
|
||||||
# MAIN
|
|
||||||
# --------------------------------------------------------------------
|
|
||||||
|
|
||||||
parser = OptionParser(option_class=damask.extendableOption, usage='%prog options file[s]', description = """
|
|
||||||
Add deformed configuration of given initial coordinates.
|
|
||||||
Operates on periodic three-dimensional x,y,z-ordered data sets.
|
|
||||||
|
|
||||||
""", version = scriptID)
|
|
||||||
|
|
||||||
parser.add_option('-f', '--defgrad',dest='defgrad', metavar = 'string',
|
|
||||||
help='heading of deformation gradient columns [%default]')
|
|
||||||
parser.add_option('--reference', dest='undeformed', action='store_true',
|
|
||||||
help='map results to reference (undeformed) average configuration [%default]')
|
|
||||||
parser.add_option('--scaling', dest='scaling', action='extend', metavar = '<float LIST>',
|
|
||||||
help='scaling of fluctuation')
|
|
||||||
parser.add_option('-u', '--unitlength', dest='unitlength', type='float', metavar = 'float',
|
|
||||||
help='set unit length for 2D model [%default]')
|
|
||||||
parser.add_option('--coordinates', dest='coords', metavar='string',
|
|
||||||
help='column heading for coordinates [%default]')
|
|
||||||
|
|
||||||
parser.set_defaults(defgrad = 'f')
|
|
||||||
parser.set_defaults(coords = 'ipinitialcoord')
|
|
||||||
parser.set_defaults(scaling = [])
|
|
||||||
parser.set_defaults(undeformed = False)
|
|
||||||
parser.set_defaults(unitlength = 0.0)
|
|
||||||
|
|
||||||
(options,filenames) = parser.parse_args()
|
|
||||||
|
|
||||||
options.scaling += [1.0 for i in xrange(max(0,3-len(options.scaling)))]
|
|
||||||
scaling = map(float, options.scaling)
|
|
||||||
|
|
||||||
|
|
||||||
# --- loop over input files -------------------------------------------------------------------------
|
|
||||||
|
|
||||||
if filenames == []: filenames = [None]
|
|
||||||
|
|
||||||
for name in filenames:
|
|
||||||
try:
|
|
||||||
table = damask.ASCIItable(name = name,
|
|
||||||
buffered = False)
|
|
||||||
except: continue
|
|
||||||
damask.util.report(scriptName,name)
|
|
||||||
|
|
||||||
# ------------------------------------------ read header ------------------------------------------
|
|
||||||
|
|
||||||
table.head_read()
|
|
||||||
|
|
||||||
# ------------------------------------------ sanity checks ----------------------------------------
|
|
||||||
|
|
||||||
errors = []
|
|
||||||
remarks = []
|
|
||||||
|
|
||||||
if table.label_dimension(options.coords) != 3: errors.append('coordinates {} are not a vector.'.format(options.coords))
|
|
||||||
else: colCoord = table.label_index(options.coords)
|
|
||||||
|
|
||||||
if table.label_dimension(options.defgrad) != 9: errors.append('deformation gradient {} is not a tensor.'.format(options.defgrad))
|
|
||||||
else: colF = table.label_index(options.defgrad)
|
|
||||||
|
|
||||||
if remarks != []: damask.util.croak(remarks)
|
|
||||||
if errors != []:
|
|
||||||
damask.util.croak(errors)
|
|
||||||
table.close(dismiss = True)
|
|
||||||
continue
|
|
||||||
|
|
||||||
# --------------- figure out size and grid ---------------------------------------------------------
|
|
||||||
|
|
||||||
table.data_readArray()
|
|
||||||
|
|
||||||
coords = [np.unique(table.data[:,colCoord+i]) for i in xrange(3)]
|
|
||||||
mincorner = np.array(map(min,coords))
|
|
||||||
maxcorner = np.array(map(max,coords))
|
|
||||||
grid = np.array(map(len,coords),'i')
|
|
||||||
size = grid/np.maximum(np.ones(3,'d'), grid-1.0) * (maxcorner-mincorner) # size from edge to edge = dim * n/(n-1)
|
|
||||||
size = np.where(grid > 1, size, min(size[grid > 1]/grid[grid > 1])) # spacing for grid==1 equal to smallest among other spacings
|
|
||||||
|
|
||||||
N = grid.prod()
|
|
||||||
|
|
||||||
if N != len(table.data): errors.append('data count {} does not match grid {}x{}x{}.'.format(N,*grid))
|
|
||||||
if errors != []:
|
|
||||||
damask.util.croak(errors)
|
|
||||||
table.close(dismiss = True)
|
|
||||||
continue
|
|
||||||
|
|
||||||
# ------------------------------------------ assemble header ---------------------------------------
|
|
||||||
|
|
||||||
table.info_append(scriptID + '\t' + ' '.join(sys.argv[1:]))
|
|
||||||
for coord in xrange(3):
|
|
||||||
label = '{}_{}_{}'.format(coord+1,options.defgrad,options.coords)
|
|
||||||
if np.any(scaling) != 1.0: label+='_{}_{}_{}'.format(scaling)
|
|
||||||
if options.undeformed: label+='_undeformed'
|
|
||||||
table.labels_append([label]) # extend ASCII header with new labels
|
|
||||||
table.head_write()
|
|
||||||
|
|
||||||
# ------------------------------------------ read deformation gradient field -----------------------
|
|
||||||
centroids = deformedCoordsFFT(table.data[:,colF:colF+9].reshape(grid[2],grid[1],grid[0],3,3),
|
|
||||||
options.undeformed)
|
|
||||||
# ------------------------------------------ process data ------------------------------------------
|
|
||||||
table.data_rewind()
|
|
||||||
for z in xrange(grid[2]):
|
|
||||||
for y in xrange(grid[1]):
|
|
||||||
for x in xrange(grid[0]):
|
|
||||||
table.data_read()
|
|
||||||
table.data_append(list(centroids[z,y,x,:]))
|
|
||||||
table.data_write()
|
|
||||||
|
|
||||||
# ------------------------------------------ output finalization -----------------------------------
|
|
||||||
|
|
||||||
table.close() # close ASCII tables
|
|
|
@ -1,7 +1,7 @@
|
||||||
#!/usr/bin/env python
|
#!/usr/bin/env python
|
||||||
# -*- coding: UTF-8 no BOM -*-
|
# -*- coding: UTF-8 no BOM -*-
|
||||||
|
|
||||||
import os,sys,string
|
import os,sys
|
||||||
from optparse import OptionParser
|
from optparse import OptionParser
|
||||||
import damask
|
import damask
|
||||||
|
|
||||||
|
@ -32,7 +32,7 @@ parser.add_option('-t','--tensor',
|
||||||
|
|
||||||
(options,filenames) = parser.parse_args()
|
(options,filenames) = parser.parse_args()
|
||||||
|
|
||||||
if options.tensor == None:
|
if options.tensor is None:
|
||||||
parser.error('no data column specified.')
|
parser.error('no data column specified.')
|
||||||
|
|
||||||
# --- loop over input files -------------------------------------------------------------------------
|
# --- loop over input files -------------------------------------------------------------------------
|
||||||
|
|
|
@ -1,7 +1,7 @@
|
||||||
#!/usr/bin/env python
|
#!/usr/bin/env python
|
||||||
# -*- coding: UTF-8 no BOM -*-
|
# -*- coding: UTF-8 no BOM -*-
|
||||||
|
|
||||||
import os,sys,string
|
import os,sys
|
||||||
from optparse import OptionParser
|
from optparse import OptionParser
|
||||||
import damask
|
import damask
|
||||||
|
|
||||||
|
@ -10,7 +10,7 @@ scriptID = ' '.join([scriptName,damask.version])
|
||||||
|
|
||||||
oneThird = 1.0/3.0
|
oneThird = 1.0/3.0
|
||||||
|
|
||||||
def deviator(m,spherical = False): # Carefull, do not change the value of m (its intent(inout)!)
|
def deviator(m,spherical = False): # Careful, do not change the value of m, its intent(inout)!
|
||||||
sph = oneThird*(m[0]+m[4]+m[8])
|
sph = oneThird*(m[0]+m[4]+m[8])
|
||||||
dev = [
|
dev = [
|
||||||
m[0]-sph, m[1], m[2],
|
m[0]-sph, m[1], m[2],
|
||||||
|
@ -39,7 +39,7 @@ parser.add_option('-s','--spherical',
|
||||||
|
|
||||||
(options,filenames) = parser.parse_args()
|
(options,filenames) = parser.parse_args()
|
||||||
|
|
||||||
if options.tensor == None:
|
if options.tensor is None:
|
||||||
parser.error('no data column specified...')
|
parser.error('no data column specified...')
|
||||||
|
|
||||||
# --- loop over input files -------------------------------------------------------------------------
|
# --- loop over input files -------------------------------------------------------------------------
|
||||||
|
|
|
@ -0,0 +1,227 @@
|
||||||
|
#!/usr/bin/env python
|
||||||
|
# -*- coding: UTF-8 no BOM -*-
|
||||||
|
|
||||||
|
import os,sys,math
|
||||||
|
import numpy as np
|
||||||
|
import scipy.ndimage
|
||||||
|
from optparse import OptionParser
|
||||||
|
import damask
|
||||||
|
|
||||||
|
scriptName = os.path.splitext(os.path.basename(__file__))[0]
|
||||||
|
scriptID = ' '.join([scriptName,damask.version])
|
||||||
|
|
||||||
|
|
||||||
|
#--------------------------------------------------------------------------------------------------
|
||||||
|
def cell2node(cellData,grid):
|
||||||
|
|
||||||
|
nodeData = 0.0
|
||||||
|
datalen = np.array(cellData.shape[3:]).prod()
|
||||||
|
|
||||||
|
for i in xrange(datalen):
|
||||||
|
node = scipy.ndimage.convolve(cellData.reshape(tuple(grid)+(datalen,))[...,i],
|
||||||
|
np.ones((2,2,2))/8., # 2x2x2 neighborhood of cells
|
||||||
|
mode = 'wrap',
|
||||||
|
origin = -1, # offset to have cell origin as center
|
||||||
|
) # now averaged at cell origins
|
||||||
|
node = np.append(node,node[np.newaxis,0,:,:,...],axis=0) # wrap along z
|
||||||
|
node = np.append(node,node[:,0,np.newaxis,:,...],axis=1) # wrap along y
|
||||||
|
node = np.append(node,node[:,:,0,np.newaxis,...],axis=2) # wrap along x
|
||||||
|
|
||||||
|
nodeData = node[...,np.newaxis] if i==0 else np.concatenate((nodeData,node[...,np.newaxis]),axis=-1)
|
||||||
|
|
||||||
|
return nodeData
|
||||||
|
|
||||||
|
#--------------------------------------------------------------------------------------------------
|
||||||
|
def displacementAvgFFT(F,grid,size,nodal=False,transformed=False):
|
||||||
|
"""calculate average cell center (or nodal) displacement for deformation gradient field specified in each grid cell"""
|
||||||
|
if nodal:
|
||||||
|
x, y, z = np.meshgrid(np.linspace(0,size[0],1+grid[0]),
|
||||||
|
np.linspace(0,size[1],1+grid[1]),
|
||||||
|
np.linspace(0,size[2],1+grid[2]),
|
||||||
|
indexing = 'ij')
|
||||||
|
else:
|
||||||
|
x, y, z = np.meshgrid(np.linspace(0,size[0],grid[0],endpoint=False),
|
||||||
|
np.linspace(0,size[1],grid[1],endpoint=False),
|
||||||
|
np.linspace(0,size[2],grid[2],endpoint=False),
|
||||||
|
indexing = 'ij')
|
||||||
|
|
||||||
|
origCoords = np.concatenate((z[:,:,:,None],y[:,:,:,None],x[:,:,:,None]),axis = 3)
|
||||||
|
|
||||||
|
F_fourier = F if transformed else np.fft.rfftn(F,axes=(0,1,2)) # transform or use provided data
|
||||||
|
Favg = np.real(F_fourier[0,0,0,:,:])/grid.prod() # take zero freq for average
|
||||||
|
avgDisplacement = np.einsum('ml,ijkl->ijkm',Favg-np.eye(3),origCoords) # dX = Favg.X
|
||||||
|
|
||||||
|
return avgDisplacement
|
||||||
|
|
||||||
|
#--------------------------------------------------------------------------------------------------
|
||||||
|
def displacementFluctFFT(F,grid,size,nodal=False,transformed=False):
|
||||||
|
"""calculate cell center (or nodal) displacement for deformation gradient field specified in each grid cell"""
|
||||||
|
integrator = 0.5j * size / math.pi
|
||||||
|
|
||||||
|
kk, kj, ki = np.meshgrid(np.where(np.arange(grid[2])>grid[2]//2,np.arange(grid[2])-grid[2],np.arange(grid[2])),
|
||||||
|
np.where(np.arange(grid[1])>grid[1]//2,np.arange(grid[1])-grid[1],np.arange(grid[1])),
|
||||||
|
np.arange(grid[0]//2+1),
|
||||||
|
indexing = 'ij')
|
||||||
|
k_s = np.concatenate((ki[:,:,:,None],kj[:,:,:,None],kk[:,:,:,None]),axis = 3)
|
||||||
|
k_sSquared = np.einsum('...l,...l',k_s,k_s)
|
||||||
|
k_sSquared[0,0,0] = 1.0 # ignore global average frequency
|
||||||
|
|
||||||
|
#--------------------------------------------------------------------------------------------------
|
||||||
|
# integration in Fourier space
|
||||||
|
|
||||||
|
displacement_fourier = -np.einsum('ijkml,ijkl,l->ijkm',
|
||||||
|
F if transformed else np.fft.rfftn(F,axes=(0,1,2)),
|
||||||
|
k_s,
|
||||||
|
integrator,
|
||||||
|
) / k_sSquared[...,np.newaxis]
|
||||||
|
|
||||||
|
#--------------------------------------------------------------------------------------------------
|
||||||
|
# backtransformation to real space
|
||||||
|
|
||||||
|
displacement = np.fft.irfftn(displacement_fourier,grid,axes=(0,1,2))
|
||||||
|
|
||||||
|
return cell2node(displacement,grid) if nodal else displacement
|
||||||
|
|
||||||
|
|
||||||
|
# --------------------------------------------------------------------
|
||||||
|
# MAIN
|
||||||
|
# --------------------------------------------------------------------
|
||||||
|
|
||||||
|
parser = OptionParser(option_class=damask.extendableOption, usage='%prog options file[s]', description = """
|
||||||
|
Add displacments resulting from deformation gradient field.
|
||||||
|
Operates on periodic three-dimensional x,y,z-ordered data sets.
|
||||||
|
Outputs at cell centers or cell nodes (into separate file).
|
||||||
|
|
||||||
|
""", version = scriptID)
|
||||||
|
|
||||||
|
parser.add_option('-f', '--defgrad',
|
||||||
|
dest = 'defgrad',
|
||||||
|
metavar = 'string',
|
||||||
|
help = 'column label of deformation gradient [%default]')
|
||||||
|
parser.add_option('-c', '--coordinates',
|
||||||
|
dest = 'coords',
|
||||||
|
metavar = 'string',
|
||||||
|
help = 'column label of coordinates [%default]')
|
||||||
|
parser.add_option('--nodal',
|
||||||
|
dest = 'nodal',
|
||||||
|
action = 'store_true',
|
||||||
|
help = 'output nodal (not cell-centered) displacements')
|
||||||
|
|
||||||
|
parser.set_defaults(defgrad = 'f',
|
||||||
|
coords = 'pos',
|
||||||
|
nodal = False,
|
||||||
|
)
|
||||||
|
|
||||||
|
(options,filenames) = parser.parse_args()
|
||||||
|
|
||||||
|
# --- loop over input files -------------------------------------------------------------------------
|
||||||
|
|
||||||
|
if filenames == []: filenames = [None]
|
||||||
|
|
||||||
|
for name in filenames:
|
||||||
|
try: table = damask.ASCIItable(name = name,
|
||||||
|
outname = (os.path.splitext(name)[0]+
|
||||||
|
'_nodal'+
|
||||||
|
os.path.splitext(name)[1]) if (options.nodal and name) else None,
|
||||||
|
buffered = False)
|
||||||
|
except: continue
|
||||||
|
damask.util.report(scriptName,name)
|
||||||
|
|
||||||
|
# ------------------------------------------ read header ------------------------------------------
|
||||||
|
|
||||||
|
table.head_read()
|
||||||
|
|
||||||
|
# ------------------------------------------ sanity checks ----------------------------------------
|
||||||
|
|
||||||
|
errors = []
|
||||||
|
remarks = []
|
||||||
|
|
||||||
|
if table.label_dimension(options.defgrad) != 9:
|
||||||
|
errors.append('deformation gradient "{}" is not a 3x3 tensor.'.format(options.defgrad))
|
||||||
|
|
||||||
|
coordDim = table.label_dimension(options.coords)
|
||||||
|
if not 3 >= coordDim >= 1:
|
||||||
|
errors.append('coordinates "{}" need to have one, two, or three dimensions.'.format(options.coords))
|
||||||
|
elif coordDim < 3:
|
||||||
|
remarks.append('appending {} dimension{} to coordinates "{}"...'.format(3-coordDim,
|
||||||
|
's' if coordDim < 2 else '',
|
||||||
|
options.coords))
|
||||||
|
|
||||||
|
if remarks != []: damask.util.croak(remarks)
|
||||||
|
if errors != []:
|
||||||
|
damask.util.croak(errors)
|
||||||
|
table.close(dismiss=True)
|
||||||
|
continue
|
||||||
|
|
||||||
|
# --------------- figure out size and grid ---------------------------------------------------------
|
||||||
|
|
||||||
|
table.data_readArray([options.defgrad,options.coords])
|
||||||
|
table.data_rewind()
|
||||||
|
|
||||||
|
if len(table.data.shape) < 2: table.data.shape += (1,) # expand to 2D shape
|
||||||
|
if table.data[:,9:].shape[1] < 3:
|
||||||
|
table.data = np.hstack((table.data,
|
||||||
|
np.zeros((table.data.shape[0],
|
||||||
|
3-table.data[:,9:].shape[1]),dtype='f'))) # fill coords up to 3D with zeros
|
||||||
|
|
||||||
|
if remarks != []: damask.util.croak(remarks)
|
||||||
|
if errors != []:
|
||||||
|
damask.util.croak(errors)
|
||||||
|
table.close(dismiss = True)
|
||||||
|
continue
|
||||||
|
|
||||||
|
# --------------- figure out size and grid ---------------------------------------------------------
|
||||||
|
|
||||||
|
coords = [np.unique(table.data[:,9+i]) for i in xrange(3)]
|
||||||
|
mincorner = np.array(map(min,coords))
|
||||||
|
maxcorner = np.array(map(max,coords))
|
||||||
|
grid = np.array(map(len,coords),'i')
|
||||||
|
size = grid/np.maximum(np.ones(3,'d'), grid-1.0) * (maxcorner-mincorner) # size from edge to edge = dim * n/(n-1)
|
||||||
|
size = np.where(grid > 1, size, min(size[grid > 1]/grid[grid > 1])) # spacing for grid==1 set to smallest among other spacings
|
||||||
|
|
||||||
|
N = grid.prod()
|
||||||
|
|
||||||
|
if N != len(table.data): errors.append('data count {} does not match grid {}x{}x{}.'.format(N,*grid))
|
||||||
|
if errors != []:
|
||||||
|
damask.util.croak(errors)
|
||||||
|
table.close(dismiss = True)
|
||||||
|
continue
|
||||||
|
|
||||||
|
# ------------------------------------------ process data ------------------------------------------
|
||||||
|
|
||||||
|
F_fourier = np.fft.rfftn(table.data[:,:9].reshape(grid[2],grid[1],grid[0],3,3),axes=(0,1,2)) # perform transform only once...
|
||||||
|
|
||||||
|
displacement = displacementFluctFFT(F_fourier,grid,size,options.nodal,transformed=True)
|
||||||
|
avgDisplacement = displacementAvgFFT (F_fourier,grid,size,options.nodal,transformed=True)
|
||||||
|
|
||||||
|
# ------------------------------------------ assemble header ---------------------------------------
|
||||||
|
|
||||||
|
if options.nodal:
|
||||||
|
table.info_clear()
|
||||||
|
table.labels_clear()
|
||||||
|
|
||||||
|
table.info_append(scriptID + '\t' + ' '.join(sys.argv[1:]))
|
||||||
|
table.labels_append((['{}_pos' .format(i+1) for i in xrange(3)] if options.nodal else []) +
|
||||||
|
['{}_avg({}).{}' .format(i+1,options.defgrad,options.coords) for i in xrange(3)] +
|
||||||
|
['{}_fluct({}).{}'.format(i+1,options.defgrad,options.coords) for i in xrange(3)] )
|
||||||
|
table.head_write()
|
||||||
|
|
||||||
|
# ------------------------------------------ output data -------------------------------------------
|
||||||
|
|
||||||
|
zrange = np.linspace(0,size[2],1+grid[2]) if options.nodal else xrange(grid[2])
|
||||||
|
yrange = np.linspace(0,size[1],1+grid[1]) if options.nodal else xrange(grid[1])
|
||||||
|
xrange = np.linspace(0,size[0],1+grid[0]) if options.nodal else xrange(grid[0])
|
||||||
|
|
||||||
|
for i,z in enumerate(zrange):
|
||||||
|
for j,y in enumerate(yrange):
|
||||||
|
for k,x in enumerate(xrange):
|
||||||
|
if options.nodal: table.data_clear()
|
||||||
|
else: table.data_read()
|
||||||
|
table.data_append([x,y,z] if options.nodal else [])
|
||||||
|
table.data_append(list(avgDisplacement[i,j,k,:]))
|
||||||
|
table.data_append(list( displacement[i,j,k,:]))
|
||||||
|
table.data_write()
|
||||||
|
|
||||||
|
# ------------------------------------------ output finalization -----------------------------------
|
||||||
|
|
||||||
|
table.close() # close ASCII tables
|
|
@ -1,7 +1,7 @@
|
||||||
#!/usr/bin/env python
|
#!/usr/bin/env python
|
||||||
# -*- coding: UTF-8 no BOM -*-
|
# -*- coding: UTF-8 no BOM -*-
|
||||||
|
|
||||||
import os,sys,string,math
|
import os,sys,math
|
||||||
import numpy as np
|
import numpy as np
|
||||||
from optparse import OptionParser
|
from optparse import OptionParser
|
||||||
import damask
|
import damask
|
||||||
|
@ -10,39 +10,35 @@ scriptName = os.path.splitext(os.path.basename(__file__))[0]
|
||||||
scriptID = ' '.join([scriptName,damask.version])
|
scriptID = ' '.join([scriptName,damask.version])
|
||||||
|
|
||||||
def divFFT(geomdim,field):
|
def divFFT(geomdim,field):
|
||||||
N = grid.prod() # field size
|
grid = np.array(np.shape(field)[2::-1])
|
||||||
n = np.array(np.shape(field)[3:]).prod() # data size
|
N = grid.prod() # field size
|
||||||
|
n = np.array(np.shape(field)[3:]).prod() # data size
|
||||||
|
|
||||||
field_fourier = np.fft.fftpack.rfftn(field,axes=(0,1,2))
|
field_fourier = np.fft.fftpack.rfftn(field,axes=(0,1,2))
|
||||||
div_fourier = np.zeros(field_fourier.shape[0:len(np.shape(field))-1],'c16') # size depents on whether tensor or vector
|
div_fourier = np.zeros(field_fourier.shape[0:len(np.shape(field))-1],'c16') # size depents on whether tensor or vector
|
||||||
|
|
||||||
# differentiation in Fourier space
|
# differentiation in Fourier space
|
||||||
k_s=np.zeros([3],'i')
|
k_s=np.zeros([3],'i')
|
||||||
TWOPIIMG = (0.0+2.0j*math.pi)
|
TWOPIIMG = 2.0j*math.pi
|
||||||
for i in xrange(grid[2]):
|
for i in xrange(grid[2]):
|
||||||
k_s[0] = i
|
k_s[0] = i
|
||||||
if(grid[2]%2==0 and i == grid[2]//2): # for even grid, set Nyquist freq to 0 (Johnson, MIT, 2011)
|
if grid[2]%2 == 0 and i == grid[2]//2: k_s[0] = 0 # for even grid, set Nyquist freq to 0 (Johnson, MIT, 2011)
|
||||||
k_s[0]=0
|
elif i > grid[2]//2: k_s[0] -= grid[2]
|
||||||
elif (i > grid[2]//2):
|
|
||||||
k_s[0] = k_s[0] - grid[2]
|
|
||||||
|
|
||||||
for j in xrange(grid[1]):
|
for j in xrange(grid[1]):
|
||||||
k_s[1] = j
|
k_s[1] = j
|
||||||
if(grid[1]%2==0 and j == grid[1]//2): # for even grid, set Nyquist freq to 0 (Johnson, MIT, 2011)
|
if grid[1]%2 == 0 and j == grid[1]//2: k_s[1] = 0 # for even grid, set Nyquist freq to 0 (Johnson, MIT, 2011)
|
||||||
k_s[1]=0
|
elif j > grid[1]//2: k_s[1] -= grid[1]
|
||||||
elif (j > grid[1]//2):
|
|
||||||
k_s[1] = k_s[1] - grid[1]
|
|
||||||
|
|
||||||
for k in xrange(grid[0]//2+1):
|
for k in xrange(grid[0]//2+1):
|
||||||
k_s[2] = k
|
k_s[2] = k
|
||||||
if(grid[0]%2==0 and k == grid[0]//2): # for even grid, set Nyquist freq to 0 (Johnson, MIT, 2011)
|
if grid[0]%2 == 0 and k == grid[0]//2: k_s[2] = 0 # for even grid, set Nyquist freq to 0 (Johnson, MIT, 2011)
|
||||||
k_s[2]=0
|
|
||||||
|
|
||||||
xi=np.array([k_s[2]/geomdim[2]+0.0j,k_s[1]/geomdim[1]+0.j,k_s[0]/geomdim[0]+0.j],'c16')
|
xi = (k_s/geomdim)[2::-1].astype('c16') # reversing the field input order
|
||||||
if n == 9: # tensor, 3x3 -> 3
|
if n == 9: # tensor, 3x3 -> 3
|
||||||
for l in xrange(3):
|
for l in xrange(3):
|
||||||
div_fourier[i,j,k,l] = sum(field_fourier[i,j,k,l,0:3]*xi) *TWOPIIMG
|
div_fourier[i,j,k,l] = sum(field_fourier[i,j,k,l,0:3]*xi) *TWOPIIMG
|
||||||
elif n == 3: # vector, 3 -> 1
|
elif n == 3: # vector, 3 -> 1
|
||||||
div_fourier[i,j,k] = sum(field_fourier[i,j,k,0:3]*xi) *TWOPIIMG
|
div_fourier[i,j,k] = sum(field_fourier[i,j,k,0:3]*xi) *TWOPIIMG
|
||||||
|
|
||||||
return np.fft.fftpack.irfftn(div_fourier,axes=(0,1,2)).reshape([N,n/3])
|
return np.fft.fftpack.irfftn(div_fourier,axes=(0,1,2)).reshape([N,n/3])
|
||||||
|
@ -62,33 +58,31 @@ Deals with both vector- and tensor-valued fields.
|
||||||
parser.add_option('-c','--coordinates',
|
parser.add_option('-c','--coordinates',
|
||||||
dest = 'coords',
|
dest = 'coords',
|
||||||
type = 'string', metavar = 'string',
|
type = 'string', metavar = 'string',
|
||||||
help = 'column heading for coordinates [%default]')
|
help = 'column label of coordinates [%default]')
|
||||||
parser.add_option('-v','--vector',
|
parser.add_option('-v','--vector',
|
||||||
dest = 'vector',
|
dest = 'vector',
|
||||||
action = 'extend', metavar = '<string LIST>',
|
action = 'extend', metavar = '<string LIST>',
|
||||||
help = 'heading of columns containing vector field values')
|
help = 'column label(s) of vector field values')
|
||||||
parser.add_option('-t','--tensor',
|
parser.add_option('-t','--tensor',
|
||||||
dest = 'tensor',
|
dest = 'tensor',
|
||||||
action = 'extend', metavar = '<string LIST>',
|
action = 'extend', metavar = '<string LIST>',
|
||||||
help = 'heading of columns containing tensor field values')
|
help = 'column label(s) of tensor field values')
|
||||||
|
|
||||||
parser.set_defaults(coords = 'ipinitialcoord',
|
parser.set_defaults(coords = 'pos',
|
||||||
)
|
)
|
||||||
|
|
||||||
(options,filenames) = parser.parse_args()
|
(options,filenames) = parser.parse_args()
|
||||||
|
|
||||||
if options.vector == None and options.tensor == None:
|
if options.vector is None and options.tensor is None:
|
||||||
parser.error('no data column specified.')
|
parser.error('no data column specified.')
|
||||||
|
|
||||||
# --- loop over input files -------------------------------------------------------------------------
|
# --- loop over input files ------------------------------------------------------------------------
|
||||||
|
|
||||||
if filenames == []: filenames = [None]
|
if filenames == []: filenames = [None]
|
||||||
|
|
||||||
for name in filenames:
|
for name in filenames:
|
||||||
try:
|
try: table = damask.ASCIItable(name = name,buffered = False)
|
||||||
table = damask.ASCIItable(name = name,buffered = False)
|
except: continue
|
||||||
except:
|
|
||||||
continue
|
|
||||||
damask.util.report(scriptName,name)
|
damask.util.report(scriptName,name)
|
||||||
|
|
||||||
# ------------------------------------------ read header ------------------------------------------
|
# ------------------------------------------ read header ------------------------------------------
|
||||||
|
@ -140,15 +134,16 @@ for name in filenames:
|
||||||
maxcorner = np.array(map(max,coords))
|
maxcorner = np.array(map(max,coords))
|
||||||
grid = np.array(map(len,coords),'i')
|
grid = np.array(map(len,coords),'i')
|
||||||
size = grid/np.maximum(np.ones(3,'d'), grid-1.0) * (maxcorner-mincorner) # size from edge to edge = dim * n/(n-1)
|
size = grid/np.maximum(np.ones(3,'d'), grid-1.0) * (maxcorner-mincorner) # size from edge to edge = dim * n/(n-1)
|
||||||
size = np.where(grid > 1, size, min(size[grid > 1]/grid[grid > 1])) # spacing for grid==1 equal to smallest among other spacings
|
size = np.where(grid > 1, size, min(size[grid > 1]/grid[grid > 1])) # spacing for grid==1 equal to smallest among other ones
|
||||||
|
|
||||||
# ------------------------------------------ process value field -----------------------------------
|
# ------------------------------------------ process value field -----------------------------------
|
||||||
|
|
||||||
stack = [table.data]
|
stack = [table.data]
|
||||||
for type, data in items.iteritems():
|
for type, data in items.iteritems():
|
||||||
for i,label in enumerate(data['active']):
|
for i,label in enumerate(data['active']):
|
||||||
stack.append(divFFT(size[::-1], # we need to reverse order here, because x is fastest,ie rightmost, but leftmost in our x,y,z notation
|
# we need to reverse order here, because x is fastest,ie rightmost, but leftmost in our x,y,z notation
|
||||||
table.data[:,data['column'][i]:data['column'][i]+data['dim']].\
|
stack.append(divFFT(size[::-1],
|
||||||
|
table.data[:,data['column'][i]:data['column'][i]+data['dim']].
|
||||||
reshape([grid[2],grid[1],grid[0]]+data['shape'])))
|
reshape([grid[2],grid[1],grid[0]]+data['shape'])))
|
||||||
|
|
||||||
# ------------------------------------------ output result -----------------------------------------
|
# ------------------------------------------ output result -----------------------------------------
|
||||||
|
|
|
@ -1,7 +1,7 @@
|
||||||
#!/usr/bin/env python
|
#!/usr/bin/env python
|
||||||
# -*- coding: UTF-8 no BOM -*-
|
# -*- coding: UTF-8 no BOM -*-
|
||||||
|
|
||||||
import os,sys,string
|
import os,sys
|
||||||
import numpy as np
|
import numpy as np
|
||||||
from optparse import OptionParser
|
from optparse import OptionParser
|
||||||
import damask
|
import damask
|
||||||
|
@ -48,7 +48,7 @@ parser.set_defaults(hkl = (1,1,1),
|
||||||
|
|
||||||
(options,filenames) = parser.parse_args()
|
(options,filenames) = parser.parse_args()
|
||||||
|
|
||||||
if options.stiffness == None:
|
if options.stiffness is None:
|
||||||
parser.error('no data column specified...')
|
parser.error('no data column specified...')
|
||||||
|
|
||||||
# --- loop over input files -------------------------------------------------------------------------
|
# --- loop over input files -------------------------------------------------------------------------
|
||||||
|
|
|
@ -1,7 +1,7 @@
|
||||||
#!/usr/bin/env python
|
#!/usr/bin/env python
|
||||||
# -*- coding: UTF-8 no BOM -*-
|
# -*- coding: UTF-8 no BOM -*-
|
||||||
|
|
||||||
import os,sys,string,itertools
|
import os,sys,itertools
|
||||||
import numpy as np
|
import numpy as np
|
||||||
from scipy import ndimage
|
from scipy import ndimage
|
||||||
from optparse import OptionParser
|
from optparse import OptionParser
|
||||||
|
@ -89,23 +89,24 @@ Add column(s) containing Euclidean distance to grain structural features: bounda
|
||||||
""", version = scriptID)
|
""", version = scriptID)
|
||||||
|
|
||||||
parser.add_option('-c','--coordinates', dest='coords', metavar='string',
|
parser.add_option('-c','--coordinates', dest='coords', metavar='string',
|
||||||
help='column heading for coordinates [%default]')
|
help='column label of coordinates [%default]')
|
||||||
parser.add_option('-i','--identifier', dest='id', metavar = 'string',
|
parser.add_option('-i','--identifier', dest='id', metavar = 'string',
|
||||||
help='heading of column containing grain identifier [%default]')
|
help='column label of grain identifier [%default]')
|
||||||
parser.add_option('-t','--type', dest = 'type', action = 'extend', metavar = '<string LIST>',
|
parser.add_option('-t','--type', dest = 'type', action = 'extend', metavar = '<string LIST>',
|
||||||
help = 'feature type {%s} '%(', '.join(map(lambda x:'/'.join(x['names']),features))) )
|
help = 'feature type {%s} '%(', '.join(map(lambda x:'/'.join(x['names']),features))) )
|
||||||
parser.add_option('-n','--neighborhood',dest='neighborhood', choices = neighborhoods.keys(), metavar = 'string',
|
parser.add_option('-n','--neighborhood',dest='neighborhood', choices = neighborhoods.keys(), metavar = 'string',
|
||||||
help = 'type of neighborhood [neumann] {%s}'%(', '.join(neighborhoods.keys())))
|
help = 'type of neighborhood [neumann] {%s}'%(', '.join(neighborhoods.keys())))
|
||||||
parser.add_option('-s', '--scale', dest = 'scale', type = 'float', metavar='float',
|
parser.add_option('-s', '--scale', dest = 'scale', type = 'float', metavar = 'float',
|
||||||
help = 'voxel size [%default]')
|
help = 'voxel size [%default]')
|
||||||
parser.set_defaults(coords = 'ipinitialcoord')
|
parser.set_defaults(coords = 'pos',
|
||||||
parser.set_defaults(id = 'texture')
|
id = 'texture',
|
||||||
parser.set_defaults(neighborhood = 'neumann')
|
neighborhood = 'neumann',
|
||||||
parser.set_defaults(scale = 1.0)
|
scale = 1.0,
|
||||||
|
)
|
||||||
|
|
||||||
(options,filenames) = parser.parse_args()
|
(options,filenames) = parser.parse_args()
|
||||||
|
|
||||||
if options.type == None:
|
if options.type is None:
|
||||||
parser.error('no feature type selected.')
|
parser.error('no feature type selected.')
|
||||||
if not set(options.type).issubset(set(list(itertools.chain(*map(lambda x: x['names'],features))))):
|
if not set(options.type).issubset(set(list(itertools.chain(*map(lambda x: x['names'],features))))):
|
||||||
parser.error('type must be chosen from (%s).'%(', '.join(map(lambda x:'|'.join(x['names']),features))) )
|
parser.error('type must be chosen from (%s).'%(', '.join(map(lambda x:'|'.join(x['names']),features))) )
|
||||||
|
@ -125,10 +126,8 @@ for i,feature in enumerate(features):
|
||||||
if filenames == []: filenames = [None]
|
if filenames == []: filenames = [None]
|
||||||
|
|
||||||
for name in filenames:
|
for name in filenames:
|
||||||
try:
|
try: table = damask.ASCIItable(name = name, buffered = False)
|
||||||
table = damask.ASCIItable(name = name, buffered = False)
|
except: continue
|
||||||
except:
|
|
||||||
continue
|
|
||||||
damask.util.report(scriptName,name)
|
damask.util.report(scriptName,name)
|
||||||
|
|
||||||
# ------------------------------------------ read header ------------------------------------------
|
# ------------------------------------------ read header ------------------------------------------
|
||||||
|
@ -141,7 +140,9 @@ for name in filenames:
|
||||||
remarks = []
|
remarks = []
|
||||||
column = {}
|
column = {}
|
||||||
|
|
||||||
if table.label_dimension(options.coords) != 3: errors.append('coordinates {} are not a vector.'.format(options.coords))
|
coordDim = table.label_dimension(options.coords)
|
||||||
|
if not 3 >= coordDim >= 1:
|
||||||
|
errors.append('coordinates "{}" need to have one, two, or three dimensions.'.format(options.coords))
|
||||||
else: coordCol = table.label_index(options.coords)
|
else: coordCol = table.label_index(options.coords)
|
||||||
|
|
||||||
if table.label_dimension(options.id) != 1: errors.append('grain identifier {} not found.'.format(options.id))
|
if table.label_dimension(options.id) != 1: errors.append('grain identifier {} not found.'.format(options.id))
|
||||||
|
@ -164,18 +165,20 @@ for name in filenames:
|
||||||
|
|
||||||
table.data_readArray()
|
table.data_readArray()
|
||||||
|
|
||||||
coords = [{},{},{}]
|
coords = [np.unique(table.data[:,coordCol+i]) for i in xrange(coordDim)]
|
||||||
for i in xrange(len(table.data)):
|
mincorner = np.array(map(min,coords))
|
||||||
for j in xrange(3):
|
maxcorner = np.array(map(max,coords))
|
||||||
coords[j][str(table.data[i,coordCol+j])] = True
|
grid = np.array(map(len,coords)+[1]*(3-len(coords)),'i')
|
||||||
grid = np.array(map(len,coords),'i')
|
|
||||||
size = grid/np.maximum(np.ones(3,'d'),grid-1.0)* \
|
|
||||||
np.array([max(map(float,coords[0].keys()))-min(map(float,coords[0].keys())),\
|
|
||||||
max(map(float,coords[1].keys()))-min(map(float,coords[1].keys())),\
|
|
||||||
max(map(float,coords[2].keys()))-min(map(float,coords[2].keys())),\
|
|
||||||
],'d') # size from bounding box, corrected for cell-centeredness
|
|
||||||
|
|
||||||
size = np.where(grid > 1, size, min(size[grid > 1]/grid[grid > 1])) # spacing for grid==1 equal to smallest among other spacings
|
N = grid.prod()
|
||||||
|
|
||||||
|
if N != len(table.data): errors.append('data count {} does not match grid '.format(N) +
|
||||||
|
'x'.join(map(str,grid)) +
|
||||||
|
'.')
|
||||||
|
if errors != []:
|
||||||
|
damask.util.croak(errors)
|
||||||
|
table.close(dismiss = True)
|
||||||
|
continue
|
||||||
|
|
||||||
# ------------------------------------------ process value field -----------------------------------
|
# ------------------------------------------ process value field -----------------------------------
|
||||||
|
|
||||||
|
|
|
@ -0,0 +1,160 @@
|
||||||
|
#!/usr/bin/env python
|
||||||
|
# -*- coding: UTF-8 no BOM -*-
|
||||||
|
|
||||||
|
import os,sys,math
|
||||||
|
import numpy as np
|
||||||
|
from optparse import OptionParser
|
||||||
|
import damask
|
||||||
|
|
||||||
|
scriptName = os.path.splitext(os.path.basename(__file__))[0]
|
||||||
|
scriptID = ' '.join([scriptName,damask.version])
|
||||||
|
|
||||||
|
#--------------------------------------------------------------------------------------------------
|
||||||
|
def gradFFT(geomdim,field):
|
||||||
|
|
||||||
|
grid = np.array(np.shape(field)[2::-1])
|
||||||
|
N = grid.prod() # field size
|
||||||
|
n = np.array(np.shape(field)[3:]).prod() # data size
|
||||||
|
if n == 3: dataType = 'vector'
|
||||||
|
elif n == 1: dataType = 'scalar'
|
||||||
|
|
||||||
|
field_fourier = np.fft.fftpack.rfftn(field,axes=(0,1,2))
|
||||||
|
grad_fourier = np.zeros(field_fourier.shape+(3,),'c16')
|
||||||
|
|
||||||
|
# differentiation in Fourier space
|
||||||
|
k_s = np.zeros([3],'i')
|
||||||
|
TWOPIIMG = 2.0j*math.pi
|
||||||
|
for i in xrange(grid[2]):
|
||||||
|
k_s[0] = i
|
||||||
|
if grid[2]%2 == 0 and i == grid[2]//2: k_s[0] = 0 # for even grid, set Nyquist freq to 0 (Johnson, MIT, 2011)
|
||||||
|
elif i > grid[2]//2: k_s[0] -= grid[2]
|
||||||
|
|
||||||
|
for j in xrange(grid[1]):
|
||||||
|
k_s[1] = j
|
||||||
|
if grid[1]%2 == 0 and j == grid[1]//2: k_s[1] = 0 # for even grid, set Nyquist freq to 0 (Johnson, MIT, 2011)
|
||||||
|
elif j > grid[1]//2: k_s[1] -= grid[1]
|
||||||
|
|
||||||
|
for k in xrange(grid[0]//2+1):
|
||||||
|
k_s[2] = k
|
||||||
|
if grid[0]%2 == 0 and k == grid[0]//2: k_s[2] = 0 # for even grid, set Nyquist freq to 0 (Johnson, MIT, 2011)
|
||||||
|
|
||||||
|
xi = (k_s/geomdim)[2::-1].astype('c16') # reversing the field order
|
||||||
|
|
||||||
|
grad_fourier[i,j,k,0,:] = field_fourier[i,j,k,0]*xi *TWOPIIMG # vector field from scalar data
|
||||||
|
|
||||||
|
if dataType == 'vector':
|
||||||
|
grad_fourier[i,j,k,1,:] = field_fourier[i,j,k,1]*xi *TWOPIIMG # tensor field from vector data
|
||||||
|
grad_fourier[i,j,k,2,:] = field_fourier[i,j,k,2]*xi *TWOPIIMG
|
||||||
|
|
||||||
|
return np.fft.fftpack.irfftn(grad_fourier,axes=(0,1,2)).reshape([N,3*n])
|
||||||
|
|
||||||
|
|
||||||
|
# --------------------------------------------------------------------
|
||||||
|
# MAIN
|
||||||
|
# --------------------------------------------------------------------
|
||||||
|
|
||||||
|
parser = OptionParser(option_class=damask.extendableOption, usage='%prog options [file[s]]', description = """
|
||||||
|
Add column(s) containing gradient of requested column(s).
|
||||||
|
Operates on periodic ordered three-dimensional data sets.
|
||||||
|
Deals with both vector- and scalar fields.
|
||||||
|
|
||||||
|
""", version = scriptID)
|
||||||
|
|
||||||
|
parser.add_option('-c','--coordinates',
|
||||||
|
dest = 'coords',
|
||||||
|
type = 'string', metavar='string',
|
||||||
|
help = 'column label of coordinates [%default]')
|
||||||
|
parser.add_option('-v','--vector',
|
||||||
|
dest = 'vector',
|
||||||
|
action = 'extend', metavar = '<string LIST>',
|
||||||
|
help = 'column label(s) of vector field values')
|
||||||
|
parser.add_option('-s','--scalar',
|
||||||
|
dest = 'scalar',
|
||||||
|
action = 'extend', metavar = '<string LIST>',
|
||||||
|
help = 'column label(s) of scalar field values')
|
||||||
|
|
||||||
|
parser.set_defaults(coords = 'pos',
|
||||||
|
)
|
||||||
|
|
||||||
|
(options,filenames) = parser.parse_args()
|
||||||
|
|
||||||
|
if options.vector is None and options.scalar is None:
|
||||||
|
parser.error('no data column specified.')
|
||||||
|
|
||||||
|
# --- loop over input files ------------------------------------------------------------------------
|
||||||
|
|
||||||
|
if filenames == []: filenames = [None]
|
||||||
|
|
||||||
|
for name in filenames:
|
||||||
|
try: table = damask.ASCIItable(name = name,buffered = False)
|
||||||
|
except: continue
|
||||||
|
damask.util.report(scriptName,name)
|
||||||
|
|
||||||
|
# ------------------------------------------ read header ------------------------------------------
|
||||||
|
|
||||||
|
table.head_read()
|
||||||
|
|
||||||
|
# ------------------------------------------ sanity checks ----------------------------------------
|
||||||
|
|
||||||
|
items = {
|
||||||
|
'scalar': {'dim': 1, 'shape': [1], 'labels':options.scalar, 'active':[], 'column': []},
|
||||||
|
'vector': {'dim': 3, 'shape': [3], 'labels':options.vector, 'active':[], 'column': []},
|
||||||
|
}
|
||||||
|
errors = []
|
||||||
|
remarks = []
|
||||||
|
column = {}
|
||||||
|
|
||||||
|
if table.label_dimension(options.coords) != 3: errors.append('coordinates {} are not a vector.'.format(options.coords))
|
||||||
|
else: colCoord = table.label_index(options.coords)
|
||||||
|
|
||||||
|
for type, data in items.iteritems():
|
||||||
|
for what in (data['labels'] if data['labels'] is not None else []):
|
||||||
|
dim = table.label_dimension(what)
|
||||||
|
if dim != data['dim']: remarks.append('column {} is not a {}.'.format(what,type))
|
||||||
|
else:
|
||||||
|
items[type]['active'].append(what)
|
||||||
|
items[type]['column'].append(table.label_index(what))
|
||||||
|
|
||||||
|
if remarks != []: damask.util.croak(remarks)
|
||||||
|
if errors != []:
|
||||||
|
damask.util.croak(errors)
|
||||||
|
table.close(dismiss = True)
|
||||||
|
continue
|
||||||
|
|
||||||
|
# ------------------------------------------ assemble header --------------------------------------
|
||||||
|
|
||||||
|
table.info_append(scriptID + '\t' + ' '.join(sys.argv[1:]))
|
||||||
|
for type, data in items.iteritems():
|
||||||
|
for label in data['active']:
|
||||||
|
table.labels_append(['{}_gradFFT({})'.format(i+1,label) for i in xrange(3 * data['dim'])]) # extend ASCII header with new labels
|
||||||
|
table.head_write()
|
||||||
|
|
||||||
|
# --------------- figure out size and grid ---------------------------------------------------------
|
||||||
|
|
||||||
|
table.data_readArray()
|
||||||
|
|
||||||
|
coords = [np.unique(table.data[:,colCoord+i]) for i in xrange(3)]
|
||||||
|
mincorner = np.array(map(min,coords))
|
||||||
|
maxcorner = np.array(map(max,coords))
|
||||||
|
grid = np.array(map(len,coords),'i')
|
||||||
|
size = grid/np.maximum(np.ones(3,'d'), grid-1.0) * (maxcorner-mincorner) # size from edge to edge = dim * n/(n-1)
|
||||||
|
size = np.where(grid > 1, size, min(size[grid > 1]/grid[grid > 1]))
|
||||||
|
|
||||||
|
# ------------------------------------------ process value field -----------------------------------
|
||||||
|
|
||||||
|
stack = [table.data]
|
||||||
|
for type, data in items.iteritems():
|
||||||
|
for i,label in enumerate(data['active']):
|
||||||
|
# we need to reverse order here, because x is fastest,ie rightmost, but leftmost in our x,y,z notation
|
||||||
|
stack.append(gradFFT(size[::-1],
|
||||||
|
table.data[:,data['column'][i]:data['column'][i]+data['dim']].
|
||||||
|
reshape([grid[2],grid[1],grid[0]]+data['shape'])))
|
||||||
|
|
||||||
|
# ------------------------------------------ output result -----------------------------------------
|
||||||
|
|
||||||
|
if len(stack) > 1: table.data = np.hstack(tuple(stack))
|
||||||
|
table.data_writeArray('%.12g')
|
||||||
|
|
||||||
|
# ------------------------------------------ output finalization -----------------------------------
|
||||||
|
|
||||||
|
table.close() # close input ASCII table (works for stdin)
|
|
@ -1,11 +1,10 @@
|
||||||
#!/usr/bin/env python
|
#!/usr/bin/env python
|
||||||
|
|
||||||
import os,sys,string,time,copy
|
import os,sys,time,copy
|
||||||
import numpy as np
|
import numpy as np
|
||||||
import damask
|
import damask
|
||||||
from optparse import OptionParser
|
from optparse import OptionParser
|
||||||
from scipy import spatial
|
from scipy import spatial
|
||||||
from collections import defaultdict
|
|
||||||
|
|
||||||
scriptName = os.path.splitext(os.path.basename(__file__))[0]
|
scriptName = os.path.splitext(os.path.basename(__file__))[0]
|
||||||
scriptID = ' '.join([scriptName,damask.version])
|
scriptID = ' '.join([scriptName,damask.version])
|
||||||
|
@ -23,7 +22,7 @@ parser.add_option('-r', '--radius',
|
||||||
parser.add_option('-d', '--disorientation',
|
parser.add_option('-d', '--disorientation',
|
||||||
dest = 'disorientation',
|
dest = 'disorientation',
|
||||||
type = 'float', metavar = 'float',
|
type = 'float', metavar = 'float',
|
||||||
help = 'disorientation threshold per grain [%default] (degrees)')
|
help = 'disorientation threshold in degrees [%default]')
|
||||||
parser.add_option('-s', '--symmetry',
|
parser.add_option('-s', '--symmetry',
|
||||||
dest = 'symmetry',
|
dest = 'symmetry',
|
||||||
type = 'string', metavar = 'string',
|
type = 'string', metavar = 'string',
|
||||||
|
@ -61,22 +60,23 @@ parser.add_option('-p', '--position',
|
||||||
type = 'string', metavar = 'string',
|
type = 'string', metavar = 'string',
|
||||||
help = 'spatial position of voxel [%default]')
|
help = 'spatial position of voxel [%default]')
|
||||||
|
|
||||||
parser.set_defaults(symmetry = 'cubic',
|
parser.set_defaults(disorientation = 5,
|
||||||
|
symmetry = 'cubic',
|
||||||
coords = 'pos',
|
coords = 'pos',
|
||||||
degrees = False,
|
degrees = False,
|
||||||
)
|
)
|
||||||
|
|
||||||
(options, filenames) = parser.parse_args()
|
(options, filenames) = parser.parse_args()
|
||||||
|
|
||||||
if options.radius == None:
|
if options.radius is None:
|
||||||
parser.error('no radius specified.')
|
parser.error('no radius specified.')
|
||||||
|
|
||||||
input = [options.eulers != None,
|
input = [options.eulers is not None,
|
||||||
options.a != None and \
|
options.a is not None and \
|
||||||
options.b != None and \
|
options.b is not None and \
|
||||||
options.c != None,
|
options.c is not None,
|
||||||
options.matrix != None,
|
options.matrix is not None,
|
||||||
options.quaternion != None,
|
options.quaternion is not None,
|
||||||
]
|
]
|
||||||
|
|
||||||
if np.sum(input) != 1: parser.error('needs exactly one input format.')
|
if np.sum(input) != 1: parser.error('needs exactly one input format.')
|
||||||
|
@ -86,17 +86,16 @@ if np.sum(input) != 1: parser.error('needs exactly one input format.')
|
||||||
(options.matrix,9,'matrix'),
|
(options.matrix,9,'matrix'),
|
||||||
(options.quaternion,4,'quaternion'),
|
(options.quaternion,4,'quaternion'),
|
||||||
][np.where(input)[0][0]] # select input label that was requested
|
][np.where(input)[0][0]] # select input label that was requested
|
||||||
toRadians = np.pi/180.0 if options.degrees else 1.0 # rescale degrees to radians
|
toRadians = np.pi/180.0 if options.degrees else 1.0 # rescale degrees to radians
|
||||||
cos_disorientation = np.cos(options.disorientation/2.*toRadians)
|
cos_disorientation = np.cos(np.radians(options.disorientation/2.)) # cos of half the disorientation angle
|
||||||
|
|
||||||
# --- loop over input files -------------------------------------------------------------------------
|
# --- loop over input files -------------------------------------------------------------------------
|
||||||
|
|
||||||
if filenames == []: filenames = [None]
|
if filenames == []: filenames = [None]
|
||||||
|
|
||||||
for name in filenames:
|
for name in filenames:
|
||||||
try:
|
try: table = damask.ASCIItable(name = name,
|
||||||
table = damask.ASCIItable(name = name,
|
buffered = False)
|
||||||
buffered = False)
|
|
||||||
except: continue
|
except: continue
|
||||||
damask.util.report(scriptName,name)
|
damask.util.report(scriptName,name)
|
||||||
|
|
||||||
|
@ -109,8 +108,10 @@ for name in filenames:
|
||||||
errors = []
|
errors = []
|
||||||
remarks = []
|
remarks = []
|
||||||
|
|
||||||
if table.label_dimension(options.coords) != 3: errors.append('coordinates {} are not a vector.'.format(options.coords))
|
if not 3 >= table.label_dimension(options.coords) >= 1:
|
||||||
if not np.all(table.label_dimension(label) == dim): errors.append('input {} has wrong dimension {}.'.format(label,dim))
|
errors.append('coordinates "{}" need to have one, two, or three dimensions.'.format(options.coords))
|
||||||
|
if not np.all(table.label_dimension(label) == dim):
|
||||||
|
errors.append('input {} does not have dimension {}.'.format(label,dim))
|
||||||
else: column = table.label_index(label)
|
else: column = table.label_index(label)
|
||||||
|
|
||||||
if remarks != []: damask.util.croak(remarks)
|
if remarks != []: damask.util.croak(remarks)
|
||||||
|
@ -122,8 +123,10 @@ for name in filenames:
|
||||||
# ------------------------------------------ assemble header ---------------------------------------
|
# ------------------------------------------ assemble header ---------------------------------------
|
||||||
|
|
||||||
table.info_append(scriptID + '\t' + ' '.join(sys.argv[1:]))
|
table.info_append(scriptID + '\t' + ' '.join(sys.argv[1:]))
|
||||||
table.labels_append('grainID_{}@{}'.format(label,
|
table.labels_append('grainID_{}@{:g}'.format('+'.join(label)
|
||||||
options.disorientation if options.degrees else np.degrees(options.disorientation))) # report orientation source and disorientation in degrees
|
if isinstance(label, (list,tuple))
|
||||||
|
else label,
|
||||||
|
options.disorientation)) # report orientation source and disorientation
|
||||||
table.head_write()
|
table.head_write()
|
||||||
|
|
||||||
# ------------------------------------------ process data ------------------------------------------
|
# ------------------------------------------ process data ------------------------------------------
|
||||||
|
@ -161,7 +164,8 @@ for name in filenames:
|
||||||
if p > 0 and p % 1000 == 0:
|
if p > 0 and p % 1000 == 0:
|
||||||
|
|
||||||
time_delta = (time.clock()-tick) * (len(grainID) - p) / p
|
time_delta = (time.clock()-tick) * (len(grainID) - p) / p
|
||||||
bg.set_message('(%02i:%02i:%02i) processing point %i of %i (grain count %i)...'%(time_delta//3600,time_delta%3600//60,time_delta%60,p,len(grainID),len(orientations)))
|
bg.set_message('(%02i:%02i:%02i) processing point %i of %i (grain count %i)...'\
|
||||||
|
%(time_delta//3600,time_delta%3600//60,time_delta%60,p,len(grainID),np.count_nonzero(memberCounts)))
|
||||||
|
|
||||||
if inputtype == 'eulers':
|
if inputtype == 'eulers':
|
||||||
o = damask.Orientation(Eulers = np.array(map(float,table.data[column:column+3]))*toRadians,
|
o = damask.Orientation(Eulers = np.array(map(float,table.data[column:column+3]))*toRadians,
|
||||||
|
@ -178,83 +182,51 @@ for name in filenames:
|
||||||
o = damask.Orientation(quaternion = np.array(map(float,table.data[column:column+4])),
|
o = damask.Orientation(quaternion = np.array(map(float,table.data[column:column+4])),
|
||||||
symmetry = options.symmetry).reduced()
|
symmetry = options.symmetry).reduced()
|
||||||
|
|
||||||
matched = False
|
matched = False
|
||||||
|
alreadyChecked = {}
|
||||||
|
candidates = []
|
||||||
|
bestDisorientation = damask.Quaternion([0,0,0,1]) # initialize to 180 deg rotation as worst case
|
||||||
|
|
||||||
# check against last matched needs to be really picky. best would be to exclude jumps across the poke (checking distance between last and me?)
|
for i in kdtree.query_ball_point(kdtree.data[p],options.radius): # check all neighboring points
|
||||||
# when walking through neighborhood first check whether grainID of that point has already been tested, if yes, skip!
|
gID = grainID[i]
|
||||||
|
if gID != -1 and gID not in alreadyChecked: # indexed point belonging to a grain not yet tested?
|
||||||
if matchedID != -1: # has matched before?
|
alreadyChecked[gID] = True # remember not to check again
|
||||||
matched = (o.quaternion.conjugated() * orientations[matchedID].quaternion).w > cos_disorientation
|
disorientation = o.disorientation(orientations[gID],SST = False)[0] # compare against other orientation
|
||||||
|
if disorientation.quaternion.w > cos_disorientation: # within threshold ...
|
||||||
if not matched:
|
candidates.append(gID) # remember as potential candidate
|
||||||
alreadyChecked = {}
|
if disorientation.quaternion.w >= bestDisorientation.w: # ... and better than current best?
|
||||||
bestDisorientation = damask.Quaternion([0,0,0,1]) # initialize to 180 deg rotation as worst case
|
|
||||||
for i in kdtree.query_ball_point(kdtree.data[p],options.radius): # check all neighboring points
|
|
||||||
gID = grainID[i]
|
|
||||||
if gID != -1 and gID not in alreadyChecked: # an already indexed point belonging to a grain not yet tested?
|
|
||||||
alreadyChecked[gID] = True # remember not to check again
|
|
||||||
disorientation = o.disorientation(orientations[gID],SST = False)[0] # compare against that grain's orientation (and skip requirement of axis within SST)
|
|
||||||
if disorientation.quaternion.w > cos_disorientation and \
|
|
||||||
disorientation.quaternion.w >= bestDisorientation.w: # within disorientation threshold and better than current best?
|
|
||||||
matched = True
|
matched = True
|
||||||
matchedID = gID # remember that grain
|
matchedID = gID # remember that grain
|
||||||
bestDisorientation = disorientation.quaternion
|
bestDisorientation = disorientation.quaternion
|
||||||
|
|
||||||
if not matched: # no match -> new grain found
|
if matched: # did match existing grain
|
||||||
memberCounts += [1] # start new membership counter
|
memberCounts[matchedID] += 1
|
||||||
|
if len(candidates) > 1: # ambiguity in grain identification?
|
||||||
|
largestGrain = sorted(candidates,key=lambda x:memberCounts[x])[-1] # find largest among potential candidate grains
|
||||||
|
matchedID = largestGrain
|
||||||
|
for c in [c for c in candidates if c != largestGrain]: # loop over smaller candidates
|
||||||
|
memberCounts[largestGrain] += memberCounts[c] # reassign member count of smaller to largest
|
||||||
|
memberCounts[c] = 0
|
||||||
|
grainID = np.where(np.in1d(grainID,candidates), largestGrain, grainID) # relabel grid points of smaller candidates as largest one
|
||||||
|
|
||||||
|
else: # no match -> new grain found
|
||||||
orientations += [o] # initialize with current orientation
|
orientations += [o] # initialize with current orientation
|
||||||
|
memberCounts += [1] # start new membership counter
|
||||||
matchedID = g
|
matchedID = g
|
||||||
g += 1 # increment grain counter
|
g += 1 # increment grain counter
|
||||||
|
|
||||||
else: # did match existing grain
|
|
||||||
memberCounts[matchedID] += 1
|
|
||||||
|
|
||||||
grainID[p] = matchedID # remember grain index assigned to point
|
grainID[p] = matchedID # remember grain index assigned to point
|
||||||
p += 1 # increment point
|
p += 1 # increment point
|
||||||
|
|
||||||
bg.set_message('identifying similar orientations among {} grains...'.format(len(orientations)))
|
grainIDs = np.where(np.array(memberCounts) > 0)[0] # identify "live" grain identifiers
|
||||||
|
packingMap = dict(zip(list(grainIDs),range(len(grainIDs)))) # map to condense into consecutive IDs
|
||||||
memberCounts = np.array(memberCounts)
|
|
||||||
similarOrientations = [[] for i in xrange(len(orientations))]
|
|
||||||
|
|
||||||
for i,orientation in enumerate(orientations[:-1]): # compare each identified orientation...
|
|
||||||
for j in xrange(i+1,len(orientations)): # ...against all others that were defined afterwards
|
|
||||||
if orientation.disorientation(orientations[j],SST = False)[0].quaternion.w > cos_disorientation: # similar orientations in both grainIDs?
|
|
||||||
similarOrientations[i].append(j) # remember in upper triangle...
|
|
||||||
similarOrientations[j].append(i) # ...and lower triangle of matrix
|
|
||||||
|
|
||||||
if similarOrientations[i] != []:
|
|
||||||
bg.set_message('grainID {} is as: {}'.format(i,' '.join(map(str,similarOrientations[i]))))
|
|
||||||
|
|
||||||
stillShifting = True
|
|
||||||
while stillShifting:
|
|
||||||
stillShifting = False
|
|
||||||
tick = time.clock()
|
|
||||||
|
|
||||||
for p,gID in enumerate(grainID): # walk through all points
|
|
||||||
if p > 0 and p % 1000 == 0:
|
|
||||||
|
|
||||||
time_delta = (time.clock()-tick) * (len(grainID) - p) / p
|
|
||||||
bg.set_message('(%02i:%02i:%02i) shifting ID of point %i out of %i (grain count %i)...'%(time_delta//3600,time_delta%3600//60,time_delta%60,p,len(grainID),len(orientations)))
|
|
||||||
if similarOrientations[gID] != []: # orientation of my grainID is similar to someone else?
|
|
||||||
similarNeighbors = defaultdict(int) # dict holding frequency of neighboring grainIDs that share my orientation (freq info not used...)
|
|
||||||
for i in kdtree.query_ball_point(kdtree.data[p],options.radius): # check all neighboring points
|
|
||||||
if grainID[i] in similarOrientations[gID]: # neighboring point shares my orientation?
|
|
||||||
similarNeighbors[grainID[i]] += 1 # remember its grainID
|
|
||||||
if similarNeighbors != {}: # found similar orientation(s) in neighborhood
|
|
||||||
candidates = np.array([gID]+similarNeighbors.keys()) # possible replacement grainIDs for me
|
|
||||||
grainID[p] = candidates[np.argsort(memberCounts[candidates])[-1]] # adopt ID that is most frequent in overall dataset
|
|
||||||
memberCounts[gID] -= 1 # my former ID loses one fellow
|
|
||||||
memberCounts[grainID[p]] += 1 # my new ID gains one fellow
|
|
||||||
bg.set_message('{}:{} --> {}'.format(p,gID,grainID[p])) # report switch of grainID
|
|
||||||
stillShifting = True
|
|
||||||
|
|
||||||
table.data_rewind()
|
table.data_rewind()
|
||||||
|
|
||||||
outputAlive = True
|
outputAlive = True
|
||||||
p = 0
|
p = 0
|
||||||
while outputAlive and table.data_read(): # read next data line of ASCII table
|
while outputAlive and table.data_read(): # read next data line of ASCII table
|
||||||
table.data_append(1+grainID[p]) # add grain ID
|
table.data_append(1+packingMap[grainID[p]]) # add (condensed) grain ID
|
||||||
outputAlive = table.data_write() # output processed line
|
outputAlive = table.data_write() # output processed line
|
||||||
p += 1
|
p += 1
|
||||||
|
|
||||||
|
|
|
@ -1,7 +1,7 @@
|
||||||
#!/usr/bin/env python
|
#!/usr/bin/env python
|
||||||
# -*- coding: UTF-8 no BOM -*-
|
# -*- coding: UTF-8 no BOM -*-
|
||||||
|
|
||||||
import os,sys,string,math
|
import os,sys,math
|
||||||
import numpy as np
|
import numpy as np
|
||||||
from optparse import OptionParser
|
from optparse import OptionParser
|
||||||
import damask
|
import damask
|
||||||
|
@ -62,12 +62,12 @@ parser.set_defaults(pole = (0.0,0.0,1.0),
|
||||||
|
|
||||||
(options, filenames) = parser.parse_args()
|
(options, filenames) = parser.parse_args()
|
||||||
|
|
||||||
input = [options.eulers != None,
|
input = [options.eulers is not None,
|
||||||
options.a != None and \
|
options.a is not None and \
|
||||||
options.b != None and \
|
options.b is not None and \
|
||||||
options.c != None,
|
options.c is not None,
|
||||||
options.matrix != None,
|
options.matrix is not None,
|
||||||
options.quaternion != None,
|
options.quaternion is not None,
|
||||||
]
|
]
|
||||||
|
|
||||||
if np.sum(input) != 1: parser.error('needs exactly one input format.')
|
if np.sum(input) != 1: parser.error('needs exactly one input format.')
|
||||||
|
@ -99,7 +99,7 @@ for name in filenames:
|
||||||
# ------------------------------------------ sanity checks ----------------------------------------
|
# ------------------------------------------ sanity checks ----------------------------------------
|
||||||
|
|
||||||
if not np.all(table.label_dimension(label) == dim):
|
if not np.all(table.label_dimension(label) == dim):
|
||||||
damask.util.croak('input {} has wrong dimension {}.'.format(label,dim))
|
damask.util.croak('input {} does not have dimension {}.'.format(label,dim))
|
||||||
table.close(dismiss = True) # close ASCIItable and remove empty file
|
table.close(dismiss = True) # close ASCIItable and remove empty file
|
||||||
continue
|
continue
|
||||||
|
|
||||||
|
|
|
@ -1,7 +1,7 @@
|
||||||
#!/usr/bin/env python
|
#!/usr/bin/env python
|
||||||
# -*- coding: UTF-8 no BOM -*-
|
# -*- coding: UTF-8 no BOM -*-
|
||||||
|
|
||||||
import os,sys,string
|
import os,sys
|
||||||
from optparse import OptionParser
|
from optparse import OptionParser
|
||||||
import damask
|
import damask
|
||||||
|
|
||||||
|
@ -39,14 +39,14 @@ parser.set_defaults(offset = 0,
|
||||||
|
|
||||||
(options,filenames) = parser.parse_args()
|
(options,filenames) = parser.parse_args()
|
||||||
|
|
||||||
if options.label == None:
|
if options.label is None:
|
||||||
parser.error('no data columns specified.')
|
parser.error('no data columns specified.')
|
||||||
if options.map == None:
|
if options.map is None:
|
||||||
parser.error('no mapping column given.')
|
parser.error('no mapping column given.')
|
||||||
|
|
||||||
# ------------------------------------------ process mapping ASCIItable ---------------------------
|
# ------------------------------------------ process mapping ASCIItable ---------------------------
|
||||||
|
|
||||||
if options.asciitable != None and os.path.isfile(options.asciitable):
|
if options.asciitable is not None and os.path.isfile(options.asciitable):
|
||||||
|
|
||||||
mappedTable = damask.ASCIItable(name = options.asciitable,
|
mappedTable = damask.ASCIItable(name = options.asciitable,
|
||||||
buffered = False, readonly = True)
|
buffered = False, readonly = True)
|
||||||
|
|
|
@ -1,7 +1,7 @@
|
||||||
#!/usr/bin/env python
|
#!/usr/bin/env python
|
||||||
# -*- coding: UTF-8 no BOM -*-
|
# -*- coding: UTF-8 no BOM -*-
|
||||||
|
|
||||||
import os,sys,math,string
|
import os,sys,math
|
||||||
import numpy as np
|
import numpy as np
|
||||||
from optparse import OptionParser
|
from optparse import OptionParser
|
||||||
import damask
|
import damask
|
||||||
|
|
|
@ -1,7 +1,7 @@
|
||||||
#!/usr/bin/env python
|
#!/usr/bin/env python
|
||||||
# -*- coding: UTF-8 no BOM -*-
|
# -*- coding: UTF-8 no BOM -*-
|
||||||
|
|
||||||
import os,sys,math,string
|
import os,sys,math
|
||||||
from optparse import OptionParser
|
from optparse import OptionParser
|
||||||
import damask
|
import damask
|
||||||
|
|
||||||
|
@ -43,7 +43,7 @@ parser.set_defaults(norm = 'frobenius',
|
||||||
|
|
||||||
(options,filenames) = parser.parse_args()
|
(options,filenames) = parser.parse_args()
|
||||||
|
|
||||||
if options.label == None:
|
if options.label is None:
|
||||||
parser.error('no data column specified.')
|
parser.error('no data column specified.')
|
||||||
|
|
||||||
# --- loop over input files -------------------------------------------------------------------------
|
# --- loop over input files -------------------------------------------------------------------------
|
||||||
|
|
|
@ -1,7 +1,7 @@
|
||||||
#!/usr/bin/env python
|
#!/usr/bin/env python
|
||||||
# -*- coding: UTF-8 no BOM -*-
|
# -*- coding: UTF-8 no BOM -*-
|
||||||
|
|
||||||
import os,sys,string,math
|
import os,sys,math
|
||||||
import numpy as np
|
import numpy as np
|
||||||
from optparse import OptionParser
|
from optparse import OptionParser
|
||||||
import damask
|
import damask
|
||||||
|
@ -15,7 +15,8 @@ scriptID = ' '.join([scriptName,damask.version])
|
||||||
|
|
||||||
parser = OptionParser(option_class=damask.extendableOption, usage='%prog options [file[s]]', description = """
|
parser = OptionParser(option_class=damask.extendableOption, usage='%prog options [file[s]]', description = """
|
||||||
Add quaternion and/or Bunge Euler angle representation of crystal lattice orientation.
|
Add quaternion and/or Bunge Euler angle representation of crystal lattice orientation.
|
||||||
Orientation is given by quaternion, Euler angles, rotation matrix, or crystal frame coordinates (i.e. component vectors of rotation matrix).
|
Orientation is given by quaternion, Euler angles, rotation matrix, or crystal frame coordinates
|
||||||
|
(i.e. component vectors of rotation matrix).
|
||||||
|
|
||||||
""", version = scriptID)
|
""", version = scriptID)
|
||||||
|
|
||||||
|
@ -74,12 +75,12 @@ options.output = map(lambda x: x.lower(), options.output)
|
||||||
if options.output == [] or (not set(options.output).issubset(set(outputChoices))):
|
if options.output == [] or (not set(options.output).issubset(set(outputChoices))):
|
||||||
parser.error('output must be chosen from {}.'.format(', '.join(outputChoices)))
|
parser.error('output must be chosen from {}.'.format(', '.join(outputChoices)))
|
||||||
|
|
||||||
input = [options.eulers != None,
|
input = [options.eulers is not None,
|
||||||
options.a != None and \
|
options.a is not None and \
|
||||||
options.b != None and \
|
options.b is not None and \
|
||||||
options.c != None,
|
options.c is not None,
|
||||||
options.matrix != None,
|
options.matrix is not None,
|
||||||
options.quaternion != None,
|
options.quaternion is not None,
|
||||||
]
|
]
|
||||||
|
|
||||||
if np.sum(input) != 1: parser.error('needs exactly one input format.')
|
if np.sum(input) != 1: parser.error('needs exactly one input format.')
|
||||||
|
@ -112,7 +113,7 @@ for name in filenames:
|
||||||
errors = []
|
errors = []
|
||||||
remarks = []
|
remarks = []
|
||||||
|
|
||||||
if not np.all(table.label_dimension(label) == dim): errors.append('input {} has wrong dimension {}.'.format(label,dim))
|
if not np.all(table.label_dimension(label) == dim): errors.append('input {} does not have dimension {}.'.format(label,dim))
|
||||||
else: column = table.label_index(label)
|
else: column = table.label_index(label)
|
||||||
|
|
||||||
if remarks != []: damask.util.croak(remarks)
|
if remarks != []: damask.util.croak(remarks)
|
||||||
|
|
|
@ -1,7 +1,7 @@
|
||||||
#!/usr/bin/env python
|
#!/usr/bin/env python
|
||||||
# -*- coding: UTF-8 no BOM -*-
|
# -*- coding: UTF-8 no BOM -*-
|
||||||
|
|
||||||
import os,sys,string
|
import os,sys
|
||||||
import numpy as np
|
import numpy as np
|
||||||
from optparse import OptionParser
|
from optparse import OptionParser
|
||||||
import damask
|
import damask
|
||||||
|
@ -14,7 +14,8 @@ scriptID = ' '.join([scriptName,damask.version])
|
||||||
# --------------------------------------------------------------------
|
# --------------------------------------------------------------------
|
||||||
|
|
||||||
parser = OptionParser(option_class=damask.extendableOption, usage='%prog options [file[s]]', description = """
|
parser = OptionParser(option_class=damask.extendableOption, usage='%prog options [file[s]]', description = """
|
||||||
Add column(s) containing Second Piola--Kirchhoff stress based on given column(s) of deformation gradient and first Piola--Kirchhoff stress.
|
Add column(s) containing Second Piola--Kirchhoff stress based on given column(s) of deformation
|
||||||
|
gradient and first Piola--Kirchhoff stress.
|
||||||
|
|
||||||
""", version = scriptID)
|
""", version = scriptID)
|
||||||
|
|
||||||
|
|
|
@ -1,7 +1,7 @@
|
||||||
#!/usr/bin/env python
|
#!/usr/bin/env python
|
||||||
# -*- coding: UTF-8 no BOM -*-
|
# -*- coding: UTF-8 no BOM -*-
|
||||||
|
|
||||||
import os,sys,string,math
|
import os,sys,math
|
||||||
import numpy as np
|
import numpy as np
|
||||||
from optparse import OptionParser
|
from optparse import OptionParser
|
||||||
import damask
|
import damask
|
||||||
|
@ -62,12 +62,12 @@ parser.set_defaults(pole = (1.0,0.0,0.0),
|
||||||
|
|
||||||
(options, filenames) = parser.parse_args()
|
(options, filenames) = parser.parse_args()
|
||||||
|
|
||||||
input = [options.eulers != None,
|
input = [options.eulers is not None,
|
||||||
options.a != None and \
|
options.a is not None and \
|
||||||
options.b != None and \
|
options.b is not None and \
|
||||||
options.c != None,
|
options.c is not None,
|
||||||
options.matrix != None,
|
options.matrix is not None,
|
||||||
options.quaternion != None,
|
options.quaternion is not None,
|
||||||
]
|
]
|
||||||
|
|
||||||
if np.sum(input) != 1: parser.error('needs exactly one input format.')
|
if np.sum(input) != 1: parser.error('needs exactly one input format.')
|
||||||
|
@ -101,7 +101,7 @@ for name in filenames:
|
||||||
errors = []
|
errors = []
|
||||||
remarks = []
|
remarks = []
|
||||||
|
|
||||||
if not np.all(table.label_dimension(label) == dim): errors.append('input {} has wrong dimension {}.'.format(label,dim))
|
if not np.all(table.label_dimension(label) == dim): errors.append('input {} does not have dimension {}.'.format(label,dim))
|
||||||
else: column = table.label_index(label)
|
else: column = table.label_index(label)
|
||||||
|
|
||||||
if remarks != []: damask.util.croak(remarks)
|
if remarks != []: damask.util.croak(remarks)
|
||||||
|
@ -133,7 +133,7 @@ for name in filenames:
|
||||||
rotatedPole = o.quaternion*pole # rotate pole according to crystal orientation
|
rotatedPole = o.quaternion*pole # rotate pole according to crystal orientation
|
||||||
(x,y) = rotatedPole[0:2]/(1.+abs(pole[2])) # stereographic projection
|
(x,y) = rotatedPole[0:2]/(1.+abs(pole[2])) # stereographic projection
|
||||||
|
|
||||||
table.data_append([np.sqrt(x*x+y*y),np.arctan2(y,x)] if options.polar else [x,y]) # cartesian coordinates
|
table.data_append([np.sqrt(x*x+y*y),np.arctan2(y,x)] if options.polar else [x,y]) # cartesian coordinates
|
||||||
|
|
||||||
outputAlive = table.data_write() # output processed line
|
outputAlive = table.data_write() # output processed line
|
||||||
|
|
||||||
|
|
|
@ -1,98 +0,0 @@
|
||||||
#!/usr/bin/env python
|
|
||||||
# -*- coding: UTF-8 no BOM -*-
|
|
||||||
|
|
||||||
import os,sys,string,numpy as np
|
|
||||||
from optparse import OptionParser
|
|
||||||
import damask
|
|
||||||
|
|
||||||
scriptName = os.path.splitext(os.path.basename(__file__))[0]
|
|
||||||
scriptID = ' '.join([scriptName,damask.version])
|
|
||||||
|
|
||||||
|
|
||||||
# --------------------------------------------------------------------
|
|
||||||
# MAIN
|
|
||||||
# --------------------------------------------------------------------
|
|
||||||
|
|
||||||
parser = OptionParser(option_class=damask.extendableOption, usage='%prog options [file[s]]', description = """
|
|
||||||
Add Quaternions based on Crystal Frame Coordinates.
|
|
||||||
|
|
||||||
""", version = scriptID)
|
|
||||||
|
|
||||||
parser.add_option('-f','--frame', dest='frame', nargs=4, type='string', metavar='<string string string string>',
|
|
||||||
help='heading of columns containing b* vector components and three frame vectors in that order')
|
|
||||||
parser.add_option('-s','--symmetry', dest='crysym', nargs=1,type='string',metavar='<string>',
|
|
||||||
help='crystal symmetry definition')
|
|
||||||
parser.set_defaults(frame = None)
|
|
||||||
|
|
||||||
(options,filenames) = parser.parse_args()
|
|
||||||
|
|
||||||
if options.frame == None:
|
|
||||||
parser.error('no data column specified...')
|
|
||||||
|
|
||||||
datainfo = {'len':4,
|
|
||||||
'label':[]
|
|
||||||
}
|
|
||||||
|
|
||||||
if options.frame != None: datainfo['label'] += options.frame
|
|
||||||
|
|
||||||
# --- loop over input files -------------------------------------------------------------------------
|
|
||||||
|
|
||||||
if filenames == []: filenames = [None]
|
|
||||||
|
|
||||||
for name in filenames:
|
|
||||||
try:
|
|
||||||
table = damask.ASCIItable(name = name,
|
|
||||||
buffered = False)
|
|
||||||
except: continue
|
|
||||||
damask.util.report(scriptName,name)
|
|
||||||
|
|
||||||
table.head_read() # read ASCII header info
|
|
||||||
|
|
||||||
# --------------- figure out columns to process ---------------------------------------------------
|
|
||||||
active = []
|
|
||||||
column = {}
|
|
||||||
|
|
||||||
for label in datainfo['label']:
|
|
||||||
key = '1_'+label if datainfo['len'] > 1 else label # non-special labels have to start with '1_'
|
|
||||||
if key in table.labels:
|
|
||||||
active.append(label)
|
|
||||||
column[label] = table.labels.index(key) # remember columns of requested data
|
|
||||||
else:
|
|
||||||
damask.util.croak('column %s not found...'%label)
|
|
||||||
|
|
||||||
# ------------------------------------------ assemble header ---------------------------------------
|
|
||||||
|
|
||||||
table.info_append(scriptID + '\t' + ' '.join(sys.argv[1:]))
|
|
||||||
table.labels_append(['Q_%i'%(i+1) for i in xrange(4)]) # extend ASCII header with new labels [1 real, 3 imaginary components]
|
|
||||||
table.head_write()
|
|
||||||
|
|
||||||
# ------------------------------------------ process data ------------------------------------------
|
|
||||||
outputAlive = True
|
|
||||||
while outputAlive and table.data_read(): # read next data line of ASCII table
|
|
||||||
vec = np.zeros([4,3])
|
|
||||||
for i,label in enumerate(active):
|
|
||||||
vec[i,:] = np.array(table.data[column[label]:
|
|
||||||
column[label]+3])
|
|
||||||
|
|
||||||
if sys.argv[1:][6]=='hexagonal': # Ensure Input matrix is orthogonal
|
|
||||||
M=np.dot(vec[0,:],vec[2,:])
|
|
||||||
vec[1,:]=vec[1,:]/np.linalg.norm(vec[1,:])
|
|
||||||
vec[2,:]=M*(vec[0,:]/np.linalg.norm(vec[0,:]))
|
|
||||||
vec[3,:]=vec[3,:]/np.linalg.norm(vec[3,:])
|
|
||||||
else:
|
|
||||||
vec[1,:]=vec[1,:]/np.linalg.norm(vec[1,:])
|
|
||||||
vec[2,:]=vec[2,:]/np.linalg.norm(vec[2,:])
|
|
||||||
vec[3,:]=vec[3,:]/np.linalg.norm(vec[3,:])
|
|
||||||
|
|
||||||
|
|
||||||
Ori=damask.Orientation(matrix=vec[1:,:],symmetry=sys.argv[1:][6])
|
|
||||||
|
|
||||||
table.data_append(np.asarray(Ori.asQuaternion()))
|
|
||||||
|
|
||||||
|
|
||||||
outputAlive = table.data_write() # output processed line
|
|
||||||
|
|
||||||
# ------------------------------------------ output result -----------------------------------------
|
|
||||||
outputAlive and table.output_flush() # just in case of buffered ASCII table
|
|
||||||
|
|
||||||
table.close() # close ASCII tables
|
|
|
@ -1,7 +1,7 @@
|
||||||
#!/usr/bin/env python
|
#!/usr/bin/env python
|
||||||
# -*- coding: UTF-8 no BOM -*-
|
# -*- coding: UTF-8 no BOM -*-
|
||||||
|
|
||||||
import os,sys,math,string
|
import os,sys,math
|
||||||
import numpy as np
|
import numpy as np
|
||||||
from optparse import OptionParser
|
from optparse import OptionParser
|
||||||
import damask
|
import damask
|
||||||
|
@ -9,226 +9,95 @@ import damask
|
||||||
scriptName = os.path.splitext(os.path.basename(__file__))[0]
|
scriptName = os.path.splitext(os.path.basename(__file__))[0]
|
||||||
scriptID = ' '.join([scriptName,damask.version])
|
scriptID = ' '.join([scriptName,damask.version])
|
||||||
|
|
||||||
slipnormal_temp = [
|
slipSystems = {
|
||||||
[0,0,0,1],
|
'fcc':
|
||||||
[0,0,0,1],
|
np.array([
|
||||||
[0,0,0,1],
|
# Slip direction Plane normal
|
||||||
[0,1,-1,0],
|
[ 0, 1,-1, 1, 1, 1, ],
|
||||||
[-1,0,1,0],
|
[-1, 0, 1, 1, 1, 1, ],
|
||||||
[1,-1,0,0],
|
[ 1,-1, 0, 1, 1, 1, ],
|
||||||
[0,1,-1,1],
|
[ 0,-1,-1, -1,-1, 1, ],
|
||||||
[-1,1,0,1],
|
[ 1, 0, 1, -1,-1, 1, ],
|
||||||
[-1,0,1,1],
|
[-1, 1, 0, -1,-1, 1, ],
|
||||||
[0,-1,1,1],
|
[ 0,-1, 1, 1,-1,-1, ],
|
||||||
[1,-1,0,1],
|
[-1, 0,-1, 1,-1,-1, ],
|
||||||
[1,0,-1,1],
|
[ 1, 1, 0, 1,-1,-1, ],
|
||||||
[0,1,-1,1],
|
[ 0, 1, 1, -1, 1,-1, ],
|
||||||
[0,1,-1,1],
|
[ 1, 0,-1, -1, 1,-1, ],
|
||||||
[-1,1,0,1],
|
[-1,-1, 0, -1, 1,-1, ],
|
||||||
[-1,1,0,1],
|
],'f'),
|
||||||
[-1,0,1,1],
|
'bcc':
|
||||||
[-1,0,1,1],
|
np.array([
|
||||||
[0,-1,1,1],
|
# Slip system <111>{110}
|
||||||
[0,-1,1,1],
|
[ 1,-1, 1, 0, 1, 1, ],
|
||||||
[1,-1,0,1],
|
[-1,-1, 1, 0, 1, 1, ],
|
||||||
[1,-1,0,1],
|
[ 1, 1, 1, 0,-1, 1, ],
|
||||||
[1,0,-1,1],
|
[-1, 1, 1, 0,-1, 1, ],
|
||||||
[1,0,-1,1],
|
[-1, 1, 1, 1, 0, 1, ],
|
||||||
]
|
[-1,-1, 1, 1, 0, 1, ],
|
||||||
|
[ 1, 1, 1, -1, 0, 1, ],
|
||||||
slipdirection_temp = [
|
[ 1,-1, 1, -1, 0, 1, ],
|
||||||
[2,-1,-1,0],
|
[-1, 1, 1, 1, 1, 0, ],
|
||||||
[-1,2,-1,0],
|
[-1, 1,-1, 1, 1, 0, ],
|
||||||
[-1,-1,2,0],
|
[ 1, 1, 1, -1, 1, 0, ],
|
||||||
[2,-1,-1,0],
|
[ 1, 1,-1, -1, 1, 0, ],
|
||||||
[-1,2,-1,0],
|
# Slip system <111>{112}
|
||||||
[-1,-1,2,0],
|
[-1, 1, 1, 2, 1, 1, ],
|
||||||
[2,-1,-1,0],
|
[ 1, 1, 1, -2, 1, 1, ],
|
||||||
[1,1,-2,0],
|
[ 1, 1,-1, 2,-1, 1, ],
|
||||||
[-1,2,-1,0],
|
[ 1,-1, 1, 2, 1,-1, ],
|
||||||
[-2,1,1,0],
|
[ 1,-1, 1, 1, 2, 1, ],
|
||||||
[-1,-1,2,0],
|
[ 1, 1,-1, -1, 2, 1, ],
|
||||||
[1,-2,1,0],
|
[ 1, 1, 1, 1,-2, 1, ],
|
||||||
[-1,2,-1,3],
|
[-1, 1, 1, 1, 2,-1, ],
|
||||||
[1,1,-2,3],
|
[ 1, 1,-1, 1, 1, 2, ],
|
||||||
[-2,1,1,3],
|
[ 1,-1, 1, -1, 1, 2, ],
|
||||||
[-1,2,-1,3],
|
[-1, 1, 1, 1,-1, 2, ],
|
||||||
[-1,-1,2,3],
|
[ 1, 1, 1, 1, 1,-2, ],
|
||||||
[-2,1,1,3],
|
],'f'),
|
||||||
[1,-2,1,3],
|
'hex':
|
||||||
[-1,-1,2,3],
|
np.array([
|
||||||
[2,-1,-1,3],
|
# Basal systems <11.0>{00.1} (independent of c/a-ratio, Bravais notation (4 coordinate base))
|
||||||
[1,-2,1,3],
|
[ 2, -1, -1, 0, 0, 0, 0, 1, ],
|
||||||
[1,1,-2,3],
|
[-1, 2, -1, 0, 0, 0, 0, 1, ],
|
||||||
[2,-1,-1,3],
|
[-1, -1, 2, 0, 0, 0, 0, 1, ],
|
||||||
]
|
# 1st type prismatic systems <11.0>{10.0} (independent of c/a-ratio)
|
||||||
|
[ 2, -1, -1, 0, 0, 1, -1, 0, ],
|
||||||
# slip normals and directions according to cpfem implementation
|
[-1, 2, -1, 0, -1, 0, 1, 0, ],
|
||||||
Nslipsystems = {'fcc': 12, 'bcc': 24, 'hex': 24}
|
[-1, -1, 2, 0, 1, -1, 0, 0, ],
|
||||||
slipnormal = { \
|
# 2nd type prismatic systems <10.0>{11.0} -- a slip; plane normals independent of c/a-ratio
|
||||||
'fcc': [
|
[ 0, 1, -1, 0, 2, -1, -1, 0, ],
|
||||||
[1,1,1],
|
[-1, 0, 1, 0, -1, 2, -1, 0, ],
|
||||||
[1,1,1],
|
[ 1, -1, 0, 0, -1, -1, 2, 0, ],
|
||||||
[1,1,1],
|
# 1st type 1st order pyramidal systems <11.0>{-11.1} -- plane normals depend on the c/a-ratio
|
||||||
[-1,-1,1],
|
[ 2, -1, -1, 0, 0, 1, -1, 1, ],
|
||||||
[-1,-1,1],
|
[-1, 2, -1, 0, -1, 0, 1, 1, ],
|
||||||
[-1,-1,1],
|
[-1, -1, 2, 0, 1, -1, 0, 1, ],
|
||||||
[1,-1,-1],
|
[ 1, 1, -2, 0, -1, 1, 0, 1, ],
|
||||||
[1,-1,-1],
|
[-2, 1, 1, 0, 0, -1, 1, 1, ],
|
||||||
[1,-1,-1],
|
[ 1, -2, 1, 0, 1, 0, -1, 1, ],
|
||||||
[-1,1,-1],
|
# pyramidal system: c+a slip <11.3>{-10.1} -- plane normals depend on the c/a-ratio
|
||||||
[-1,1,-1],
|
[ 2, -1, -1, 3, -1, 1, 0, 1, ],
|
||||||
[-1,1,-1],
|
[ 1, -2, 1, 3, -1, 1, 0, 1, ],
|
||||||
],
|
[-1, -1, 2, 3, 1, 0, -1, 1, ],
|
||||||
'bcc': [
|
[-2, 1, 1, 3, 1, 0, -1, 1, ],
|
||||||
[0,1,1],
|
[-1, 2, -1, 3, 0, -1, 1, 1, ],
|
||||||
[0,1,1],
|
[ 1, 1, -2, 3, 0, -1, 1, 1, ],
|
||||||
[0,-1,1],
|
[-2, 1, 1, 3, 1, -1, 0, 1, ],
|
||||||
[0,-1,1],
|
[-1, 2, -1, 3, 1, -1, 0, 1, ],
|
||||||
[1,0,1],
|
[ 1, 1, -2, 3, -1, 0, 1, 1, ],
|
||||||
[1,0,1],
|
[ 2, -1, -1, 3, -1, 0, 1, 1, ],
|
||||||
[-1,0,1],
|
[ 1, -2, 1, 3, 0, 1, -1, 1, ],
|
||||||
[-1,0,1],
|
[-1, -1, 2, 3, 0, 1, -1, 1, ],
|
||||||
[1,1,0],
|
# pyramidal system: c+a slip <11.3>{-1-1.2} -- as for hexagonal ice (Castelnau et al. 1996, similar to twin system found below)
|
||||||
[1,1,0],
|
[ 2, -1, -1, 3, -2, 1, 1, 2, ], # sorted according to similar twin system
|
||||||
[-1,1,0],
|
[-1, 2, -1, 3, 1, -2, 1, 2, ], # <11.3>{-1-1.2} shear = 2((c/a)^2-2)/(3 c/a)
|
||||||
[-1,1,0],
|
[-1, -1, 2, 3, 1, 1, -2, 2, ],
|
||||||
[2,1,1],
|
[-2, 1, 1, 3, 2, -1, -1, 2, ],
|
||||||
[-2,1,1],
|
[ 1, -2, 1, 3, -1, 2, -1, 2, ],
|
||||||
[2,-1,1],
|
[ 1, 1, -2, 3, -1, -1, 2, 2, ],
|
||||||
[2,1,-1],
|
],'f'),
|
||||||
[1,2,1],
|
}
|
||||||
[-1,2,1],
|
|
||||||
[1,-2,1],
|
|
||||||
[1,2,-1],
|
|
||||||
[1,1,2],
|
|
||||||
[-1,1,2],
|
|
||||||
[1,-1,2],
|
|
||||||
[1,1,-2],
|
|
||||||
],
|
|
||||||
'hex': [ # these are dummy numbers and are recalculated based on the above hex real slip systems.
|
|
||||||
[1,1,0],
|
|
||||||
[1,1,0],
|
|
||||||
[1,0,1],
|
|
||||||
[1,0,1],
|
|
||||||
[0,1,1],
|
|
||||||
[0,1,1],
|
|
||||||
[1,-1,0],
|
|
||||||
[1,-1,0],
|
|
||||||
[-1,0,1],
|
|
||||||
[-1,0,1],
|
|
||||||
[0,-1,1],
|
|
||||||
[0,-1,1],
|
|
||||||
[2,-1,1],
|
|
||||||
[1,-2,-1],
|
|
||||||
[1,1,2],
|
|
||||||
[2,1,1],
|
|
||||||
[1,2,-1],
|
|
||||||
[1,-1,2],
|
|
||||||
[2,1,-1],
|
|
||||||
[1,2,1],
|
|
||||||
[1,-1,-2],
|
|
||||||
[2,-1,-1],
|
|
||||||
[1,-2,1],
|
|
||||||
[1,1,-2],
|
|
||||||
],
|
|
||||||
}
|
|
||||||
slipdirection = { \
|
|
||||||
'fcc': [
|
|
||||||
[0,1,-1],
|
|
||||||
[-1,0,1],
|
|
||||||
[1,-1,0],
|
|
||||||
[0,-1,-1],
|
|
||||||
[1,0,1],
|
|
||||||
[-1,1,0],
|
|
||||||
[0,-1,1],
|
|
||||||
[-1,0,-1],
|
|
||||||
[1,1,0],
|
|
||||||
[0,1,1],
|
|
||||||
[1,0,-1],
|
|
||||||
[-1,-1,0],
|
|
||||||
],
|
|
||||||
'bcc': [
|
|
||||||
[1,-1,1],
|
|
||||||
[-1,-1,1],
|
|
||||||
[1,1,1],
|
|
||||||
[-1,1,1],
|
|
||||||
[-1,1,1],
|
|
||||||
[-1,-1,1],
|
|
||||||
[1,1,1],
|
|
||||||
[1,-1,1],
|
|
||||||
[-1,1,1],
|
|
||||||
[-1,1,-1],
|
|
||||||
[1,1,1],
|
|
||||||
[1,1,-1],
|
|
||||||
[-1,1,1],
|
|
||||||
[1,1,1],
|
|
||||||
[1,1,-1],
|
|
||||||
[1,-1,1],
|
|
||||||
[1,-1,1],
|
|
||||||
[1,1,-1],
|
|
||||||
[1,1,1],
|
|
||||||
[-1,1,1],
|
|
||||||
[1,1,-1],
|
|
||||||
[1,-1,1],
|
|
||||||
[-1,1,1],
|
|
||||||
[1,1,1],
|
|
||||||
],
|
|
||||||
'hex': [ # these are dummy numbers and are recalculated based on the above hex real slip systems.
|
|
||||||
[-1,1,1],
|
|
||||||
[1,-1,1],
|
|
||||||
[-1,-1,1],
|
|
||||||
[-1,1,1],
|
|
||||||
[-1,-1,1],
|
|
||||||
[1,-1,1],
|
|
||||||
[1,1,1],
|
|
||||||
[-1,-1,1],
|
|
||||||
[1,-1,1],
|
|
||||||
[1,1,1],
|
|
||||||
[1,1,1],
|
|
||||||
[-1,1,1],
|
|
||||||
[1,1,-1],
|
|
||||||
[1,1,-1],
|
|
||||||
[1,1,-1],
|
|
||||||
[1,-1,-1],
|
|
||||||
[1,-1,-1],
|
|
||||||
[1,-1,-1],
|
|
||||||
[1,-1,1],
|
|
||||||
[1,-1,1],
|
|
||||||
[1,-1,1],
|
|
||||||
[1,1,1],
|
|
||||||
[1,1,1],
|
|
||||||
[1,1,1],
|
|
||||||
],
|
|
||||||
}
|
|
||||||
|
|
||||||
def applyEulers(phi1,Phi,phi2,x):
|
|
||||||
""" transform x given in crystal coordinates to xbar returned in lab coordinates for Euler angles phi1,Phi,phi2 """
|
|
||||||
|
|
||||||
eulerRot = [[ math.cos(phi1)*math.cos(phi2) - math.cos(Phi)*math.sin(phi1)*math.sin(phi2),
|
|
||||||
-math.cos(phi1)*math.sin(phi2) - math.cos(Phi)*math.cos(phi2)*math.sin(phi1),
|
|
||||||
math.sin(Phi)*math.sin(phi1)
|
|
||||||
],
|
|
||||||
[ math.cos(phi2)*math.sin(phi1) + math.cos(Phi)*math.cos(phi1)*math.sin(phi2),
|
|
||||||
math.cos(Phi)*math.cos(phi1)*math.cos(phi2) - math.sin(phi1)*math.sin(phi2),
|
|
||||||
-math.sin(Phi)*math.cos(phi1)
|
|
||||||
],
|
|
||||||
[ math.sin(Phi)*math.sin(phi2),
|
|
||||||
math.sin(Phi)*math.cos(phi2),
|
|
||||||
math.cos(Phi)
|
|
||||||
]]
|
|
||||||
|
|
||||||
xbar = [0,0,0]
|
|
||||||
if len(x) == 3:
|
|
||||||
for i in range(3):
|
|
||||||
xbar[i] = sum([eulerRot[i][j]*x[j] for j in range(3)])
|
|
||||||
return xbar
|
|
||||||
|
|
||||||
def normalize(x):
|
|
||||||
|
|
||||||
norm = math.sqrt(sum([x[i]*x[i] for i in range(len(x))]))
|
|
||||||
|
|
||||||
return [x[i]/norm for i in range(len(x))]
|
|
||||||
|
|
||||||
# --------------------------------------------------------------------
|
# --------------------------------------------------------------------
|
||||||
# MAIN
|
# MAIN
|
||||||
|
@ -239,126 +108,166 @@ Add columns listing Schmid factors (and optional trace vector of selected system
|
||||||
|
|
||||||
""", version = scriptID)
|
""", version = scriptID)
|
||||||
|
|
||||||
parser.add_option('-l','--lattice', dest='lattice', type='choice', choices=('fcc','bcc','hex'), metavar='string',
|
latticeChoices = ('fcc','bcc','hex')
|
||||||
help="type of lattice structure [%default] {fcc,bcc',hex}")
|
parser.add_option('-l','--lattice',
|
||||||
parser.add_option('--direction', dest='forcedirection', type='int', nargs=3, metavar='int int int',
|
dest = 'lattice', type = 'choice', choices = latticeChoices, metavar='string',
|
||||||
help='force direction in lab coordinates %default')
|
help = 'type of lattice structure [%default] {}'.format(latticeChoices))
|
||||||
parser.add_option('-n','--normal', dest='stressnormal', type='int', nargs=3, metavar='int int int',
|
parser.add_option('--covera',
|
||||||
help='stress plane normal in lab coordinates ')
|
dest = 'CoverA', type = 'float', metavar = 'float',
|
||||||
parser.add_option('--trace', dest='traceplane', type='int', nargs=3, metavar='int int int',
|
help = 'C over A ratio for hexagonal systems')
|
||||||
help='normal (in lab coordinates) of plane on which the plane trace of the Schmid factor(s) is reported')
|
parser.add_option('-f', '--force',
|
||||||
parser.add_option('--covera', dest='CoverA', type='float', metavar='float',
|
dest = 'force',
|
||||||
help='C over A ratio for hexagonal systems')
|
type = 'float', nargs = 3, metavar = 'float float float',
|
||||||
parser.add_option('-r','--rank', dest='rank', type='int', nargs=3, metavar='int int int',
|
help = 'force direction in lab frame [%default]')
|
||||||
help="report trace of r'th highest Schmid factor [%default]")
|
parser.add_option('-n', '--normal',
|
||||||
parser.add_option('-e', '--eulers', dest='eulers', metavar='string',
|
dest = 'normal',
|
||||||
help='Euler angles label')
|
type = 'float', nargs = 3, metavar = 'float float float',
|
||||||
parser.add_option('-d', '--degrees', dest='degrees', action='store_true',
|
help = 'stress plane normal in lab frame [%default]')
|
||||||
help='Euler angles are given in degrees [%default]')
|
parser.add_option('-e', '--eulers',
|
||||||
parser.set_defaults(lattice = 'fcc')
|
dest = 'eulers',
|
||||||
parser.set_defaults(forcedirection = [0, 0, 1])
|
type = 'string', metavar = 'string',
|
||||||
parser.set_defaults(stressnormal = None)
|
help = 'Euler angles label')
|
||||||
parser.set_defaults(traceplane = None)
|
parser.add_option('-d', '--degrees',
|
||||||
parser.set_defaults(rank = 0)
|
dest = 'degrees',
|
||||||
parser.set_defaults(CoverA = 1.587)
|
action = 'store_true',
|
||||||
parser.set_defaults(eulers = 'eulerangles')
|
help = 'Euler angles are given in degrees [%default]')
|
||||||
|
parser.add_option('-m', '--matrix',
|
||||||
|
dest = 'matrix',
|
||||||
|
type = 'string', metavar = 'string',
|
||||||
|
help = 'orientation matrix label')
|
||||||
|
parser.add_option('-a',
|
||||||
|
dest = 'a',
|
||||||
|
type = 'string', metavar = 'string',
|
||||||
|
help = 'crystal frame a vector label')
|
||||||
|
parser.add_option('-b',
|
||||||
|
dest = 'b',
|
||||||
|
type = 'string', metavar = 'string',
|
||||||
|
help = 'crystal frame b vector label')
|
||||||
|
parser.add_option('-c',
|
||||||
|
dest = 'c',
|
||||||
|
type = 'string', metavar = 'string',
|
||||||
|
help = 'crystal frame c vector label')
|
||||||
|
parser.add_option('-q', '--quaternion',
|
||||||
|
dest = 'quaternion',
|
||||||
|
type = 'string', metavar = 'string',
|
||||||
|
help = 'quaternion label')
|
||||||
|
|
||||||
(options,filenames) = parser.parse_args()
|
parser.set_defaults(force = (0.0,0.0,1.0),
|
||||||
|
normal = None,
|
||||||
|
lattice = latticeChoices[0],
|
||||||
|
CoverA = math.sqrt(8./3.),
|
||||||
|
degrees = False,
|
||||||
|
)
|
||||||
|
|
||||||
options.forcedirection = normalize(options.forcedirection)
|
(options, filenames) = parser.parse_args()
|
||||||
if options.stressnormal:
|
|
||||||
if abs(sum([options.forcedirection[i] * options.stressnormal[i] for i in range(3)])) < 1e-3:
|
|
||||||
options.stressnormal = normalize(options.stressnormal)
|
|
||||||
else:
|
|
||||||
parser.error('stress plane normal not orthogonal to force direction')
|
|
||||||
else:
|
|
||||||
options.stressnormal = options.forcedirection
|
|
||||||
if options.traceplane:
|
|
||||||
options.traceplane = normalize(options.traceplane)
|
|
||||||
options.rank = min(options.rank,Nslipsystems[options.lattice])
|
|
||||||
|
|
||||||
datainfo = { # list of requested labels per datatype
|
|
||||||
'vector': {'len':3,
|
|
||||||
'label':[]},
|
|
||||||
}
|
|
||||||
|
|
||||||
datainfo['vector']['label'] += [options.eulers]
|
|
||||||
|
|
||||||
toRadians = math.pi/180.0 if options.degrees else 1.0 # rescale degrees to radians
|
toRadians = math.pi/180.0 if options.degrees else 1.0 # rescale degrees to radians
|
||||||
|
|
||||||
if options.lattice=='hex': # Convert 4 Miller indices notation of hex to orthogonal 3 Miller indices notation
|
force = np.array(options.force)
|
||||||
for i in range(Nslipsystems[options.lattice]):
|
force /= np.linalg.norm(force)
|
||||||
slipnormal[options.lattice][i][0]=slipnormal_temp[i][0]
|
|
||||||
slipnormal[options.lattice][i][1]=(slipnormal_temp[i][0]+2.0*slipnormal_temp[i][1])/math.sqrt(3.0)
|
|
||||||
slipnormal[options.lattice][i][2]=slipnormal_temp[i][3]/options.CoverA
|
|
||||||
slipdirection[options.lattice][i][0]=slipdirection_temp[i][0]*1.5 # direction [uvtw]->[3u/2 (u+2v)*sqrt(3)/2 w*(c/a)] ,
|
|
||||||
slipdirection[options.lattice][i][1]=(slipdirection_temp[i][0]+2.0*slipdirection_temp[i][1])*(0.5*math.sqrt(3.0))
|
|
||||||
slipdirection[options.lattice][i][2]=slipdirection_temp[i][3]*options.CoverA
|
|
||||||
|
|
||||||
for i in range(Nslipsystems[options.lattice]):
|
if options.normal:
|
||||||
slipnormal[options.lattice][i]=normalize(slipnormal[options.lattice][i])
|
damask.util.croak('got normal')
|
||||||
slipdirection[options.lattice][i]=normalize(slipdirection[options.lattice][i])
|
normal = np.array(options.normal)
|
||||||
|
normal /= np.linalg.norm(normal)
|
||||||
|
if abs(np.dot(force,normal)) > 1e-3:
|
||||||
|
parser.error('stress plane normal not orthogonal to force direction')
|
||||||
|
else:
|
||||||
|
normal = force
|
||||||
|
|
||||||
# --- loop over input files -------------------------------------------------------------------------
|
input = [options.eulers is not None,
|
||||||
|
options.a is not None and \
|
||||||
|
options.b is not None and \
|
||||||
|
options.c is not None,
|
||||||
|
options.matrix is not None,
|
||||||
|
options.quaternion is not None,
|
||||||
|
]
|
||||||
|
|
||||||
|
if np.sum(input) != 1: parser.error('needs exactly one input format.')
|
||||||
|
|
||||||
|
(label,dim,inputtype) = [(options.eulers,3,'eulers'),
|
||||||
|
([options.a,options.b,options.c],[3,3,3],'frame'),
|
||||||
|
(options.matrix,9,'matrix'),
|
||||||
|
(options.quaternion,4,'quaternion'),
|
||||||
|
][np.where(input)[0][0]] # select input label that was requested
|
||||||
|
|
||||||
|
c_direction = np.zeros((len(slipSystems[options.lattice]),3),'f')
|
||||||
|
c_normal = np.zeros_like(c_direction)
|
||||||
|
|
||||||
|
|
||||||
|
if options.lattice in latticeChoices[:2]:
|
||||||
|
c_direction = slipSystems[options.lattice][:,:3]
|
||||||
|
c_normal = slipSystems[options.lattice][:,3:]
|
||||||
|
elif options.lattice == latticeChoices[2]:
|
||||||
|
# convert 4 Miller index notation of hex to orthogonal 3 Miller index notation
|
||||||
|
for i in xrange(len(c_direction)):
|
||||||
|
c_direction[i] = np.array([slipSystems['hex'][i,0]*1.5,
|
||||||
|
(slipSystems['hex'][i,0] + 2.*slipSystems['hex'][i,1])*0.5*np.sqrt(3),
|
||||||
|
slipSystems['hex'][i,3]*options.CoverA,
|
||||||
|
])
|
||||||
|
c_normal[i] = np.array([slipSystems['hex'][i,4],
|
||||||
|
(slipSystems['hex'][i,4] + 2.*slipSystems['hex'][i,5])/np.sqrt(3),
|
||||||
|
slipSystems['hex'][i,7]/options.CoverA,
|
||||||
|
])
|
||||||
|
|
||||||
|
c_direction /= np.tile(np.linalg.norm(c_direction,axis=1),(3,1)).T
|
||||||
|
c_normal /= np.tile(np.linalg.norm(c_normal ,axis=1),(3,1)).T
|
||||||
|
|
||||||
|
# --- loop over input files ------------------------------------------------------------------------
|
||||||
|
|
||||||
if filenames == []: filenames = [None]
|
if filenames == []: filenames = [None]
|
||||||
|
|
||||||
for name in filenames:
|
for name in filenames:
|
||||||
try:
|
try: table = damask.ASCIItable(name = name,
|
||||||
table = damask.ASCIItable(name = name,buffered = False)
|
buffered = False)
|
||||||
except:
|
except: continue
|
||||||
continue
|
|
||||||
damask.util.report(scriptName,name)
|
damask.util.report(scriptName,name)
|
||||||
|
|
||||||
table.head_read() # read ASCII header info
|
# ------------------------------------------ read header ------------------------------------------
|
||||||
table.info_append(scriptID + '\t' + ' '.join(sys.argv[1:]))
|
|
||||||
|
|
||||||
key = '1_%s'%datainfo['vector']['label'][0]
|
table.head_read()
|
||||||
if key not in table.labels:
|
|
||||||
file['croak'].write('column %s not found...\n'%key)
|
# ------------------------------------------ sanity checks ----------------------------------------
|
||||||
|
|
||||||
|
if not np.all(table.label_dimension(label) == dim):
|
||||||
|
damask.util.croak('input {} does not have dimension {}.'.format(label,dim))
|
||||||
|
table.close(dismiss = True) # close ASCIItable and remove empty file
|
||||||
continue
|
continue
|
||||||
else:
|
|
||||||
column = table.labels.index(key) # remember columns of requested data
|
column = table.label_index(label)
|
||||||
|
|
||||||
# ------------------------------------------ assemble header ---------------------------------------
|
# ------------------------------------------ assemble header ---------------------------------------
|
||||||
|
|
||||||
table.labels_append(['%i_S(%i_%i_%i)[%i_%i_%i]'%(i+1,
|
table.info_append(scriptID + '\t' + ' '.join(sys.argv[1:]))
|
||||||
slipnormal[options.lattice][i][0],
|
table.labels_append(['{id}_'
|
||||||
slipnormal[options.lattice][i][1],
|
'S[{direction[0]:.1g}_{direction[1]:.1g}_{direction[2]:.1g}]'
|
||||||
slipnormal[options.lattice][i][2],
|
'({normal[0]:.1g}_{normal[1]:.1g}_{normal[2]:.1g})'\
|
||||||
slipdirection[options.lattice][i][0],
|
.format( id = i+1,
|
||||||
slipdirection[options.lattice][i][1],
|
normal = theNormal,
|
||||||
slipdirection[options.lattice][i][2],
|
direction = theDirection,
|
||||||
) for i in range(Nslipsystems[options.lattice])])
|
) for i,(theNormal,theDirection) in enumerate(zip(c_normal,c_direction))])
|
||||||
|
|
||||||
if options.traceplane:
|
|
||||||
if options.rank > 0:
|
|
||||||
table.labels_append('trace_x trace_y trace_z system')
|
|
||||||
else:
|
|
||||||
table.labels_append(['(%i)tx\tty\ttz'%(i+1) for i in range(Nslipsystems[options.lattice])])
|
|
||||||
table.head_write()
|
table.head_write()
|
||||||
|
|
||||||
# ------------------------------------------ process data ------------------------------------------
|
# ------------------------------------------ process data ------------------------------------------
|
||||||
|
|
||||||
outputAlive = True
|
outputAlive = True
|
||||||
while outputAlive and table.data_read(): # read next data line of ASCII table
|
while outputAlive and table.data_read(): # read next data line of ASCII table
|
||||||
[phi1,Phi,phi2] = Eulers=toRadians*np.array(map(\
|
if inputtype == 'eulers':
|
||||||
float,table.data[column:column+datainfo['vector']['len']]))
|
o = damask.Orientation(Eulers = np.array(map(float,table.data[column:column+3]))*toRadians,)
|
||||||
S = [ sum( [applyEulers(phi1,Phi,phi2,normalize( \
|
elif inputtype == 'matrix':
|
||||||
slipnormal[options.lattice][slipsystem]))[i]*options.stressnormal[i] for i in range(3)] ) * \
|
o = damask.Orientation(matrix = np.array(map(float,table.data[column:column+9])).reshape(3,3).transpose(),)
|
||||||
sum( [applyEulers(phi1,Phi,phi2,normalize( \
|
elif inputtype == 'frame':
|
||||||
slipdirection[options.lattice][slipsystem]))[i]*options.forcedirection[i] for i in range(3)] ) \
|
o = damask.Orientation(matrix = np.array(map(float,table.data[column[0]:column[0]+3] + \
|
||||||
for slipsystem in range(Nslipsystems[options.lattice]) ]
|
table.data[column[1]:column[1]+3] + \
|
||||||
table.data_append(S)
|
table.data[column[2]:column[2]+3])).reshape(3,3),)
|
||||||
if options.traceplane:
|
elif inputtype == 'quaternion':
|
||||||
trace = [np.cross(options.traceplane,applyEulers(phi1,Phi,phi2,normalize(slipnormal[options.lattice][slipsystem]))) \
|
o = damask.Orientation(quaternion = np.array(map(float,table.data[column:column+4])),)
|
||||||
for slipsystem in range(Nslipsystems[options.lattice]) ]
|
|
||||||
if options.rank == 0:
|
rotForce = o.quaternion.conjugated() * force
|
||||||
table.data_append('\t'.join(map(lambda x:'%f\t%f\t%f'%(x[0],x[1],x[2]),trace)))
|
rotNormal = o.quaternion.conjugated() * normal
|
||||||
elif options.rank > 0:
|
table.data_append(np.abs(np.sum(c_direction*rotForce,axis=1) * np.sum(c_normal*rotNormal,axis=1)))
|
||||||
SabsSorted = sorted([(abs(S[i]),i) for i in range(len(S))])
|
|
||||||
table.data_append('\t'.join(map(str,trace[SabsSorted[-options.rank][1]])) + '\t%i'%(1+SabsSorted[-options.rank][1]))
|
|
||||||
outputAlive = table.data_write() # output processed line
|
outputAlive = table.data_write() # output processed line
|
||||||
|
|
||||||
# ------------------------------------------ output finalization -----------------------------------
|
# ------------------------------------------ output finalization -----------------------------------
|
||||||
|
|
||||||
table.close() # close input ASCII table (works for stdin)
|
table.close() # close ASCII tables
|
||||||
|
|
|
@ -1,7 +1,7 @@
|
||||||
#!/usr/bin/env python
|
#!/usr/bin/env python
|
||||||
# -*- coding: UTF-8 no BOM -*-
|
# -*- coding: UTF-8 no BOM -*-
|
||||||
|
|
||||||
import os,sys,string
|
import os,sys
|
||||||
import numpy as np
|
import numpy as np
|
||||||
from optparse import OptionParser
|
from optparse import OptionParser
|
||||||
import damask
|
import damask
|
||||||
|
@ -25,7 +25,7 @@ parser.add_option('-t','--tensor',
|
||||||
|
|
||||||
(options,filenames) = parser.parse_args()
|
(options,filenames) = parser.parse_args()
|
||||||
|
|
||||||
if options.tensor == None:
|
if options.tensor is None:
|
||||||
parser.error('no data column specified.')
|
parser.error('no data column specified.')
|
||||||
|
|
||||||
# --- loop over input files -------------------------------------------------------------------------
|
# --- loop over input files -------------------------------------------------------------------------
|
||||||
|
|
|
@ -1,7 +1,7 @@
|
||||||
#!/usr/bin/env python
|
#!/usr/bin/env python
|
||||||
# -*- coding: UTF-8 no BOM -*-
|
# -*- coding: UTF-8 no BOM -*-
|
||||||
|
|
||||||
import os,sys,string
|
import os,sys
|
||||||
import numpy as np
|
import numpy as np
|
||||||
from optparse import OptionParser
|
from optparse import OptionParser
|
||||||
import damask
|
import damask
|
||||||
|
@ -10,7 +10,7 @@ scriptName = os.path.splitext(os.path.basename(__file__))[0]
|
||||||
scriptID = ' '.join([scriptName,damask.version])
|
scriptID = ' '.join([scriptName,damask.version])
|
||||||
|
|
||||||
def operator(stretch,strain,eigenvalues):
|
def operator(stretch,strain,eigenvalues):
|
||||||
''' Albrecht Bertram: Elasticity and Plasticity of Large Deformations An Introduction (3rd Edition, 2012), p. 102 '''
|
"""Albrecht Bertram: Elasticity and Plasticity of Large Deformations An Introduction (3rd Edition, 2012), p. 102"""
|
||||||
return {
|
return {
|
||||||
'V#ln': np.log(eigenvalues) ,
|
'V#ln': np.log(eigenvalues) ,
|
||||||
'U#ln': np.log(eigenvalues) ,
|
'U#ln': np.log(eigenvalues) ,
|
||||||
|
@ -75,7 +75,7 @@ if options.logarithmic: strains.append('ln')
|
||||||
if options.biot: strains.append('Biot')
|
if options.biot: strains.append('Biot')
|
||||||
if options.green: strains.append('Green')
|
if options.green: strains.append('Green')
|
||||||
|
|
||||||
if options.defgrad == None:
|
if options.defgrad is None:
|
||||||
parser.error('no data column specified.')
|
parser.error('no data column specified.')
|
||||||
|
|
||||||
# --- loop over input files -------------------------------------------------------------------------
|
# --- loop over input files -------------------------------------------------------------------------
|
||||||
|
|
|
@ -1,7 +1,7 @@
|
||||||
#!/usr/bin/env python
|
#!/usr/bin/env python
|
||||||
# -*- coding: UTF-8 no BOM -*-
|
# -*- coding: UTF-8 no BOM -*-
|
||||||
|
|
||||||
import os,sys,string
|
import os,sys
|
||||||
from optparse import OptionParser
|
from optparse import OptionParser
|
||||||
import damask
|
import damask
|
||||||
|
|
||||||
|
|
|
@ -1,7 +1,7 @@
|
||||||
#!/usr/bin/env python
|
#!/usr/bin/env python
|
||||||
# -*- coding: UTF-8 no BOM -*-
|
# -*- coding: UTF-8 no BOM -*-
|
||||||
|
|
||||||
import os,sys,string
|
import os,sys
|
||||||
import numpy as np
|
import numpy as np
|
||||||
import scipy.ndimage
|
import scipy.ndimage
|
||||||
from optparse import OptionParser
|
from optparse import OptionParser
|
||||||
|
@ -22,7 +22,7 @@ Average each data block of size 'packing' into single values thus reducing the f
|
||||||
parser.add_option('-c','--coordinates',
|
parser.add_option('-c','--coordinates',
|
||||||
dest = 'coords',
|
dest = 'coords',
|
||||||
type = 'string', metavar = 'string',
|
type = 'string', metavar = 'string',
|
||||||
help = 'column heading for coordinates [%default]')
|
help = 'column label of coordinates [%default]')
|
||||||
parser.add_option('-p','--packing',
|
parser.add_option('-p','--packing',
|
||||||
dest = 'packing',
|
dest = 'packing',
|
||||||
type = 'int', nargs = 3, metavar = 'int int int',
|
type = 'int', nargs = 3, metavar = 'int int int',
|
||||||
|
@ -39,7 +39,7 @@ parser.add_option('-s', '--size',
|
||||||
dest = 'size',
|
dest = 'size',
|
||||||
type = 'float', nargs = 3, metavar = 'float float float',
|
type = 'float', nargs = 3, metavar = 'float float float',
|
||||||
help = 'size in x,y,z [autodetect]')
|
help = 'size in x,y,z [autodetect]')
|
||||||
parser.set_defaults(coords = 'ipinitialcoord',
|
parser.set_defaults(coords = 'pos',
|
||||||
packing = (2,2,2),
|
packing = (2,2,2),
|
||||||
shift = (0,0,0),
|
shift = (0,0,0),
|
||||||
grid = (0,0,0),
|
grid = (0,0,0),
|
||||||
|
@ -59,11 +59,10 @@ if any(shift != 0): prefix += 'shift{:+}{:+}{:+}_'.format(*shift)
|
||||||
if filenames == []: filenames = [None]
|
if filenames == []: filenames = [None]
|
||||||
|
|
||||||
for name in filenames:
|
for name in filenames:
|
||||||
try:
|
try: table = damask.ASCIItable(name = name,
|
||||||
table = damask.ASCIItable(name = name,
|
outname = os.path.join(os.path.dirname(name),
|
||||||
outname = os.path.join(os.path.dirname(name),
|
prefix+os.path.basename(name)) if name else name,
|
||||||
prefix+os.path.basename(name)) if name else name,
|
buffered = False)
|
||||||
buffered = False)
|
|
||||||
except: continue
|
except: continue
|
||||||
damask.util.report(scriptName,name)
|
damask.util.report(scriptName,name)
|
||||||
|
|
||||||
|
@ -75,7 +74,6 @@ for name in filenames:
|
||||||
|
|
||||||
errors = []
|
errors = []
|
||||||
remarks = []
|
remarks = []
|
||||||
colCoord = None
|
|
||||||
|
|
||||||
if table.label_dimension(options.coords) != 3: errors.append('coordinates {} are not a vector.'.format(options.coords))
|
if table.label_dimension(options.coords) != 3: errors.append('coordinates {} are not a vector.'.format(options.coords))
|
||||||
else: colCoord = table.label_index(options.coords)
|
else: colCoord = table.label_index(options.coords)
|
||||||
|
@ -86,7 +84,6 @@ for name in filenames:
|
||||||
table.close(dismiss = True)
|
table.close(dismiss = True)
|
||||||
continue
|
continue
|
||||||
|
|
||||||
|
|
||||||
# ------------------------------------------ assemble header ---------------------------------------
|
# ------------------------------------------ assemble header ---------------------------------------
|
||||||
|
|
||||||
table.info_append(scriptID + '\t' + ' '.join(sys.argv[1:]))
|
table.info_append(scriptID + '\t' + ' '.join(sys.argv[1:]))
|
||||||
|
@ -101,10 +98,10 @@ for name in filenames:
|
||||||
mincorner = np.array(map(min,coords))
|
mincorner = np.array(map(min,coords))
|
||||||
maxcorner = np.array(map(max,coords))
|
maxcorner = np.array(map(max,coords))
|
||||||
grid = np.array(map(len,coords),'i')
|
grid = np.array(map(len,coords),'i')
|
||||||
size = grid/np.maximum(np.ones(3,'d'), grid-1.0) * (maxcorner-mincorner) # size from edge to edge = dim * n/(n-1)
|
size = grid/np.maximum(np.ones(3,'d'), grid-1.0) * (maxcorner-mincorner) # size from edge to edge = dim * n/(n-1)
|
||||||
size = np.where(grid > 1, size, min(size[grid > 1]/grid[grid > 1])) # spacing for grid==1 equal to smallest among other spacings
|
size = np.where(grid > 1, size, min(size[grid > 1]/grid[grid > 1])) # spacing for grid==1 set to smallest among other spacings
|
||||||
delta = size/np.maximum(np.ones(3,'d'), grid)
|
delta = size/np.maximum(np.ones(3,'d'), grid)
|
||||||
origin = mincorner - 0.5*delta # shift from cell center to corner
|
origin = mincorner - 0.5*delta # shift from cell center to corner
|
||||||
|
|
||||||
else:
|
else:
|
||||||
grid = np.array(options.grid,'i')
|
grid = np.array(options.grid,'i')
|
||||||
|
|
|
@ -1,7 +1,7 @@
|
||||||
#!/usr/bin/env python
|
#!/usr/bin/env python
|
||||||
# -*- coding: UTF-8 no BOM -*-
|
# -*- coding: UTF-8 no BOM -*-
|
||||||
|
|
||||||
import os,sys,string
|
import os,sys
|
||||||
import numpy as np
|
import numpy as np
|
||||||
from optparse import OptionParser
|
from optparse import OptionParser
|
||||||
import damask
|
import damask
|
||||||
|
@ -28,7 +28,7 @@ parser.add_option('-l','--label',
|
||||||
|
|
||||||
(options,filenames) = parser.parse_args()
|
(options,filenames) = parser.parse_args()
|
||||||
|
|
||||||
if options.label == None:
|
if options.label is None:
|
||||||
parser.error('no grouping column specified.')
|
parser.error('no grouping column specified.')
|
||||||
|
|
||||||
|
|
||||||
|
@ -37,10 +37,14 @@ if options.label == None:
|
||||||
if filenames == []: filenames = [None]
|
if filenames == []: filenames = [None]
|
||||||
|
|
||||||
for name in filenames:
|
for name in filenames:
|
||||||
try:
|
damask.util.croak(name)
|
||||||
table = damask.ASCIItable(name = name,
|
|
||||||
outname = options.label+'_averaged_'+name if name else name,
|
try: table = damask.ASCIItable(name = name,
|
||||||
buffered = False)
|
outname = os.path.join(
|
||||||
|
os.path.split(name)[0],
|
||||||
|
options.label+'_averaged_'+os.path.split(name)[1]
|
||||||
|
) if name else name,
|
||||||
|
buffered = False)
|
||||||
except: continue
|
except: continue
|
||||||
damask.util.report(scriptName,name)
|
damask.util.report(scriptName,name)
|
||||||
|
|
||||||
|
|
|
@ -1,7 +1,7 @@
|
||||||
#!/usr/bin/env python
|
#!/usr/bin/env python
|
||||||
# -*- coding: UTF-8 no BOM -*-
|
# -*- coding: UTF-8 no BOM -*-
|
||||||
|
|
||||||
import os,sys,string
|
import os,sys
|
||||||
import numpy as np
|
import numpy as np
|
||||||
from optparse import OptionParser
|
from optparse import OptionParser
|
||||||
import damask
|
import damask
|
||||||
|
@ -77,11 +77,11 @@ minmax = np.array([np.array(options.xrange),
|
||||||
grid = np.zeros(options.bins,'f')
|
grid = np.zeros(options.bins,'f')
|
||||||
result = np.zeros((options.bins[0],options.bins[1],3),'f')
|
result = np.zeros((options.bins[0],options.bins[1],3),'f')
|
||||||
|
|
||||||
if options.data == None: parser.error('no data columns specified.')
|
if options.data is None: parser.error('no data columns specified.')
|
||||||
|
|
||||||
labels = options.data
|
labels = options.data
|
||||||
|
|
||||||
if options.weight != None: labels += [options.weight] # prevent character splitting of single string value
|
if options.weight is not None: labels += [options.weight] # prevent character splitting of single string value
|
||||||
|
|
||||||
# --- loop over input files -------------------------------------------------------------------------
|
# --- loop over input files -------------------------------------------------------------------------
|
||||||
|
|
||||||
|
@ -124,7 +124,7 @@ for name in filenames:
|
||||||
x = int(options.bins[0]*(table.data[i,0]-minmax[0,0])/delta[0])
|
x = int(options.bins[0]*(table.data[i,0]-minmax[0,0])/delta[0])
|
||||||
y = int(options.bins[1]*(table.data[i,1]-minmax[1,0])/delta[1])
|
y = int(options.bins[1]*(table.data[i,1]-minmax[1,0])/delta[1])
|
||||||
if x >= 0 and x < options.bins[0] and y >= 0 and y < options.bins[1]:
|
if x >= 0 and x < options.bins[0] and y >= 0 and y < options.bins[1]:
|
||||||
grid[x,y] += 1. if options.weight == None else table.data[i,2] # count (weighted) occurrences
|
grid[x,y] += 1. if options.weight is None else table.data[i,2] # count (weighted) occurrences
|
||||||
|
|
||||||
if options.normCol:
|
if options.normCol:
|
||||||
for x in xrange(options.bins[0]):
|
for x in xrange(options.bins[0]):
|
||||||
|
|
|
@ -1,7 +1,7 @@
|
||||||
#!/usr/bin/env python
|
#!/usr/bin/env python
|
||||||
# -*- coding: UTF-8 no BOM -*-
|
# -*- coding: UTF-8 no BOM -*-
|
||||||
|
|
||||||
import os,sys,string
|
import os,sys
|
||||||
import numpy as np
|
import numpy as np
|
||||||
from optparse import OptionParser
|
from optparse import OptionParser
|
||||||
import damask
|
import damask
|
||||||
|
@ -19,36 +19,38 @@ to resolution*packing.
|
||||||
|
|
||||||
""", version = scriptID)
|
""", version = scriptID)
|
||||||
|
|
||||||
parser.add_option('-c','--coordinates', dest='coords', metavar='string',
|
parser.add_option('-c','--coordinates',
|
||||||
help='column heading for coordinates [%default]')
|
dest = 'coords', metavar = 'string',
|
||||||
parser.add_option('-p','--packing', dest='packing', type='int', nargs=3, metavar='int int int',
|
help = 'column label of coordinates [%default]')
|
||||||
help='dimension of packed group [%default]')
|
parser.add_option('-p','--packing',
|
||||||
parser.add_option('-g','--grid', dest='resolution', type='int', nargs=3, metavar='int int int',
|
dest = 'packing', type = 'int', nargs = 3, metavar = 'int int int',
|
||||||
help='resolution in x,y,z [autodetect]')
|
help = 'dimension of packed group [%default]')
|
||||||
parser.add_option('-s','--size', dest='dimension', type='float', nargs=3, metavar='int int int',
|
parser.add_option('-g','--grid',
|
||||||
help='dimension in x,y,z [autodetect]')
|
dest = 'resolution', type = 'int', nargs = 3, metavar = 'int int int',
|
||||||
parser.set_defaults(coords = 'ipinitialcoord')
|
help = 'resolution in x,y,z [autodetect]')
|
||||||
parser.set_defaults(packing = (2,2,2))
|
parser.add_option('-s','--size',
|
||||||
parser.set_defaults(grid = (0,0,0))
|
dest = 'dimension', type = 'float', nargs = 3, metavar = 'int int int',
|
||||||
parser.set_defaults(size = (0.0,0.0,0.0))
|
help = 'dimension in x,y,z [autodetect]')
|
||||||
|
parser.set_defaults(coords = 'pos',
|
||||||
|
packing = (2,2,2),
|
||||||
|
grid = (0,0,0),
|
||||||
|
size = (0.0,0.0,0.0),
|
||||||
|
)
|
||||||
|
|
||||||
(options,filenames) = parser.parse_args()
|
(options,filenames) = parser.parse_args()
|
||||||
|
|
||||||
|
|
||||||
options.packing = np.array(options.packing)
|
options.packing = np.array(options.packing)
|
||||||
prefix = 'blowUp%ix%ix%i_'%(options.packing[0],options.packing[1],options.packing[2])
|
prefix = 'blowUp{}x{}x{}_'.format(*options.packing)
|
||||||
|
|
||||||
# --- loop over input files -------------------------------------------------------------------------
|
# --- loop over input files -------------------------------------------------------------------------
|
||||||
|
|
||||||
if filenames == []: filenames = [None]
|
if filenames == []: filenames = [None]
|
||||||
|
|
||||||
for name in filenames:
|
for name in filenames:
|
||||||
try:
|
try: table = damask.ASCIItable(name = name,
|
||||||
table = damask.ASCIItable(name = name,
|
outname = os.path.join(os.path.dirname(name),
|
||||||
outname = os.path.join(os.path.dirname(name),
|
prefix+os.path.basename(name)) if name else name,
|
||||||
prefix+ \
|
buffered = False)
|
||||||
os.path.basename(name)) if name else name,
|
|
||||||
buffered = False)
|
|
||||||
except: continue
|
except: continue
|
||||||
damask.util.report(scriptName,name)
|
damask.util.report(scriptName,name)
|
||||||
|
|
||||||
|
@ -58,39 +60,41 @@ for name in filenames:
|
||||||
|
|
||||||
# ------------------------------------------ sanity checks ----------------------------------------
|
# ------------------------------------------ sanity checks ----------------------------------------
|
||||||
|
|
||||||
if table.label_dimension(options.coords) != 3: errors.append('coordinates {} are not a vector.'.format(options.coords))
|
errors = []
|
||||||
else: coordCol = table.label_index(options.coords)
|
remarks = []
|
||||||
|
|
||||||
|
if table.label_dimension(options.coords) != 3: errors.append('coordinates {} are not a vector.'.format(options.coords))
|
||||||
|
else: colCoord = table.label_index(options.coords)
|
||||||
|
|
||||||
# ------------------------------------------ assemble header --------------------------------------
|
colElem = table.label_index('elem')
|
||||||
|
|
||||||
table.info_append(scriptID + '\t' + ' '.join(sys.argv[1:]))
|
if remarks != []: damask.util.croak(remarks)
|
||||||
|
if errors != []:
|
||||||
|
damask.util.croak(errors)
|
||||||
|
table.close(dismiss = True)
|
||||||
|
continue
|
||||||
|
|
||||||
# --------------- figure out size and grid ---------------------------------------------------------
|
# --------------- figure out size and grid ---------------------------------------------------------
|
||||||
|
|
||||||
table.data_readArray()
|
table.data_readArray(options.coords)
|
||||||
|
|
||||||
coords = [{},{},{}]
|
|
||||||
for i in xrange(len(table.data)):
|
|
||||||
for j in xrange(3):
|
|
||||||
coords[j][str(table.data[i,coordCol+j])] = True
|
|
||||||
grid = np.array(map(len,coords),'i')
|
|
||||||
size = grid/np.maximum(np.ones(3,'d'),grid-1.0)* \
|
|
||||||
np.array([max(map(float,coords[0].keys()))-min(map(float,coords[0].keys())),\
|
|
||||||
max(map(float,coords[1].keys()))-min(map(float,coords[1].keys())),\
|
|
||||||
max(map(float,coords[2].keys()))-min(map(float,coords[2].keys())),\
|
|
||||||
],'d') # size from bounding box, corrected for cell-centeredness
|
|
||||||
|
|
||||||
size = np.where(grid > 1, size, min(size[grid > 1]/grid[grid > 1])) # spacing for grid==1 equal to smallest among other spacings
|
|
||||||
|
|
||||||
|
coords = [np.unique(table.data[:,i]) for i in xrange(3)]
|
||||||
|
mincorner = np.array(map(min,coords))
|
||||||
|
maxcorner = np.array(map(max,coords))
|
||||||
|
grid = np.array(map(len,coords),'i')
|
||||||
|
size = grid/np.maximum(np.ones(3,'d'), grid-1.0) * (maxcorner-mincorner) # size from edge to edge = dim * n/(n-1)
|
||||||
|
size = np.where(grid > 1, size, min(size[grid > 1]/grid[grid > 1])) # spacing for grid==1 set to smallest among other spacings
|
||||||
|
|
||||||
packing = np.array(options.packing,'i')
|
packing = np.array(options.packing,'i')
|
||||||
outSize = grid*packing
|
outSize = grid*packing
|
||||||
|
|
||||||
# ------------------------------------------ assemble header ---------------------------------------
|
# ------------------------------------------ assemble header --------------------------------------
|
||||||
|
|
||||||
|
table.info_append(scriptID + '\t' + ' '.join(sys.argv[1:]))
|
||||||
table.head_write()
|
table.head_write()
|
||||||
|
|
||||||
# ------------------------------------------ process data -------------------------------------------
|
# ------------------------------------------ process data -------------------------------------------
|
||||||
|
|
||||||
table.data_rewind()
|
table.data_rewind()
|
||||||
data = np.zeros(outSize.tolist()+[len(table.labels)])
|
data = np.zeros(outSize.tolist()+[len(table.labels)])
|
||||||
p = np.zeros(3,'i')
|
p = np.zeros(3,'i')
|
||||||
|
@ -102,15 +106,15 @@ for name in filenames:
|
||||||
table.data_read()
|
table.data_read()
|
||||||
data[d[0]:d[0]+packing[0],
|
data[d[0]:d[0]+packing[0],
|
||||||
d[1]:d[1]+packing[1],
|
d[1]:d[1]+packing[1],
|
||||||
d[2]:d[2]+packing[2],
|
d[2]:d[2]+packing[2],
|
||||||
: ] = np.tile(np.array(table.data_asFloat(),'d'),packing.tolist()+[1]) # tile to match blowUp voxel size
|
: ] = np.tile(np.array(table.data_asFloat(),'d'),packing.tolist()+[1]) # tile to match blowUp voxel size
|
||||||
elementSize = size/grid/packing
|
elementSize = size/grid/packing
|
||||||
elem = 1
|
elem = 1
|
||||||
for c in xrange(outSize[2]):
|
for c in xrange(outSize[2]):
|
||||||
for b in xrange(outSize[1]):
|
for b in xrange(outSize[1]):
|
||||||
for a in xrange(outSize[0]):
|
for a in xrange(outSize[0]):
|
||||||
data[a,b,c,coordCol:coordCol+3] = [a+0.5,b+0.5,c+0.5]*elementSize
|
data[a,b,c,colCoord:colCoord+3] = [a+0.5,b+0.5,c+0.5]*elementSize
|
||||||
data[a,b,c,table.label_index('elem')] = elem
|
if colElem != -1: data[a,b,c,colElem] = elem
|
||||||
table.data = data[a,b,c,:].tolist()
|
table.data = data[a,b,c,:].tolist()
|
||||||
outputAlive = table.data_write() # output processed line
|
outputAlive = table.data_write() # output processed line
|
||||||
elem += 1
|
elem += 1
|
||||||
|
|
|
@ -1,7 +1,8 @@
|
||||||
#!/usr/bin/env python
|
#!/usr/bin/env python
|
||||||
# -*- coding: UTF-8 no BOM -*-
|
# -*- coding: UTF-8 no BOM -*-
|
||||||
|
|
||||||
import os,re,sys,string,fnmatch,math,random
|
import os,re,sys
|
||||||
|
import math # noqa
|
||||||
import numpy as np
|
import numpy as np
|
||||||
from optparse import OptionParser
|
from optparse import OptionParser
|
||||||
import damask
|
import damask
|
||||||
|
@ -42,7 +43,7 @@ parser.set_defaults(condition = '',
|
||||||
|
|
||||||
(options,filenames) = parser.parse_args()
|
(options,filenames) = parser.parse_args()
|
||||||
|
|
||||||
if options.labels == None or options.formulae == None:
|
if options.labels is None or options.formulae is None:
|
||||||
parser.error('no formulae specified.')
|
parser.error('no formulae specified.')
|
||||||
if len(options.labels) != len(options.formulae):
|
if len(options.labels) != len(options.formulae):
|
||||||
parser.error('number of labels ({}) and formulae ({}) do not match.'.format(len(options.labels),len(options.formulae)))
|
parser.error('number of labels ({}) and formulae ({}) do not match.'.format(len(options.labels),len(options.formulae)))
|
||||||
|
|
|
@ -1,7 +1,8 @@
|
||||||
#!/usr/bin/env python
|
#!/usr/bin/env python
|
||||||
# -*- coding: UTF-8 no BOM -*-
|
# -*- coding: UTF-8 no BOM -*-
|
||||||
|
|
||||||
import os,re,sys,fnmatch,math,random
|
import os,re,sys,fnmatch
|
||||||
|
import math # noqa
|
||||||
import numpy as np
|
import numpy as np
|
||||||
from optparse import OptionParser
|
from optparse import OptionParser
|
||||||
import damask
|
import damask
|
||||||
|
@ -34,7 +35,7 @@ Filter rows according to condition and columns by either white or black listing.
|
||||||
|
|
||||||
Examples:
|
Examples:
|
||||||
Every odd row if x coordinate is positive -- " #ip.x# >= 0.0 and #_row_#%2 == 1 ).
|
Every odd row if x coordinate is positive -- " #ip.x# >= 0.0 and #_row_#%2 == 1 ).
|
||||||
All rows where label 'foo' equals 'bar' -- " #foo# == \"bar\" "
|
All rows where label 'foo' equals 'bar' -- " #s#foo# == 'bar' "
|
||||||
|
|
||||||
""", version = scriptID)
|
""", version = scriptID)
|
||||||
|
|
||||||
|
@ -79,14 +80,14 @@ for name in filenames:
|
||||||
positions = []
|
positions = []
|
||||||
|
|
||||||
for position,label in enumerate(table.labels):
|
for position,label in enumerate(table.labels):
|
||||||
if (options.whitelist == None or any([ position in table.label_indexrange(needle) \
|
if (options.whitelist is None or any([ position in table.label_indexrange(needle) \
|
||||||
or fnmatch.fnmatch(label,needle) for needle in options.whitelist])) \
|
or fnmatch.fnmatch(label,needle) for needle in options.whitelist])) \
|
||||||
and (options.blacklist == None or not any([ position in table.label_indexrange(needle) \
|
and (options.blacklist is None or not any([ position in table.label_indexrange(needle) \
|
||||||
or fnmatch.fnmatch(label,needle) for needle in options.blacklist])): # a label to keep?
|
or fnmatch.fnmatch(label,needle) for needle in options.blacklist])): # a label to keep?
|
||||||
labels.append(label) # remember name...
|
labels.append(label) # remember name...
|
||||||
positions.append(position) # ...and position
|
positions.append(position) # ...and position
|
||||||
|
|
||||||
if len(labels) > 0 and options.whitelist != None and options.blacklist == None: # check whether reordering is possible
|
if len(labels) > 0 and options.whitelist is not None and options.blacklist is None: # check whether reordering is possible
|
||||||
whitelistitem = np.zeros(len(labels),dtype=int)
|
whitelistitem = np.zeros(len(labels),dtype=int)
|
||||||
for i,label in enumerate(labels): # check each selected label
|
for i,label in enumerate(labels): # check each selected label
|
||||||
match = [ positions[i] in table.label_indexrange(needle) \
|
match = [ positions[i] in table.label_indexrange(needle) \
|
||||||
|
@ -118,7 +119,7 @@ for name in filenames:
|
||||||
|
|
||||||
# ------------------------------------------ assemble header ---------------------------------------
|
# ------------------------------------------ assemble header ---------------------------------------
|
||||||
|
|
||||||
table.info_append(scriptID + '\t' + ' '.join(sys.argv[1:])) # read ASCII header info
|
table.info_append(scriptID + '\t' + ' '.join(sys.argv[1:]))
|
||||||
table.labels_clear()
|
table.labels_clear()
|
||||||
table.labels_append(np.array(labels)[order]) # update with new label set
|
table.labels_append(np.array(labels)[order]) # update with new label set
|
||||||
table.head_write()
|
table.head_write()
|
||||||
|
|
|
@ -0,0 +1,142 @@
|
||||||
|
#!/usr/bin/env python
|
||||||
|
# -*- coding: UTF-8 no BOM -*-
|
||||||
|
|
||||||
|
import os,sys
|
||||||
|
import numpy as np
|
||||||
|
from optparse import OptionParser
|
||||||
|
import damask
|
||||||
|
|
||||||
|
scriptName = os.path.splitext(os.path.basename(__file__))[0]
|
||||||
|
scriptID = ' '.join([scriptName,damask.version])
|
||||||
|
|
||||||
|
# --------------------------------------------------------------------
|
||||||
|
# MAIN
|
||||||
|
# --------------------------------------------------------------------
|
||||||
|
|
||||||
|
parser = OptionParser(option_class=damask.extendableOption, usage='%prog options [file[s]]', description = """
|
||||||
|
Generate histogram of N bins in given data range.
|
||||||
|
|
||||||
|
""", version = scriptID)
|
||||||
|
|
||||||
|
parser.add_option('-d','--data',
|
||||||
|
dest = 'data',
|
||||||
|
type = 'string', metavar = 'string',
|
||||||
|
help = 'column heading for data')
|
||||||
|
parser.add_option('-w','--weights',
|
||||||
|
dest = 'weights',
|
||||||
|
type = 'string', metavar = 'string',
|
||||||
|
help = 'column heading for weights')
|
||||||
|
parser.add_option('--range',
|
||||||
|
dest = 'range',
|
||||||
|
type = 'float', nargs = 2, metavar = 'float float',
|
||||||
|
help = 'data range of histogram [min - max]')
|
||||||
|
parser.add_option('-N',
|
||||||
|
dest = 'N',
|
||||||
|
type = 'int', metavar = 'int',
|
||||||
|
help = 'number of bins')
|
||||||
|
parser.add_option('--density',
|
||||||
|
dest = 'density',
|
||||||
|
action = 'store_true',
|
||||||
|
help = 'report probability density')
|
||||||
|
parser.add_option('--logarithmic',
|
||||||
|
dest = 'log',
|
||||||
|
action = 'store_true',
|
||||||
|
help = 'logarithmically spaced bins')
|
||||||
|
parser.set_defaults(data = None,
|
||||||
|
weights = None,
|
||||||
|
range = None,
|
||||||
|
N = None,
|
||||||
|
density = False,
|
||||||
|
log = False,
|
||||||
|
)
|
||||||
|
|
||||||
|
(options,filenames) = parser.parse_args()
|
||||||
|
|
||||||
|
if not options.data: parser.error('no data specified.')
|
||||||
|
if not options.N: parser.error('no bin number specified.')
|
||||||
|
|
||||||
|
if options.log:
|
||||||
|
def forward(x):
|
||||||
|
return np.log(x)
|
||||||
|
def reverse(x):
|
||||||
|
return np.exp(x)
|
||||||
|
else:
|
||||||
|
def forward(x):
|
||||||
|
return x
|
||||||
|
def reverse(x):
|
||||||
|
return x
|
||||||
|
|
||||||
|
|
||||||
|
# --- loop over input files ------------------------------------------------------------------------
|
||||||
|
|
||||||
|
if filenames == []: filenames = [None]
|
||||||
|
|
||||||
|
for name in filenames:
|
||||||
|
try: table = damask.ASCIItable(name = name,
|
||||||
|
buffered = False,
|
||||||
|
readonly = True)
|
||||||
|
except: continue
|
||||||
|
damask.util.report(scriptName,name)
|
||||||
|
|
||||||
|
# ------------------------------------------ read header ------------------------------------------
|
||||||
|
|
||||||
|
table.head_read()
|
||||||
|
|
||||||
|
# ------------------------------------------ sanity checks ----------------------------------------
|
||||||
|
|
||||||
|
errors = []
|
||||||
|
remarks = []
|
||||||
|
|
||||||
|
if table.label_dimension(options.data) != 1: errors.append('data {} are not scalar.'.format(options.data))
|
||||||
|
if options.weights and \
|
||||||
|
table.label_dimension(options.data) != 1: errors.append('weights {} are not scalar.'.format(options.weights))
|
||||||
|
|
||||||
|
if remarks != []: damask.util.croak(remarks)
|
||||||
|
if errors != []:
|
||||||
|
damask.util.croak(errors)
|
||||||
|
table.close(dismiss = True)
|
||||||
|
continue
|
||||||
|
|
||||||
|
# --------------- read data ----------------------------------------------------------------
|
||||||
|
|
||||||
|
table.data_readArray([options.data,options.weights])
|
||||||
|
|
||||||
|
# --------------- auto range ---------------------------------------------------------------
|
||||||
|
|
||||||
|
if options.range is None:
|
||||||
|
rangeMin,rangeMax = min(table.data[:,0]),max(table.data[:,0])
|
||||||
|
else:
|
||||||
|
rangeMin,rangeMax = min(options.range),max(options.range)
|
||||||
|
|
||||||
|
# --------------- bin data ----------------------------------------------------------------
|
||||||
|
|
||||||
|
count,edges = np.histogram(table.data[:,0],
|
||||||
|
bins = reverse(forward(rangeMin) + np.arange(options.N+1) *
|
||||||
|
(forward(rangeMax)-forward(rangeMin))/options.N),
|
||||||
|
range = (rangeMin,rangeMax),
|
||||||
|
weights = None if options.weights is None else table.data[:,1],
|
||||||
|
density = options.density,
|
||||||
|
)
|
||||||
|
bincenter = reverse(forward(rangeMin) + (0.5+np.arange(options.N)) *
|
||||||
|
(forward(rangeMax)-forward(rangeMin))/options.N) # determine center of bins
|
||||||
|
|
||||||
|
# ------------------------------------------ assemble header ---------------------------------------
|
||||||
|
|
||||||
|
table.info_clear()
|
||||||
|
table.info_append([scriptID + '\t' + ' '.join(sys.argv[1:]),
|
||||||
|
scriptID + ':\t' +
|
||||||
|
'data range {} -- {}'.format(rangeMin,rangeMax) +
|
||||||
|
(' (log)' if options.log else ''),
|
||||||
|
])
|
||||||
|
table.labels_clear()
|
||||||
|
table.labels_append(['bincenter','count'])
|
||||||
|
table.head_write()
|
||||||
|
|
||||||
|
# ------------------------------------------ output result -----------------------------------------
|
||||||
|
|
||||||
|
table.data = np.squeeze(np.dstack((bincenter,count)))
|
||||||
|
table.data_writeArray()
|
||||||
|
|
||||||
|
# ------------------------------------------ output finalization -----------------------------------
|
||||||
|
|
||||||
|
table.close() # close ASCII tables
|
|
@ -1,7 +1,7 @@
|
||||||
#!/usr/bin/env python
|
#!/usr/bin/env python
|
||||||
# -*- coding: UTF-8 no BOM -*-
|
# -*- coding: UTF-8 no BOM -*-
|
||||||
|
|
||||||
import os,sys,string
|
import os,sys
|
||||||
import numpy as np
|
import numpy as np
|
||||||
from optparse import OptionParser
|
from optparse import OptionParser
|
||||||
from PIL import Image
|
from PIL import Image
|
||||||
|
@ -115,7 +115,7 @@ for name in filenames:
|
||||||
try:
|
try:
|
||||||
table = damask.ASCIItable(name = name,
|
table = damask.ASCIItable(name = name,
|
||||||
buffered = False,
|
buffered = False,
|
||||||
labeled = options.label != None,
|
labeled = options.label is not None,
|
||||||
readonly = True)
|
readonly = True)
|
||||||
except: continue
|
except: continue
|
||||||
damask.util.report(scriptName,name)
|
damask.util.report(scriptName,name)
|
||||||
|
@ -131,15 +131,15 @@ for name in filenames:
|
||||||
damask.util.croak('column {} not found.'.format(options.label))
|
damask.util.croak('column {} not found.'.format(options.label))
|
||||||
table.close(dismiss = True) # close ASCIItable and remove empty file
|
table.close(dismiss = True) # close ASCIItable and remove empty file
|
||||||
continue
|
continue
|
||||||
|
# convert data to values between 0 and 1 and arrange according to given options
|
||||||
# convert data to values between 0 and 1 and arrange according to given options
|
|
||||||
if options.dimension != []: table.data = table.data.reshape(options.dimension[1],options.dimension[0])
|
if options.dimension != []: table.data = table.data.reshape(options.dimension[1],options.dimension[0])
|
||||||
if options.abs: table.data = np.abs(table.data)
|
if options.abs: table.data = np.abs(table.data)
|
||||||
if options.log: table.data = np.log10(table.data);options.range = np.log10(options.range)
|
if options.log: table.data = np.log10(table.data);options.range = np.log10(options.range)
|
||||||
if options.flipLR: table.data = np.fliplr(table.data)
|
if options.flipLR: table.data = np.fliplr(table.data)
|
||||||
if options.flipUD: table.data = np.flipud(table.data)
|
if options.flipUD: table.data = np.flipud(table.data)
|
||||||
|
|
||||||
mask = np.logical_or(table.data == options.gap, np.isnan(table.data)) if options.gap else np.logical_not(np.isnan(table.data)) # mask gap and NaN (if gap present)
|
mask = np.logical_or(table.data == options.gap, np.isnan(table.data))\
|
||||||
|
if options.gap else np.logical_not(np.isnan(table.data)) # mask gap and NaN (if gap present)
|
||||||
if np.all(np.array(options.range) == 0.0):
|
if np.all(np.array(options.range) == 0.0):
|
||||||
options.range = [table.data[mask].min(),
|
options.range = [table.data[mask].min(),
|
||||||
table.data[mask].max()]
|
table.data[mask].max()]
|
||||||
|
@ -176,7 +176,7 @@ for name in filenames:
|
||||||
|
|
||||||
im.save(sys.stdout if not name else
|
im.save(sys.stdout if not name else
|
||||||
os.path.splitext(name)[0]+ \
|
os.path.splitext(name)[0]+ \
|
||||||
('' if options.label == None else '_'+options.label)+ \
|
('' if options.label is None else '_'+options.label)+ \
|
||||||
'.png',
|
'.png',
|
||||||
format = "PNG")
|
format = "PNG")
|
||||||
|
|
||||||
|
|
|
@ -1,7 +1,7 @@
|
||||||
#!/usr/bin/env python
|
#!/usr/bin/env python
|
||||||
# -*- coding: UTF-8 no BOM -*-
|
# -*- coding: UTF-8 no BOM -*-
|
||||||
|
|
||||||
import os,sys,string
|
import os,sys
|
||||||
import numpy as np
|
import numpy as np
|
||||||
from optparse import OptionParser
|
from optparse import OptionParser
|
||||||
from PIL import Image, ImageDraw
|
from PIL import Image, ImageDraw
|
||||||
|
@ -112,7 +112,7 @@ for name in filenames:
|
||||||
try:
|
try:
|
||||||
table = damask.ASCIItable(name = name,
|
table = damask.ASCIItable(name = name,
|
||||||
buffered = False,
|
buffered = False,
|
||||||
labeled = options.label != None,
|
labeled = options.label is not None,
|
||||||
readonly = True)
|
readonly = True)
|
||||||
except: continue
|
except: continue
|
||||||
table.report_name(scriptName,name)
|
table.report_name(scriptName,name)
|
||||||
|
@ -161,9 +161,10 @@ for name in filenames:
|
||||||
]) # find x-y bounding box for given z layer
|
]) # find x-y bounding box for given z layer
|
||||||
|
|
||||||
nodes -= boundingBox[0].repeat(np.prod(options.dimension+1)).reshape([3]+list(options.dimension+1))
|
nodes -= boundingBox[0].repeat(np.prod(options.dimension+1)).reshape([3]+list(options.dimension+1))
|
||||||
nodes *= (options.pixelsize*options.dimension/options.size).repeat(np.prod(options.dimension+1)).reshape([3]+list(options.dimension+1))
|
nodes *= (options.pixelsize*options.dimension/options.size).repeat(np.prod(options.dimension+1)).\
|
||||||
imagesize = (options.pixelsize*(boundingBox[1]-boundingBox[0])*options.dimension\
|
reshape([3]+list(options.dimension+1))
|
||||||
/options.size)[:2].astype('i') # determine image size from number of cells in overall bounding box
|
imagesize = (options.pixelsize*(boundingBox[1]-boundingBox[0])* # determine image size from number of
|
||||||
|
options.dimension/options.size)[:2].astype('i') # cells in overall bounding box
|
||||||
im = Image.new('RGBA',imagesize)
|
im = Image.new('RGBA',imagesize)
|
||||||
draw = ImageDraw.Draw(im)
|
draw = ImageDraw.Draw(im)
|
||||||
|
|
||||||
|
|
|
@ -1,7 +1,7 @@
|
||||||
#!/usr/bin/env python
|
#!/usr/bin/env python
|
||||||
# -*- coding: UTF-8 no BOM -*-
|
# -*- coding: UTF-8 no BOM -*-
|
||||||
|
|
||||||
import os,sys,string
|
import os,sys
|
||||||
import numpy as np
|
import numpy as np
|
||||||
from optparse import OptionParser
|
from optparse import OptionParser
|
||||||
from PIL import Image
|
from PIL import Image
|
||||||
|
@ -80,7 +80,7 @@ for name in filenames:
|
||||||
try:
|
try:
|
||||||
table = damask.ASCIItable(name = name,
|
table = damask.ASCIItable(name = name,
|
||||||
buffered = False,
|
buffered = False,
|
||||||
labeled = options.label != None,
|
labeled = options.label is not None,
|
||||||
readonly = True)
|
readonly = True)
|
||||||
except: continue
|
except: continue
|
||||||
damask.util.report(scriptName,name)
|
damask.util.report(scriptName,name)
|
||||||
|
@ -98,14 +98,13 @@ for name in filenames:
|
||||||
errors.append('column{} {} not found'.format('s' if len(missing_labels) > 1 else '',
|
errors.append('column{} {} not found'.format('s' if len(missing_labels) > 1 else '',
|
||||||
', '.join(missing_labels)))
|
', '.join(missing_labels)))
|
||||||
if table.label_dimension(options.label) != 3:
|
if table.label_dimension(options.label) != 3:
|
||||||
errors.append('column {} has wrong dimension'.format(options.label))
|
errors.append('column {} does not have dimension'.format(options.label))
|
||||||
|
|
||||||
if errors != []:
|
if errors != []:
|
||||||
damask.util.croak(errors)
|
damask.util.croak(errors)
|
||||||
table.close(dismiss = True) # close ASCII table file handles and delete output file
|
table.close(dismiss = True) # close ASCII table file handles and delete output file
|
||||||
continue
|
continue
|
||||||
|
# convert data to shape and arrange according to given options
|
||||||
# convert data to shape and arrange according to given options
|
|
||||||
if options.dimension != []: table.data = table.data.reshape(options.dimension[1],options.dimension[0],3)
|
if options.dimension != []: table.data = table.data.reshape(options.dimension[1],options.dimension[0],3)
|
||||||
if options.flipLR: table.data = np.fliplr(table.data)
|
if options.flipLR: table.data = np.fliplr(table.data)
|
||||||
if options.flipUD: table.data = np.flipud(table.data)
|
if options.flipUD: table.data = np.flipud(table.data)
|
||||||
|
|
|
@ -55,7 +55,7 @@ else:
|
||||||
|
|
||||||
sys.path.append(damask.solver.Marc().libraryPath('../../'))
|
sys.path.append(damask.solver.Marc().libraryPath('../../'))
|
||||||
try:
|
try:
|
||||||
from py_post import *
|
import py_post
|
||||||
except:
|
except:
|
||||||
print('error: no valid Mentat release found')
|
print('error: no valid Mentat release found')
|
||||||
sys.exit(-1)
|
sys.exit(-1)
|
||||||
|
@ -63,7 +63,7 @@ except:
|
||||||
|
|
||||||
# --------------------------- open results file and initialize mesh ----------
|
# --------------------------- open results file and initialize mesh ----------
|
||||||
|
|
||||||
p = post_open(filename+'.t16')
|
p = py_post.post_open(filename+'.t16')
|
||||||
p.moveto(0)
|
p.moveto(0)
|
||||||
Nnodes = p.nodes()
|
Nnodes = p.nodes()
|
||||||
Nincrements = p.increments() - 1 # t16 contains one "virtual" increment (at 0)
|
Nincrements = p.increments() - 1 # t16 contains one "virtual" increment (at 0)
|
||||||
|
@ -114,7 +114,7 @@ for incCount,position in enumerate(locations): # walk through locations
|
||||||
|
|
||||||
p.moveto(position+1) # wind to correct position
|
p.moveto(position+1) # wind to correct position
|
||||||
|
|
||||||
# --- get displacements
|
# --- get displacements
|
||||||
|
|
||||||
node_displacement = [[0,0,0] for i in range(Nnodes)]
|
node_displacement = [[0,0,0] for i in range(Nnodes)]
|
||||||
for n in range(Nnodes):
|
for n in range(Nnodes):
|
||||||
|
@ -124,10 +124,11 @@ for incCount,position in enumerate(locations): # walk through locations
|
||||||
cellnode_displacement = [[c[i][n] for i in range(3)] for n in range(Ncellnodes)]
|
cellnode_displacement = [[c[i][n] for i in range(3)] for n in range(Ncellnodes)]
|
||||||
|
|
||||||
|
|
||||||
# --- append displacements to corresponding files
|
# --- append displacements to corresponding files
|
||||||
|
|
||||||
for geomtype in options.type:
|
for geomtype in options.type:
|
||||||
outFilename = eval('"'+eval("'%%s_%%s_inc%%0%ii.vtk'%(math.log10(max(increments+[1]))+1)")+'"%(dirname + os.sep + os.path.split(filename)[1],geomtype,increments[incCount])')
|
outFilename = eval('"'+eval("'%%s_%%s_inc%%0%ii.vtk'%(math.log10(max(increments+[1]))+1)")\
|
||||||
|
+'"%(dirname + os.sep + os.path.split(filename)[1],geomtype,increments[incCount])')
|
||||||
print outFilename
|
print outFilename
|
||||||
shutil.copyfile('%s_%s.vtk'%(filename,geomtype),outFilename)
|
shutil.copyfile('%s_%s.vtk'%(filename,geomtype),outFilename)
|
||||||
|
|
||||||
|
|
|
@ -10,12 +10,8 @@ scriptID = ' '.join([scriptName,damask.version])
|
||||||
|
|
||||||
# -----------------------------
|
# -----------------------------
|
||||||
def ParseOutputFormat(filename,homogID,crystID,phaseID):
|
def ParseOutputFormat(filename,homogID,crystID,phaseID):
|
||||||
#
|
"""parse .output* files in order to get a list of outputs"""
|
||||||
# parse .output* files in order to get a list of outputs
|
myID = {'Homogenization': homogID,
|
||||||
# -----------------------------
|
|
||||||
|
|
||||||
myID = {
|
|
||||||
'Homogenization': homogID,
|
|
||||||
'Crystallite': crystID,
|
'Crystallite': crystID,
|
||||||
'Constitutive': phaseID,
|
'Constitutive': phaseID,
|
||||||
}
|
}
|
||||||
|
@ -61,7 +57,7 @@ def ParseOutputFormat(filename,homogID,crystID,phaseID):
|
||||||
elif length > 0:
|
elif length > 0:
|
||||||
format[what]['outputs'].append([output,length])
|
format[what]['outputs'].append([output,length])
|
||||||
|
|
||||||
if not '_id' in format[what]['specials']:
|
if '_id' not in format[what]['specials']:
|
||||||
print "\nsection '%s' not found in <%s>"%(myID[what], what)
|
print "\nsection '%s' not found in <%s>"%(myID[what], what)
|
||||||
print '\n'.join(map(lambda x:' [%s]'%x, format[what]['specials']['brothers']))
|
print '\n'.join(map(lambda x:' [%s]'%x, format[what]['specials']['brothers']))
|
||||||
|
|
||||||
|
@ -70,15 +66,15 @@ def ParseOutputFormat(filename,homogID,crystID,phaseID):
|
||||||
|
|
||||||
# -----------------------------
|
# -----------------------------
|
||||||
def ParsePostfile(p,filename, outputFormat, legacyFormat):
|
def ParsePostfile(p,filename, outputFormat, legacyFormat):
|
||||||
#
|
"""
|
||||||
# parse postfile in order to get position and labels of outputs
|
parse postfile in order to get position and labels of outputs
|
||||||
# needs "outputFormat" for mapping of output names to postfile output indices
|
|
||||||
# -----------------------------
|
|
||||||
|
|
||||||
|
needs "outputFormat" for mapping of output names to postfile output indices
|
||||||
|
"""
|
||||||
startVar = {True: 'GrainCount',
|
startVar = {True: 'GrainCount',
|
||||||
False:'HomogenizationCount'}
|
False:'HomogenizationCount'}
|
||||||
|
|
||||||
# --- build statistics
|
# --- build statistics
|
||||||
|
|
||||||
stat = { \
|
stat = { \
|
||||||
'IndexOfLabel': {}, \
|
'IndexOfLabel': {}, \
|
||||||
|
@ -95,7 +91,7 @@ def ParsePostfile(p,filename, outputFormat, legacyFormat):
|
||||||
'LabelOfElementalTensor': [None]*p.element_tensors(), \
|
'LabelOfElementalTensor': [None]*p.element_tensors(), \
|
||||||
}
|
}
|
||||||
|
|
||||||
# --- find labels
|
# --- find labels
|
||||||
|
|
||||||
for labelIndex in range(stat['NumberOfNodalScalars']):
|
for labelIndex in range(stat['NumberOfNodalScalars']):
|
||||||
label = p.node_scalar_label(labelIndex)
|
label = p.node_scalar_label(labelIndex)
|
||||||
|
@ -119,9 +115,9 @@ def ParsePostfile(p,filename, outputFormat, legacyFormat):
|
||||||
startIndex = stat['IndexOfLabel'][startVar[legacyFormat]]
|
startIndex = stat['IndexOfLabel'][startVar[legacyFormat]]
|
||||||
stat['LabelOfElementalScalar'][startIndex] = startVar[legacyFormat]
|
stat['LabelOfElementalScalar'][startIndex] = startVar[legacyFormat]
|
||||||
|
|
||||||
# We now have to find a mapping for each output label as defined in the .output* files to the output position in the post file
|
# We now have to find a mapping for each output label as defined in the .output* files to the output position in the post file
|
||||||
# Since we know where the user defined outputs start ("startIndex"), we can simply assign increasing indices to the labels
|
# Since we know where the user defined outputs start ("startIndex"), we can simply assign increasing indices to the labels
|
||||||
# given in the .output* file
|
# given in the .output* file
|
||||||
|
|
||||||
offset = 1
|
offset = 1
|
||||||
if legacyFormat:
|
if legacyFormat:
|
||||||
|
@ -177,10 +173,7 @@ def ParsePostfile(p,filename, outputFormat, legacyFormat):
|
||||||
|
|
||||||
# -----------------------------
|
# -----------------------------
|
||||||
def GetIncrementLocations(p,Nincrements,options):
|
def GetIncrementLocations(p,Nincrements,options):
|
||||||
#
|
"""get mapping between positions in postfile and increment number"""
|
||||||
# get mapping between positions in postfile and increment number
|
|
||||||
# -----------------------------
|
|
||||||
|
|
||||||
incAtPosition = {}
|
incAtPosition = {}
|
||||||
positionOfInc = {}
|
positionOfInc = {}
|
||||||
|
|
||||||
|
@ -209,7 +202,6 @@ def GetIncrementLocations(p,Nincrements,options):
|
||||||
|
|
||||||
# -----------------------------
|
# -----------------------------
|
||||||
def SummarizePostfile(stat,where=sys.stdout):
|
def SummarizePostfile(stat,where=sys.stdout):
|
||||||
# -----------------------------
|
|
||||||
|
|
||||||
where.write('\n\n')
|
where.write('\n\n')
|
||||||
where.write('title:\t%s'%stat['Title'] + '\n\n')
|
where.write('title:\t%s'%stat['Title'] + '\n\n')
|
||||||
|
@ -217,16 +209,18 @@ def SummarizePostfile(stat,where=sys.stdout):
|
||||||
where.write('increments:\t%i'%(stat['NumberOfIncrements']) + '\n\n')
|
where.write('increments:\t%i'%(stat['NumberOfIncrements']) + '\n\n')
|
||||||
where.write('nodes:\t%i'%stat['NumberOfNodes'] + '\n\n')
|
where.write('nodes:\t%i'%stat['NumberOfNodes'] + '\n\n')
|
||||||
where.write('elements:\t%i'%stat['NumberOfElements'] + '\n\n')
|
where.write('elements:\t%i'%stat['NumberOfElements'] + '\n\n')
|
||||||
where.write('nodal scalars:\t%i'%stat['NumberOfNodalScalars'] + '\n\n ' + '\n '.join(stat['LabelOfNodalScalar']) + '\n\n')
|
where.write('nodal scalars:\t%i'%stat['NumberOfNodalScalars'] + '\n\n '\
|
||||||
where.write('elemental scalars:\t%i'%stat['NumberOfElementalScalars'] + '\n\n ' + '\n '.join(stat['LabelOfElementalScalar']) + '\n\n')
|
+'\n '.join(stat['LabelOfNodalScalar']) + '\n\n')
|
||||||
where.write('elemental tensors:\t%i'%stat['NumberOfElementalTensors'] + '\n\n ' + '\n '.join(stat['LabelOfElementalTensor']) + '\n\n')
|
where.write('elemental scalars:\t%i'%stat['NumberOfElementalScalars'] + '\n\n '\
|
||||||
|
+ '\n '.join(stat['LabelOfElementalScalar']) + '\n\n')
|
||||||
|
where.write('elemental tensors:\t%i'%stat['NumberOfElementalTensors'] + '\n\n '\
|
||||||
|
+ '\n '.join(stat['LabelOfElementalTensor']) + '\n\n')
|
||||||
|
|
||||||
return True
|
return True
|
||||||
|
|
||||||
|
|
||||||
# -----------------------------
|
# -----------------------------
|
||||||
def SummarizeOutputfile(format,where=sys.stdout):
|
def SummarizeOutputfile(format,where=sys.stdout):
|
||||||
# -----------------------------
|
|
||||||
|
|
||||||
where.write('\nUser Defined Outputs')
|
where.write('\nUser Defined Outputs')
|
||||||
for what in format.keys():
|
for what in format.keys():
|
||||||
|
@ -239,7 +233,6 @@ def SummarizeOutputfile(format,where=sys.stdout):
|
||||||
|
|
||||||
# -----------------------------
|
# -----------------------------
|
||||||
def writeHeader(myfile,stat,geomtype):
|
def writeHeader(myfile,stat,geomtype):
|
||||||
# -----------------------------
|
|
||||||
|
|
||||||
myfile.write('2\theader\n')
|
myfile.write('2\theader\n')
|
||||||
myfile.write(string.replace('$Id$','\n','\\n')+
|
myfile.write(string.replace('$Id$','\n','\\n')+
|
||||||
|
@ -316,7 +309,7 @@ if not os.path.exists(filename+'.t16'):
|
||||||
|
|
||||||
sys.path.append(damask.solver.Marc().libraryPath('../../'))
|
sys.path.append(damask.solver.Marc().libraryPath('../../'))
|
||||||
try:
|
try:
|
||||||
from py_post import *
|
import py_post
|
||||||
except:
|
except:
|
||||||
print('error: no valid Mentat release found')
|
print('error: no valid Mentat release found')
|
||||||
sys.exit(-1)
|
sys.exit(-1)
|
||||||
|
@ -336,14 +329,14 @@ if damask.core.mesh.mesh_init_postprocessing(filename+'.mesh'):
|
||||||
|
|
||||||
# --- check if ip data available for all elements; if not, then .t19 file is required
|
# --- check if ip data available for all elements; if not, then .t19 file is required
|
||||||
|
|
||||||
p = post_open(filename+'.t16')
|
p = py_post.post_open(filename+'.t16')
|
||||||
asciiFile = False
|
asciiFile = False
|
||||||
p.moveto(1)
|
p.moveto(1)
|
||||||
for e in range(p.elements()):
|
for e in range(p.elements()):
|
||||||
if not damask.core.mesh.mesh_get_nodeAtIP(str(p.element(e).type),1):
|
if not damask.core.mesh.mesh_get_nodeAtIP(str(p.element(e).type),1):
|
||||||
if os.path.exists(filename+'.t19'):
|
if os.path.exists(filename+'.t19'):
|
||||||
p.close()
|
p.close()
|
||||||
p = post_open(filename+'.t19')
|
p = py_post.post_open(filename+'.t19')
|
||||||
asciiFile = True
|
asciiFile = True
|
||||||
break
|
break
|
||||||
|
|
||||||
|
@ -383,18 +376,20 @@ time_start = time.time()
|
||||||
for incCount,position in enumerate(locations): # walk through locations
|
for incCount,position in enumerate(locations): # walk through locations
|
||||||
p.moveto(position+1) # wind to correct position
|
p.moveto(position+1) # wind to correct position
|
||||||
time_delta = (float(len(locations)) / float(incCount+1) - 1.0) * (time.time() - time_start)
|
time_delta = (float(len(locations)) / float(incCount+1) - 1.0) * (time.time() - time_start)
|
||||||
sys.stdout.write("\r(%02i:%02i:%02i) processing increment %i of %i..."%(time_delta//3600,time_delta%3600//60,time_delta%60,incCount+1,len(locations)))
|
sys.stdout.write("\r(%02i:%02i:%02i) processing increment %i of %i..."\
|
||||||
|
%(time_delta//3600,time_delta%3600//60,time_delta%60,incCount+1,len(locations)))
|
||||||
sys.stdout.flush()
|
sys.stdout.flush()
|
||||||
|
|
||||||
# --- write header
|
# --- write header
|
||||||
|
|
||||||
outFilename = {}
|
outFilename = {}
|
||||||
for geomtype in options.type:
|
for geomtype in options.type:
|
||||||
outFilename[geomtype] = eval('"'+eval("'%%s_%%s_inc%%0%ii.txt'%(math.log10(max(increments+[1]))+1)")+'"%(dirname + os.sep + os.path.split(filename)[1],geomtype,increments[incCount])')
|
outFilename[geomtype] = eval('"'+eval("'%%s_%%s_inc%%0%ii.txt'%(math.log10(max(increments+[1]))+1)")\
|
||||||
|
+'"%(dirname + os.sep + os.path.split(filename)[1],geomtype,increments[incCount])')
|
||||||
with open(outFilename[geomtype],'w') as myfile:
|
with open(outFilename[geomtype],'w') as myfile:
|
||||||
writeHeader(myfile,stat,geomtype)
|
writeHeader(myfile,stat,geomtype)
|
||||||
|
|
||||||
# --- write node based data
|
# --- write node based data
|
||||||
|
|
||||||
if geomtype == 'nodebased':
|
if geomtype == 'nodebased':
|
||||||
for n in range(stat['NumberOfNodes']):
|
for n in range(stat['NumberOfNodes']):
|
||||||
|
@ -403,7 +398,7 @@ for incCount,position in enumerate(locations): # walk through locations
|
||||||
myfile.write('\t'+str(p.node_scalar(n,l)))
|
myfile.write('\t'+str(p.node_scalar(n,l)))
|
||||||
myfile.write('\n')
|
myfile.write('\n')
|
||||||
|
|
||||||
# --- write ip based data
|
# --- write ip based data
|
||||||
|
|
||||||
elif geomtype == 'ipbased':
|
elif geomtype == 'ipbased':
|
||||||
for e in range(stat['NumberOfElements']):
|
for e in range(stat['NumberOfElements']):
|
||||||
|
@ -424,5 +419,3 @@ for incCount,position in enumerate(locations): # walk through locations
|
||||||
|
|
||||||
p.close()
|
p.close()
|
||||||
sys.stdout.write("\n")
|
sys.stdout.write("\n")
|
||||||
|
|
||||||
# --------------------------- DONE --------------------------------
|
|
||||||
|
|
|
@ -1,7 +1,7 @@
|
||||||
#!/usr/bin/env python
|
#!/usr/bin/env python
|
||||||
# -*- coding: UTF-8 no BOM -*-
|
# -*- coding: UTF-8 no BOM -*-
|
||||||
|
|
||||||
import os, sys, string
|
import os,sys
|
||||||
import damask
|
import damask
|
||||||
from optparse import OptionParser
|
from optparse import OptionParser
|
||||||
|
|
||||||
|
@ -14,9 +14,9 @@ def outMentat(cmd,locals):
|
||||||
exec(cmd[3:])
|
exec(cmd[3:])
|
||||||
elif cmd[0:3] == '(?)':
|
elif cmd[0:3] == '(?)':
|
||||||
cmd = eval(cmd[3:])
|
cmd = eval(cmd[3:])
|
||||||
py_send(cmd)
|
py_mentat.py_send(cmd)
|
||||||
else:
|
else:
|
||||||
py_send(cmd)
|
py_mentat.py_send(cmd)
|
||||||
return
|
return
|
||||||
|
|
||||||
|
|
||||||
|
@ -59,7 +59,8 @@ def colorMap(colors,baseIdx=32):
|
||||||
# MAIN FUNCTION STARTS HERE
|
# MAIN FUNCTION STARTS HERE
|
||||||
# -----------------------------
|
# -----------------------------
|
||||||
|
|
||||||
parser = OptionParser(option_class=damask.extendableOption, usage="%prog [options] predefinedScheme | (lower_h,s,l upper_h,s,l)", description = """
|
parser = OptionParser(option_class=damask.extendableOption,
|
||||||
|
usage="%prog [options] predefinedScheme | (lower_h,s,l upper_h,s,l)", description = """
|
||||||
Changes the color map in MSC.Mentat.
|
Changes the color map in MSC.Mentat.
|
||||||
|
|
||||||
Interpolates colors between "lower_hsl" and "upper_hsl".
|
Interpolates colors between "lower_hsl" and "upper_hsl".
|
||||||
|
@ -121,13 +122,12 @@ if options.palettef:
|
||||||
elif options.palette:
|
elif options.palette:
|
||||||
for theColor in theMap.export(format='list',steps=options.colorcount):
|
for theColor in theMap.export(format='list',steps=options.colorcount):
|
||||||
print '\t'.join(map(lambda x: str(int(255*x)),theColor))
|
print '\t'.join(map(lambda x: str(int(255*x)),theColor))
|
||||||
else:
|
else: # connect to Mentat and change colorMap
|
||||||
### connect to Mentat and change colorMap
|
|
||||||
sys.path.append(damask.solver.Marc().libraryPath('../../'))
|
sys.path.append(damask.solver.Marc().libraryPath('../../'))
|
||||||
try:
|
try:
|
||||||
from py_mentat import *
|
import py_mentat
|
||||||
print 'waiting to connect...'
|
print 'waiting to connect...'
|
||||||
py_connect('',options.port)
|
py_mentat.py_connect('',options.port)
|
||||||
print 'connected...'
|
print 'connected...'
|
||||||
mentat = True
|
mentat = True
|
||||||
except:
|
except:
|
||||||
|
@ -138,7 +138,7 @@ else:
|
||||||
cmds = colorMap(theMap.export(format='list',steps=options.colorcount),options.baseIdx)
|
cmds = colorMap(theMap.export(format='list',steps=options.colorcount),options.baseIdx)
|
||||||
if mentat:
|
if mentat:
|
||||||
output(['*show_table']+cmds+['*show_model *redraw'],outputLocals,'Mentat')
|
output(['*show_table']+cmds+['*show_model *redraw'],outputLocals,'Mentat')
|
||||||
py_disconnect()
|
py_mentat.py_disconnect()
|
||||||
|
|
||||||
if options.verbose:
|
if options.verbose:
|
||||||
output(cmds,outputLocals,'Stdout')
|
output(cmds,outputLocals,'Stdout')
|
||||||
|
|
|
@ -1,7 +1,7 @@
|
||||||
#!/usr/bin/env python
|
#!/usr/bin/env python
|
||||||
# -*- coding: UTF-8 no BOM -*-
|
# -*- coding: UTF-8 no BOM -*-
|
||||||
|
|
||||||
import string,sys,os
|
import sys,os
|
||||||
import damask
|
import damask
|
||||||
from optparse import OptionParser
|
from optparse import OptionParser
|
||||||
|
|
||||||
|
@ -9,7 +9,7 @@ scriptName = os.path.splitext(os.path.basename(__file__))[0]
|
||||||
scriptID = ' '.join([scriptName,damask.version])
|
scriptID = ' '.join([scriptName,damask.version])
|
||||||
|
|
||||||
# --------------------------------------------------------------------
|
# --------------------------------------------------------------------
|
||||||
# MAIN
|
# MAIN
|
||||||
# --------------------------------------------------------------------
|
# --------------------------------------------------------------------
|
||||||
#Borland, D., & Taylor, R. M. (2007). Rainbow Color Map (Still) Considered Harmful. Computer Graphics and Applications, IEEE, 27(2), 14--17.
|
#Borland, D., & Taylor, R. M. (2007). Rainbow Color Map (Still) Considered Harmful. Computer Graphics and Applications, IEEE, 27(2), 14--17.
|
||||||
#Moreland, K. (2009). Diverging Color Maps for Scientific Visualization. In Proc. 5th Int. Symp. Visual Computing (pp. 92--103).
|
#Moreland, K. (2009). Diverging Color Maps for Scientific Visualization. In Proc. 5th Int. Symp. Visual Computing (pp. 92--103).
|
||||||
|
@ -62,8 +62,10 @@ if options.trim[0] < -1.0 or \
|
||||||
parser.error('invalid trim range (-1 +1).')
|
parser.error('invalid trim range (-1 +1).')
|
||||||
|
|
||||||
|
|
||||||
name = options.format if options.basename == None else options.basename
|
name = options.format if options.basename is None\
|
||||||
output = sys.stdout if options.basename == None else open(os.path.basename(options.basename)+extensions[outtypes.index(options.format)],'w')
|
else options.basename
|
||||||
|
output = sys.stdout if options.basename is None\
|
||||||
|
else open(os.path.basename(options.basename)+extensions[outtypes.index(options.format)],'w')
|
||||||
|
|
||||||
colorLeft = damask.Color(options.colormodel.upper(), list(options.left))
|
colorLeft = damask.Color(options.colormodel.upper(), list(options.left))
|
||||||
colorRight = damask.Color(options.colormodel.upper(), list(options.right))
|
colorRight = damask.Color(options.colormodel.upper(), list(options.right))
|
||||||
|
|
|
@ -1,7 +1,7 @@
|
||||||
#!/usr/bin/env python
|
#!/usr/bin/env python
|
||||||
# -*- coding: UTF-8 no BOM -*-
|
# -*- coding: UTF-8 no BOM -*-
|
||||||
|
|
||||||
import os,sys,string
|
import os,sys
|
||||||
import numpy as np
|
import numpy as np
|
||||||
from optparse import OptionParser
|
from optparse import OptionParser
|
||||||
import damask
|
import damask
|
||||||
|
@ -79,7 +79,7 @@ for name in filenames:
|
||||||
|
|
||||||
# ------------------------------------------ assemble header ---------------------------------------
|
# ------------------------------------------ assemble header ---------------------------------------
|
||||||
|
|
||||||
randomSeed = int(os.urandom(4).encode('hex'), 16) if options.randomSeed == None else options.randomSeed # random seed per file
|
randomSeed = int(os.urandom(4).encode('hex'), 16) if options.randomSeed is None else options.randomSeed # random seed per file
|
||||||
np.random.seed(randomSeed)
|
np.random.seed(randomSeed)
|
||||||
|
|
||||||
table.info_append([scriptID + '\t' + ' '.join(sys.argv[1:]),
|
table.info_append([scriptID + '\t' + ' '.join(sys.argv[1:]),
|
||||||
|
|
|
@ -1,7 +1,7 @@
|
||||||
#!/usr/bin/env python
|
#!/usr/bin/env python
|
||||||
# -*- coding: UTF-8 no BOM -*-
|
# -*- coding: UTF-8 no BOM -*-
|
||||||
|
|
||||||
import os,sys,math,re,threading,time,struct,string
|
import os,sys,math,re,time,struct,string
|
||||||
import damask
|
import damask
|
||||||
from optparse import OptionParser, OptionGroup
|
from optparse import OptionParser, OptionGroup
|
||||||
|
|
||||||
|
@ -17,7 +17,6 @@ fileExtensions = { \
|
||||||
|
|
||||||
# -----------------------------
|
# -----------------------------
|
||||||
class vector: # mimic py_post node object
|
class vector: # mimic py_post node object
|
||||||
# -----------------------------
|
|
||||||
x,y,z = [None,None,None]
|
x,y,z = [None,None,None]
|
||||||
|
|
||||||
def __init__(self,coords):
|
def __init__(self,coords):
|
||||||
|
@ -27,7 +26,6 @@ class vector: # mimic py_post node object
|
||||||
|
|
||||||
# -----------------------------
|
# -----------------------------
|
||||||
class element: # mimic py_post element object
|
class element: # mimic py_post element object
|
||||||
# -----------------------------
|
|
||||||
items = []
|
items = []
|
||||||
type = None
|
type = None
|
||||||
|
|
||||||
|
@ -37,7 +35,6 @@ class element: # mimic py_post element object
|
||||||
|
|
||||||
# -----------------------------
|
# -----------------------------
|
||||||
class elemental_scalar: # mimic py_post element_scalar object
|
class elemental_scalar: # mimic py_post element_scalar object
|
||||||
# -----------------------------
|
|
||||||
id = None
|
id = None
|
||||||
value = None
|
value = None
|
||||||
|
|
||||||
|
@ -48,7 +45,6 @@ class elemental_scalar: # mimic py_post element_scalar object
|
||||||
|
|
||||||
# -----------------------------
|
# -----------------------------
|
||||||
class MPIEspectral_result: # mimic py_post result object
|
class MPIEspectral_result: # mimic py_post result object
|
||||||
# -----------------------------
|
|
||||||
|
|
||||||
file = None
|
file = None
|
||||||
dataOffset = 0
|
dataOffset = 0
|
||||||
|
@ -68,7 +64,8 @@ class MPIEspectral_result: # mimic py_post result object
|
||||||
increment = 0
|
increment = 0
|
||||||
startingIncrement = 0
|
startingIncrement = 0
|
||||||
position = 0
|
position = 0
|
||||||
time = 0.0 # this is a dummy at the moment, we need to parse the load file and figure out what time a particular increment corresponds to
|
# this is a dummy at the moment, we need to parse the load file and figure out what time a particular increment corresponds to
|
||||||
|
time = 0.0
|
||||||
N_nodes = 0
|
N_nodes = 0
|
||||||
N_node_scalars = 0
|
N_node_scalars = 0
|
||||||
N_elements = 0
|
N_elements = 0
|
||||||
|
@ -87,40 +84,40 @@ class MPIEspectral_result: # mimic py_post result object
|
||||||
self.dataOffset += 7
|
self.dataOffset += 7
|
||||||
#search first for the new keywords with ':', if not found try to find the old ones
|
#search first for the new keywords with ':', if not found try to find the old ones
|
||||||
self.theTitle = self._keyedString('load:')
|
self.theTitle = self._keyedString('load:')
|
||||||
if self.theTitle == None:
|
if self.theTitle is None:
|
||||||
self.theTitle = self._keyedString('load')
|
self.theTitle = self._keyedString('load')
|
||||||
|
|
||||||
self.wd = self._keyedString('workingdir:')
|
self.wd = self._keyedString('workingdir:')
|
||||||
if self.wd == None:
|
if self.wd is None:
|
||||||
self.wd = self._keyedString('workingdir')
|
self.wd = self._keyedString('workingdir')
|
||||||
|
|
||||||
self.geometry = self._keyedString('geometry:')
|
self.geometry = self._keyedString('geometry:')
|
||||||
if self.geometry == None:
|
if self.geometry is None:
|
||||||
self.geometry = self._keyedString('geometry')
|
self.geometry = self._keyedString('geometry')
|
||||||
|
|
||||||
self.N_loadcases = self._keyedPackedArray('loadcases:',count=1,type='i')[0]
|
self.N_loadcases = self._keyedPackedArray('loadcases:',count=1,type='i')[0]
|
||||||
if self.N_loadcases == None:
|
if self.N_loadcases is None:
|
||||||
self.N_loadcases = self._keyedPackedArray('loadcases',count=1,type='i')[0]
|
self.N_loadcases = self._keyedPackedArray('loadcases',count=1,type='i')[0]
|
||||||
|
|
||||||
self._frequencies = self._keyedPackedArray('frequencies:',count=self.N_loadcases,type='i')
|
self._frequencies = self._keyedPackedArray('frequencies:',count=self.N_loadcases,type='i')
|
||||||
if all ( i == None for i in self._frequencies):
|
if all ( i is None for i in self._frequencies):
|
||||||
self._frequencies = self._keyedPackedArray('frequencies',count=self.N_loadcases,type='i')
|
self._frequencies = self._keyedPackedArray('frequencies',count=self.N_loadcases,type='i')
|
||||||
|
|
||||||
self._increments = self._keyedPackedArray('increments:',count=self.N_loadcases,type='i')
|
self._increments = self._keyedPackedArray('increments:',count=self.N_loadcases,type='i')
|
||||||
if all (i == None for i in self._increments):
|
if all (i is None for i in self._increments):
|
||||||
self._increments = self._keyedPackedArray('increments',count=self.N_loadcases,type='i')
|
self._increments = self._keyedPackedArray('increments',count=self.N_loadcases,type='i')
|
||||||
|
|
||||||
self.startingIncrement = self._keyedPackedArray('startingIncrement:',count=1,type='i')[0]
|
self.startingIncrement = self._keyedPackedArray('startingIncrement:',count=1,type='i')[0]
|
||||||
if self.startingIncrement == None:
|
if self.startingIncrement is None:
|
||||||
self.startingIncrement = self._keyedPackedArray('startingIncrement',count=1,type='i')[0]
|
self.startingIncrement = self._keyedPackedArray('startingIncrement',count=1,type='i')[0]
|
||||||
|
|
||||||
|
|
||||||
self._times = self._keyedPackedArray('times:',count=self.N_loadcases,type='d')
|
self._times = self._keyedPackedArray('times:',count=self.N_loadcases,type='d')
|
||||||
if all (i == None for i in self._times):
|
if all (i is None for i in self._times):
|
||||||
self._times = self._keyedPackedArray('times',count=self.N_loadcases,type='d')
|
self._times = self._keyedPackedArray('times',count=self.N_loadcases,type='d')
|
||||||
|
|
||||||
self._logscales = self._keyedPackedArray('logscales:',count=self.N_loadcases,type='i')
|
self._logscales = self._keyedPackedArray('logscales:',count=self.N_loadcases,type='i')
|
||||||
if all (i == None for i in self._logscales):
|
if all (i is None for i in self._logscales):
|
||||||
self._logscales = self._keyedPackedArray('logscales',count=self.N_loadcases,type='i')
|
self._logscales = self._keyedPackedArray('logscales',count=self.N_loadcases,type='i')
|
||||||
|
|
||||||
self.size = self._keyedPackedArray('size:',count=3,type='d')
|
self.size = self._keyedPackedArray('size:',count=3,type='d')
|
||||||
|
@ -135,7 +132,7 @@ class MPIEspectral_result: # mimic py_post result object
|
||||||
self.N_elements = self.grid[0] * self.grid[1] * self.grid[2]
|
self.N_elements = self.grid[0] * self.grid[1] * self.grid[2]
|
||||||
|
|
||||||
self.N_element_scalars = self._keyedPackedArray('materialpoint_sizeResults:',count=1,type='i')[0]
|
self.N_element_scalars = self._keyedPackedArray('materialpoint_sizeResults:',count=1,type='i')[0]
|
||||||
if self.N_element_scalars == None:
|
if self.N_element_scalars is None:
|
||||||
self.N_element_scalars = self._keyedPackedArray('materialpoint_sizeResults',count=1,type='i')[0]
|
self.N_element_scalars = self._keyedPackedArray('materialpoint_sizeResults',count=1,type='i')[0]
|
||||||
|
|
||||||
self.N_positions = (self.filesize-self.dataOffset)/(self.N_elements*self.N_element_scalars*8)
|
self.N_positions = (self.filesize-self.dataOffset)/(self.N_elements*self.N_element_scalars*8)
|
||||||
|
@ -156,8 +153,7 @@ class MPIEspectral_result: # mimic py_post result object
|
||||||
print '\n**\n* Unexpected file size. Incomplete simulation or file corrupted!\n**'
|
print '\n**\n* Unexpected file size. Incomplete simulation or file corrupted!\n**'
|
||||||
|
|
||||||
def __str__(self):
|
def __str__(self):
|
||||||
|
"""Summary of results file"""
|
||||||
|
|
||||||
return '\n'.join([
|
return '\n'.join([
|
||||||
'workdir: %s'%self.wd,
|
'workdir: %s'%self.wd,
|
||||||
'geometry: %s'%self.geometry,
|
'geometry: %s'%self.geometry,
|
||||||
|
@ -181,13 +177,14 @@ class MPIEspectral_result: # mimic py_post result object
|
||||||
filepos=0 # start at the beginning
|
filepos=0 # start at the beginning
|
||||||
while name != identifier and filepos < self.dataOffset: # stop searching when found or when reached end of header
|
while name != identifier and filepos < self.dataOffset: # stop searching when found or when reached end of header
|
||||||
self.file.seek(filepos)
|
self.file.seek(filepos)
|
||||||
dataLen=struct.unpack('i',self.file.read(4))[0] # read the starting tag in front of the keyword (Fortran indicates start and end of writing by a 4 byte tag indicating the length of the following data)
|
# read the starting tag in front of the keyword (Fortran indicates start and end of writing by a 4 byte tag indicating the length of the following data)
|
||||||
name = self.file.read(len(identifier)) # anticipate identifier
|
dataLen=struct.unpack('i',self.file.read(4))[0]
|
||||||
start=filepos+(4+len(identifier)) # this is the position where the values for the found key are stored
|
name = self.file.read(len(identifier)) # anticipate identifier
|
||||||
filepos=filepos+(4+dataLen+4) # forward to next keyword
|
start=filepos+(4+len(identifier)) # position of the values for the found key
|
||||||
|
filepos=filepos+(4+dataLen+4) # forward to next keyword
|
||||||
|
|
||||||
if name==identifier: # found the correct name
|
if name==identifier: # found the correct name
|
||||||
key['pos'] = start # save position
|
key['pos'] = start # save position
|
||||||
key['name'] = name
|
key['name'] = name
|
||||||
return key
|
return key
|
||||||
|
|
||||||
|
@ -195,7 +192,7 @@ class MPIEspectral_result: # mimic py_post result object
|
||||||
bytecount = {'d': 8,'i': 4}
|
bytecount = {'d': 8,'i': 4}
|
||||||
values = [default]*count
|
values = [default]*count
|
||||||
key = self.locateKeyValue(identifier)
|
key = self.locateKeyValue(identifier)
|
||||||
if key['name'] == identifier and key['pos'] != None:
|
if key['name'] == identifier and key['pos'] is not None:
|
||||||
self.file.seek(key['pos'])
|
self.file.seek(key['pos'])
|
||||||
for i in range(count):
|
for i in range(count):
|
||||||
values[i] = struct.unpack(type,self.file.read(bytecount[type]))[0]
|
values[i] = struct.unpack(type,self.file.read(bytecount[type]))[0]
|
||||||
|
@ -286,8 +283,6 @@ class MPIEspectral_result: # mimic py_post result object
|
||||||
if not options.legacy:
|
if not options.legacy:
|
||||||
incStart = self.dataOffset \
|
incStart = self.dataOffset \
|
||||||
+ self.position*8*self.N_elements*self.N_element_scalars
|
+ self.position*8*self.N_elements*self.N_element_scalars
|
||||||
# header & footer + extra header and footer for 4 byte int range (Fortran)
|
|
||||||
# values
|
|
||||||
where = (e*self.N_element_scalars + idx)*8
|
where = (e*self.N_element_scalars + idx)*8
|
||||||
try:
|
try:
|
||||||
self.file.seek(incStart+where)
|
self.file.seek(incStart+where)
|
||||||
|
@ -299,15 +294,15 @@ class MPIEspectral_result: # mimic py_post result object
|
||||||
|
|
||||||
else:
|
else:
|
||||||
self.fourByteLimit = 2**31 -1 -8
|
self.fourByteLimit = 2**31 -1 -8
|
||||||
|
# header & footer + extra header and footer for 4 byte int range (Fortran)
|
||||||
|
# values
|
||||||
incStart = self.dataOffset \
|
incStart = self.dataOffset \
|
||||||
+ self.position*8*( 1 + self.N_elements*self.N_element_scalars*8//self.fourByteLimit \
|
+ self.position*8*( 1 + self.N_elements*self.N_element_scalars*8//self.fourByteLimit \
|
||||||
+ self.N_elements*self.N_element_scalars)
|
+ self.N_elements*self.N_element_scalars)
|
||||||
# header & footer + extra header and footer for 4 byte int range (Fortran)
|
|
||||||
# values
|
|
||||||
|
|
||||||
where = (e*self.N_element_scalars + idx)*8
|
where = (e*self.N_element_scalars + idx)*8
|
||||||
try:
|
try:
|
||||||
if where%self.fourByteLimit + 8 >= self.fourByteLimit: # danger of reading into fortran record footer at 4 byte limit
|
if where%self.fourByteLimit + 8 >= self.fourByteLimit: # danger of reading into fortran record footer at 4 byte limit
|
||||||
data=''
|
data=''
|
||||||
for i in xrange(8):
|
for i in xrange(8):
|
||||||
self.file.seek(incStart+where+(where//self.fourByteLimit)*8+4)
|
self.file.seek(incStart+where+(where//self.fourByteLimit)*8+4)
|
||||||
|
@ -330,50 +325,9 @@ class MPIEspectral_result: # mimic py_post result object
|
||||||
def element_tensors(self):
|
def element_tensors(self):
|
||||||
return self.N_element_tensors
|
return self.N_element_tensors
|
||||||
|
|
||||||
# -----------------------------
|
|
||||||
class backgroundMessage(threading.Thread):
|
|
||||||
# -----------------------------
|
|
||||||
|
|
||||||
def __init__(self):
|
|
||||||
threading.Thread.__init__(self)
|
|
||||||
self.message = ''
|
|
||||||
self.new_message = ''
|
|
||||||
self.counter = 0
|
|
||||||
self.symbols = ['- ', '\ ', '| ', '/ ',]
|
|
||||||
self.waittime = 0.5
|
|
||||||
|
|
||||||
def __quit__(self):
|
|
||||||
length = len(self.message) + len(self.symbols[self.counter])
|
|
||||||
sys.stderr.write(chr(8)*length + ' '*length + chr(8)*length)
|
|
||||||
sys.stderr.write('')
|
|
||||||
|
|
||||||
def run(self):
|
|
||||||
while not threading.enumerate()[0]._Thread__stopped:
|
|
||||||
time.sleep(self.waittime)
|
|
||||||
self.update_message()
|
|
||||||
self.__quit__()
|
|
||||||
|
|
||||||
def set_message(self, new_message):
|
|
||||||
self.new_message = new_message
|
|
||||||
self.print_message()
|
|
||||||
|
|
||||||
def print_message(self):
|
|
||||||
length = len(self.message) + len(self.symbols[self.counter])
|
|
||||||
sys.stderr.write(chr(8)*length + ' '*length + chr(8)*length) # delete former message
|
|
||||||
sys.stderr.write(self.symbols[self.counter] + self.new_message) # print new message
|
|
||||||
self.message = self.new_message
|
|
||||||
|
|
||||||
def update_message(self):
|
|
||||||
self.counter = (self.counter + 1)%len(self.symbols)
|
|
||||||
self.print_message()
|
|
||||||
|
|
||||||
|
|
||||||
# -----------------------------
|
# -----------------------------
|
||||||
def ipCoords(elemType, nodalCoordinates):
|
def ipCoords(elemType, nodalCoordinates):
|
||||||
#
|
"""returns IP coordinates for a given element"""
|
||||||
# returns IP coordinates for a given element
|
|
||||||
# -----------------------------
|
|
||||||
|
|
||||||
nodeWeightsPerNode = {
|
nodeWeightsPerNode = {
|
||||||
7: [ [27.0, 9.0, 3.0, 9.0, 9.0, 3.0, 1.0, 3.0],
|
7: [ [27.0, 9.0, 3.0, 9.0, 9.0, 3.0, 1.0, 3.0],
|
||||||
[ 9.0, 27.0, 9.0, 3.0, 3.0, 9.0, 3.0, 1.0],
|
[ 9.0, 27.0, 9.0, 3.0, 3.0, 9.0, 3.0, 1.0],
|
||||||
|
@ -422,10 +376,7 @@ def ipCoords(elemType, nodalCoordinates):
|
||||||
|
|
||||||
# -----------------------------
|
# -----------------------------
|
||||||
def ipIDs(elemType):
|
def ipIDs(elemType):
|
||||||
#
|
"""returns IP numbers for given element type"""
|
||||||
# returns IP numbers for given element type
|
|
||||||
# -----------------------------
|
|
||||||
|
|
||||||
ipPerNode = {
|
ipPerNode = {
|
||||||
7: [ 1, 2, 4, 3, 5, 6, 8, 7 ],
|
7: [ 1, 2, 4, 3, 5, 6, 8, 7 ],
|
||||||
57: [ 1, 2, 4, 3, 5, 6, 8, 7 ],
|
57: [ 1, 2, 4, 3, 5, 6, 8, 7 ],
|
||||||
|
@ -441,9 +392,7 @@ def ipIDs(elemType):
|
||||||
|
|
||||||
# -----------------------------
|
# -----------------------------
|
||||||
def substituteLocation(string, mesh, coords):
|
def substituteLocation(string, mesh, coords):
|
||||||
#
|
"""do variable interpolation in group and filter strings"""
|
||||||
# do variable interpolation in group and filter strings
|
|
||||||
# -----------------------------
|
|
||||||
substitute = string
|
substitute = string
|
||||||
substitute = substitute.replace('elem', str(mesh[0]))
|
substitute = substitute.replace('elem', str(mesh[0]))
|
||||||
substitute = substitute.replace('node', str(mesh[1]))
|
substitute = substitute.replace('node', str(mesh[1]))
|
||||||
|
@ -458,10 +407,7 @@ def substituteLocation(string, mesh, coords):
|
||||||
|
|
||||||
# -----------------------------
|
# -----------------------------
|
||||||
def heading(glue,parts):
|
def heading(glue,parts):
|
||||||
#
|
"""joins pieces from parts by glue. second to last entry in pieces tells multiplicity"""
|
||||||
# joins pieces from parts by glue. second to last entry in pieces tells multiplicity
|
|
||||||
# -----------------------------
|
|
||||||
|
|
||||||
header = []
|
header = []
|
||||||
for pieces in parts:
|
for pieces in parts:
|
||||||
if pieces[-2] == 0:
|
if pieces[-2] == 0:
|
||||||
|
@ -473,12 +419,12 @@ def heading(glue,parts):
|
||||||
|
|
||||||
# -----------------------------
|
# -----------------------------
|
||||||
def mapIncremental(label, mapping, N, base, new):
|
def mapIncremental(label, mapping, N, base, new):
|
||||||
#
|
"""
|
||||||
# applies the function defined by "mapping"
|
applies the function defined by "mapping"
|
||||||
# (can be either 'min','max','avg', 'sum', or user specified)
|
|
||||||
# to a list of data
|
|
||||||
# -----------------------------
|
|
||||||
|
|
||||||
|
(can be either 'min','max','avg', 'sum', or user specified)
|
||||||
|
to a list of data
|
||||||
|
"""
|
||||||
theMap = { 'min': lambda n,b,a: a if n==0 else min(b,a),
|
theMap = { 'min': lambda n,b,a: a if n==0 else min(b,a),
|
||||||
'max': lambda n,b,a: a if n==0 else max(b,a),
|
'max': lambda n,b,a: a if n==0 else max(b,a),
|
||||||
'avg': lambda n,b,a: (n*b+a)/(n+1),
|
'avg': lambda n,b,a: (n*b+a)/(n+1),
|
||||||
|
@ -504,10 +450,7 @@ def mapIncremental(label, mapping, N, base, new):
|
||||||
|
|
||||||
# -----------------------------
|
# -----------------------------
|
||||||
def OpenPostfile(name,type,nodal = False):
|
def OpenPostfile(name,type,nodal = False):
|
||||||
#
|
"""open postfile with extrapolation mode 'translate'"""
|
||||||
# open postfile with extrapolation mode "translate"
|
|
||||||
# -----------------------------
|
|
||||||
|
|
||||||
p = {\
|
p = {\
|
||||||
'spectral': MPIEspectral_result,\
|
'spectral': MPIEspectral_result,\
|
||||||
'marc': post_open,\
|
'marc': post_open,\
|
||||||
|
@ -520,10 +463,7 @@ def OpenPostfile(name,type,nodal = False):
|
||||||
|
|
||||||
# -----------------------------
|
# -----------------------------
|
||||||
def ParseOutputFormat(filename,what,me):
|
def ParseOutputFormat(filename,what,me):
|
||||||
#
|
"""parse .output* files in order to get a list of outputs"""
|
||||||
# parse .output* files in order to get a list of outputs
|
|
||||||
# -----------------------------
|
|
||||||
|
|
||||||
content = []
|
content = []
|
||||||
format = {'outputs':{},'specials':{'brothers':[]}}
|
format = {'outputs':{},'specials':{'brothers':[]}}
|
||||||
for prefix in ['']+map(str,range(1,17)):
|
for prefix in ['']+map(str,range(1,17)):
|
||||||
|
@ -567,13 +507,11 @@ def ParseOutputFormat(filename,what,me):
|
||||||
|
|
||||||
# -----------------------------
|
# -----------------------------
|
||||||
def ParsePostfile(p,filename, outputFormat):
|
def ParsePostfile(p,filename, outputFormat):
|
||||||
#
|
"""
|
||||||
# parse postfile in order to get position and labels of outputs
|
parse postfile in order to get position and labels of outputs
|
||||||
# needs "outputFormat" for mapping of output names to postfile output indices
|
|
||||||
# -----------------------------
|
|
||||||
|
|
||||||
# --- build statistics
|
|
||||||
|
|
||||||
|
needs "outputFormat" for mapping of output names to postfile output indices
|
||||||
|
"""
|
||||||
stat = { \
|
stat = { \
|
||||||
'IndexOfLabel': {}, \
|
'IndexOfLabel': {}, \
|
||||||
'Title': p.title(), \
|
'Title': p.title(), \
|
||||||
|
@ -589,7 +527,7 @@ def ParsePostfile(p,filename, outputFormat):
|
||||||
'LabelOfElementalTensor': [None]*p.element_tensors(), \
|
'LabelOfElementalTensor': [None]*p.element_tensors(), \
|
||||||
}
|
}
|
||||||
|
|
||||||
# --- find labels
|
# --- find labels
|
||||||
|
|
||||||
for labelIndex in range(stat['NumberOfNodalScalars']):
|
for labelIndex in range(stat['NumberOfNodalScalars']):
|
||||||
label = p.node_scalar_label(labelIndex)
|
label = p.node_scalar_label(labelIndex)
|
||||||
|
@ -613,9 +551,9 @@ def ParsePostfile(p,filename, outputFormat):
|
||||||
startIndex = stat['IndexOfLabel']['HomogenizationCount']
|
startIndex = stat['IndexOfLabel']['HomogenizationCount']
|
||||||
stat['LabelOfElementalScalar'][startIndex] = 'HomogenizationCount'
|
stat['LabelOfElementalScalar'][startIndex] = 'HomogenizationCount'
|
||||||
|
|
||||||
# We now have to find a mapping for each output label as defined in the .output* files to the output position in the post file
|
# We now have to find a mapping for each output label as defined in the .output* files to the output position in the post file
|
||||||
# Since we know where the user defined outputs start ("startIndex"), we can simply assign increasing indices to the labels
|
# Since we know where the user defined outputs start ("startIndex"), we can simply assign increasing indices to the labels
|
||||||
# given in the .output* file
|
# given in the .output* file
|
||||||
|
|
||||||
offset = 1
|
offset = 1
|
||||||
for (name,N) in outputFormat['Homogenization']['outputs']:
|
for (name,N) in outputFormat['Homogenization']['outputs']:
|
||||||
|
@ -663,7 +601,6 @@ def ParsePostfile(p,filename, outputFormat):
|
||||||
|
|
||||||
# -----------------------------
|
# -----------------------------
|
||||||
def SummarizePostfile(stat,where=sys.stdout,format='marc'):
|
def SummarizePostfile(stat,where=sys.stdout,format='marc'):
|
||||||
# -----------------------------
|
|
||||||
|
|
||||||
where.write('\n\n')
|
where.write('\n\n')
|
||||||
where.write('title:\t%s'%stat['Title'] + '\n\n')
|
where.write('title:\t%s'%stat['Title'] + '\n\n')
|
||||||
|
@ -671,9 +608,12 @@ def SummarizePostfile(stat,where=sys.stdout,format='marc'):
|
||||||
where.write('increments:\t%i'%(stat['NumberOfIncrements']) + '\n\n')
|
where.write('increments:\t%i'%(stat['NumberOfIncrements']) + '\n\n')
|
||||||
where.write('nodes:\t%i'%stat['NumberOfNodes'] + '\n\n')
|
where.write('nodes:\t%i'%stat['NumberOfNodes'] + '\n\n')
|
||||||
where.write('elements:\t%i'%stat['NumberOfElements'] + '\n\n')
|
where.write('elements:\t%i'%stat['NumberOfElements'] + '\n\n')
|
||||||
where.write('nodal scalars:\t%i'%stat['NumberOfNodalScalars'] + '\n\n ' + '\n '.join(stat['LabelOfNodalScalar']) + '\n\n')
|
where.write('nodal scalars:\t%i'%stat['NumberOfNodalScalars'] + '\n\n '\
|
||||||
where.write('elemental scalars:\t%i'%stat['NumberOfElementalScalars'] + '\n\n ' + '\n '.join(stat['LabelOfElementalScalar']) + '\n\n')
|
+'\n '.join(stat['LabelOfNodalScalar']) + '\n\n')
|
||||||
where.write('elemental tensors:\t%i'%stat['NumberOfElementalTensors'] + '\n\n ' + '\n '.join(stat['LabelOfElementalTensor']) + '\n\n')
|
where.write('elemental scalars:\t%i'%stat['NumberOfElementalScalars'] + '\n\n '\
|
||||||
|
+ '\n '.join(stat['LabelOfElementalScalar']) + '\n\n')
|
||||||
|
where.write('elemental tensors:\t%i'%stat['NumberOfElementalTensors'] + '\n\n '\
|
||||||
|
+ '\n '.join(stat['LabelOfElementalTensor']) + '\n\n')
|
||||||
|
|
||||||
return True
|
return True
|
||||||
|
|
||||||
|
@ -799,14 +739,14 @@ if not os.path.exists(files[0]):
|
||||||
|
|
||||||
# --- figure out filetype
|
# --- figure out filetype
|
||||||
|
|
||||||
if options.filetype == None:
|
if options.filetype is None:
|
||||||
ext = os.path.splitext(files[0])[1]
|
ext = os.path.splitext(files[0])[1]
|
||||||
for theType in fileExtensions.keys():
|
for theType in fileExtensions.keys():
|
||||||
if ext in fileExtensions[theType]:
|
if ext in fileExtensions[theType]:
|
||||||
options.filetype = theType
|
options.filetype = theType
|
||||||
break
|
break
|
||||||
|
|
||||||
if options.filetype != None: options.filetype = options.filetype.lower()
|
if options.filetype is not None: options.filetype = options.filetype.lower()
|
||||||
|
|
||||||
if options.filetype == 'marc': offset_pos = 1
|
if options.filetype == 'marc': offset_pos = 1
|
||||||
else: offset_pos = 0
|
else: offset_pos = 0
|
||||||
|
@ -822,7 +762,7 @@ if options.filetype == 'marc':
|
||||||
sys.path.append(damask.solver.Marc().libraryPath('../../'))
|
sys.path.append(damask.solver.Marc().libraryPath('../../'))
|
||||||
|
|
||||||
try:
|
try:
|
||||||
from py_post import *
|
from py_post import post_open
|
||||||
except:
|
except:
|
||||||
print('error: no valid Mentat release found')
|
print('error: no valid Mentat release found')
|
||||||
sys.exit(-1)
|
sys.exit(-1)
|
||||||
|
@ -834,7 +774,7 @@ if options.constitutiveResult and not options.phase:
|
||||||
parser.print_help()
|
parser.print_help()
|
||||||
parser.error('constitutive results require phase...')
|
parser.error('constitutive results require phase...')
|
||||||
|
|
||||||
if options.nodalScalar and ( options.elemScalar or options.elemTensor
|
if options.nodalScalar and ( options.elemScalar or options.elemTensor\
|
||||||
or options.homogenizationResult or options.crystalliteResult or options.constitutiveResult ):
|
or options.homogenizationResult or options.crystalliteResult or options.constitutiveResult ):
|
||||||
parser.print_help()
|
parser.print_help()
|
||||||
parser.error('not allowed to mix nodal with elemental results...')
|
parser.error('not allowed to mix nodal with elemental results...')
|
||||||
|
@ -851,7 +791,7 @@ options.sep.reverse()
|
||||||
|
|
||||||
# --- start background messaging
|
# --- start background messaging
|
||||||
|
|
||||||
bg = backgroundMessage()
|
bg = damask.util.backgroundMessage()
|
||||||
bg.start()
|
bg.start()
|
||||||
|
|
||||||
# --- parse .output and .t16 files
|
# --- parse .output and .t16 files
|
||||||
|
@ -874,7 +814,7 @@ bg.set_message('parsing .output files...')
|
||||||
|
|
||||||
for what in me:
|
for what in me:
|
||||||
outputFormat[what] = ParseOutputFormat(filename, what, me[what])
|
outputFormat[what] = ParseOutputFormat(filename, what, me[what])
|
||||||
if not '_id' in outputFormat[what]['specials']:
|
if '_id' not in outputFormat[what]['specials']:
|
||||||
print "\nsection '%s' not found in <%s>"%(me[what], what)
|
print "\nsection '%s' not found in <%s>"%(me[what], what)
|
||||||
print '\n'.join(map(lambda x:' [%s]'%x, outputFormat[what]['specials']['brothers']))
|
print '\n'.join(map(lambda x:' [%s]'%x, outputFormat[what]['specials']['brothers']))
|
||||||
|
|
||||||
|
@ -886,15 +826,18 @@ if options.filetype == 'marc':
|
||||||
stat['NumberOfIncrements'] -= 1 # t16 contains one "virtual" increment (at 0)
|
stat['NumberOfIncrements'] -= 1 # t16 contains one "virtual" increment (at 0)
|
||||||
|
|
||||||
# --- sanity check for output variables
|
# --- sanity check for output variables
|
||||||
# for mentat variables (nodalScalar,elemScalar,elemTensor) we simply have to check whether the label is found in the stat[indexOfLabel] dictionary
|
# for mentat variables (nodalScalar,elemScalar,elemTensor) we simply have to check whether the label
|
||||||
# for user defined variables (homogenizationResult,crystalliteResult,constitutiveResult) we have to check the corresponding outputFormat, since the namescheme in stat['IndexOfLabel'] is different
|
# is found in the stat[indexOfLabel] dictionary for user defined variables (homogenizationResult,
|
||||||
|
# crystalliteResult,constitutiveResult) we have to check the corresponding outputFormat, since the
|
||||||
|
# namescheme in stat['IndexOfLabel'] is different
|
||||||
|
|
||||||
for opt in ['nodalScalar','elemScalar','elemTensor','homogenizationResult','crystalliteResult','constitutiveResult']:
|
for opt in ['nodalScalar','elemScalar','elemTensor','homogenizationResult','crystalliteResult','constitutiveResult']:
|
||||||
if eval('options.%s'%opt):
|
if eval('options.%s'%opt):
|
||||||
for label in eval('options.%s'%opt):
|
for label in eval('options.%s'%opt):
|
||||||
if (opt in ['nodalScalar','elemScalar','elemTensor'] and label not in stat['IndexOfLabel'] and label not in ['elements',]) \
|
if (opt in ['nodalScalar','elemScalar','elemTensor'] and label not in stat['IndexOfLabel'] and label not in ['elements',]) \
|
||||||
or (opt in ['homogenizationResult','crystalliteResult','constitutiveResult'] \
|
or (opt in ['homogenizationResult','crystalliteResult','constitutiveResult'] \
|
||||||
and (not outputFormat[opt[:-6].capitalize()]['outputs'] or not label in zip(*outputFormat[opt[:-6].capitalize()]['outputs'])[0])):
|
and (not outputFormat[opt[:-6].capitalize()]['outputs'] \
|
||||||
|
or label not in zip(*outputFormat[opt[:-6].capitalize()]['outputs'])[0])):
|
||||||
parser.error('%s "%s" unknown...'%(opt,label))
|
parser.error('%s "%s" unknown...'%(opt,label))
|
||||||
|
|
||||||
|
|
||||||
|
@ -952,15 +895,14 @@ if options.nodalScalar:
|
||||||
myIpID = 0
|
myIpID = 0
|
||||||
myGrainID = 0
|
myGrainID = 0
|
||||||
|
|
||||||
# --- filter valid locations
|
# generate an expression that is only true for the locations specified by options.filter
|
||||||
|
filter = substituteLocation(options.filter, [myElemID,myNodeID,myIpID,myGrainID], myNodeCoordinates)
|
||||||
filter = substituteLocation(options.filter, [myElemID,myNodeID,myIpID,myGrainID], myNodeCoordinates) # generates an expression that is only true for the locations specified by options.filter
|
|
||||||
if filter != '' and not eval(filter): # for all filter expressions that are not true:...
|
if filter != '' and not eval(filter): # for all filter expressions that are not true:...
|
||||||
continue # ... ignore this data point and continue with next
|
continue # ... ignore this data point and continue with next
|
||||||
|
|
||||||
# --- group data locations
|
# --- group data locations
|
||||||
|
# generate a unique key for a group of separated data based on the separation criterium for the location
|
||||||
grp = substituteLocation('#'.join(options.sep), [myElemID,myNodeID,myIpID,myGrainID], myNodeCoordinates) # generates a unique key for a group of separated data based on the separation criterium for the location
|
grp = substituteLocation('#'.join(options.sep), [myElemID,myNodeID,myIpID,myGrainID], myNodeCoordinates)
|
||||||
|
|
||||||
if grp not in index: # create a new group if not yet present
|
if grp not in index: # create a new group if not yet present
|
||||||
index[grp] = groupCount
|
index[grp] = groupCount
|
||||||
|
@ -983,26 +925,28 @@ else:
|
||||||
if e%1000 == 0:
|
if e%1000 == 0:
|
||||||
bg.set_message('scan elem %i...'%e)
|
bg.set_message('scan elem %i...'%e)
|
||||||
myElemID = p.element_id(e)
|
myElemID = p.element_id(e)
|
||||||
myIpCoordinates = ipCoords(p.element(e).type, map(lambda node: [node.x, node.y, node.z], map(p.node, map(p.node_sequence, p.element(e).items))))
|
myIpCoordinates = ipCoords(p.element(e).type, map(lambda node: [node.x, node.y, node.z],
|
||||||
|
map(p.node, map(p.node_sequence, p.element(e).items))))
|
||||||
myIpIDs = ipIDs(p.element(e).type)
|
myIpIDs = ipIDs(p.element(e).type)
|
||||||
Nips = len(myIpIDs)
|
Nips = len(myIpIDs)
|
||||||
myNodeIDs = p.element(e).items[:Nips]
|
myNodeIDs = p.element(e).items[:Nips]
|
||||||
for n in range(Nips):
|
for n in range(Nips):
|
||||||
myIpID = myIpIDs[n]
|
myIpID = myIpIDs[n]
|
||||||
myNodeID = myNodeIDs[n]
|
myNodeID = myNodeIDs[n]
|
||||||
for g in range(('GrainCount' in stat['IndexOfLabel'] and int(p.element_scalar(e, stat['IndexOfLabel']['GrainCount'])[0].value))
|
for g in range(('GrainCount' in stat['IndexOfLabel']\
|
||||||
or 1):
|
and int(p.element_scalar(e, stat['IndexOfLabel']['GrainCount'])[0].value))\
|
||||||
|
or 1):
|
||||||
myGrainID = g + 1
|
myGrainID = g + 1
|
||||||
|
|
||||||
# --- filter valid locations
|
# --- filter valid locations
|
||||||
|
# generates an expression that is only true for the locations specified by options.filter
|
||||||
filter = substituteLocation(options.filter, [myElemID,myNodeID,myIpID,myGrainID], myIpCoordinates[n]) # generates an expression that is only true for the locations specified by options.filter
|
filter = substituteLocation(options.filter, [myElemID,myNodeID,myIpID,myGrainID], myIpCoordinates[n])
|
||||||
if filter != '' and not eval(filter): # for all filter expressions that are not true:...
|
if filter != '' and not eval(filter): # for all filter expressions that are not true:...
|
||||||
continue # ... ignore this data point and continue with next
|
continue # ... ignore this data point and continue with next
|
||||||
|
|
||||||
# --- group data locations
|
# --- group data locations
|
||||||
|
# generates a unique key for a group of separated data based on the separation criterium for the location
|
||||||
grp = substituteLocation('#'.join(options.sep), [myElemID,myNodeID,myIpID,myGrainID], myIpCoordinates[n]) # generates a unique key for a group of separated data based on the separation criterium for the location
|
grp = substituteLocation('#'.join(options.sep), [myElemID,myNodeID,myIpID,myGrainID], myIpCoordinates[n])
|
||||||
|
|
||||||
if grp not in index: # create a new group if not yet present
|
if grp not in index: # create a new group if not yet present
|
||||||
index[grp] = groupCount
|
index[grp] = groupCount
|
||||||
|
@ -1059,11 +1003,8 @@ fileOpen = False
|
||||||
assembleHeader = True
|
assembleHeader = True
|
||||||
header = []
|
header = []
|
||||||
standard = ['inc'] + \
|
standard = ['inc'] + \
|
||||||
{True: ['time'],
|
['time'] if options.time else [] + \
|
||||||
False:[]}[options.time] + \
|
['elem','node','ip','grain','1_pos','2_pos','3_pos']
|
||||||
['elem','node','ip','grain'] + \
|
|
||||||
{True: ['1_nodeinitialcoord','2_nodeinitialcoord','3_nodeinitialcoord'],
|
|
||||||
False:['1_ipinitialcoord','2_ipinitialcoord','3_ipinitialcoord']}[options.nodalScalar != []]
|
|
||||||
|
|
||||||
# --------------------------- loop over positions --------------------------------
|
# --------------------------- loop over positions --------------------------------
|
||||||
|
|
||||||
|
@ -1104,7 +1045,8 @@ for incCount,position in enumerate(locations): # walk through locations
|
||||||
if fileOpen:
|
if fileOpen:
|
||||||
file.close()
|
file.close()
|
||||||
fileOpen = False
|
fileOpen = False
|
||||||
outFilename = eval('"'+eval("'%%s_inc%%0%ii%%s.txt'%(math.log10(max(increments+[1]))+1)")+'"%(dirname + os.sep + options.prefix + os.path.split(filename)[1],increments[incCount],options.suffix)')
|
outFilename = eval('"'+eval("'%%s_inc%%0%ii%%s.txt'%(math.log10(max(increments+[1]))+1)")\
|
||||||
|
+'"%(dirname + os.sep + options.prefix + os.path.split(filename)[1],increments[incCount],options.suffix)')
|
||||||
else:
|
else:
|
||||||
outFilename = '%s.txt'%(dirname + os.sep + options.prefix + os.path.split(filename)[1] + options.suffix)
|
outFilename = '%s.txt'%(dirname + os.sep + options.prefix + os.path.split(filename)[1] + options.suffix)
|
||||||
|
|
||||||
|
@ -1128,7 +1070,8 @@ for incCount,position in enumerate(locations): # walk through locations
|
||||||
member += 1
|
member += 1
|
||||||
if member%1000 == 0:
|
if member%1000 == 0:
|
||||||
time_delta = ((len(locations)*memberCount)/float(member+incCount*memberCount)-1.0)*(time.time()-time_start)
|
time_delta = ((len(locations)*memberCount)/float(member+incCount*memberCount)-1.0)*(time.time()-time_start)
|
||||||
bg.set_message('(%02i:%02i:%02i) processing point %i of %i from increment %i (position %i)...'%(time_delta//3600,time_delta%3600//60,time_delta%60,member,memberCount,increments[incCount],position))
|
bg.set_message('(%02i:%02i:%02i) processing point %i of %i from increment %i (position %i)...'
|
||||||
|
%(time_delta//3600,time_delta%3600//60,time_delta%60,member,memberCount,increments[incCount],position))
|
||||||
|
|
||||||
newby = [] # current member's data
|
newby = [] # current member's data
|
||||||
|
|
||||||
|
@ -1140,7 +1083,9 @@ for incCount,position in enumerate(locations): # walk through locations
|
||||||
else:
|
else:
|
||||||
length = 1
|
length = 1
|
||||||
content = [ p.node_scalar(p.node_sequence(n),stat['IndexOfLabel'][label]) ]
|
content = [ p.node_scalar(p.node_sequence(n),stat['IndexOfLabel'][label]) ]
|
||||||
if assembleHeader: header += heading('_',[[component,''.join( label.split() )] for component in range(int(length>1),length+int(length>1))])
|
if assembleHeader:
|
||||||
|
header += heading('_',[[component,''.join( label.split() )]
|
||||||
|
for component in range(int(length>1),length+int(length>1))])
|
||||||
newby.append({'label':label,
|
newby.append({'label':label,
|
||||||
'len':length,
|
'len':length,
|
||||||
'content':content })
|
'content':content })
|
||||||
|
@ -1156,7 +1101,8 @@ for incCount,position in enumerate(locations): # walk through locations
|
||||||
if options.elemTensor:
|
if options.elemTensor:
|
||||||
for label in options.elemTensor:
|
for label in options.elemTensor:
|
||||||
if assembleHeader:
|
if assembleHeader:
|
||||||
header += heading('.',[[''.join( label.split() ),component] for component in ['intensity','t11','t22','t33','t12','t23','t13']])
|
header += heading('.',[[''.join( label.split() ),component]
|
||||||
|
for component in ['intensity','t11','t22','t33','t12','t23','t13']])
|
||||||
myTensor = p.element_tensor(p.element_sequence(e),stat['IndexOfLabel'][label])[n_local]
|
myTensor = p.element_tensor(p.element_sequence(e),stat['IndexOfLabel'][label])[n_local]
|
||||||
newby.append({'label':label,
|
newby.append({'label':label,
|
||||||
'len':7,
|
'len':7,
|
||||||
|
|
|
@ -1,7 +1,7 @@
|
||||||
#!/usr/bin/env python
|
#!/usr/bin/env python
|
||||||
# -*- coding: UTF-8 no BOM -*-
|
# -*- coding: UTF-8 no BOM -*-
|
||||||
|
|
||||||
import os,sys,string
|
import os,sys,re
|
||||||
import damask
|
import damask
|
||||||
from optparse import OptionParser
|
from optparse import OptionParser
|
||||||
|
|
||||||
|
@ -32,14 +32,17 @@ parser.set_defaults(label = [],
|
||||||
|
|
||||||
(options,filenames) = parser.parse_args()
|
(options,filenames) = parser.parse_args()
|
||||||
|
|
||||||
|
pattern = [re.compile('^()(.+)$'), # label pattern for scalar
|
||||||
|
re.compile('^(\d+_)?(.+)$'), # label pattern for multidimension
|
||||||
|
]
|
||||||
|
|
||||||
# --- loop over input files -------------------------------------------------------------------------
|
# --- loop over input files -------------------------------------------------------------------------
|
||||||
|
|
||||||
if filenames == []: filenames = [None]
|
if filenames == []: filenames = [None]
|
||||||
|
|
||||||
for name in filenames:
|
for name in filenames:
|
||||||
try:
|
try: table = damask.ASCIItable(name = name,
|
||||||
table = damask.ASCIItable(name = name,
|
buffered = False)
|
||||||
buffered = False)
|
|
||||||
except: continue
|
except: continue
|
||||||
damask.util.report(scriptName,name)
|
damask.util.report(scriptName,name)
|
||||||
|
|
||||||
|
@ -63,8 +66,9 @@ for name in filenames:
|
||||||
for i,index in enumerate(indices):
|
for i,index in enumerate(indices):
|
||||||
if index == -1: remarks.append('label {} not present...'.format(options.label[i]))
|
if index == -1: remarks.append('label {} not present...'.format(options.label[i]))
|
||||||
else:
|
else:
|
||||||
|
m = pattern[dimensions[i]>1].match(table.labels[index]) # isolate label name
|
||||||
for j in xrange(dimensions[i]):
|
for j in xrange(dimensions[i]):
|
||||||
table.labels[index+j] = table.labels[index+j].replace(options.label[i],options.substitute[i])
|
table.labels[index+j] = table.labels[index+j].replace(m.group(2),options.substitute[i]) # replace name with substitute
|
||||||
|
|
||||||
if remarks != []: damask.util.croak(remarks)
|
if remarks != []: damask.util.croak(remarks)
|
||||||
if errors != []:
|
if errors != []:
|
||||||
|
|
|
@ -1,7 +1,7 @@
|
||||||
#!/usr/bin/env python
|
#!/usr/bin/env python
|
||||||
# -*- coding: UTF-8 no BOM -*-
|
# -*- coding: UTF-8 no BOM -*-
|
||||||
|
|
||||||
import os,sys,string,math
|
import os,sys,math
|
||||||
import numpy as np
|
import numpy as np
|
||||||
from optparse import OptionParser
|
from optparse import OptionParser
|
||||||
import damask
|
import damask
|
||||||
|
@ -41,7 +41,7 @@ parser.set_defaults(rotation = (0.,1.,1.,1.),
|
||||||
|
|
||||||
(options,filenames) = parser.parse_args()
|
(options,filenames) = parser.parse_args()
|
||||||
|
|
||||||
if options.vector == None and options.tensor == None:
|
if options.vector is None and options.tensor is None:
|
||||||
parser.error('no data column specified.')
|
parser.error('no data column specified.')
|
||||||
|
|
||||||
toRadians = math.pi/180.0 if options.degrees else 1.0 # rescale degrees to radians
|
toRadians = math.pi/180.0 if options.degrees else 1.0 # rescale degrees to radians
|
||||||
|
@ -107,8 +107,7 @@ for name in filenames:
|
||||||
for column in items[datatype]['column']: # loop over all requested labels
|
for column in items[datatype]['column']: # loop over all requested labels
|
||||||
table.data[column:column+items[datatype]['dim']] = \
|
table.data[column:column+items[datatype]['dim']] = \
|
||||||
np.dot(R,np.dot(np.array(map(float,table.data[column:column+items[datatype]['dim']])).\
|
np.dot(R,np.dot(np.array(map(float,table.data[column:column+items[datatype]['dim']])).\
|
||||||
reshape(items[datatype]['shape']),R.transpose())).\
|
reshape(items[datatype]['shape']),R.transpose())).reshape(items[datatype]['dim'])
|
||||||
reshape(items[datatype]['dim'])
|
|
||||||
|
|
||||||
outputAlive = table.data_write() # output processed line
|
outputAlive = table.data_write() # output processed line
|
||||||
|
|
||||||
|
|
|
@ -1,7 +1,7 @@
|
||||||
#!/usr/bin/env python
|
#!/usr/bin/env python
|
||||||
# -*- coding: UTF-8 no BOM -*-
|
# -*- coding: UTF-8 no BOM -*-
|
||||||
|
|
||||||
import os,sys,string
|
import os,sys
|
||||||
import numpy as np
|
import numpy as np
|
||||||
from optparse import OptionParser
|
from optparse import OptionParser
|
||||||
import damask
|
import damask
|
||||||
|
@ -36,7 +36,7 @@ parser.set_defaults(key = [],
|
||||||
|
|
||||||
(options,filenames) = parser.parse_args()
|
(options,filenames) = parser.parse_args()
|
||||||
|
|
||||||
if options.keys == None:
|
if options.keys is None:
|
||||||
parser.error('No sorting column(s) specified.')
|
parser.error('No sorting column(s) specified.')
|
||||||
|
|
||||||
options.keys.reverse() # numpy sorts with most significant column as last
|
options.keys.reverse() # numpy sorts with most significant column as last
|
||||||
|
|
|
@ -144,16 +144,12 @@ for file in files:
|
||||||
|
|
||||||
posOffset = (shift+[0.5,0.5,0.5])*dimension/resolution
|
posOffset = (shift+[0.5,0.5,0.5])*dimension/resolution
|
||||||
elementSize = dimension/resolution*packing
|
elementSize = dimension/resolution*packing
|
||||||
elem = 1
|
|
||||||
for c in xrange(downSized[2]):
|
for c in xrange(downSized[2]):
|
||||||
for b in xrange(downSized[1]):
|
for b in xrange(downSized[1]):
|
||||||
for a in xrange(downSized[0]):
|
for a in xrange(downSized[0]):
|
||||||
datavar[a,b,c,locationCol:locationCol+3] = posOffset + [a,b,c]*elementSize
|
datavar[a,b,c,locationCol:locationCol+3] = posOffset + [a,b,c]*elementSize
|
||||||
datavar[a,b,c,elemCol] = elem
|
|
||||||
table.data = datavar[a,b,c,:].tolist()
|
table.data = datavar[a,b,c,:].tolist()
|
||||||
table.data_write() # output processed line
|
table.data_write() # output processed line
|
||||||
elem += 1
|
|
||||||
|
|
||||||
|
|
||||||
# ------------------------------------------ output result ---------------------------------------
|
# ------------------------------------------ output result ---------------------------------------
|
||||||
|
|
||||||
|
|
|
@ -4,17 +4,15 @@
|
||||||
import os,string,math,sys
|
import os,string,math,sys
|
||||||
import numpy as np
|
import numpy as np
|
||||||
from optparse import OptionParser
|
from optparse import OptionParser
|
||||||
from vtk import *
|
import vtk
|
||||||
|
import damask
|
||||||
|
|
||||||
scriptName = os.path.splitext(os.path.basename(__file__))[0]
|
scriptName = os.path.splitext(os.path.basename(__file__))[0]
|
||||||
scriptID = ' '.join([scriptName,damask.version])
|
scriptID = ' '.join([scriptName,damask.version])
|
||||||
|
|
||||||
# -----------------------------
|
# -----------------------------
|
||||||
def getHeader(filename,sizeFastIndex,sizeSlowIndex,stepsize):
|
def getHeader(filename,sizeFastIndex,sizeSlowIndex,stepsize):
|
||||||
# -----------------------------
|
"""returns header for ang file step size in micrometer"""
|
||||||
# returns header for ang file
|
|
||||||
# step size in micrometer
|
|
||||||
|
|
||||||
return '\n'.join([ \
|
return '\n'.join([ \
|
||||||
'# TEM_PIXperUM 1.000000', \
|
'# TEM_PIXperUM 1.000000', \
|
||||||
'# x-star 1.000000', \
|
'# x-star 1.000000', \
|
||||||
|
@ -50,10 +48,7 @@ def getHeader(filename,sizeFastIndex,sizeSlowIndex,stepsize):
|
||||||
|
|
||||||
# -----------------------------
|
# -----------------------------
|
||||||
def positiveRadians(angle):
|
def positiveRadians(angle):
|
||||||
# -----------------------------
|
"""returns positive angle in radians from angle in degrees"""
|
||||||
# returns positive angle in radians
|
|
||||||
# gets angle in degrees
|
|
||||||
|
|
||||||
angle = math.radians(float(angle))
|
angle = math.radians(float(angle))
|
||||||
while angle < 0.0:
|
while angle < 0.0:
|
||||||
angle += 2.0 * math.pi
|
angle += 2.0 * math.pi
|
||||||
|
@ -63,14 +58,16 @@ def positiveRadians(angle):
|
||||||
|
|
||||||
# -----------------------------
|
# -----------------------------
|
||||||
def getDataLine(angles,x,y,validData=True):
|
def getDataLine(angles,x,y,validData=True):
|
||||||
# -----------------------------
|
"""
|
||||||
# returns string of one line in ang file
|
returns string of one line in ang file
|
||||||
# convention in ang file: y coordinate comes first and is fastest index
|
|
||||||
# positions in micrometer
|
|
||||||
|
|
||||||
|
convention in ang file: y coordinate comes first and is fastest index
|
||||||
|
positions in micrometer
|
||||||
|
"""
|
||||||
info = {True: (9999.9, 1.0, 0,99999,0.0),
|
info = {True: (9999.9, 1.0, 0,99999,0.0),
|
||||||
False: ( -1.0,-1.0,-1, -1,1.0)}
|
False: ( -1.0,-1.0,-1, -1,1.0)}
|
||||||
return '%9.5f %9.5f %9.5f %12.5f %12.5f %6.1f %6.3f %2i %6i %6.3f \n'%(tuple(map(positiveRadians,angles))+(y*1e6,x*1e6)+info[validData])
|
return '%9.5f %9.5f %9.5f %12.5f %12.5f %6.1f %6.3f %2i %6i %6.3f \n'\
|
||||||
|
%(tuple(map(positiveRadians,angles))+(y*1e6,x*1e6)+info[validData])
|
||||||
|
|
||||||
|
|
||||||
|
|
||||||
|
@ -155,10 +152,9 @@ if options.hexagonal:
|
||||||
|
|
||||||
for filename in filenames:
|
for filename in filenames:
|
||||||
|
|
||||||
# Read the source file
|
|
||||||
|
|
||||||
if options.verbose: sys.stdout.write("\nREADING VTK FILE\n")
|
if options.verbose: sys.stdout.write("\nREADING VTK FILE\n")
|
||||||
reader = vtkUnstructuredGridReader()
|
# Read the source file
|
||||||
|
reader = vtk.vtkUnstructuredGridReader()
|
||||||
reader.SetFileName(filename)
|
reader.SetFileName(filename)
|
||||||
reader.ReadAllScalarsOn()
|
reader.ReadAllScalarsOn()
|
||||||
reader.ReadAllVectorsOn()
|
reader.ReadAllVectorsOn()
|
||||||
|
@ -166,7 +162,7 @@ for filename in filenames:
|
||||||
undeformedMesh = reader.GetOutput()
|
undeformedMesh = reader.GetOutput()
|
||||||
|
|
||||||
|
|
||||||
# Get euler angles from cell data
|
# Get euler angles from cell data
|
||||||
|
|
||||||
if options.verbose: sys.stdout.write("\nGETTING EULER ANGLES\n")
|
if options.verbose: sys.stdout.write("\nGETTING EULER ANGLES\n")
|
||||||
angles = {}
|
angles = {}
|
||||||
|
@ -177,14 +173,14 @@ for filename in filenames:
|
||||||
if options.verbose: sys.stdout.write(" found scalar with name %s\n"%scalarName)
|
if options.verbose: sys.stdout.write(" found scalar with name %s\n"%scalarName)
|
||||||
if len(angles) < 3: # found data for all three euler angles?
|
if len(angles) < 3: # found data for all three euler angles?
|
||||||
for label in options.eulerLabel:
|
for label in options.eulerLabel:
|
||||||
if not label in angles.keys():
|
if label not in angles.keys():
|
||||||
parser.error('Could not find scalar data with name %s'%label)
|
parser.error('Could not find scalar data with name %s'%label)
|
||||||
|
|
||||||
|
|
||||||
# Get deformed mesh
|
# Get deformed mesh
|
||||||
|
|
||||||
if options.verbose: sys.stdout.write("\nDEFORM MESH\n")
|
if options.verbose: sys.stdout.write("\nDEFORM MESH\n")
|
||||||
warpVector = vtkWarpVector()
|
warpVector = vtk.vtkWarpVector()
|
||||||
undeformedMesh.GetPointData().SetActiveVectors(options.dispLabel)
|
undeformedMesh.GetPointData().SetActiveVectors(options.dispLabel)
|
||||||
warpVector.SetInput(undeformedMesh)
|
warpVector.SetInput(undeformedMesh)
|
||||||
warpVector.Update()
|
warpVector.Update()
|
||||||
|
@ -197,29 +193,29 @@ for filename in filenames:
|
||||||
sys.stdout.write(" z (% .8f % .8f)\n"%(box[4],box[5]))
|
sys.stdout.write(" z (% .8f % .8f)\n"%(box[4],box[5]))
|
||||||
|
|
||||||
|
|
||||||
# Get cell centers of deformed mesh (position of ips)
|
# Get cell centers of deformed mesh (position of ips)
|
||||||
|
|
||||||
if options.verbose: sys.stdout.write("\nGETTING CELL CENTERS OF DEFORMED MESH\n")
|
if options.verbose: sys.stdout.write("\nGETTING CELL CENTERS OF DEFORMED MESH\n")
|
||||||
cellCenter = vtkCellCenters()
|
cellCenter = vtk.vtkCellCenters()
|
||||||
cellCenter.SetVertexCells(0) # do not generate vertex cells, just points
|
cellCenter.SetVertexCells(0) # do not generate vertex cells, just points
|
||||||
cellCenter.SetInput(deformedMesh)
|
cellCenter.SetInput(deformedMesh)
|
||||||
cellCenter.Update()
|
cellCenter.Update()
|
||||||
meshIPs = cellCenter.GetOutput()
|
meshIPs = cellCenter.GetOutput()
|
||||||
|
|
||||||
|
|
||||||
# Get outer surface of deformed mesh
|
# Get outer surface of deformed mesh
|
||||||
|
|
||||||
if options.verbose: sys.stdout.write("\nGETTING OUTER SURFACE OF DEFORMED MESH\n")
|
if options.verbose: sys.stdout.write("\nGETTING OUTER SURFACE OF DEFORMED MESH\n")
|
||||||
surfaceFilter = vtkDataSetSurfaceFilter()
|
surfaceFilter = vtk.vtkDataSetSurfaceFilter()
|
||||||
surfaceFilter.SetInput(deformedMesh)
|
surfaceFilter.SetInput(deformedMesh)
|
||||||
surfaceFilter.Update()
|
surfaceFilter.Update()
|
||||||
surface = surfaceFilter.GetOutput()
|
surface = surfaceFilter.GetOutput()
|
||||||
|
|
||||||
|
|
||||||
# Get coordinate system for ang files
|
# Get coordinate system for ang files
|
||||||
# z-vector is normal to slices
|
# z-vector is normal to slices
|
||||||
# x-vector corresponds to the up-direction
|
# x-vector corresponds to the up-direction
|
||||||
# "R" rotates coordinates from the mesh system into the TSL system
|
# "R" rotates coordinates from the mesh system into the TSL system
|
||||||
|
|
||||||
if options.verbose: sys.stdout.write("\nGETTING COORDINATE SYSTEM FOR ANG FILES\n")
|
if options.verbose: sys.stdout.write("\nGETTING COORDINATE SYSTEM FOR ANG FILES\n")
|
||||||
z = np.array(options.normal,dtype='float')
|
z = np.array(options.normal,dtype='float')
|
||||||
|
@ -235,7 +231,7 @@ for filename in filenames:
|
||||||
sys.stdout.write(" z (% .8f % .8f % .8f)\n"%tuple(z))
|
sys.stdout.write(" z (% .8f % .8f % .8f)\n"%tuple(z))
|
||||||
|
|
||||||
|
|
||||||
# Get bounding box in rotated system (x,y,z)
|
# Get bounding box in rotated system (x,y,z)
|
||||||
|
|
||||||
if options.verbose: sys.stdout.write("\nGETTING BOUNDING BOX IN ROTATED SYSTEM\n")
|
if options.verbose: sys.stdout.write("\nGETTING BOUNDING BOX IN ROTATED SYSTEM\n")
|
||||||
rotatedbox = [[np.inf,-np.inf] for i in range(3)] # bounding box in rotated TSL system
|
rotatedbox = [[np.inf,-np.inf] for i in range(3)] # bounding box in rotated TSL system
|
||||||
|
@ -254,8 +250,8 @@ for filename in filenames:
|
||||||
sys.stdout.write(" z (% .8f % .8f)\n"%tuple(rotatedbox[2]))
|
sys.stdout.write(" z (% .8f % .8f)\n"%tuple(rotatedbox[2]))
|
||||||
|
|
||||||
|
|
||||||
# Correct bounding box so that a multiplicity of the resolution fits into it
|
# Correct bounding box so that a multiplicity of the resolution fits into it
|
||||||
# and get number of points and extent in each (rotated) axis direction
|
# and get number of points and extent in each (rotated) axis direction
|
||||||
|
|
||||||
if options.verbose: sys.stdout.write("\nCORRECTING EXTENT OF BOUNDING BOX IN ROTATED SYSTEM\n")
|
if options.verbose: sys.stdout.write("\nCORRECTING EXTENT OF BOUNDING BOX IN ROTATED SYSTEM\n")
|
||||||
correction = []
|
correction = []
|
||||||
|
@ -284,12 +280,12 @@ for filename in filenames:
|
||||||
sys.stdout.write(" z (% .8f % .8f)\n"%tuple(rotatedbox[2]))
|
sys.stdout.write(" z (% .8f % .8f)\n"%tuple(rotatedbox[2]))
|
||||||
|
|
||||||
|
|
||||||
# Generate new regular point grid for ang files
|
# Generate new regular point grid for ang files
|
||||||
# Use "polydata" object with points as single vertices
|
# Use "polydata" object with points as single vertices
|
||||||
# beware of TSL convention: y direction is fastest index
|
# beware of TSL convention: y direction is fastest index
|
||||||
|
|
||||||
if options.verbose: sys.stdout.write("\nGENERATING POINTS FOR POINT GRID")
|
if options.verbose: sys.stdout.write("\nGENERATING POINTS FOR POINT GRID")
|
||||||
points = vtkPoints()
|
points = vtk.vtkPoints()
|
||||||
for k in xrange(Npoints[2]):
|
for k in xrange(Npoints[2]):
|
||||||
for j in xrange(Npoints[0]):
|
for j in xrange(Npoints[0]):
|
||||||
for i in xrange(Npoints[1]): # y is fastest index
|
for i in xrange(Npoints[1]): # y is fastest index
|
||||||
|
@ -309,9 +305,9 @@ for filename in filenames:
|
||||||
sys.stdout.write(" grid resolution: %.8f\n"%options.resolution)
|
sys.stdout.write(" grid resolution: %.8f\n"%options.resolution)
|
||||||
|
|
||||||
if options.verbose: sys.stdout.write("\nGENERATING VERTICES FOR POINT GRID")
|
if options.verbose: sys.stdout.write("\nGENERATING VERTICES FOR POINT GRID")
|
||||||
vertices = vtkCellArray()
|
vertices = vtk.vtkCellArray()
|
||||||
for i in xrange(totalNpoints):
|
for i in xrange(totalNpoints):
|
||||||
vertex = vtkVertex()
|
vertex = vtk.vtkVertex()
|
||||||
vertex.GetPointIds().SetId(0,i) # each vertex consists of exactly one (index 0) point with ID "i"
|
vertex.GetPointIds().SetId(0,i) # each vertex consists of exactly one (index 0) point with ID "i"
|
||||||
vertices.InsertNextCell(vertex)
|
vertices.InsertNextCell(vertex)
|
||||||
if options.verbose:
|
if options.verbose:
|
||||||
|
@ -319,34 +315,35 @@ for filename in filenames:
|
||||||
sys.stdout.flush()
|
sys.stdout.flush()
|
||||||
|
|
||||||
if options.verbose: sys.stdout.write("\n\nGENERATING POINT GRID\n")
|
if options.verbose: sys.stdout.write("\n\nGENERATING POINT GRID\n")
|
||||||
pointgrid = vtkPolyData()
|
pointgrid = vtk.vtkPolyData()
|
||||||
pointgrid.SetPoints(points)
|
pointgrid.SetPoints(points)
|
||||||
pointgrid.SetVerts(vertices)
|
pointgrid.SetVerts(vertices)
|
||||||
pointgrid.Update()
|
pointgrid.Update()
|
||||||
|
|
||||||
|
|
||||||
# Find out which points reside inside mesh geometry
|
# Find out which points reside inside mesh geometry
|
||||||
|
|
||||||
if options.verbose: sys.stdout.write("\nIDENTIFYING POINTS INSIDE MESH GEOMETRY\n")
|
if options.verbose: sys.stdout.write("\nIDENTIFYING POINTS INSIDE MESH GEOMETRY\n")
|
||||||
enclosedPoints = vtkSelectEnclosedPoints()
|
enclosedPoints = vtk.vtkSelectEnclosedPoints()
|
||||||
enclosedPoints.SetSurface(surface)
|
enclosedPoints.SetSurface(surface)
|
||||||
enclosedPoints.SetInput(pointgrid)
|
enclosedPoints.SetInput(pointgrid)
|
||||||
enclosedPoints.Update()
|
enclosedPoints.Update()
|
||||||
|
|
||||||
|
|
||||||
# Build kdtree from mesh IPs and match mesh IPs to point grid
|
# Build kdtree from mesh IPs and match mesh IPs to point grid
|
||||||
|
|
||||||
if options.verbose: sys.stdout.write("\nBUILDING MAPPING OF GRID POINTS")
|
if options.verbose: sys.stdout.write("\nBUILDING MAPPING OF GRID POINTS")
|
||||||
kdTree = vtkKdTree()
|
kdTree = vtk.vtkKdTree()
|
||||||
kdTree.BuildLocatorFromPoints(meshIPs.GetPoints())
|
kdTree.BuildLocatorFromPoints(meshIPs.GetPoints())
|
||||||
gridToMesh = []
|
gridToMesh = []
|
||||||
ids = vtkIdList()
|
ids = vtk.vtkIdList()
|
||||||
NenclosedPoints = 0
|
NenclosedPoints = 0
|
||||||
for i in range(pointgrid.GetNumberOfPoints()):
|
for i in range(pointgrid.GetNumberOfPoints()):
|
||||||
gridToMesh.append([])
|
gridToMesh.append([])
|
||||||
if enclosedPoints.IsInside(i):
|
if enclosedPoints.IsInside(i):
|
||||||
NenclosedPoints += 1
|
NenclosedPoints += 1
|
||||||
kdTree.FindClosestNPoints(options.interpolation,pointgrid.GetPoint(i),ids) # here one could use faster(?) "FindClosestPoint" if only first nearest neighbor required
|
# here one could use faster(?) "FindClosestPoint" if only first nearest neighbor required
|
||||||
|
kdTree.FindClosestNPoints(options.interpolation,pointgrid.GetPoint(i),ids)
|
||||||
for j in range(ids.GetNumberOfIds()):
|
for j in range(ids.GetNumberOfIds()):
|
||||||
gridToMesh[-1].extend([ids.GetId(j)])
|
gridToMesh[-1].extend([ids.GetId(j)])
|
||||||
if options.verbose:
|
if options.verbose:
|
||||||
|
@ -358,7 +355,7 @@ for filename in filenames:
|
||||||
|
|
||||||
|
|
||||||
|
|
||||||
# ITERATE OVER SLICES AND CREATE ANG FILE
|
# ITERATE OVER SLICES AND CREATE ANG FILE
|
||||||
|
|
||||||
if options.verbose:
|
if options.verbose:
|
||||||
sys.stdout.write("\nWRITING OUT ANG FILES\n")
|
sys.stdout.write("\nWRITING OUT ANG FILES\n")
|
||||||
|
@ -404,13 +401,13 @@ for filename in filenames:
|
||||||
angfile.write(getDataLine(interpolatedPhi,x,y,enclosedPoints.IsInside(i)))
|
angfile.write(getDataLine(interpolatedPhi,x,y,enclosedPoints.IsInside(i)))
|
||||||
|
|
||||||
|
|
||||||
# Visualize slices
|
# Visualize slices
|
||||||
|
|
||||||
if options.visualize:
|
if options.visualize:
|
||||||
meshMapper = vtkDataSetMapper()
|
meshMapper = vtk.vtkDataSetMapper()
|
||||||
meshMapper.SetInput(surface)
|
meshMapper.SetInput(surface)
|
||||||
meshMapper.ScalarVisibilityOff() # do not use scalar data for coloring
|
meshMapper.ScalarVisibilityOff() # do not use scalar data for coloring
|
||||||
meshActor = vtkActor()
|
meshActor = vtk.vtkActor()
|
||||||
meshActor.SetMapper(meshMapper)
|
meshActor.SetMapper(meshMapper)
|
||||||
meshActor.GetProperty().SetOpacity(0.2)
|
meshActor.GetProperty().SetOpacity(0.2)
|
||||||
meshActor.GetProperty().SetColor(1.0,1.0,0)
|
meshActor.GetProperty().SetColor(1.0,1.0,0)
|
||||||
|
@ -418,43 +415,43 @@ for filename in filenames:
|
||||||
# meshActor.GetProperty().SetEdgeColor(1,1,0.5)
|
# meshActor.GetProperty().SetEdgeColor(1,1,0.5)
|
||||||
# meshActor.GetProperty().EdgeVisibilityOn()
|
# meshActor.GetProperty().EdgeVisibilityOn()
|
||||||
|
|
||||||
boxpoints = vtkPoints()
|
boxpoints = vtk.vtkPoints()
|
||||||
for n in range(8):
|
for n in range(8):
|
||||||
P = [rotatedbox[0][(n/1)%2],
|
P = [rotatedbox[0][(n/1)%2],
|
||||||
rotatedbox[1][(n/2)%2],
|
rotatedbox[1][(n/2)%2],
|
||||||
rotatedbox[2][(n/4)%2]]
|
rotatedbox[2][(n/4)%2]]
|
||||||
boxpoints.InsertNextPoint(list(np.dot(R.T,np.array(P))))
|
boxpoints.InsertNextPoint(list(np.dot(R.T,np.array(P))))
|
||||||
box = vtkHexahedron()
|
box = vtk.vtkHexahedron()
|
||||||
for n,i in enumerate([0,1,3,2,4,5,7,6]):
|
for n,i in enumerate([0,1,3,2,4,5,7,6]):
|
||||||
box.GetPointIds().SetId(n,i)
|
box.GetPointIds().SetId(n,i)
|
||||||
boxgrid = vtkUnstructuredGrid()
|
boxgrid = vtk.vtkUnstructuredGrid()
|
||||||
boxgrid.SetPoints(boxpoints)
|
boxgrid.SetPoints(boxpoints)
|
||||||
boxgrid.InsertNextCell(box.GetCellType(), box.GetPointIds())
|
boxgrid.InsertNextCell(box.GetCellType(), box.GetPointIds())
|
||||||
boxsurfaceFilter = vtkDataSetSurfaceFilter()
|
boxsurfaceFilter = vtk.vtkDataSetSurfaceFilter()
|
||||||
boxsurfaceFilter.SetInput(boxgrid)
|
boxsurfaceFilter.SetInput(boxgrid)
|
||||||
boxsurfaceFilter.Update()
|
boxsurfaceFilter.Update()
|
||||||
boxsurface = boxsurfaceFilter.GetOutput()
|
boxsurface = boxsurfaceFilter.GetOutput()
|
||||||
|
|
||||||
boxMapper = vtkDataSetMapper()
|
boxMapper = vtk.vtkDataSetMapper()
|
||||||
boxMapper.SetInput(boxsurface)
|
boxMapper.SetInput(boxsurface)
|
||||||
boxActor = vtkActor()
|
boxActor = vtk.vtkActor()
|
||||||
boxActor.SetMapper(boxMapper)
|
boxActor.SetMapper(boxMapper)
|
||||||
boxActor.GetProperty().SetLineWidth(2.0)
|
boxActor.GetProperty().SetLineWidth(2.0)
|
||||||
boxActor.GetProperty().SetRepresentationToWireframe()
|
boxActor.GetProperty().SetRepresentationToWireframe()
|
||||||
|
|
||||||
gridMapper = vtkDataSetMapper()
|
gridMapper = vtk.vtkDataSetMapper()
|
||||||
gridMapper.SetInput(pointgrid)
|
gridMapper.SetInput(pointgrid)
|
||||||
gridActor = vtkActor()
|
gridActor = vtk.vtkActor()
|
||||||
gridActor.SetMapper(gridMapper)
|
gridActor.SetMapper(gridMapper)
|
||||||
gridActor.GetProperty().SetColor(0,0,0)
|
gridActor.GetProperty().SetColor(0,0,0)
|
||||||
gridActor.GetProperty().SetPointSize(3)
|
gridActor.GetProperty().SetPointSize(3)
|
||||||
|
|
||||||
|
|
||||||
renderer = vtkRenderer()
|
renderer = vtk.vtkRenderer()
|
||||||
renderWindow = vtkRenderWindow()
|
renderWindow = vtk.vtkRenderWindow()
|
||||||
renderWindow.FullScreenOn()
|
renderWindow.FullScreenOn()
|
||||||
renderWindow.AddRenderer(renderer)
|
renderWindow.AddRenderer(renderer)
|
||||||
renderWindowInteractor = vtkRenderWindowInteractor()
|
renderWindowInteractor = vtk.vtkRenderWindowInteractor()
|
||||||
renderWindowInteractor.SetRenderWindow(renderWindow)
|
renderWindowInteractor.SetRenderWindow(renderWindow)
|
||||||
renderer.AddActor(meshActor)
|
renderer.AddActor(meshActor)
|
||||||
renderer.AddActor(boxActor)
|
renderer.AddActor(boxActor)
|
||||||
|
@ -462,6 +459,6 @@ for filename in filenames:
|
||||||
renderer.SetBackground(1,1,1)
|
renderer.SetBackground(1,1,1)
|
||||||
|
|
||||||
renderWindow.Render()
|
renderWindow.Render()
|
||||||
renderWindowInteractor.SetInteractorStyle(vtkInteractorStyleTrackballCamera())
|
renderWindowInteractor.SetInteractorStyle(vtk.vtkInteractorStyleTrackballCamera())
|
||||||
renderWindowInteractor.Start()
|
renderWindowInteractor.Start()
|
||||||
|
|
||||||
|
|
|
@ -1,7 +1,7 @@
|
||||||
#!/usr/bin/env python
|
#!/usr/bin/env python
|
||||||
# -*- coding: UTF-8 no BOM -*-
|
# -*- coding: UTF-8 no BOM -*-
|
||||||
|
|
||||||
import os,string,glob,re
|
import os,glob,re
|
||||||
import damask
|
import damask
|
||||||
from optparse import OptionParser
|
from optparse import OptionParser
|
||||||
|
|
||||||
|
@ -10,7 +10,6 @@ scriptID = ' '.join([scriptName,damask.version])
|
||||||
|
|
||||||
# -----------------------------
|
# -----------------------------
|
||||||
def findTag(filename,tag):
|
def findTag(filename,tag):
|
||||||
# -----------------------------
|
|
||||||
|
|
||||||
with open(filename,'r') as myfile:
|
with open(filename,'r') as myfile:
|
||||||
mypattern = re.compile(str(tag))
|
mypattern = re.compile(str(tag))
|
||||||
|
|
|
@ -1,8 +1,9 @@
|
||||||
#!/usr/bin/env python
|
#!/usr/bin/env python
|
||||||
# -*- coding: UTF-8 no BOM -*-
|
# -*- coding: UTF-8 no BOM -*-
|
||||||
|
|
||||||
import os,sys,string,vtk
|
import os,vtk
|
||||||
import damask
|
import damask
|
||||||
|
from collections import defaultdict
|
||||||
from optparse import OptionParser
|
from optparse import OptionParser
|
||||||
|
|
||||||
scriptName = os.path.splitext(os.path.basename(__file__))[0]
|
scriptName = os.path.splitext(os.path.basename(__file__))[0]
|
||||||
|
@ -17,125 +18,157 @@ Add scalar and RGB tuples from ASCIItable to existing VTK point cloud (.vtp).
|
||||||
|
|
||||||
""", version = scriptID)
|
""", version = scriptID)
|
||||||
|
|
||||||
parser.add_option('-v', '--vtk', dest='vtk', \
|
parser.add_option( '--vtk',
|
||||||
|
dest = 'vtk',
|
||||||
|
type = 'string', metavar = 'string',
|
||||||
help = 'VTK file name')
|
help = 'VTK file name')
|
||||||
|
parser.add_option( '--inplace',
|
||||||
|
dest = 'inplace',
|
||||||
|
action = 'store_true',
|
||||||
|
help = 'modify VTK file in-place')
|
||||||
|
parser.add_option('-r', '--render',
|
||||||
|
dest = 'render',
|
||||||
|
action = 'store_true',
|
||||||
|
help = 'open output in VTK render window')
|
||||||
parser.add_option('-s', '--scalar', dest='scalar', action='extend', \
|
parser.add_option('-s', '--scalar', dest='scalar', action='extend', \
|
||||||
help = 'scalar values')
|
help = 'scalar values')
|
||||||
|
parser.add_option('-v', '--vector',
|
||||||
|
dest = 'vector',
|
||||||
|
action = 'extend', metavar = '<string LIST>',
|
||||||
|
help = 'vector value label(s)')
|
||||||
parser.add_option('-c', '--color', dest='color', action='extend', \
|
parser.add_option('-c', '--color', dest='color', action='extend', \
|
||||||
help = 'RGB color tuples')
|
help = 'RGB color tuples')
|
||||||
|
|
||||||
parser.set_defaults(scalar = [])
|
parser.set_defaults(scalar = [],
|
||||||
parser.set_defaults(color = [])
|
vector = [],
|
||||||
|
color = [],
|
||||||
|
inplace = False,
|
||||||
|
render = False,
|
||||||
|
)
|
||||||
|
|
||||||
(options, filenames) = parser.parse_args()
|
(options, filenames) = parser.parse_args()
|
||||||
|
|
||||||
datainfo = { # list of requested labels per datatype
|
if not options.vtk: parser.error('No VTK file specified.')
|
||||||
'scalar': {'len':1,
|
if not os.path.exists(options.vtk): parser.error('VTK file does not exist.')
|
||||||
'label':[]},
|
|
||||||
'color': {'len':3,
|
|
||||||
'label':[]},
|
|
||||||
}
|
|
||||||
|
|
||||||
if not os.path.exists(options.vtk):
|
|
||||||
parser.error('VTK file does not exist'); sys.exit()
|
|
||||||
|
|
||||||
reader = vtk.vtkXMLPolyDataReader()
|
reader = vtk.vtkXMLPolyDataReader()
|
||||||
reader.SetFileName(options.vtk)
|
reader.SetFileName(options.vtk)
|
||||||
reader.Update()
|
reader.Update()
|
||||||
Npoints = reader.GetNumberOfPoints()
|
Npoints = reader.GetNumberOfPoints()
|
||||||
Ncells = reader.GetNumberOfCells()
|
Ncells = reader.GetNumberOfCells()
|
||||||
Nvertices = reader.GetNumberOfVerts()
|
Nvertices = reader.GetNumberOfVerts()
|
||||||
Polydata = reader.GetOutput()
|
Polydata = reader.GetOutput()
|
||||||
|
|
||||||
if Npoints != Ncells or Npoints != Nvertices:
|
if Npoints != Ncells or Npoints != Nvertices:
|
||||||
parser.error('Number of points, cells, and vertices in VTK differ from each other'); sys.exit()
|
parser.error('Number of points, cells, and vertices in VTK differ from each other.')
|
||||||
if options.scalar != None: datainfo['scalar']['label'] += options.scalar
|
|
||||||
if options.color != None: datainfo['color']['label'] += options.color
|
|
||||||
|
|
||||||
# ------------------------------------------ setup file handles ---------------------------------------
|
damask.util.croak('{}: {} points, {} vertices, and {} cells...'.format(options.vtk,Npoints,Nvertices,Ncells))
|
||||||
|
|
||||||
files = []
|
# --- loop over input files -------------------------------------------------------------------------
|
||||||
if filenames == []:
|
|
||||||
files.append({'name':'STDIN', 'input':sys.stdin, 'output':sys.stdout, 'croak':sys.stderr})
|
|
||||||
else:
|
|
||||||
for name in filenames:
|
|
||||||
if os.path.exists(name):
|
|
||||||
files.append({'name':name, 'input':open(name), 'output':sys.stderr, 'croak':sys.stderr})
|
|
||||||
|
|
||||||
#--- loop over input files ------------------------------------------------------------------------
|
if filenames == []: filenames = [None]
|
||||||
for file in files:
|
|
||||||
if file['name'] != 'STDIN': file['croak'].write('\033[1m'+scriptName+'\033[0m: '+file['name']+'\n')
|
|
||||||
else: file['croak'].write('\033[1m'+scriptName+'\033[0m\n')
|
|
||||||
|
|
||||||
table = damask.ASCIItable(file['input'],file['output'],False) # make unbuffered ASCII_table
|
for name in filenames:
|
||||||
table.head_read() # read ASCII header info
|
try: table = damask.ASCIItable(name = name,
|
||||||
|
buffered = False,
|
||||||
|
readonly = True)
|
||||||
|
except: continue
|
||||||
|
damask.util.report(scriptName, name)
|
||||||
|
|
||||||
# --------------- figure out columns to process
|
# --- interpret header ----------------------------------------------------------------------------
|
||||||
active = {}
|
|
||||||
column = {}
|
|
||||||
|
|
||||||
array = {}
|
table.head_read()
|
||||||
|
|
||||||
for datatype,info in datainfo.items():
|
remarks = []
|
||||||
for label in info['label']:
|
errors = []
|
||||||
foundIt = False
|
VTKarray = {}
|
||||||
for key in ['1_'+label,label]:
|
active = defaultdict(list)
|
||||||
if key in table.labels:
|
|
||||||
foundIt = True
|
for datatype,dimension,label in [['scalar',1,options.scalar],
|
||||||
if datatype not in active: active[datatype] = []
|
['vector',3,options.vector],
|
||||||
if datatype not in column: column[datatype] = {}
|
['color',3,options.color],
|
||||||
if datatype not in array: array[datatype] = {}
|
]:
|
||||||
active[datatype].append(label)
|
for i,dim in enumerate(table.label_dimension(label)):
|
||||||
column[datatype][label] = table.labels.index(key) # remember columns of requested data
|
me = label[i]
|
||||||
if datatype == 'scalar':
|
if dim == -1: remarks.append('{} "{}" not found...'.format(datatype,me))
|
||||||
array[datatype][label] = vtk.vtkDoubleArray()
|
elif dim > dimension: remarks.append('"{}" not of dimension {}...'.format(me,dimension))
|
||||||
array[datatype][label].SetNumberOfComponents(1)
|
else:
|
||||||
array[datatype][label].SetName(label)
|
remarks.append('adding {} "{}"...'.format(datatype,me))
|
||||||
elif datatype == 'color':
|
active[datatype].append(me)
|
||||||
array[datatype][label] = vtk.vtkUnsignedCharArray()
|
|
||||||
array[datatype][label].SetNumberOfComponents(3)
|
if datatype in ['scalar','vector']: VTKarray[me] = vtk.vtkDoubleArray()
|
||||||
array[datatype][label].SetName(label)
|
elif datatype == 'color': VTKarray[me] = vtk.vtkUnsignedCharArray()
|
||||||
if not foundIt:
|
|
||||||
file['croak'].write('column %s not found...\n'%label)
|
VTKarray[me].SetNumberOfComponents(dimension)
|
||||||
|
VTKarray[me].SetName(label[i])
|
||||||
|
|
||||||
|
if remarks != []: damask.util.croak(remarks)
|
||||||
|
if errors != []:
|
||||||
|
damask.util.croak(errors)
|
||||||
|
table.close(dismiss = True)
|
||||||
|
continue
|
||||||
|
|
||||||
# ------------------------------------------ process data ---------------------------------------
|
# ------------------------------------------ process data ---------------------------------------
|
||||||
|
|
||||||
while table.data_read(): # read next data line of ASCII table
|
while table.data_read(): # read next data line of ASCII table
|
||||||
|
|
||||||
for datatype,labels in active.items(): # loop over scalar,color
|
for datatype,labels in active.items(): # loop over scalar,color
|
||||||
for label in labels: # loop over all requested items
|
for me in labels: # loop over all requested items
|
||||||
theData = table.data[column[datatype][label]:\
|
theData = [table.data[i] for i in table.label_indexrange(me)] # read strings
|
||||||
column[datatype][label]+datainfo[datatype]['len']] # read strings
|
if datatype == 'color': VTKarray[me].InsertNextTuple3(*map(lambda x: int(255.*float(x)),theData))
|
||||||
if datatype == 'color':
|
elif datatype == 'vector': VTKarray[me].InsertNextTuple3(*map(float,theData))
|
||||||
theData = map(lambda x: int(255.*float(x)),theData)
|
elif datatype == 'scalar': VTKarray[me].InsertNextValue(float(theData[0]))
|
||||||
array[datatype][label].InsertNextTuple3(theData[0],theData[1],theData[2],)
|
|
||||||
elif datatype == 'scalar':
|
|
||||||
array[datatype][label].InsertNextValue(float(theData[0]))
|
|
||||||
|
|
||||||
table.input_close() # close input ASCII table
|
table.input_close() # close input ASCII table
|
||||||
|
|
||||||
# ------------------------------------------ add data ---------------------------------------
|
# ------------------------------------------ add data ---------------------------------------
|
||||||
|
|
||||||
for datatype,labels in active.items(): # loop over scalar,color
|
for datatype,labels in active.items(): # loop over scalar,color
|
||||||
if datatype == 'color':
|
if datatype == 'color':
|
||||||
Polydata.GetPointData().SetScalars(array[datatype][labels[0]])
|
Polydata.GetPointData().SetScalars(VTKarray[active['color'][0]])
|
||||||
Polydata.GetCellData().SetScalars(array[datatype][labels[0]])
|
Polydata.GetCellData().SetScalars(VTKarray[active['color'][0]])
|
||||||
for label in labels: # loop over all requested items
|
for me in labels: # loop over all requested items
|
||||||
Polydata.GetPointData().AddArray(array[datatype][label])
|
Polydata.GetPointData().AddArray(VTKarray[me])
|
||||||
Polydata.GetCellData().AddArray(array[datatype][label])
|
Polydata.GetCellData().AddArray(VTKarray[me])
|
||||||
|
|
||||||
Polydata.Modified()
|
Polydata.Modified()
|
||||||
if vtk.VTK_MAJOR_VERSION <= 5:
|
if vtk.VTK_MAJOR_VERSION <= 5: Polydata.Update()
|
||||||
Polydata.Update()
|
|
||||||
|
|
||||||
# ------------------------------------------ output result ---------------------------------------
|
# ------------------------------------------ output result ---------------------------------------
|
||||||
|
|
||||||
writer = vtk.vtkXMLPolyDataWriter()
|
writer = vtk.vtkXMLPolyDataWriter()
|
||||||
writer.SetDataModeToBinary()
|
writer.SetDataModeToBinary()
|
||||||
writer.SetCompressorTypeToZLib()
|
writer.SetCompressorTypeToZLib()
|
||||||
writer.SetFileName(os.path.splitext(options.vtk)[0]+'_added.vtp')
|
writer.SetFileName(os.path.splitext(options.vtk)[0]+('.vtp' if options.inplace else '_added.vtp'))
|
||||||
if vtk.VTK_MAJOR_VERSION <= 5:
|
if vtk.VTK_MAJOR_VERSION <= 5: writer.SetInput(Polydata)
|
||||||
writer.SetInput(Polydata)
|
else: writer.SetInputData(Polydata)
|
||||||
else:
|
writer.Write()
|
||||||
writer.SetInputData(Polydata)
|
|
||||||
writer.Write()
|
# ------------------------------------------ render result ---------------------------------------
|
||||||
|
|
||||||
|
if options.render:
|
||||||
|
mapper = vtk.vtkDataSetMapper()
|
||||||
|
mapper.SetInputData(Polydata)
|
||||||
|
actor = vtk.vtkActor()
|
||||||
|
actor.SetMapper(mapper)
|
||||||
|
|
||||||
|
# Create the graphics structure. The renderer renders into the
|
||||||
|
# render window. The render window interactor captures mouse events
|
||||||
|
# and will perform appropriate camera or actor manipulation
|
||||||
|
# depending on the nature of the events.
|
||||||
|
|
||||||
|
ren = vtk.vtkRenderer()
|
||||||
|
|
||||||
|
renWin = vtk.vtkRenderWindow()
|
||||||
|
renWin.AddRenderer(ren)
|
||||||
|
|
||||||
|
ren.AddActor(actor)
|
||||||
|
ren.SetBackground(1, 1, 1)
|
||||||
|
renWin.SetSize(200, 200)
|
||||||
|
|
||||||
|
iren = vtk.vtkRenderWindowInteractor()
|
||||||
|
iren.SetRenderWindow(renWin)
|
||||||
|
|
||||||
|
iren.Initialize()
|
||||||
|
renWin.Render()
|
||||||
|
iren.Start()
|
||||||
|
|
|
@ -1,7 +1,7 @@
|
||||||
#!/usr/bin/env python
|
#!/usr/bin/env python
|
||||||
# -*- coding: UTF-8 no BOM -*-
|
# -*- coding: UTF-8 no BOM -*-
|
||||||
|
|
||||||
import os,sys,string,vtk
|
import os,vtk
|
||||||
import damask
|
import damask
|
||||||
from collections import defaultdict
|
from collections import defaultdict
|
||||||
from optparse import OptionParser
|
from optparse import OptionParser
|
||||||
|
@ -30,10 +30,6 @@ parser.add_option('-r', '--render',
|
||||||
dest = 'render',
|
dest = 'render',
|
||||||
action = 'store_true',
|
action = 'store_true',
|
||||||
help = 'open output in VTK render window')
|
help = 'open output in VTK render window')
|
||||||
parser.add_option('-m', '--mode',
|
|
||||||
dest = 'mode',
|
|
||||||
type = 'choice', metavar = 'string', choices = ['cell', 'point'],
|
|
||||||
help = 'cell-centered or point-centered data')
|
|
||||||
parser.add_option('-s', '--scalar',
|
parser.add_option('-s', '--scalar',
|
||||||
dest = 'scalar',
|
dest = 'scalar',
|
||||||
action = 'extend', metavar = '<string LIST>',
|
action = 'extend', metavar = '<string LIST>',
|
||||||
|
@ -56,7 +52,6 @@ parser.set_defaults(scalar = [],
|
||||||
|
|
||||||
(options, filenames) = parser.parse_args()
|
(options, filenames) = parser.parse_args()
|
||||||
|
|
||||||
if not options.mode: parser.error('No data mode specified.')
|
|
||||||
if not options.vtk: parser.error('No VTK file specified.')
|
if not options.vtk: parser.error('No VTK file specified.')
|
||||||
if not os.path.exists(options.vtk): parser.error('VTK file does not exist.')
|
if not os.path.exists(options.vtk): parser.error('VTK file does not exist.')
|
||||||
|
|
||||||
|
@ -83,9 +78,9 @@ damask.util.croak('{}: {} points and {} cells...'.format(options.vtk,Npoints,Nce
|
||||||
if filenames == []: filenames = [None]
|
if filenames == []: filenames = [None]
|
||||||
|
|
||||||
for name in filenames:
|
for name in filenames:
|
||||||
try:
|
try: table = damask.ASCIItable(name = name,
|
||||||
table = damask.ASCIItable(name = name,
|
buffered = False,
|
||||||
buffered = False, readonly = True)
|
readonly = True)
|
||||||
except: continue
|
except: continue
|
||||||
damask.util.report(scriptName, name)
|
damask.util.report(scriptName, name)
|
||||||
|
|
||||||
|
@ -124,8 +119,11 @@ for name in filenames:
|
||||||
|
|
||||||
# ------------------------------------------ process data ---------------------------------------
|
# ------------------------------------------ process data ---------------------------------------
|
||||||
|
|
||||||
|
datacount = 0
|
||||||
|
|
||||||
while table.data_read(): # read next data line of ASCII table
|
while table.data_read(): # read next data line of ASCII table
|
||||||
|
|
||||||
|
datacount += 1 # count data lines
|
||||||
for datatype,labels in active.items(): # loop over scalar,color
|
for datatype,labels in active.items(): # loop over scalar,color
|
||||||
for me in labels: # loop over all requested items
|
for me in labels: # loop over all requested items
|
||||||
theData = [table.data[i] for i in table.label_indexrange(me)] # read strings
|
theData = [table.data[i] for i in table.label_indexrange(me)] # read strings
|
||||||
|
@ -133,15 +131,25 @@ for name in filenames:
|
||||||
elif datatype == 'vector': VTKarray[me].InsertNextTuple3(*map(float,theData))
|
elif datatype == 'vector': VTKarray[me].InsertNextTuple3(*map(float,theData))
|
||||||
elif datatype == 'scalar': VTKarray[me].InsertNextValue(float(theData[0]))
|
elif datatype == 'scalar': VTKarray[me].InsertNextValue(float(theData[0]))
|
||||||
|
|
||||||
|
table.close() # close input ASCII table
|
||||||
|
|
||||||
# ------------------------------------------ add data ---------------------------------------
|
# ------------------------------------------ add data ---------------------------------------
|
||||||
|
|
||||||
|
if datacount == Npoints: mode = 'point'
|
||||||
|
elif datacount == Ncells: mode = 'cell'
|
||||||
|
else:
|
||||||
|
damask.util.croak('Data count is incompatible with grid...')
|
||||||
|
continue
|
||||||
|
|
||||||
|
damask.util.croak('{} mode...'.format(mode))
|
||||||
|
|
||||||
for datatype,labels in active.items(): # loop over scalar,color
|
for datatype,labels in active.items(): # loop over scalar,color
|
||||||
if datatype == 'color':
|
if datatype == 'color':
|
||||||
if options.mode == 'cell': rGrid.GetCellData().SetScalars(VTKarray[active['color'][0]])
|
if mode == 'cell': rGrid.GetCellData().SetScalars(VTKarray[active['color'][0]])
|
||||||
elif options.mode == 'point': rGrid.GetPointData().SetScalars(VTKarray[active['color'][0]])
|
elif mode == 'point': rGrid.GetPointData().SetScalars(VTKarray[active['color'][0]])
|
||||||
for me in labels: # loop over all requested items
|
for me in labels: # loop over all requested items
|
||||||
if options.mode == 'cell': rGrid.GetCellData().AddArray(VTKarray[me])
|
if mode == 'cell': rGrid.GetCellData().AddArray(VTKarray[me])
|
||||||
elif options.mode == 'point': rGrid.GetPointData().AddArray(VTKarray[me])
|
elif mode == 'point': rGrid.GetPointData().AddArray(VTKarray[me])
|
||||||
|
|
||||||
rGrid.Modified()
|
rGrid.Modified()
|
||||||
if vtk.VTK_MAJOR_VERSION <= 5: rGrid.Update()
|
if vtk.VTK_MAJOR_VERSION <= 5: rGrid.Update()
|
||||||
|
@ -151,7 +159,7 @@ for name in filenames:
|
||||||
writer = vtk.vtkXMLRectilinearGridWriter()
|
writer = vtk.vtkXMLRectilinearGridWriter()
|
||||||
writer.SetDataModeToBinary()
|
writer.SetDataModeToBinary()
|
||||||
writer.SetCompressorTypeToZLib()
|
writer.SetCompressorTypeToZLib()
|
||||||
writer.SetFileName(os.path.splitext(options.vtk)[0]+('' if options.inplace else '_added.vtr'))
|
writer.SetFileName(os.path.splitext(options.vtk)[0]+('.vtr' if options.inplace else '_added.vtr'))
|
||||||
if vtk.VTK_MAJOR_VERSION <= 5: writer.SetInput(rGrid)
|
if vtk.VTK_MAJOR_VERSION <= 5: writer.SetInput(rGrid)
|
||||||
else: writer.SetInputData(rGrid)
|
else: writer.SetInputData(rGrid)
|
||||||
writer.Write()
|
writer.Write()
|
||||||
|
@ -164,10 +172,10 @@ if options.render:
|
||||||
actor = vtk.vtkActor()
|
actor = vtk.vtkActor()
|
||||||
actor.SetMapper(mapper)
|
actor.SetMapper(mapper)
|
||||||
|
|
||||||
# Create the graphics structure. The renderer renders into the
|
# Create the graphics structure. The renderer renders into the
|
||||||
# render window. The render window interactor captures mouse events
|
# render window. The render window interactor captures mouse events
|
||||||
# and will perform appropriate camera or actor manipulation
|
# and will perform appropriate camera or actor manipulation
|
||||||
# depending on the nature of the events.
|
# depending on the nature of the events.
|
||||||
|
|
||||||
ren = vtk.vtkRenderer()
|
ren = vtk.vtkRenderer()
|
||||||
|
|
||||||
|
@ -181,9 +189,6 @@ if options.render:
|
||||||
iren = vtk.vtkRenderWindowInteractor()
|
iren = vtk.vtkRenderWindowInteractor()
|
||||||
iren.SetRenderWindow(renWin)
|
iren.SetRenderWindow(renWin)
|
||||||
|
|
||||||
#ren.ResetCamera()
|
|
||||||
#ren.GetActiveCamera().Zoom(1.5)
|
|
||||||
|
|
||||||
iren.Initialize()
|
iren.Initialize()
|
||||||
renWin.Render()
|
renWin.Render()
|
||||||
iren.Start()
|
iren.Start()
|
||||||
|
|
|
@ -1,7 +1,7 @@
|
||||||
#!/usr/bin/env python
|
#!/usr/bin/env python
|
||||||
# -*- coding: UTF-8 no BOM -*-
|
# -*- coding: UTF-8 no BOM -*-
|
||||||
|
|
||||||
import os,sys,string,vtk
|
import os,sys,vtk
|
||||||
import damask
|
import damask
|
||||||
from collections import defaultdict
|
from collections import defaultdict
|
||||||
from optparse import OptionParser
|
from optparse import OptionParser
|
||||||
|
@ -32,7 +32,7 @@ parser.set_defaults(scalar = [],
|
||||||
|
|
||||||
(options, filenames) = parser.parse_args()
|
(options, filenames) = parser.parse_args()
|
||||||
|
|
||||||
if options.vtk == None or not os.path.exists(options.vtk):
|
if options.vtk is None or not os.path.exists(options.vtk):
|
||||||
parser.error('VTK file does not exist')
|
parser.error('VTK file does not exist')
|
||||||
|
|
||||||
if os.path.splitext(options.vtk)[1] == '.vtu':
|
if os.path.splitext(options.vtk)[1] == '.vtu':
|
||||||
|
|
|
@ -1,7 +1,7 @@
|
||||||
#!/usr/bin/env python
|
#!/usr/bin/env python
|
||||||
# -*- coding: UTF-8 no BOM -*-
|
# -*- coding: UTF-8 no BOM -*-
|
||||||
|
|
||||||
import os,sys,string,vtk
|
import os,sys,vtk
|
||||||
import numpy as np
|
import numpy as np
|
||||||
import damask
|
import damask
|
||||||
from optparse import OptionParser
|
from optparse import OptionParser
|
||||||
|
@ -18,12 +18,12 @@ Produce a VTK point cloud dataset based on coordinates given in an ASCIItable.
|
||||||
|
|
||||||
""", version = scriptID)
|
""", version = scriptID)
|
||||||
|
|
||||||
parser.add_option('-d', '--deformed',
|
parser.add_option('-c', '--coordinates',
|
||||||
dest = 'deformed',
|
dest = 'pos',
|
||||||
type = 'string', metavar = 'string',
|
type = 'string', metavar = 'string',
|
||||||
help = 'deformed coordinate label [%default]')
|
help = 'coordinate label [%default]')
|
||||||
|
|
||||||
parser.set_defaults(deformed = 'ipdeformedcoord'
|
parser.set_defaults(pos = 'pos'
|
||||||
)
|
)
|
||||||
|
|
||||||
(options, filenames) = parser.parse_args()
|
(options, filenames) = parser.parse_args()
|
||||||
|
@ -46,9 +46,9 @@ for name in filenames:
|
||||||
|
|
||||||
errors = []
|
errors = []
|
||||||
remarks = []
|
remarks = []
|
||||||
coordDim = table.label_dimension(options.deformed)
|
coordDim = table.label_dimension(options.pos)
|
||||||
if not 3 >= coordDim >= 1: errors.append('coordinates "{}" need to have one, two, or three dimensions.'.format(options.deformed))
|
if not 3 >= coordDim >= 1: errors.append('coordinates "{}" need to have one, two, or three dimensions.'.format(options.pos))
|
||||||
elif coordDim < 3: remarks.append('appending {} dimensions to coordinates "{}"...'.format(3-coordDim,options.deformed))
|
elif coordDim < 3: remarks.append('appending {} dimensions to coordinates "{}"...'.format(3-coordDim,options.pos))
|
||||||
|
|
||||||
if remarks != []: damask.util.croak(remarks)
|
if remarks != []: damask.util.croak(remarks)
|
||||||
if errors != []:
|
if errors != []:
|
||||||
|
@ -58,7 +58,7 @@ for name in filenames:
|
||||||
|
|
||||||
# ------------------------------------------ process data ---------------------------------------
|
# ------------------------------------------ process data ---------------------------------------
|
||||||
|
|
||||||
table.data_readArray(options.deformed)
|
table.data_readArray(options.pos)
|
||||||
if len(table.data.shape) < 2: table.data.shape += (1,) # expand to 2D shape
|
if len(table.data.shape) < 2: table.data.shape += (1,) # expand to 2D shape
|
||||||
if table.data.shape[1] < 3:
|
if table.data.shape[1] < 3:
|
||||||
table.data = np.hstack((table.data,
|
table.data = np.hstack((table.data,
|
||||||
|
@ -86,8 +86,8 @@ for name in filenames:
|
||||||
(directory,filename) = os.path.split(name)
|
(directory,filename) = os.path.split(name)
|
||||||
writer.SetDataModeToBinary()
|
writer.SetDataModeToBinary()
|
||||||
writer.SetCompressorTypeToZLib()
|
writer.SetCompressorTypeToZLib()
|
||||||
writer.SetFileName(os.path.join(directory,os.path.splitext(filename)[0]
|
writer.SetFileName(os.path.join(directory,os.path.splitext(filename)[0]\
|
||||||
+'.'+writer.GetDefaultFileExtension()))
|
+'.'+writer.GetDefaultFileExtension()))
|
||||||
else:
|
else:
|
||||||
writer = vtk.vtkDataSetWriter()
|
writer = vtk.vtkDataSetWriter()
|
||||||
writer.WriteToOutputStringOn()
|
writer.WriteToOutputStringOn()
|
||||||
|
@ -96,6 +96,6 @@ for name in filenames:
|
||||||
if vtk.VTK_MAJOR_VERSION <= 5: writer.SetInput(Polydata)
|
if vtk.VTK_MAJOR_VERSION <= 5: writer.SetInput(Polydata)
|
||||||
else: writer.SetInputData(Polydata)
|
else: writer.SetInputData(Polydata)
|
||||||
writer.Write()
|
writer.Write()
|
||||||
if name == None: sys.stdout.write(writer.GetOutputString()[0:writer.GetOutputStringLength()])
|
if name is None: sys.stdout.write(writer.GetOutputString()[0:writer.GetOutputStringLength()])
|
||||||
|
|
||||||
table.close()
|
table.close()
|
||||||
|
|
|
@ -24,11 +24,11 @@ parser.add_option('-m', '--mode',
|
||||||
type = 'choice', choices = ['cell','point'],
|
type = 'choice', choices = ['cell','point'],
|
||||||
help = 'cell-centered or point-centered coordinates ')
|
help = 'cell-centered or point-centered coordinates ')
|
||||||
parser.add_option('-c', '--coordinates',
|
parser.add_option('-c', '--coordinates',
|
||||||
dest = 'position',
|
dest = 'coords',
|
||||||
type = 'string', metavar = 'string',
|
type = 'string', metavar = 'string',
|
||||||
help = 'coordinate label [%default]')
|
help = 'coordinate label [%default]')
|
||||||
parser.set_defaults(position ='ipinitialcoord',
|
parser.set_defaults(coords = 'pos',
|
||||||
mode ='cell'
|
mode = 'cell'
|
||||||
)
|
)
|
||||||
|
|
||||||
(options, filenames) = parser.parse_args()
|
(options, filenames) = parser.parse_args()
|
||||||
|
@ -38,9 +38,9 @@ parser.set_defaults(position ='ipinitialcoord',
|
||||||
if filenames == []: filenames = [None]
|
if filenames == []: filenames = [None]
|
||||||
|
|
||||||
for name in filenames:
|
for name in filenames:
|
||||||
try:
|
try: table = damask.ASCIItable(name = name,
|
||||||
table = damask.ASCIItable(name = name,
|
buffered = False,
|
||||||
buffered = False, readonly = True)
|
readonly = True)
|
||||||
except: continue
|
except: continue
|
||||||
damask.util.report(scriptName,name)
|
damask.util.report(scriptName,name)
|
||||||
|
|
||||||
|
@ -48,10 +48,13 @@ for name in filenames:
|
||||||
|
|
||||||
table.head_read()
|
table.head_read()
|
||||||
|
|
||||||
errors = []
|
remarks = []
|
||||||
if table.label_dimension(options.position) != 3:
|
errors = []
|
||||||
errors.append('coordinates {} are not a vector.'.format(options.position))
|
coordDim = table.label_dimension(options.coords)
|
||||||
|
if not 3 >= coordDim >= 1: errors.append('coordinates "{}" need to have one, two, or three dimensions.'.format(options.coords))
|
||||||
|
elif coordDim < 3: remarks.append('appending {} dimensions to coordinates "{}"...'.format(3-coordDim,options.coords))
|
||||||
|
|
||||||
|
if remarks != []: damask.util.croak(remarks)
|
||||||
if errors != []:
|
if errors != []:
|
||||||
damask.util.croak(errors)
|
damask.util.croak(errors)
|
||||||
table.close(dismiss=True)
|
table.close(dismiss=True)
|
||||||
|
@ -59,7 +62,12 @@ for name in filenames:
|
||||||
|
|
||||||
# --------------- figure out size and grid ---------------------------------------------------------
|
# --------------- figure out size and grid ---------------------------------------------------------
|
||||||
|
|
||||||
table.data_readArray(options.position)
|
table.data_readArray(options.coords)
|
||||||
|
if len(table.data.shape) < 2: table.data.shape += (1,) # expand to 2D shape
|
||||||
|
if table.data.shape[1] < 3:
|
||||||
|
table.data = np.hstack((table.data,
|
||||||
|
np.zeros((table.data.shape[0],
|
||||||
|
3-table.data.shape[1]),dtype='f'))) # fill coords up to 3D with zeros
|
||||||
|
|
||||||
coords = [np.unique(table.data[:,i]) for i in xrange(3)]
|
coords = [np.unique(table.data[:,i]) for i in xrange(3)]
|
||||||
if options.mode == 'cell':
|
if options.mode == 'cell':
|
||||||
|
@ -101,7 +109,7 @@ for name in filenames:
|
||||||
writer.SetDataModeToBinary()
|
writer.SetDataModeToBinary()
|
||||||
writer.SetCompressorTypeToZLib()
|
writer.SetCompressorTypeToZLib()
|
||||||
writer.SetFileName(os.path.join(directory,os.path.splitext(filename)[0] \
|
writer.SetFileName(os.path.join(directory,os.path.splitext(filename)[0] \
|
||||||
+'_{}({})'.format(options.position, options.mode) \
|
+'_{}({})'.format(options.coords, options.mode) \
|
||||||
+'.'+writer.GetDefaultFileExtension()))
|
+'.'+writer.GetDefaultFileExtension()))
|
||||||
else:
|
else:
|
||||||
writer = vtk.vtkDataSetWriter()
|
writer = vtk.vtkDataSetWriter()
|
||||||
|
@ -111,6 +119,6 @@ for name in filenames:
|
||||||
if vtk.VTK_MAJOR_VERSION <= 5: writer.SetInput(rGrid)
|
if vtk.VTK_MAJOR_VERSION <= 5: writer.SetInput(rGrid)
|
||||||
else: writer.SetInputData(rGrid)
|
else: writer.SetInputData(rGrid)
|
||||||
writer.Write()
|
writer.Write()
|
||||||
if name == None: sys.stdout.write(writer.GetOutputString()[0:writer.GetOutputStringLength()])
|
if name is None: sys.stdout.write(writer.GetOutputString()[0:writer.GetOutputStringLength()])
|
||||||
|
|
||||||
table.close()
|
table.close()
|
||||||
|
|
|
@ -1,10 +1,10 @@
|
||||||
#!/usr/bin/env python
|
#!/usr/bin/env python
|
||||||
# -*- coding: UTF-8 no BOM -*-
|
# -*- coding: UTF-8 no BOM -*-
|
||||||
|
|
||||||
import os,sys,string,shutil
|
import os,sys,shutil
|
||||||
import damask
|
import damask
|
||||||
from optparse import OptionParser
|
from optparse import OptionParser
|
||||||
from vtk import *
|
import vtk
|
||||||
|
|
||||||
scriptName = os.path.splitext(os.path.basename(__file__))[0]
|
scriptName = os.path.splitext(os.path.basename(__file__))[0]
|
||||||
scriptID = ' '.join([scriptName,damask.version])
|
scriptID = ' '.join([scriptName,damask.version])
|
||||||
|
@ -44,18 +44,16 @@ for filename in filenames:
|
||||||
|
|
||||||
for filename in filenames:
|
for filename in filenames:
|
||||||
|
|
||||||
# Read the source file
|
|
||||||
|
|
||||||
sys.stdout.write('read file "%s" ...'%filename)
|
sys.stdout.write('read file "%s" ...'%filename)
|
||||||
sys.stdout.flush()
|
sys.stdout.flush()
|
||||||
suffix = os.path.splitext(filename)[1]
|
suffix = os.path.splitext(filename)[1]
|
||||||
if suffix == '.vtk':
|
if suffix == '.vtk':
|
||||||
reader = vtkUnstructuredGridReader()
|
reader = vtk.vtkUnstructuredGridReader()
|
||||||
reader.ReadAllScalarsOn()
|
reader.ReadAllScalarsOn()
|
||||||
reader.ReadAllVectorsOn()
|
reader.ReadAllVectorsOn()
|
||||||
reader.ReadAllTensorsOn()
|
reader.ReadAllTensorsOn()
|
||||||
elif suffix == '.vtu':
|
elif suffix == '.vtu':
|
||||||
reader = vtkXMLUnstructuredGridReader()
|
reader = vtk.vtkXMLUnstructuredGridReader()
|
||||||
else:
|
else:
|
||||||
parser.error('filetype "%s" not supported'%suffix)
|
parser.error('filetype "%s" not supported'%suffix)
|
||||||
reader.SetFileName(filename)
|
reader.SetFileName(filename)
|
||||||
|
@ -65,7 +63,7 @@ for filename in filenames:
|
||||||
sys.stdout.flush()
|
sys.stdout.flush()
|
||||||
|
|
||||||
|
|
||||||
# Read the scalar data
|
# Read the scalar data
|
||||||
|
|
||||||
scalarData = {}
|
scalarData = {}
|
||||||
scalarsToBeRemoved = []
|
scalarsToBeRemoved = []
|
||||||
|
@ -83,19 +81,18 @@ for filename in filenames:
|
||||||
scalarsToBeRemoved.append(scalarName)
|
scalarsToBeRemoved.append(scalarName)
|
||||||
for scalarName in scalarsToBeRemoved:
|
for scalarName in scalarsToBeRemoved:
|
||||||
uGrid.GetCellData().RemoveArray(scalarName)
|
uGrid.GetCellData().RemoveArray(scalarName)
|
||||||
# uGrid.UpdateData()
|
|
||||||
sys.stdout.write('\rread scalar data done\n')
|
sys.stdout.write('\rread scalar data done\n')
|
||||||
sys.stdout.flush()
|
sys.stdout.flush()
|
||||||
|
|
||||||
|
|
||||||
# Convert the scalar data to vector data
|
# Convert the scalar data to vector data
|
||||||
|
|
||||||
NscalarData = len(scalarData)
|
NscalarData = len(scalarData)
|
||||||
for n,label in enumerate(scalarData):
|
for n,label in enumerate(scalarData):
|
||||||
sys.stdout.write("\rconvert to vector data %d%%" %(100*n/NscalarData))
|
sys.stdout.write("\rconvert to vector data %d%%" %(100*n/NscalarData))
|
||||||
sys.stdout.flush()
|
sys.stdout.flush()
|
||||||
Nvalues = scalarData[label][0].GetNumberOfTuples()
|
Nvalues = scalarData[label][0].GetNumberOfTuples()
|
||||||
vectorData = vtkDoubleArray()
|
vectorData = vtk.vtkDoubleArray()
|
||||||
vectorData.SetName(label)
|
vectorData.SetName(label)
|
||||||
vectorData.SetNumberOfComponents(3) # set this before NumberOfTuples !!!
|
vectorData.SetNumberOfComponents(3) # set this before NumberOfTuples !!!
|
||||||
vectorData.SetNumberOfTuples(Nvalues)
|
vectorData.SetNumberOfTuples(Nvalues)
|
||||||
|
@ -103,16 +100,15 @@ for filename in filenames:
|
||||||
for j in range(3):
|
for j in range(3):
|
||||||
vectorData.SetComponent(i,j,scalarData[label][j].GetValue(i))
|
vectorData.SetComponent(i,j,scalarData[label][j].GetValue(i))
|
||||||
uGrid.GetCellData().AddArray(vectorData)
|
uGrid.GetCellData().AddArray(vectorData)
|
||||||
# uGrid.GetCellData().SetActiveVectors(label)
|
|
||||||
sys.stdout.write('\rconvert to vector data done\n')
|
sys.stdout.write('\rconvert to vector data done\n')
|
||||||
|
|
||||||
|
|
||||||
# Write to new vtk file
|
# Write to new vtk file
|
||||||
|
|
||||||
outfilename = os.path.splitext(filename)[0]+'.vtu'
|
outfilename = os.path.splitext(filename)[0]+'.vtu'
|
||||||
sys.stdout.write('write to file "%s" ...'%outfilename)
|
sys.stdout.write('write to file "%s" ...'%outfilename)
|
||||||
sys.stdout.flush()
|
sys.stdout.flush()
|
||||||
writer = vtkXMLUnstructuredGridWriter()
|
writer = vtk.vtkXMLUnstructuredGridWriter()
|
||||||
writer.SetFileName(outfilename+'_tmp')
|
writer.SetFileName(outfilename+'_tmp')
|
||||||
writer.SetDataModeToAscii()
|
writer.SetDataModeToAscii()
|
||||||
writer.SetInput(uGrid)
|
writer.SetInput(uGrid)
|
||||||
|
|
|
@ -48,9 +48,10 @@ for name in filenames:
|
||||||
# --------------- interprete header -----------------------------------------------------------------
|
# --------------- interprete header -----------------------------------------------------------------
|
||||||
table.head_read()
|
table.head_read()
|
||||||
errors=[]
|
errors=[]
|
||||||
if table.label_dimension(options.deformed) != 3: errors.append('columns "{}" have dimension {}'.format(options.deformed,
|
if table.label_dimension(options.deformed) != 3:
|
||||||
table.label_dimension(options.deformed)))
|
errors.append('columns "{}" have dimension {}'.format(options.deformed,table.label_dimension(options.deformed)))
|
||||||
if table.label_dimension(options.coords) != 3: errors.append('coordinates {} are not a vector.'.format(options.coords))
|
if table.label_dimension(options.coords) != 3:
|
||||||
|
errors.append('coordinates {} are not a vector.'.format(options.coords))
|
||||||
|
|
||||||
table.data_readArray([options.coords,options.deformed])
|
table.data_readArray([options.coords,options.deformed])
|
||||||
|
|
||||||
|
@ -66,7 +67,7 @@ for name in filenames:
|
||||||
max(map(float,coords[2].keys()))-min(map(float,coords[2].keys())),\
|
max(map(float,coords[2].keys()))-min(map(float,coords[2].keys())),\
|
||||||
],'d') # size from bounding box, corrected for cell-centeredness
|
],'d') # size from bounding box, corrected for cell-centeredness
|
||||||
|
|
||||||
size = np.where(grid > 1, size, min(size[grid > 1]/grid[grid > 1])) # spacing for grid==1 equal to smallest among other spacings
|
size = np.where(grid > 1, size, min(size[grid > 1]/grid[grid > 1])) # spacing for grid==1 set to smallest among other spacings
|
||||||
|
|
||||||
# ------------------------------------------ process data ---------------------------------------
|
# ------------------------------------------ process data ---------------------------------------
|
||||||
hexPoints = np.array([[-1,-1,-1],
|
hexPoints = np.array([[-1,-1,-1],
|
||||||
|
@ -101,8 +102,8 @@ for name in filenames:
|
||||||
(directory,filename) = os.path.split(name)
|
(directory,filename) = os.path.split(name)
|
||||||
writer.SetDataModeToBinary()
|
writer.SetDataModeToBinary()
|
||||||
writer.SetCompressorTypeToZLib()
|
writer.SetCompressorTypeToZLib()
|
||||||
writer.SetFileName(os.path.join(directory,os.path.splitext(filename)[0]
|
writer.SetFileName(os.path.join(directory,os.path.splitext(filename)[0]\
|
||||||
+'.'+writer.GetDefaultFileExtension()))
|
+'.'+writer.GetDefaultFileExtension()))
|
||||||
else:
|
else:
|
||||||
writer = vtk.vtkDataSetWriter()
|
writer = vtk.vtkDataSetWriter()
|
||||||
writer.WriteToOutputStringOn()
|
writer.WriteToOutputStringOn()
|
||||||
|
@ -111,7 +112,7 @@ for name in filenames:
|
||||||
if vtk.VTK_MAJOR_VERSION <= 5: writer.SetInput(uGrid)
|
if vtk.VTK_MAJOR_VERSION <= 5: writer.SetInput(uGrid)
|
||||||
else: writer.SetInputData(uGrid)
|
else: writer.SetInputData(uGrid)
|
||||||
writer.Write()
|
writer.Write()
|
||||||
if name == None: sys.stdout.write(writer.GetOutputString()[0:writer.GetOutputStringLength()])
|
if name is None: sys.stdout.write(writer.GetOutputString()[0:writer.GetOutputStringLength()])
|
||||||
|
|
||||||
table.close() # close input ASCII table
|
table.close() # close input ASCII table
|
||||||
|
|
||||||
|
|
|
@ -1,7 +1,7 @@
|
||||||
#!/usr/bin/python
|
#!/usr/bin/python
|
||||||
# -*- coding: UTF-8 no BOM -*-
|
# -*- coding: UTF-8 no BOM -*-
|
||||||
|
|
||||||
import os,string,sys,re
|
import os,sys
|
||||||
from optparse import OptionParser
|
from optparse import OptionParser
|
||||||
import numpy as np
|
import numpy as np
|
||||||
import damask
|
import damask
|
||||||
|
@ -73,7 +73,7 @@ for file in files:
|
||||||
for iPhi1 in range(nPhi1):
|
for iPhi1 in range(nPhi1):
|
||||||
for iPHI in range(nPHI):
|
for iPHI in range(nPHI):
|
||||||
for iPhi2 in range(nPhi2):
|
for iPhi2 in range(nPhi2):
|
||||||
ODF[iPhi1,iPHI,iPhi2] = float(line.split()[3])*0.125 # extract intensity (in column 4) and weight by 1/8 (since we convert from the 8 corners to the center later on)
|
ODF[iPhi1,iPHI,iPhi2] = float(line.split()[3])*0.125 # extract intensity (in column 4) and weight by 1/8
|
||||||
line = file['input'].readline()
|
line = file['input'].readline()
|
||||||
|
|
||||||
for iPhi1 in range(nPhi1-1):
|
for iPhi1 in range(nPhi1-1):
|
||||||
|
|
|
@ -1,16 +1,7 @@
|
||||||
#!/usr/bin/env python
|
#!/usr/bin/env python
|
||||||
# -*- coding: UTF-8 no BOM -*-
|
# -*- coding: UTF-8 no BOM -*-
|
||||||
|
|
||||||
'''
|
import sys,os,re
|
||||||
Writes meaningful labels to the Abaqus input file (*.inp)
|
|
||||||
based on the files
|
|
||||||
<modelname_jobname>.output<Homogenization/Crystallite/Constitutive>
|
|
||||||
that are written during the first run of the model.
|
|
||||||
See Abaqus Keyword Reference Manual (AKRM) *DEPVAR for details.
|
|
||||||
Original script: marc_addUserOutput.py modified by Benjamin Bode
|
|
||||||
'''
|
|
||||||
|
|
||||||
import sys,os,re,string
|
|
||||||
from optparse import OptionParser
|
from optparse import OptionParser
|
||||||
import damask
|
import damask
|
||||||
|
|
||||||
|
@ -19,7 +10,6 @@ scriptID = ' '.join([scriptName,damask.version])
|
||||||
|
|
||||||
# -----------------------------
|
# -----------------------------
|
||||||
def ParseOutputFormat(filename,what,me):
|
def ParseOutputFormat(filename,what,me):
|
||||||
# -----------------------------
|
|
||||||
format = {'outputs':{},'specials':{'brothers':[]}}
|
format = {'outputs':{},'specials':{'brothers':[]}}
|
||||||
|
|
||||||
outputmetafile = filename+'.output'+what
|
outputmetafile = filename+'.output'+what
|
||||||
|
@ -120,7 +110,7 @@ for file in files:
|
||||||
|
|
||||||
for what in me:
|
for what in me:
|
||||||
outputFormat[what] = ParseOutputFormat(formatFile,what,me[what])
|
outputFormat[what] = ParseOutputFormat(formatFile,what,me[what])
|
||||||
if not '_id' in outputFormat[what]['specials']:
|
if '_id' not in outputFormat[what]['specials']:
|
||||||
print "'%s' not found in <%s>"%(me[what],what)
|
print "'%s' not found in <%s>"%(me[what],what)
|
||||||
print '\n'.join(map(lambda x:' '+x,outputFormat[what]['specials']['brothers']))
|
print '\n'.join(map(lambda x:' '+x,outputFormat[what]['specials']['brothers']))
|
||||||
sys.exit(1)
|
sys.exit(1)
|
||||||
|
@ -164,19 +154,14 @@ for file in files:
|
||||||
if m:
|
if m:
|
||||||
lastSection = thisSection
|
lastSection = thisSection
|
||||||
thisSection = m.group(1)
|
thisSection = m.group(1)
|
||||||
#Abaqus keyword can be upper or lower case
|
if (lastSection.upper() == '*DEPVAR' and thisSection.upper() == '*USER'): #Abaqus keyword can be upper or lower case
|
||||||
if (lastSection.upper() == '*DEPVAR' and thisSection.upper() == '*USER'):
|
|
||||||
#Abaqus SDVs are named SDV1...SDVn if no specific name is given
|
|
||||||
#Abaqus needs total number of SDVs in the line after *Depvar keyword
|
|
||||||
if options.number > 0:
|
if options.number > 0:
|
||||||
#number of SDVs
|
output.write('%i\n'%options.number) #Abaqus needs total number of SDVs in the line after *Depvar keyword
|
||||||
output.write('%i\n'%options.number)
|
|
||||||
else:
|
else:
|
||||||
#number of SDVs
|
|
||||||
output.write('%i\n'%len(UserVars))
|
output.write('%i\n'%len(UserVars))
|
||||||
#index,output variable key,output variable description
|
|
||||||
for i in range(len(UserVars)):
|
for i in range(len(UserVars)):
|
||||||
output.write('%i,"%i%s","%i%s"\n'%(i+1,0,UserVars[i],0,UserVars[i]))
|
output.write('%i,"%i%s","%i%s"\n'%(i+1,0,UserVars[i],0,UserVars[i])) #index,output variable key,output variable description
|
||||||
if (thisSection.upper() != '*DEPVAR' or not re.match('\s*\d',line)):
|
if (thisSection.upper() != '*DEPVAR' or not re.match('\s*\d',line)):
|
||||||
output.write(line)
|
output.write(line)
|
||||||
output.close()
|
output.close()
|
||||||
|
|
|
@ -1,7 +1,7 @@
|
||||||
#!/usr/bin/env python
|
#!/usr/bin/env python
|
||||||
# -*- coding: UTF-8 no BOM -*-
|
# -*- coding: UTF-8 no BOM -*-
|
||||||
|
|
||||||
import os,sys,math,string
|
import os,sys,math
|
||||||
import numpy as np
|
import numpy as np
|
||||||
from optparse import OptionParser
|
from optparse import OptionParser
|
||||||
import damask
|
import damask
|
||||||
|
@ -114,7 +114,7 @@ for name in filenames:
|
||||||
|
|
||||||
microstructure = microstructure.reshape(info['grid'],order='F')
|
microstructure = microstructure.reshape(info['grid'],order='F')
|
||||||
|
|
||||||
if options.dimension != None:
|
if options.dimension is not None:
|
||||||
mask = (np.array(options.dimension) < 0).astype(float) # zero where positive dimension, otherwise one
|
mask = (np.array(options.dimension) < 0).astype(float) # zero where positive dimension, otherwise one
|
||||||
dim = abs(np.array(options.dimension)) # dimensions of primitive body
|
dim = abs(np.array(options.dimension)) # dimensions of primitive body
|
||||||
pos = np.zeros(3,dtype='float')
|
pos = np.zeros(3,dtype='float')
|
||||||
|
@ -134,10 +134,9 @@ for name in filenames:
|
||||||
|
|
||||||
|
|
||||||
# --- report ---------------------------------------------------------------------------------------
|
# --- report ---------------------------------------------------------------------------------------
|
||||||
|
if ( newInfo['microstructures'] != info['microstructures']):
|
||||||
|
damask.util.croak('--> microstructures: %i'%newInfo['microstructures'])
|
||||||
|
|
||||||
remarks = []
|
|
||||||
if ( newInfo['microstructures'] != info['microstructures']): remarks.append('--> microstructures: %i'%newInfo['microstructures'])
|
|
||||||
if remarks != []: damask.util.croak(remarks)
|
|
||||||
|
|
||||||
#--- write header ---------------------------------------------------------------------------------
|
#--- write header ---------------------------------------------------------------------------------
|
||||||
|
|
||||||
|
|
|
@ -1,7 +1,7 @@
|
||||||
#!/usr/bin/env python
|
#!/usr/bin/env python
|
||||||
# -*- coding: UTF-8 no BOM -*-
|
# -*- coding: UTF-8 no BOM -*-
|
||||||
|
|
||||||
import os,sys,math,string
|
import os,sys,math
|
||||||
import numpy as np
|
import numpy as np
|
||||||
from optparse import OptionParser
|
from optparse import OptionParser
|
||||||
import damask
|
import damask
|
||||||
|
@ -28,24 +28,32 @@ parser.add_option('-o', '--offset',
|
||||||
help = 'a,b,c offset from old to new origin of grid [%default]')
|
help = 'a,b,c offset from old to new origin of grid [%default]')
|
||||||
parser.add_option('-f', '--fill',
|
parser.add_option('-f', '--fill',
|
||||||
dest = 'fill',
|
dest = 'fill',
|
||||||
type = 'int', metavar = 'int',
|
type = 'float', metavar = 'float',
|
||||||
help = '(background) canvas grain index. "0" selects maximum microstructure index + 1 [%default]')
|
help = '(background) canvas grain index. "0" selects maximum microstructure index + 1 [%default]')
|
||||||
|
parser.add_option('--float',
|
||||||
|
dest = 'real',
|
||||||
|
action = 'store_true',
|
||||||
|
help = 'input data is float [%default]')
|
||||||
|
|
||||||
parser.set_defaults(grid = ['0','0','0'],
|
parser.set_defaults(grid = ['0','0','0'],
|
||||||
offset = (0,0,0),
|
offset = (0,0,0),
|
||||||
fill = 0,
|
fill = 0,
|
||||||
|
real = False,
|
||||||
)
|
)
|
||||||
|
|
||||||
(options, filenames) = parser.parse_args()
|
(options, filenames) = parser.parse_args()
|
||||||
|
|
||||||
|
datatype = 'f' if options.real else 'i'
|
||||||
|
|
||||||
|
|
||||||
# --- loop over input files -------------------------------------------------------------------------
|
# --- loop over input files -------------------------------------------------------------------------
|
||||||
|
|
||||||
if filenames == []: filenames = [None]
|
if filenames == []: filenames = [None]
|
||||||
|
|
||||||
for name in filenames:
|
for name in filenames:
|
||||||
try:
|
try: table = damask.ASCIItable(name = name,
|
||||||
table = damask.ASCIItable(name = name,
|
buffered = False,
|
||||||
buffered = False, labeled = False)
|
labeled = False)
|
||||||
except: continue
|
except: continue
|
||||||
damask.util.report(scriptName,name)
|
damask.util.report(scriptName,name)
|
||||||
|
|
||||||
|
@ -71,7 +79,7 @@ for name in filenames:
|
||||||
|
|
||||||
# --- read data ------------------------------------------------------------------------------------
|
# --- read data ------------------------------------------------------------------------------------
|
||||||
|
|
||||||
microstructure = table.microstructure_read(info['grid']).reshape(info['grid'],order='F') # read microstructure
|
microstructure = table.microstructure_read(info['grid'],datatype).reshape(info['grid'],order='F') # read microstructure
|
||||||
|
|
||||||
# --- do work ------------------------------------------------------------------------------------
|
# --- do work ------------------------------------------------------------------------------------
|
||||||
|
|
||||||
|
@ -81,11 +89,12 @@ for name in filenames:
|
||||||
'microstructures': 0,
|
'microstructures': 0,
|
||||||
}
|
}
|
||||||
|
|
||||||
newInfo['grid'] = np.array([int(o*float(n.translate(None,'xX'))) if n[-1].lower() == 'x' else int(n) for o,n in zip(info['grid'],options.grid)],'i')
|
newInfo['grid'] = np.array([int(o*float(n.translate(None,'xX'))) if n[-1].lower() == 'x'\
|
||||||
|
else int(n) for o,n in zip(info['grid'],options.grid)],'i')
|
||||||
newInfo['grid'] = np.where(newInfo['grid'] > 0, newInfo['grid'],info['grid'])
|
newInfo['grid'] = np.where(newInfo['grid'] > 0, newInfo['grid'],info['grid'])
|
||||||
|
|
||||||
microstructure_cropped = np.zeros(newInfo['grid'],'i')
|
microstructure_cropped = np.zeros(newInfo['grid'],datatype)
|
||||||
microstructure_cropped.fill(options.fill if options.fill > 0 else microstructure.max()+1)
|
microstructure_cropped.fill(options.fill if options.real or options.fill > 0 else microstructure.max()+1)
|
||||||
xindex = list(set(xrange(options.offset[0],options.offset[0]+newInfo['grid'][0])) & \
|
xindex = list(set(xrange(options.offset[0],options.offset[0]+newInfo['grid'][0])) & \
|
||||||
set(xrange(info['grid'][0])))
|
set(xrange(info['grid'][0])))
|
||||||
yindex = list(set(xrange(options.offset[1],options.offset[1]+newInfo['grid'][1])) & \
|
yindex = list(set(xrange(options.offset[1],options.offset[1]+newInfo['grid'][1])) & \
|
||||||
|
@ -143,7 +152,7 @@ for name in filenames:
|
||||||
"origin\tx {origin[0]}\ty {origin[1]}\tz {origin[2]}".format(origin=newInfo['origin']),
|
"origin\tx {origin[0]}\ty {origin[1]}\tz {origin[2]}".format(origin=newInfo['origin']),
|
||||||
"homogenization\t{homog}".format(homog=info['homogenization']),
|
"homogenization\t{homog}".format(homog=info['homogenization']),
|
||||||
"microstructures\t{microstructures}".format(microstructures=newInfo['microstructures']),
|
"microstructures\t{microstructures}".format(microstructures=newInfo['microstructures']),
|
||||||
extra_header
|
extra_header
|
||||||
])
|
])
|
||||||
table.labels_clear()
|
table.labels_clear()
|
||||||
table.head_write()
|
table.head_write()
|
||||||
|
@ -151,9 +160,9 @@ for name in filenames:
|
||||||
|
|
||||||
# --- write microstructure information ------------------------------------------------------------
|
# --- write microstructure information ------------------------------------------------------------
|
||||||
|
|
||||||
formatwidth = int(math.floor(math.log10(microstructure_cropped.max())+1))
|
format = '%g' if options.real else '%{}i'.format(int(math.floor(math.log10(microstructure_cropped.max())+1)))
|
||||||
table.data = microstructure_cropped.reshape((newInfo['grid'][0],newInfo['grid'][1]*newInfo['grid'][2]),order='F').transpose()
|
table.data = microstructure_cropped.reshape((newInfo['grid'][0],newInfo['grid'][1]*newInfo['grid'][2]),order='F').transpose()
|
||||||
table.data_writeArray('%%%ii'%(formatwidth),delimiter=' ')
|
table.data_writeArray(format,delimiter=' ')
|
||||||
|
|
||||||
# --- output finalization --------------------------------------------------------------------------
|
# --- output finalization --------------------------------------------------------------------------
|
||||||
|
|
||||||
|
|
|
@ -1,7 +1,7 @@
|
||||||
#!/usr/bin/env python
|
#!/usr/bin/env python
|
||||||
# -*- coding: UTF-8 no BOM -*-
|
# -*- coding: UTF-8 no BOM -*-
|
||||||
|
|
||||||
import os,sys,string,vtk
|
import os,sys,vtk
|
||||||
import numpy as np
|
import numpy as np
|
||||||
from optparse import OptionParser
|
from optparse import OptionParser
|
||||||
import damask
|
import damask
|
||||||
|
@ -54,12 +54,25 @@ for name in filenames:
|
||||||
errors = []
|
errors = []
|
||||||
if np.any(info['grid'] < 1): errors.append('invalid grid a b c.')
|
if np.any(info['grid'] < 1): errors.append('invalid grid a b c.')
|
||||||
if np.any(info['size'] <= 0.0): errors.append('invalid size x y z.')
|
if np.any(info['size'] <= 0.0): errors.append('invalid size x y z.')
|
||||||
|
|
||||||
|
#--- read microstructure information --------------------------------------------------------------
|
||||||
|
|
||||||
|
if options.data:
|
||||||
|
microstructure,ok = table.microstructure_read(info['grid'],strict = True) # read microstructure
|
||||||
|
|
||||||
|
if ok:
|
||||||
|
structure = vtk.vtkIntArray()
|
||||||
|
structure.SetName('Microstructures')
|
||||||
|
for idx in microstructure: structure.InsertNextValue(idx)
|
||||||
|
|
||||||
|
else: errors.append('mismatch between data and grid dimension.')
|
||||||
|
|
||||||
if errors != []:
|
if errors != []:
|
||||||
damask.util.croak(errors)
|
damask.util.croak(errors)
|
||||||
table.close(dismiss = True)
|
table.close(dismiss = True)
|
||||||
continue
|
continue
|
||||||
|
|
||||||
# --- generate VTK rectilinear grid --------------------------------------------------------------------------------
|
# --- generate VTK rectilinear grid ---------------------------------------------------------------
|
||||||
|
|
||||||
grid = vtk.vtkRectilinearGrid()
|
grid = vtk.vtkRectilinearGrid()
|
||||||
grid.SetDimensions([x+1 for x in info['grid']])
|
grid.SetDimensions([x+1 for x in info['grid']])
|
||||||
|
@ -72,18 +85,8 @@ for name in filenames:
|
||||||
elif i == 1: grid.SetYCoordinates(temp)
|
elif i == 1: grid.SetYCoordinates(temp)
|
||||||
elif i == 2: grid.SetZCoordinates(temp)
|
elif i == 2: grid.SetZCoordinates(temp)
|
||||||
|
|
||||||
#--- read microstructure information --------------------------------------------------------------
|
|
||||||
|
|
||||||
if options.data:
|
if options.data: grid.GetCellData().AddArray(structure)
|
||||||
microstructure = table.microstructure_read(info['grid']) # read microstructure
|
|
||||||
|
|
||||||
structure = vtk.vtkIntArray()
|
|
||||||
structure.SetName('Microstructures')
|
|
||||||
|
|
||||||
for idx in microstructure:
|
|
||||||
structure.InsertNextValue(idx)
|
|
||||||
|
|
||||||
grid.GetCellData().AddArray(structure)
|
|
||||||
|
|
||||||
# --- write data -----------------------------------------------------------------------------------
|
# --- write data -----------------------------------------------------------------------------------
|
||||||
if name:
|
if name:
|
||||||
|
@ -91,8 +94,7 @@ for name in filenames:
|
||||||
(directory,filename) = os.path.split(name)
|
(directory,filename) = os.path.split(name)
|
||||||
writer.SetDataModeToBinary()
|
writer.SetDataModeToBinary()
|
||||||
writer.SetCompressorTypeToZLib()
|
writer.SetCompressorTypeToZLib()
|
||||||
writer.SetFileName(os.path.join(directory,os.path.splitext(filename)[0]
|
writer.SetFileName(os.path.join(directory,os.path.splitext(filename)[0]+'.'+writer.GetDefaultFileExtension()))
|
||||||
+'.'+writer.GetDefaultFileExtension()))
|
|
||||||
else:
|
else:
|
||||||
writer = vtk.vtkDataSetWriter()
|
writer = vtk.vtkDataSetWriter()
|
||||||
writer.WriteToOutputStringOn()
|
writer.WriteToOutputStringOn()
|
||||||
|
@ -101,6 +103,6 @@ for name in filenames:
|
||||||
if vtk.VTK_MAJOR_VERSION <= 5: writer.SetInput(grid)
|
if vtk.VTK_MAJOR_VERSION <= 5: writer.SetInput(grid)
|
||||||
else: writer.SetInputData(grid)
|
else: writer.SetInputData(grid)
|
||||||
writer.Write()
|
writer.Write()
|
||||||
if name == None: sys.stdout.write(writer.GetOutputString()[0:writer.GetOutputStringLength()])
|
if name is None: sys.stdout.write(writer.GetOutputString()[0:writer.GetOutputStringLength()])
|
||||||
|
|
||||||
table.close()
|
table.close()
|
||||||
|
|
|
@ -1,20 +1,17 @@
|
||||||
#!/usr/bin/env python
|
#!/usr/bin/env python
|
||||||
# -*- coding: UTF-8 no BOM -*-
|
# -*- coding: UTF-8 no BOM -*-
|
||||||
|
|
||||||
import os,sys,string,math
|
import os,sys,math
|
||||||
import numpy as np
|
import numpy as np
|
||||||
import damask
|
import damask
|
||||||
from scipy import ndimage
|
from scipy import ndimage
|
||||||
from optparse import OptionParser
|
from optparse import OptionParser
|
||||||
from collections import defaultdict
|
|
||||||
|
|
||||||
scriptName = os.path.splitext(os.path.basename(__file__))[0]
|
scriptName = os.path.splitext(os.path.basename(__file__))[0]
|
||||||
scriptID = ' '.join([scriptName,damask.version])
|
scriptID = ' '.join([scriptName,damask.version])
|
||||||
|
|
||||||
def mostFrequent(arr):
|
def mostFrequent(arr):
|
||||||
d = defaultdict(int)
|
return np.argmax(np.bincount(arr))
|
||||||
for i in arr: d[i] += 1
|
|
||||||
return sorted(d.iteritems(), key=lambda x: x[1], reverse=True)[0][0] # return value of most frequent microstructure
|
|
||||||
|
|
||||||
|
|
||||||
#--------------------------------------------------------------------------------------------------
|
#--------------------------------------------------------------------------------------------------
|
||||||
|
@ -43,10 +40,9 @@ parser.set_defaults(stencil = 3,
|
||||||
if filenames == []: filenames = [None]
|
if filenames == []: filenames = [None]
|
||||||
|
|
||||||
for name in filenames:
|
for name in filenames:
|
||||||
try:
|
try: table = damask.ASCIItable(name = name,
|
||||||
table = damask.ASCIItable(name = name,
|
buffered = False,
|
||||||
buffered = False,
|
labeled = False)
|
||||||
labeled = False)
|
|
||||||
except: continue
|
except: continue
|
||||||
damask.util.report(scriptName,name)
|
damask.util.report(scriptName,name)
|
||||||
|
|
||||||
|
@ -72,7 +68,7 @@ for name in filenames:
|
||||||
|
|
||||||
# --- read data ------------------------------------------------------------------------------------
|
# --- read data ------------------------------------------------------------------------------------
|
||||||
|
|
||||||
microstructure = table.microstructure_read(info['grid']).reshape(info['grid'],order='F') # read microstructure
|
microstructure = table.microstructure_read(info['grid']).reshape(info['grid'],order='F') # read microstructure
|
||||||
|
|
||||||
# --- do work ------------------------------------------------------------------------------------
|
# --- do work ------------------------------------------------------------------------------------
|
||||||
|
|
||||||
|
|
Some files were not shown because too many files have changed in this diff Show More
Loading…
Reference in New Issue