fixed local merge conflict

This commit is contained in:
Eureka Pai 2017-08-29 17:09:18 -04:00
commit f743a00944
32 changed files with 907 additions and 867 deletions

99
DAMASK_prerequisites.sh Executable file
View File

@ -0,0 +1,99 @@
#!/usr/bin/env bash
OUTFILE="system_report.txt"
echo generating $OUTFILE
echo date +"%m-%d-%y" >$OUTFILE
# redirect STDOUT and STDERR to logfile
# https://stackoverflow.com/questions/11229385/redirect-all-output-in-a-bash-script-when-using-set-x^
exec > $OUTFILE 2>&1
# directory, file is not a symlink by definition
# https://stackoverflow.com/questions/59895/getting-the-source-directory-of-a-bash-script-from-within
DAMASK_ROOT="$( cd "$( dirname "${BASH_SOURCE[0]}" )" && pwd )"
echo ==============================================================================================
echo DAMASK settings
echo ==============================================================================================
echo DAMASK_ROOT:
echo $DAMASK_ROOT
echo
echo Version:
cat VERSION
echo
echo Settings in CONFIG:
cat CONFIG
echo
echo ==============================================================================================
echo System
echo ==============================================================================================
uname -a
echo
echo ==============================================================================================
echo Python
echo ==============================================================================================
DEFAULT_PYTHON=python2.7
for executable in python python2 python3 python2.7; do
if [[ "$(which $executable)x" != "x" ]]; then
echo $executable version: $($executable --version 2>&1)
else
echo $executable does not exist
fi
done
echo Location of $DEFAULT_PYTHON: $(ls -la $(which $DEFAULT_PYTHON))
echo
for module in numpy scipy;do
echo ----------------------------------------------------------------------------------------------
echo $module
echo ----------------------------------------------------------------------------------------------
$DEFAULT_PYTHON -c "import $module; \
print('Version: {}'.format($module.__version__)); \
print('Location: {}'.format($module.__file__))"
done
echo ----------------------------------------------------------------------------------------------
echo vtk
echo ----------------------------------------------------------------------------------------------
$DEFAULT_PYTHON -c "import vtk; \
print('Version: {}'.format(vtk.vtkVersion.GetVTKVersion())); \
print('Location: {}'.format(vtk.__file__))"
echo ----------------------------------------------------------------------------------------------
echo h5py
echo ----------------------------------------------------------------------------------------------
$DEFAULT_PYTHON -c "import h5py; \
print('Version: {}'.format(h5py.version.version)); \
print('Location: {}'.format(h5py.__file__))"
echo
echo ==============================================================================================
echo GCC
echo ==============================================================================================
for executable in gcc g++ gfortran ;do
if [[ "$(which $executable)x" != "x" ]]; then
echo $(which $executable) version: $($executable --version 2>&1)
else
echo $executable does not exist
fi
done
echo
echo ==============================================================================================
echo Intel Compiler Suite
echo ==============================================================================================
for executable in icc icpc ifort ;do
if [[ "$(which $executable)x" != "x" ]]; then
echo $(which $executable) version: $($executable --version 2>&1)
else
echo $executable does not exist
fi
done
echo
echo ==============================================================================================
echo MPI Wrappers
echo ==============================================================================================
for executable in mpicc mpiCC mpicxx mpicxx mpifort mpif90 mpif77; do
if [[ "$(which $executable)x" != "x" ]]; then
echo $(which $executable) version: $($executable --show 2>&1)
else
echo $executable does not exist
fi
done

@ -1 +1 @@
Subproject commit 19a53f6229603aeafb2466b58679a1cd04fc0142 Subproject commit 5e97c9ac2fa4f7748a1bc040c15889623a3afd1f

View File

@ -1 +1 @@
v2.0.1-805-gdc3eda3 v2.0.1-901-gaee90f7

14
env/DAMASK.csh vendored
View File

@ -26,15 +26,11 @@ if ( "x$DAMASK_NUM_THREADS" == "x" ) then
set DAMASK_NUM_THREADS=1 set DAMASK_NUM_THREADS=1
endif endif
# according to http://software.intel.com/en-us/forums/topic/501500 # currently, there is no information that unlimited causes problems
# this seems to make sense for the stack size # still, http://software.intel.com/en-us/forums/topic/501500 suggest to fix it
if ( `which free` != "free: Command not found." ) then # http://superuser.com/questions/220059/what-parameters-has-ulimit
set freeMem=`free -k | grep -E '(Mem|Speicher):' | awk '{print $4;}'` limit datasize unlimited # maximum heap size (kB)
set heap=` expr $freeMem / 2` limit stacksize unlimited # maximum stack size (kB)
set stack=`expr $freeMem / $DAMASK_NUM_THREADS / 2`
# http://superuser.com/questions/220059/what-parameters-has-ulimit
limit datasize $heap # maximum heap size (kB)
limit stacksize $stack # maximum stack size (kB)
endif endif
if ( `limit | grep memoryuse` != "" ) then if ( `limit | grep memoryuse` != "" ) then
limit memoryuse unlimited # maximum physical memory size limit memoryuse unlimited # maximum physical memory size

39
env/DAMASK.sh vendored
View File

@ -5,9 +5,12 @@ function canonicalPath {
python -c "import os,sys; print(os.path.realpath(os.path.expanduser(sys.argv[1])))" $1 python -c "import os,sys; print(os.path.realpath(os.path.expanduser(sys.argv[1])))" $1
} }
function blink {
echo -e "\033[2;5m$1\033[0m"
}
if [ "$OSTYPE" == "linux-gnu" ] || [ "$OSTYPE" == 'linux' ]; then if [ "$OSTYPE" == "linux-gnu" ] || [ "$OSTYPE" == 'linux' ]; then
DAMASK_ROOT=$(canonicalPath "$(dirname $BASH_SOURCE)") DAMASK_ROOT=$(dirname $BASH_SOURCE)
else else
[[ "${BASH_SOURCE::1}" == "/" ]] && BASE="" || BASE="$(pwd)/" [[ "${BASH_SOURCE::1}" == "/" ]] && BASE="" || BASE="$(pwd)/"
STAT=$(stat "$(dirname $BASE$BASH_SOURCE)") STAT=$(stat "$(dirname $BASE$BASH_SOURCE)")
@ -16,9 +19,10 @@ fi
# transition compatibility (renamed $DAMASK_ROOT/DAMASK_env.sh to $DAMASK_ROOT/env/DAMASK.sh) # transition compatibility (renamed $DAMASK_ROOT/DAMASK_env.sh to $DAMASK_ROOT/env/DAMASK.sh)
if [ ${BASH_SOURCE##*/} == "DAMASK.sh" ]; then if [ ${BASH_SOURCE##*/} == "DAMASK.sh" ]; then
DAMASK_ROOT=$(canonicalPath "$DAMASK_ROOT/..") DAMASK_ROOT="$DAMASK_ROOT/.."
fi fi
DAMASK_ROOT=$(canonicalPath $DAMASK_ROOT)
# shorthand command to change to DAMASK_ROOT directory # shorthand command to change to DAMASK_ROOT directory
eval "function DAMASK_root() { cd $DAMASK_ROOT; }" eval "function DAMASK_root() { cd $DAMASK_ROOT; }"
@ -33,27 +37,21 @@ unset -f set
# add DAMASK_BIN if present # add DAMASK_BIN if present
[ "x$DAMASK_BIN" != "x" ] && PATH=$DAMASK_BIN:$PATH [ "x$DAMASK_BIN" != "x" ] && PATH=$DAMASK_BIN:$PATH
SOLVER=$(which DAMASK_spectral || true 2>/dev/null) SOLVER=$(type -p DAMASK_spectral || true 2>/dev/null)
[ "x$SOLVER" == "x" ] && SOLVER='Not found!' [ "x$SOLVER" == "x" ] && SOLVER=$(blink 'Not found!')
PROCESSING=$(which postResults || true 2>/dev/null) PROCESSING=$(type -p postResults || true 2>/dev/null)
[ "x$PROCESSING" == "x" ] && PROCESSING='Not found!' [ "x$PROCESSING" == "x" ] && PROCESSING=$(blink 'Not found!')
[ "x$DAMASK_NUM_THREADS" == "x" ] && DAMASK_NUM_THREADS=1 [ "x$DAMASK_NUM_THREADS" == "x" ] && DAMASK_NUM_THREADS=1
# according to http://software.intel.com/en-us/forums/topic/501500 # currently, there is no information that unlimited causes problems
# this seems to make sense for the stack size # still, http://software.intel.com/en-us/forums/topic/501500 suggest to fix it
FREE=$(type -p free 2>/dev/null) # http://superuser.com/questions/220059/what-parameters-has-ulimit
if [ "x$FREE" != "x" ]; then ulimit -d unlimited 2>/dev/null # maximum heap size (kB)
freeMem=$(free -k | grep -E '(Mem|Speicher):' | awk '{print $4;}') ulimit -s unlimited 2>/dev/null # maximum stack size (kB)
# http://superuser.com/questions/220059/what-parameters-has-ulimit ulimit -v unlimited 2>/dev/null # maximum virtual memory size
ulimit -d unlimited 2>/dev/null \ ulimit -m unlimited 2>/dev/null # maximum physical memory size
|| ulimit -d $(expr $freeMem / 2) 2>/dev/null # maximum heap size (kB)
ulimit -s unlimited 2>/dev/null \
|| ulimit -s $(expr $freeMem / $DAMASK_NUM_THREADS / 2) 2>/dev/null # maximum stack size (kB)
fi
ulimit -v unlimited 2>/dev/null # maximum virtual memory size
ulimit -m unlimited 2>/dev/null # maximum physical memory size
# disable output in case of scp # disable output in case of scp
if [ ! -z "$PS1" ]; then if [ ! -z "$PS1" ]; then
@ -72,7 +70,8 @@ if [ ! -z "$PS1" ]; then
[[ $(canonicalPath "$PETSC_DIR") == $PETSC_DIR ]] \ [[ $(canonicalPath "$PETSC_DIR") == $PETSC_DIR ]] \
|| echo " ~~> "$(canonicalPath "$PETSC_DIR") || echo " ~~> "$(canonicalPath "$PETSC_DIR")
fi fi
echo "MSC.Marc/Mentat $MSC_ROOT" echo -n "MSC.Marc/Mentat "
[ -d $MSC_ROOT ] && echo $MSC_ROOT || blink $MSC_ROOT
echo echo
echo -n "heap size " echo -n "heap size "
[[ "$(ulimit -d)" == "unlimited" ]] \ [[ "$(ulimit -d)" == "unlimited" ]] \

36
env/DAMASK.zsh vendored
View File

@ -1,6 +1,10 @@
# sets up an environment for DAMASK on zsh # sets up an environment for DAMASK on zsh
# usage: source DAMASK.zsh # usage: source DAMASK.zsh
function canonicalPath {
python -c "import os,sys; print(os.path.realpath(os.path.expanduser(sys.argv[1])))" $1
}
# transition compatibility (renamed $DAMASK_ROOT/DAMASK_env.zsh to $DAMASK_ROOT/env/DAMASK.zsh) # transition compatibility (renamed $DAMASK_ROOT/DAMASK_env.zsh to $DAMASK_ROOT/env/DAMASK.zsh)
if [ ${0:t:r} = 'DAMASK' ]; then if [ ${0:t:r} = 'DAMASK' ]; then
DAMASK_ROOT=${0:a:h}'/..' DAMASK_ROOT=${0:a:h}'/..'
@ -21,21 +25,21 @@ unset -f set
# add DAMASK_BIN if present # add DAMASK_BIN if present
[ "x$DAMASK_BIN" != "x" ] && PATH=$DAMASK_BIN:$PATH [ "x$DAMASK_BIN" != "x" ] && PATH=$DAMASK_BIN:$PATH
SOLVER=`which DAMASK_spectral || True 2>/dev/null` SOLVER=$(type -p DAMASK_spectral || true 2>/dev/null)
PROCESSING=`which postResults || True 2>/dev/null` [ "x$SOLVER" == "x" ] && SOLVER='Not found!'
[ "x$DAMASK_NUM_THREADS" = "x" ] && DAMASK_NUM_THREADS=1
# according to http://software.intel.com/en-us/forums/topic/501500 PROCESSING=$(type -p postResults || true 2>/dev/null)
# this seems to make sense for the stack size [ "x$PROCESSING" == "x" ] && PROCESSING='Not found!'
if [ "`which free 2>/dev/null`" != "free not found" ]; then
freeMem=`free -k | grep -E '(Mem|Speicher):' | awk '{print $4;}'`
# http://superuser.com/questions/220059/what-parameters-has-ulimit [ "x$DAMASK_NUM_THREADS" == "x" ] && DAMASK_NUM_THREADS=1
#ulimit -d `expr $freeMem / 2` 2>/dev/null # maximum heap size (kB)
ulimit -s `expr $freeMem / $DAMASK_NUM_THREADS / 2` 2>/dev/null # maximum stack size (kB) # currently, there is no information that unlimited causes problems
fi # still, http://software.intel.com/en-us/forums/topic/501500 suggest to fix it
ulimit -v unlimited 2>/dev/null # maximum virtual memory size # http://superuser.com/questions/220059/what-parameters-has-ulimit
ulimit -m unlimited 2>/dev/null # maximum physical memory size ulimit -d unlimited 2>/dev/null # maximum heap size (kB)
ulimit -s unlimited 2>/dev/null # maximum stack size (kB)
ulimit -v unlimited 2>/dev/null # maximum virtual memory size
ulimit -m unlimited 2>/dev/null # maximum physical memory size
# disable output in case of scp # disable output in case of scp
if [ ! -z "$PS1" ]; then if [ ! -z "$PS1" ]; then
@ -51,8 +55,8 @@ if [ ! -z "$PS1" ]; then
echo "Multithreading DAMASK_NUM_THREADS=$DAMASK_NUM_THREADS" echo "Multithreading DAMASK_NUM_THREADS=$DAMASK_NUM_THREADS"
if [ "x$PETSC_DIR" != "x" ]; then if [ "x$PETSC_DIR" != "x" ]; then
echo "PETSc location $PETSC_DIR" echo "PETSc location $PETSC_DIR"
[[ $(python -c "import os,sys; print(os.path.realpath(os.path.expanduser(sys.argv[1])))" "$PETSC_DIR") == $PETSC_DIR ]] \ [[ $(canonicalPath "$PETSC_DIR") == $PETSC_DIR ]] \
|| echo " ~~> "$(python -c "import os,sys; print(os.path.realpath(os.path.expanduser(sys.argv[1])))" "$PETSC_DIR") || echo " ~~> "$(canonicalPath "$PETSC_DIR")
fi fi
[[ "x$PETSC_ARCH" == "x" ]] \ [[ "x$PETSC_ARCH" == "x" ]] \
|| echo "PETSc architecture $PETSC_ARCH" || echo "PETSc architecture $PETSC_ARCH"
@ -79,7 +83,7 @@ fi
export DAMASK_NUM_THREADS export DAMASK_NUM_THREADS
export PYTHONPATH=$DAMASK_ROOT/lib:$PYTHONPATH export PYTHONPATH=$DAMASK_ROOT/lib:$PYTHONPATH
for var in BASE STAT SOLVER PROCESSING FREE DAMASK_BIN MATCH; do for var in BASE STAT SOLVER PROCESSING FREE DAMASK_BIN; do
unset "${var}" unset "${var}"
done done
for var in DAMASK MSC; do for var in DAMASK MSC; do

View File

@ -1,177 +0,0 @@
#
# System-Wide Abaqus Environment File
# -------------------------------------
standard_parallel = ALL
mp_mode = MPI
mp_file_system = (DETECT,DETECT)
mp_num_parallel_ftps = (4, 4)
mp_environment_export = ('MPI_PROPAGATE_TSTP',
'ABA_CM_BUFFERING',
'ABA_CM_BUFFERING_LIMIT',
'ABA_ITERATIVE_SOLVER_VERBOSE',
'ABA_DMPSOLVER_BWDPARALLELOFF',
'ABA_ELP_SURFACE_SPLIT',
'ABA_ELP_SUSPEND',
'ABA_HOME',
'ABA_MEMORY_MODE',
'ABA_MPI_MESSAGE_TRACKING',
'ABA_MPI_VERBOSE_LEVEL',
'ABA_PATH',
'ABAQUS_CSE_RELTIMETOLERANCE',
'ABA_RESOURCE_MONITOR',
'ABA_RESOURCE_USEMALLINFO',
'ABAQUS_LANG',
'ABAQUS_CSE_CURRCONFIGMAPPING',
'ABAQUS_MPF_DIAGNOSTIC_LEVEL',
'ABAQUSLM_LICENSE_FILE',
'ABQ_CRTMALLOC',
'ABQ_DATACHECK',
'ABQ_RECOVER',
'ABQ_RESTART',
'ABQ_SPLITFILE',
'ABQ_XPL_WINDOWDUMP',
'ABQ_XPL_PARTITIONSIZE',
'ABQLMHANGLIMIT',
'ABQLMQUEUE',
'ABQLMUSER',
'CCI_RENDEZVOUS',
'DOMAIN',
'DOMAIN_CPUS',
'DOUBLE_PRECISION',
'FLEXLM_DIAGNOSTICS',
'FOR0006',
'FOR0064',
'FOR_IGNORE_EXCEPTIONS',
'FOR_DISABLE_DIAGNOSTIC_DISPLAY',
'LD_PRELOAD',
'MP_NUMBER_OF_THREADS',
'MPC_GANG',
'MPI_FLAGS',
'MPI_FLUSH_FCACHE',
'MPI_RDMA_NENVELOPE',
'MPI_SOCKBUFSIZE',
'MPI_USE_MALLOPT_MMAP_MAX',
'MPI_USE_MALLOPT_MMAP_THRESHOLD',
'MPI_USE_MALLOPT_SBRK_PROTECTION',
'MPI_WORKDIR',
'MPCCI_DEBUG',
'MPCCI_CODEID',
'MPCCI_JOBID',
'MPCCI_NETDEVICE',
'MPCCI_TINFO',
'MPCCI_SERVER',
'ABAQUS_CCI_DEBUG',
'NCPUS',
'OMP_DYNAMIC',
'OMP_NUM_THREADS',
'OUTDIR',
'PAIDUP',
'PARALLEL_METHOD',
'RAIDEV_NDREG_LAZYMEM',
'ABA_SYMBOLIC_GENERALCOLLAPSE',
'ABA_SYMBOLIC_GENERAL_MAXCLIQUERANK',
'ABA_ADM_MINIMUMINCREASE',
'ABA_ADM_MINIMUMDECREASE',
'IPATH_NO_CPUAFFINITY',
'MALLOC_MMAP_THRESHOLD_',
'ABA_EXT_SIMOUTPUT',
'SMA_WS',
'SMA_PARENT',
'SMA_PLATFORM',
'ABA_PRE_DECOMPOSITION',
'ACML_FAST_MALLOC',
'ACML_FAST_MALLOC_CHUNK_SIZE',
'ACML_FAST_MALLOC_MAX_CHUNKS',
'ACML_FAST_MALLOC_DEBUG')
import driverUtils, os
#-*- mode: python -*-
# #
# Compile and Link command settings for the Windows 64 Platform #
# ( AMD Opteron / Intel EM64T ) #
# #
compile_fortran=['ifort',
'/c','/DABQ_WIN86_64', '/u',
'/iface:cref', '/recursive', '/Qauto-scalar',
'/QxSSE3', '/QaxAVX',
'/heap-arrays:1',
# '/Od', '/Ob0' # <-- Optimization
# '/Zi', # <-- Debugging
'/include:%I', '/free', '/O1', '/fpp', '/openmp', '/Qmkl']
link_sl=['LINK',
'/nologo', '/NOENTRY', '/INCREMENTAL:NO', '/subsystem:console', '/machine:AMD64',
'/NODEFAULTLIB:LIBC.LIB', '/NODEFAULTLIB:LIBCMT.LIB',
'/DEFAULTLIB:OLDNAMES.LIB', '/DEFAULTLIB:LIBIFCOREMD.LIB', '/DEFAULTLIB:LIBIFPORTMD', '/DEFAULTLIB:LIBMMD.LIB',
'/DEFAULTLIB:kernel32.lib', '/DEFAULTLIB:user32.lib', '/DEFAULTLIB:advapi32.lib',
'/FIXED:NO', '/dll',
'/def:%E', '/out:%U', '%F', '%A', '%L', '%B',
'oldnames.lib', 'user32.lib', 'ws2_32.lib', 'netapi32.lib', 'advapi32.lib']
link_exe=['LINK',
'/nologo', '/INCREMENTAL:NO', '/subsystem:console', '/machine:AMD64', '/STACK:20000000',
'/NODEFAULTLIB:LIBC.LIB', '/NODEFAULTLIB:LIBCMT.LIB', '/DEFAULTLIB:OLDNAMES.LIB', '/DEFAULTLIB:LIBIFCOREMD.LIB',
'/DEFAULTLIB:LIBIFPORTMD', '/DEFAULTLIB:LIBMMD.LIB', '/DEFAULTLIB:kernel32.lib',
'/DEFAULTLIB:user32.lib', '/DEFAULTLIB:advapi32.lib',
'/FIXED:NO', '/LARGEADDRESSAWARE',
'/out:%J', '%F', '%M', '%L', '%B', '%O',
'oldnames.lib', 'user32.lib', 'ws2_32.lib', 'netapi32.lib', 'advapi32.lib']
# Link command to be used for MAKE w/o fortran compiler.
# remove the pound signs in order to remove the comments and have the file take effect.
#
#link_exe=['LINK', '/nologo', 'INCREMENTAL:NO', '/subsystem:console', '/machine:AMD64', '/NODEFAULTLIB:LIBC.LIB', '/NODEFAULTLIB:LIBCMT.LIB',
# '/DEFAULTLIB:OLDNAMES.LIB', '/DEFAULTLIB:MSVCRT.LIB', '/DEFAULTLIB:kernel32.lib', 'DEFAULTLIB:user32.lib', '/DEFAULTLIB:advapi32.lib',
# '/FIXED:NO', '/LARGEADDRESSAWARE', '/DEBUG', '/out:%J', '%F', '%M', '%L', '%B', '%O', 'oldnames.lib', 'user32.lib', 'ws2_32.lib',
# 'netapi32.lib', 'advapi32.lib]
# MPI Configuration
mp_mode = THREADS
mp_mpi_implementation = NATIVE
mp_rsh_command = 'dummy %H -l %U -n %C'
mp_mpirun_path = {}
mpirun = ''
progDir = os.environ.get('ProgramFiles','C:\\Program Files')
for mpiDir in ('Microsoft HPC Pack', 'Microsoft HPC Pack 2008 R2', 'Microsoft HPC Pack 2008', 'Microsoft HPC Pack 2008 SDK'):
mpirun = progDir + os.sep + mpiDir + os.sep + 'bin' + os.sep + 'mpiexec.exe'
if os.path.exists(mpirun):
mp_mpirun_path[NATIVE] = mpirun
mp_mpirun_path[MSSDK] = os.path.join(progDir, mpiDir)
break
if os.environ.has_key('CCP_HOME'):
from queueCCS import QueueCCS
queues['default'] = QueueCCS(queueName='share')
queues['share'] = QueueCCS(queueName='share')
queues['local'] = QueueCCS(queueName='local')
queues['genxmlshare'] = QueueCCS(queueName='genxmlshare')
queues['genxmllocal'] = QueueCCS(queueName='genxmllocal')
del QueueCCS
mpirun = os.path.join(os.environ['CCP_HOME'], 'bin', 'mpiexec.exe')
if os.path.exists(mpirun):
mp_mpirun_path[NATIVE] = mpirun
run_mode=BATCH
if mp_mpirun_path:
mp_mode=MPI
del progDir, mpiDir, mpirun
graphicsEnv = driverUtils.locateFile(os.environ['ABA_PATH'],'site','graphicsConfig','env')
if graphicsEnv:
execfile(graphicsEnv)
else:
raise 'Cannot find the graphics configuration environment file (graphicsConfig.env)'
del driverUtils, os, graphicsEnv
license_server_type=FLEXNET
abaquslm_license_file=""
doc_root="
doc_root_type="html"
academic=RESEARCH

View File

@ -14,12 +14,6 @@ except(NameError):
class ASCIItable(): class ASCIItable():
"""Read and write to ASCII tables""" """Read and write to ASCII tables"""
__slots__ = ['__IO__',
'info',
'labeled',
'data',
]
tmpext = '_tmp' # filename extension for in-place access tmpext = '_tmp' # filename extension for in-place access
# ------------------------------------------------------------------ # ------------------------------------------------------------------
@ -370,48 +364,29 @@ class ASCIItable():
""" """
from collections import Iterable from collections import Iterable
if isinstance(labels, Iterable) and not isinstance(labels, str): # check whether list of labels is requested listOfLabels = isinstance(labels, Iterable) and not isinstance(labels, str) # check whether list of labels is requested
dim = [] if not listOfLabels: labels = [labels]
for label in labels:
if label is not None:
myDim = -1
try: # column given as number?
idx = int(label)-1
myDim = 1 # if found has at least dimension 1
if self.tags[idx].startswith('1_'): # column has multidim indicator?
while idx+myDim < len(self.tags) and self.tags[idx+myDim].startswith("%i_"%(myDim+1)):
myDim += 1 # add while found
except ValueError: # column has string label
label = label[1:-1] if label[0] == label[-1] and label[0] in ('"',"'") else label # remove outermost quotations
if label in self.tags: # can be directly found?
myDim = 1 # scalar by definition
elif '1_'+label in self.tags: # look for first entry of possible multidim object
idx = self.tags.index('1_'+label) # get starting column
myDim = 1 # (at least) one-dimensional
while idx+myDim < len(self.tags) and self.tags[idx+myDim].startswith("%i_"%(myDim+1)):
myDim += 1 # keep adding while going through object
dim.append(myDim) dim = []
else: for label in labels:
dim = -1 # assume invalid label if label is not None:
idx = -1 myDim = -1
try: # column given as number? try: # column given as number?
idx = int(labels)-1 idx = int(label)-1
dim = 1 # if found has at least dimension 1 myDim = 1 # if found treat as single column of dimension 1
if self.tags[idx].startswith('1_'): # column has multidim indicator? except ValueError: # column has string label
while idx+dim < len(self.tags) and self.tags[idx+dim].startswith("%i_"%(dim+1)): label = label[1:-1] if label[0] == label[-1] and label[0] in ('"',"'") else label # remove outermost quotations
dim += 1 # add as long as found if label in self.tags: # can be directly found?
except ValueError: # column has string label myDim = 1 # scalar by definition
labels = labels[1:-1] if labels[0] == labels[-1] and labels[0] in ('"',"'") else labels # remove outermost quotations elif '1_'+label in self.tags: # look for first entry of possible multidim object
if labels in self.tags: # can be directly found? idx = self.tags.index('1_'+label) # get starting column
dim = 1 # scalar by definition myDim = 1 # (at least) one-dimensional
elif '1_'+labels in self.tags: # look for first entry of possible multidim object while idx+myDim < len(self.tags) and self.tags[idx+myDim].startswith("%i_"%(myDim+1)):
idx = self.tags.index('1_'+labels) # get starting column myDim += 1 # keep adding while going through object
dim = 1 # is (at least) one-dimensional
while idx+dim < len(self.tags) and self.tags[idx+dim].startswith("%i_"%(dim+1)):
dim += 1 # keep adding while going through object
return np.array(dim) if isinstance(dim,Iterable) else dim dim.append(myDim)
return np.array(dim) if listOfLabels else dim[0]
# ------------------------------------------------------------------ # ------------------------------------------------------------------
def label_indexrange(self, def label_indexrange(self,
@ -517,7 +492,7 @@ class ASCIItable():
(d if str(c) != str(labels[present[i]]) else (d if str(c) != str(labels[present[i]]) else
1))) 1)))
use = np.array(columns) if len(columns) > 0 else None use = np.array(columns) if len(columns) > 0 else None
self.tags = list(np.array(self.tags)[use]) # update labels with valid subset self.tags = list(np.array(self.tags)[use]) # update labels with valid subset
self.data = np.loadtxt(self.__IO__['in'],usecols=use,ndmin=2) self.data = np.loadtxt(self.__IO__['in'],usecols=use,ndmin=2)

View File

@ -9,28 +9,29 @@ class Color():
Conversion of colors between different color-spaces. Conversion of colors between different color-spaces.
Colors should be given in the form Color('model',[vector]). Colors should be given in the form Color('model',[vector]).
To convert or copy color from one space to other, use the methods To convert or copy color from one space to other, use the methods
convertTo('model') or expressAs('model'), respectively. convertTo('model') or expressAs('model'), respectively.
""" """
__slots__ = [ __slots__ = [
'model', 'model',
'color', 'color',
'__dict__', '__dict__',
] ]
# ------------------------------------------------------------------ # ------------------------------------------------------------------
def __init__(self, def __init__(self,
model = 'RGB', model = 'RGB',
color = np.zeros(3,'d')): color = np.zeros(3,'d')):
self.__transforms__ = \ self.__transforms__ = \
{'HSL': {'index': 0, 'next': self._HSL2RGB}, {'HSV': {'index': 0, 'next': self._HSV2HSL},
'RGB': {'index': 1, 'next': self._RGB2XYZ, 'prev': self._RGB2HSL}, 'HSL': {'index': 1, 'next': self._HSL2RGB, 'prev': self._HSL2HSV},
'XYZ': {'index': 2, 'next': self._XYZ2CIELAB, 'prev': self._XYZ2RGB}, 'RGB': {'index': 2, 'next': self._RGB2XYZ, 'prev': self._RGB2HSL},
'CIELAB': {'index': 3, 'next': self._CIELAB2MSH, 'prev': self._CIELAB2XYZ}, 'XYZ': {'index': 3, 'next': self._XYZ2CIELAB, 'prev': self._XYZ2RGB},
'MSH': {'index': 4, 'prev': self._MSH2CIELAB}, 'CIELAB': {'index': 4, 'next': self._CIELAB2MSH, 'prev': self._CIELAB2XYZ},
'MSH': {'index': 5, 'prev': self._MSH2CIELAB},
} }
model = model.upper() model = model.upper()
@ -46,24 +47,24 @@ class Color():
self.model = model self.model = model
self.color = np.array(color,'d') self.color = np.array(color,'d')
# ------------------------------------------------------------------ # ------------------------------------------------------------------
def __repr__(self): def __repr__(self):
"""Color model and values""" """Color model and values"""
return 'Model: %s Color: %s'%(self.model,str(self.color)) return 'Model: %s Color: %s'%(self.model,str(self.color))
# ------------------------------------------------------------------ # ------------------------------------------------------------------
def __str__(self): def __str__(self):
"""Color model and values""" """Color model and values"""
return self.__repr__() return self.__repr__()
# ------------------------------------------------------------------ # ------------------------------------------------------------------
def convertTo(self,toModel = 'RGB'): def convertTo(self,toModel = 'RGB'):
toModel = toModel.upper() toModel = toModel.upper()
if toModel not in list(self.__transforms__.keys()): return if toModel not in list(self.__transforms__.keys()): return
sourcePos = self.__transforms__[self.model]['index'] sourcePos = self.__transforms__[self.model]['index']
targetPos = self.__transforms__[toModel]['index'] targetPos = self.__transforms__[toModel]['index']
@ -76,23 +77,62 @@ class Color():
self.__transforms__[self.model]['prev']() self.__transforms__[self.model]['prev']()
sourcePos -= 1 sourcePos -= 1
return self return self
# ------------------------------------------------------------------ # ------------------------------------------------------------------
def expressAs(self,asModel = 'RGB'): def expressAs(self,asModel = 'RGB'):
return self.__class__(self.model,self.color).convertTo(asModel) return self.__class__(self.model,self.color).convertTo(asModel)
def _HSV2HSL(self):
"""
Convert H(ue) S(aturation) V(alue or brightness) to H(ue) S(aturation) L(uminance)
with all values in the range of 0 to 1
http://codeitdown.com/hsl-hsb-hsv-color/
"""
if self.model != 'HSV': return
converted = Color('HSL',np.array([
self.color[0],
1. if self.color[2] == 0.0 or (self.color[1] == 0.0 and self.color[2] == 1.0) \
else self.color[1]*self.color[2]/(1.-abs(self.color[2]*(2.-self.color[1])-1.)),
0.5*self.color[2]*(2.-self.color[1]),
]))
self.model = converted.model
self.color = converted.color
def _HSL2HSV(self):
"""
Convert H(ue) S(aturation) L(uminance) to H(ue) S(aturation) V(alue or brightness)
with all values in the range of 0 to 1
http://codeitdown.com/hsl-hsb-hsv-color/
"""
if self.model != 'HSL': return
h = self.color[0]
b = self.color[2]+0.5*(self.color[1]*(1.-abs(2*self.color[2]-1)))
s = 1.0 if b == 0.0 else 2.*(b-self.color[2])/b
converted = Color('HSV',np.array([h,s,b]))
self.model = converted.model
self.color = converted.color
def _HSL2RGB(self): def _HSL2RGB(self):
""" """
Convert H(ue) S(aturation) L(uminance) to R(red) G(reen) B(lue) Convert H(ue) S(aturation) L(uminance) to R(red) G(reen) B(lue)
with all values in the range of 0 to 1 with all values in the range of 0 to 1
from http://en.wikipedia.org/wiki/HSL_and_HSV from http://en.wikipedia.org/wiki/HSL_and_HSV
""" """
if self.model != 'HSL': return if self.model != 'HSL': return
sextant = self.color[0]*6.0 sextant = self.color[0]*6.0
c = (1.0 - abs(2.0 * self.color[2] - 1.0))*self.color[1] c = (1.0 - abs(2.0 * self.color[2] - 1.0))*self.color[1]
x = c*(1.0 - abs(sextant%2 - 1.0)) x = c*(1.0 - abs(sextant%2 - 1.0))
@ -108,15 +148,15 @@ class Color():
][int(sextant)],'d')) ][int(sextant)],'d'))
self.model = converted.model self.model = converted.model
self.color = converted.color self.color = converted.color
def _RGB2HSL(self): def _RGB2HSL(self):
""" """
Convert R(ed) G(reen) B(lue) to H(ue) S(aturation) L(uminance) Convert R(ed) G(reen) B(lue) to H(ue) S(aturation) L(uminance)
with all values in the range of 0 to 1 with all values in the range of 0 to 1
from http://130.113.54.154/~monger/hsl-rgb.html from http://130.113.54.154/~monger/hsl-rgb.html
""" """
if self.model != 'RGB': return if self.model != 'RGB': return
HSL = np.zeros(3,'d') HSL = np.zeros(3,'d')
@ -141,43 +181,43 @@ class Color():
if (HSL[0] < 0.0): if (HSL[0] < 0.0):
HSL[0] = HSL[0] + 360.0 HSL[0] = HSL[0] + 360.0
for i in range(2): for i in range(2):
HSL[i+1] = min(HSL[i+1],1.0) HSL[i+1] = min(HSL[i+1],1.0)
HSL[i+1] = max(HSL[i+1],0.0) HSL[i+1] = max(HSL[i+1],0.0)
converted = Color('HSL', HSL) converted = Color('HSL', HSL)
self.model = converted.model self.model = converted.model
self.color = converted.color self.color = converted.color
def _RGB2XYZ(self): def _RGB2XYZ(self):
""" """
Convert R(ed) G(reen) B(lue) to CIE XYZ Convert R(ed) G(reen) B(lue) to CIE XYZ
with all values in the range of 0 to 1 with all values in the range of 0 to 1
from http://www.cs.rit.edu/~ncs/color/t_convert.html from http://www.cs.rit.edu/~ncs/color/t_convert.html
""" """
if self.model != 'RGB': return if self.model != 'RGB': return
XYZ = np.zeros(3,'d') XYZ = np.zeros(3,'d')
RGB_lin = np.zeros(3,'d') RGB_lin = np.zeros(3,'d')
convert = np.array([[0.412453,0.357580,0.180423], convert = np.array([[0.412453,0.357580,0.180423],
[0.212671,0.715160,0.072169], [0.212671,0.715160,0.072169],
[0.019334,0.119193,0.950227]]) [0.019334,0.119193,0.950227]])
for i in range(3): for i in range(3):
if (self.color[i] > 0.04045): RGB_lin[i] = ((self.color[i]+0.0555)/1.0555)**2.4 if (self.color[i] > 0.04045): RGB_lin[i] = ((self.color[i]+0.0555)/1.0555)**2.4
else: RGB_lin[i] = self.color[i] /12.92 else: RGB_lin[i] = self.color[i] /12.92
XYZ = np.dot(convert,RGB_lin) XYZ = np.dot(convert,RGB_lin)
for i in range(3): for i in range(3):
XYZ[i] = max(XYZ[i],0.0)
converted = Color('XYZ', XYZ) XYZ[i] = max(XYZ[i],0.0)
converted = Color('XYZ', XYZ)
self.model = converted.model self.model = converted.model
self.color = converted.color self.color = converted.color
def _XYZ2RGB(self): def _XYZ2RGB(self):
""" """
@ -199,17 +239,17 @@ class Color():
if (RGB_lin[i] > 0.0031308): RGB[i] = ((RGB_lin[i])**(1.0/2.4))*1.0555-0.0555 if (RGB_lin[i] > 0.0031308): RGB[i] = ((RGB_lin[i])**(1.0/2.4))*1.0555-0.0555
else: RGB[i] = RGB_lin[i] *12.92 else: RGB[i] = RGB_lin[i] *12.92
for i in range(3): for i in range(3):
RGB[i] = min(RGB[i],1.0) RGB[i] = min(RGB[i],1.0)
RGB[i] = max(RGB[i],0.0) RGB[i] = max(RGB[i],0.0)
maxVal = max(RGB) # clipping colors according to the display gamut maxVal = max(RGB) # clipping colors according to the display gamut
if (maxVal > 1.0): RGB /= maxVal if (maxVal > 1.0): RGB /= maxVal
converted = Color('RGB', RGB) converted = Color('RGB', RGB)
self.model = converted.model self.model = converted.model
self.color = converted.color self.color = converted.color
def _CIELAB2XYZ(self): def _CIELAB2XYZ(self):
""" """
@ -219,19 +259,19 @@ class Color():
from http://www.easyrgb.com/index.php?X=MATH&H=07#text7 from http://www.easyrgb.com/index.php?X=MATH&H=07#text7
""" """
if self.model != 'CIELAB': return if self.model != 'CIELAB': return
ref_white = np.array([.95047, 1.00000, 1.08883],'d') # Observer = 2, Illuminant = D65 ref_white = np.array([.95047, 1.00000, 1.08883],'d') # Observer = 2, Illuminant = D65
XYZ = np.zeros(3,'d') XYZ = np.zeros(3,'d')
XYZ[1] = (self.color[0] + 16.0 ) / 116.0 XYZ[1] = (self.color[0] + 16.0 ) / 116.0
XYZ[0] = XYZ[1] + self.color[1]/ 500.0 XYZ[0] = XYZ[1] + self.color[1]/ 500.0
XYZ[2] = XYZ[1] - self.color[2]/ 200.0 XYZ[2] = XYZ[1] - self.color[2]/ 200.0
for i in range(len(XYZ)): for i in range(len(XYZ)):
if (XYZ[i] > 6./29. ): XYZ[i] = XYZ[i]**3. if (XYZ[i] > 6./29. ): XYZ[i] = XYZ[i]**3.
else: XYZ[i] = 108./841. * (XYZ[i] - 4./29.) else: XYZ[i] = 108./841. * (XYZ[i] - 4./29.)
converted = Color('XYZ', XYZ*ref_white) converted = Color('XYZ', XYZ*ref_white)
self.model = converted.model self.model = converted.model
self.color = converted.color self.color = converted.color
@ -244,30 +284,30 @@ class Color():
http://www.cs.rit.edu/~ncs/color/t_convert.html http://www.cs.rit.edu/~ncs/color/t_convert.html
""" """
if self.model != 'XYZ': return if self.model != 'XYZ': return
ref_white = np.array([.95047, 1.00000, 1.08883],'d') # Observer = 2, Illuminant = D65 ref_white = np.array([.95047, 1.00000, 1.08883],'d') # Observer = 2, Illuminant = D65
XYZ = self.color/ref_white XYZ = self.color/ref_white
for i in range(len(XYZ)): for i in range(len(XYZ)):
if (XYZ[i] > 216./24389 ): XYZ[i] = XYZ[i]**(1.0/3.0) if (XYZ[i] > 216./24389 ): XYZ[i] = XYZ[i]**(1.0/3.0)
else: XYZ[i] = (841./108. * XYZ[i]) + 16.0/116.0 else: XYZ[i] = (841./108. * XYZ[i]) + 16.0/116.0
converted = Color('CIELAB', np.array([ 116.0 * XYZ[1] - 16.0, converted = Color('CIELAB', np.array([ 116.0 * XYZ[1] - 16.0,
500.0 * (XYZ[0] - XYZ[1]), 500.0 * (XYZ[0] - XYZ[1]),
200.0 * (XYZ[1] - XYZ[2]) ])) 200.0 * (XYZ[1] - XYZ[2]) ]))
self.model = converted.model self.model = converted.model
self.color = converted.color self.color = converted.color
def _CIELAB2MSH(self): def _CIELAB2MSH(self):
""" """
Convert CIE Lab to Msh colorspace Convert CIE Lab to Msh colorspace
from http://www.cs.unm.edu/~kmorel/documents/ColorMaps/DivergingColorMapWorkshop.xls from http://www.cs.unm.edu/~kmorel/documents/ColorMaps/DivergingColorMapWorkshop.xls
""" """
if self.model != 'CIELAB': return if self.model != 'CIELAB': return
Msh = np.zeros(3,'d') Msh = np.zeros(3,'d')
Msh[0] = math.sqrt(np.dot(self.color,self.color)) Msh[0] = math.sqrt(np.dot(self.color,self.color))
if (Msh[0] > 0.001): if (Msh[0] > 0.001):
Msh[1] = math.acos(self.color[0]/Msh[0]) Msh[1] = math.acos(self.color[0]/Msh[0])
@ -287,8 +327,8 @@ class Color():
from http://www.cs.unm.edu/~kmorel/documents/ColorMaps/DivergingColorMapWorkshop.xls from http://www.cs.unm.edu/~kmorel/documents/ColorMaps/DivergingColorMapWorkshop.xls
""" """
if self.model != 'MSH': return if self.model != 'MSH': return
Lab = np.zeros(3,'d') Lab = np.zeros(3,'d')
Lab[0] = self.color[0] * math.cos(self.color[1]) Lab[0] = self.color[0] * math.cos(self.color[1])
Lab[1] = self.color[0] * math.sin(self.color[1]) * math.cos(self.color[2]) Lab[1] = self.color[0] * math.sin(self.color[1]) * math.cos(self.color[2])
Lab[2] = self.color[0] * math.sin(self.color[1]) * math.sin(self.color[2]) Lab[2] = self.color[0] * math.sin(self.color[1]) * math.sin(self.color[2])
@ -305,7 +345,7 @@ class Colormap():
'left', 'left',
'right', 'right',
'interpolate', 'interpolate',
] ]
__predefined__ = { __predefined__ = {
'gray': {'left': Color('HSL',[0,1,1]), 'gray': {'left': Color('HSL',[0,1,1]),
'right': Color('HSL',[0,0,0.15]), 'right': Color('HSL',[0,0,0.15]),
@ -329,7 +369,7 @@ class Colormap():
'right': Color('HSL',[0.11,0.75,0.38]), 'right': Color('HSL',[0.11,0.75,0.38]),
'interpolate': 'perceptualuniform'}, 'interpolate': 'perceptualuniform'},
'redgreen': {'left': Color('HSL',[0.97,0.96,0.36]), 'redgreen': {'left': Color('HSL',[0.97,0.96,0.36]),
'right': Color('HSL',[0.33333,1.0,0.14]), 'right': Color('HSL',[0.33333,1.0,0.14]),
'interpolate': 'perceptualuniform'}, 'interpolate': 'perceptualuniform'},
'bluered': {'left': Color('HSL',[0.65,0.53,0.49]), 'bluered': {'left': Color('HSL',[0.65,0.53,0.49]),
'right': Color('HSL',[0.97,0.96,0.36]), 'right': Color('HSL',[0.97,0.96,0.36]),
@ -347,8 +387,8 @@ class Colormap():
'right': Color('RGB',[0.000002,0.000000,0.286275]), 'right': Color('RGB',[0.000002,0.000000,0.286275]),
'interpolate': 'perceptualuniform'}, 'interpolate': 'perceptualuniform'},
} }
# ------------------------------------------------------------------ # ------------------------------------------------------------------
def __init__(self, def __init__(self,
left = Color('RGB',[1,1,1]), left = Color('RGB',[1,1,1]),
@ -366,32 +406,32 @@ class Colormap():
left = Color() left = Color()
if right.__class__.__name__ != 'Color': if right.__class__.__name__ != 'Color':
right = Color() right = Color()
self.left = left self.left = left
self.right = right self.right = right
self.interpolate = interpolate self.interpolate = interpolate
# ------------------------------------------------------------------ # ------------------------------------------------------------------
def __repr__(self): def __repr__(self):
"""Left and right value of colormap""" """Left and right value of colormap"""
return 'Left: %s Right: %s'%(self.left,self.right) return 'Left: %s Right: %s'%(self.left,self.right)
# ------------------------------------------------------------------ # ------------------------------------------------------------------
def invert(self): def invert(self):
(self.left, self.right) = (self.right, self.left) (self.left, self.right) = (self.right, self.left)
return self return self
# ------------------------------------------------------------------ # ------------------------------------------------------------------
def color(self,fraction = 0.5): def color(self,fraction = 0.5):
def interpolate_Msh(lo, hi, frac): def interpolate_Msh(lo, hi, frac):
def rad_diff(a,b): def rad_diff(a,b):
return abs(a[2]-b[2]) return abs(a[2]-b[2])
# if saturation of one of the two colors is too less than the other, hue of the less # if saturation of one of the two colors is too less than the other, hue of the less
def adjust_hue(Msh_sat, Msh_unsat): def adjust_hue(Msh_sat, Msh_unsat):
if Msh_sat[0] >= Msh_unsat[0]: if Msh_sat[0] >= Msh_unsat[0]:
return Msh_sat[2] return Msh_sat[2]
@ -399,11 +439,11 @@ class Colormap():
hSpin = Msh_sat[1]/math.sin(Msh_sat[1])*math.sqrt(Msh_unsat[0]**2.0-Msh_sat[0]**2)/Msh_sat[0] hSpin = Msh_sat[1]/math.sin(Msh_sat[1])*math.sqrt(Msh_unsat[0]**2.0-Msh_sat[0]**2)/Msh_sat[0]
if Msh_sat[2] < - math.pi/3.0: hSpin *= -1.0 if Msh_sat[2] < - math.pi/3.0: hSpin *= -1.0
return Msh_sat[2] + hSpin return Msh_sat[2] + hSpin
Msh1 = np.array(lo[:]) Msh1 = np.array(lo[:])
Msh2 = np.array(hi[:]) Msh2 = np.array(hi[:])
if (Msh1[1] > 0.05 and Msh2[1] > 0.05 and rad_diff(Msh1,Msh2) > math.pi/3.0): if (Msh1[1] > 0.05 and Msh2[1] > 0.05 and rad_diff(Msh1,Msh2) > math.pi/3.0):
M_mid = max(Msh1[0],Msh2[0],88.0) M_mid = max(Msh1[0],Msh2[0],88.0)
if frac < 0.5: if frac < 0.5:
Msh2 = np.array([M_mid,0.0,0.0],'d') Msh2 = np.array([M_mid,0.0,0.0],'d')
@ -414,16 +454,16 @@ class Colormap():
if Msh1[1] < 0.05 and Msh2[1] > 0.05: Msh1[2] = adjust_hue(Msh2,Msh1) if Msh1[1] < 0.05 and Msh2[1] > 0.05: Msh1[2] = adjust_hue(Msh2,Msh1)
elif Msh1[1] > 0.05 and Msh2[1] < 0.05: Msh2[2] = adjust_hue(Msh1,Msh2) elif Msh1[1] > 0.05 and Msh2[1] < 0.05: Msh2[2] = adjust_hue(Msh1,Msh2)
Msh = (1.0 - frac) * Msh1 + frac * Msh2 Msh = (1.0 - frac) * Msh1 + frac * Msh2
return Color('MSH',Msh) return Color('MSH',Msh)
def interpolate_linear(lo, hi, frac): def interpolate_linear(lo, hi, frac):
"""Linear interpolation between lo and hi color at given fraction; output in model of lo color.""" """Linear interpolation between lo and hi color at given fraction; output in model of lo color."""
interpolation = (1.0 - frac) * np.array(lo.color[:]) \ interpolation = (1.0 - frac) * np.array(lo.color[:]) \
+ frac * np.array(hi.expressAs(lo.model).color[:]) + frac * np.array(hi.expressAs(lo.model).color[:])
return Color(lo.model,interpolation) return Color(lo.model,interpolation)
if self.interpolate == 'perceptualuniform': if self.interpolate == 'perceptualuniform':
return interpolate_Msh(self.left.expressAs('MSH').color, return interpolate_Msh(self.left.expressAs('MSH').color,
self.right.expressAs('MSH').color,fraction) self.right.expressAs('MSH').color,fraction)
@ -432,8 +472,8 @@ class Colormap():
self.right,fraction) self.right,fraction)
else: else:
raise NameError('unknown color interpolation method') raise NameError('unknown color interpolation method')
# ------------------------------------------------------------------ # ------------------------------------------------------------------
def export(self,name = 'uniformPerceptualColorMap',\ def export(self,name = 'uniformPerceptualColorMap',\
format = 'paraview',\ format = 'paraview',\
steps = 2,\ steps = 2,\
@ -461,21 +501,21 @@ class Colormap():
colormap = ['View.ColorTable = {'] \ colormap = ['View.ColorTable = {'] \
+ [',\n'.join(['{%s}'%(','.join([str(x*255.0) for x in color])) for color in colors])] \ + [',\n'.join(['{%s}'%(','.join([str(x*255.0) for x in color])) for color in colors])] \
+ ['}'] + ['}']
elif format == 'gom': elif format == 'gom':
colormap = ['1 1 ' + str(name) \ colormap = ['1 1 ' + str(name) \
+ ' 9 ' + str(name) \ + ' 9 ' + str(name) \
+ ' 0 1 0 3 0 0 -1 9 \ 0 0 0 255 255 255 0 0 255 ' \ + ' 0 1 0 3 0 0 -1 9 \ 0 0 0 255 255 255 0 0 255 ' \
+ '30 NO_UNIT 1 1 64 64 64 255 1 0 0 0 0 0 0 3 0 ' + str(len(colors)) \ + '30 NO_UNIT 1 1 64 64 64 255 1 0 0 0 0 0 0 3 0 ' + str(len(colors)) \
+ ' '.join([' 0 %s 255 1'%(' '.join([str(int(x*255.0)) for x in color])) for color in reversed(colors)])] + ' '.join([' 0 %s 255 1'%(' '.join([str(int(x*255.0)) for x in color])) for color in reversed(colors)])]
elif format == 'raw': elif format == 'raw':
colormap = ['\t'.join(map(str,color)) for color in colors] colormap = ['\t'.join(map(str,color)) for color in colors]
elif format == 'list': elif format == 'list':
colormap = colors colormap = colors
else: else:
raise NameError('unknown color export format') raise NameError('unknown color export format')
return '\n'.join(colormap) + '\n' if type(colormap[0]) is str else colormap return '\n'.join(colormap) + '\n' if type(colormap[0]) is str else colormap

View File

@ -101,8 +101,6 @@ class Texture(Section):
class Material(): class Material():
"""Reads, manipulates and writes material.config files""" """Reads, manipulates and writes material.config files"""
__slots__ = ['data']
def __init__(self,verbose=True): def __init__(self,verbose=True):
"""Generates ordered list of parts""" """Generates ordered list of parts"""
self.parts = [ self.parts = [

View File

@ -41,7 +41,7 @@ parser.add_option('-f','--formula',
parser.add_option('-c','--condition', parser.add_option('-c','--condition',
dest = 'condition', metavar='string', dest = 'condition', metavar='string',
help = 'condition to filter rows') help = 'condition to alter existing column data')
parser.set_defaults(condition = None, parser.set_defaults(condition = None,
) )
@ -77,28 +77,27 @@ for name in filenames:
# --------------------------------------- evaluate condition --------------------------------------- # --------------------------------------- evaluate condition ---------------------------------------
if options.condition is not None: if options.condition is not None:
interpolator = []
condition = options.condition # copy per file, since might be altered inline condition = options.condition # copy per file, since might be altered inline
breaker = False breaker = False
for position,operand in enumerate(set(re.findall(r'#(([s]#)?(.+?))#',condition))): # find three groups for position,(all,marker,column) in enumerate(set(re.findall(r'#(([s]#)?(.+?))#',condition))): # find three groups
condition = condition.replace('#'+operand[0]+'#', idx = table.label_index(column)
{ '': '{%i}'%position, dim = table.label_dimension(column)
's#':'"{%i}"'%position}[operand[1]]) if idx < 0 and column not in specials:
if operand[2] in specials: # special label damask.util.croak('column "{}" not found.'.format(column))
interpolator += ['specials["%s"]'%operand[2]] breaker = True
else: else:
try: if column in specials:
interpolator += ['%s(table.data[%i])'%({ '':'float', replacement = 'specials["{}"]'.format(column)
's#':'str'}[operand[1]], elif dim == 1: # scalar input
table.label_index(operand[2]))] # could be generalized to indexrange as array lookup replacement = '{}(table.data[{}])'.format({ '':'float',
except: 's#':'str'}[marker],idx) # take float or string value of data column
damask.util.croak('column "{}" not found.'.format(operand[2])) elif dim > 1: # multidimensional input (vector, tensor, etc.)
breaker = True replacement = 'np.array(table.data[{}:{}],dtype=float)'.format(idx,idx+dim) # use (flat) array representation
if breaker: continue # found mistake in condition evaluation --> next file condition = condition.replace('#'+all+'#',replacement)
evaluator_condition = "'" + condition + "'.format(" + ','.join(interpolator) + ")" if breaker: continue # found mistake in condition evaluation --> next file
# ------------------------------------------ build formulas ---------------------------------------- # ------------------------------------------ build formulas ----------------------------------------
@ -162,7 +161,7 @@ for name in filenames:
# -------------------------------------- evaluate formulas ----------------------------------------- # -------------------------------------- evaluate formulas -----------------------------------------
if options.condition is None or eval(eval(evaluator_condition)): # condition for veteran replacement fulfilled if options.condition is None or eval(condition): # condition for veteran replacement fulfilled
for veteran in veterans: # evaluate formulas that overwrite for veteran in veterans: # evaluate formulas that overwrite
table.data[table.label_index(veteran): table.data[table.label_index(veteran):
table.label_index(veteran)+table.label_dimension(veteran)] = \ table.label_index(veteran)+table.label_dimension(veteran)] = \

View File

@ -14,7 +14,7 @@ scriptID = ' '.join([scriptName,damask.version])
# -------------------------------------------------------------------- # --------------------------------------------------------------------
parser = OptionParser(option_class=damask.extendableOption, usage='%prog options [file[s]]', description = """ parser = OptionParser(option_class=damask.extendableOption, usage='%prog options [file[s]]', description = """
Add data of selected column(s) from (first) row of second ASCIItable that shares the linking column value. Add data of selected column(s) from (first) row of linked ASCIItable that shares the linking column value.
""", version = scriptID) """, version = scriptID)
@ -25,7 +25,7 @@ parser.add_option('--link',
parser.add_option('-l','--label', parser.add_option('-l','--label',
dest = 'label', dest = 'label',
action = 'extend', metavar = '<string LIST>', action = 'extend', metavar = '<string LIST>',
help = 'column label(s) to be appended') help = 'column label(s) to add from linked ASCIItable')
parser.add_option('-a','--asciitable', parser.add_option('-a','--asciitable',
dest = 'asciitable', dest = 'asciitable',
type = 'string', metavar = 'string', type = 'string', metavar = 'string',
@ -55,6 +55,9 @@ if options.asciitable is not None and os.path.isfile(options.asciitable):
if len(missing_labels) > 0: if len(missing_labels) > 0:
damask.util.croak('column{} {} not found...'.format('s' if len(missing_labels) > 1 else '',', '.join(missing_labels))) damask.util.croak('column{} {} not found...'.format('s' if len(missing_labels) > 1 else '',', '.join(missing_labels)))
if len(missing_labels) >= len(options.label):
damask.util.croak('aborting...')
sys.exit()
index = linkedTable.data[:,:linkDim] index = linkedTable.data[:,:linkDim]
data = linkedTable.data[:,linkDim:] data = linkedTable.data[:,linkDim:]
@ -69,7 +72,8 @@ for name in filenames:
try: table = damask.ASCIItable(name = name, try: table = damask.ASCIItable(name = name,
buffered = False) buffered = False)
except: continue except: continue
damask.util.report(scriptName,name) damask.util.report(scriptName,"{} {} <== {} {}".format(name,damask.util.deemph('@ '+options.link[0]),
options.asciitable,damask.util.deemph('@ '+options.link[1])))
# ------------------------------------------ read header ------------------------------------------ # ------------------------------------------ read header ------------------------------------------

View File

@ -42,14 +42,22 @@ parser.add_option('-r', '--crystalrotation',
dest='crystalrotation', dest='crystalrotation',
type = 'float', nargs = 4, metavar = ' '.join(['float']*4), type = 'float', nargs = 4, metavar = ' '.join(['float']*4),
help = 'angle and axis of additional crystal frame rotation') help = 'angle and axis of additional crystal frame rotation')
parser.add_option('-e', '--eulers', parser.add_option( '--eulers',
dest = 'eulers', dest = 'eulers',
type = 'string', metavar = 'string', type = 'string', metavar = 'string',
help = 'Euler angles label') help = 'Euler angles label')
parser.add_option('-m', '--matrix', parser.add_option( '--rodrigues',
dest = 'rodrigues',
type = 'string', metavar = 'string',
help = 'Rodrigues vector label')
parser.add_option( '--matrix',
dest = 'matrix', dest = 'matrix',
type = 'string', metavar = 'string', type = 'string', metavar = 'string',
help = 'orientation matrix label') help = 'orientation matrix label')
parser.add_option( '--quaternion',
dest = 'quaternion',
type = 'string', metavar = 'string',
help = 'quaternion label')
parser.add_option('-a', parser.add_option('-a',
dest = 'a', dest = 'a',
type = 'string', metavar = 'string', type = 'string', metavar = 'string',
@ -62,10 +70,6 @@ parser.add_option('-c',
dest = 'c', dest = 'c',
type = 'string', metavar = 'string', type = 'string', metavar = 'string',
help = 'crystal frame c vector label') help = 'crystal frame c vector label')
parser.add_option('-q', '--quaternion',
dest = 'quaternion',
type = 'string', metavar = 'string',
help = 'quaternion label')
parser.set_defaults(output = [], parser.set_defaults(output = [],
symmetry = damask.Symmetry.lattices[-1], symmetry = damask.Symmetry.lattices[-1],
@ -81,6 +85,7 @@ if options.output == [] or (not set(options.output).issubset(set(outputChoices))
parser.error('output must be chosen from {}.'.format(', '.join(outputChoices))) parser.error('output must be chosen from {}.'.format(', '.join(outputChoices)))
input = [options.eulers is not None, input = [options.eulers is not None,
options.rodrigues is not None,
options.a is not None and \ options.a is not None and \
options.b is not None and \ options.b is not None and \
options.c is not None, options.c is not None,
@ -91,6 +96,7 @@ input = [options.eulers is not None,
if np.sum(input) != 1: parser.error('needs exactly one input format.') if np.sum(input) != 1: parser.error('needs exactly one input format.')
(label,dim,inputtype) = [(options.eulers,3,'eulers'), (label,dim,inputtype) = [(options.eulers,3,'eulers'),
(options.rodrigues,3,'rodrigues'),
([options.a,options.b,options.c],[3,3,3],'frame'), ([options.a,options.b,options.c],[3,3,3],'frame'),
(options.matrix,9,'matrix'), (options.matrix,9,'matrix'),
(options.quaternion,4,'quaternion'), (options.quaternion,4,'quaternion'),
@ -143,6 +149,9 @@ for name in filenames:
if inputtype == 'eulers': if inputtype == 'eulers':
o = damask.Orientation(Eulers = np.array(map(float,table.data[column:column+3]))*toRadians, o = damask.Orientation(Eulers = np.array(map(float,table.data[column:column+3]))*toRadians,
symmetry = options.symmetry).reduced() symmetry = options.symmetry).reduced()
elif inputtype == 'rodrigues':
o = damask.Orientation(Rodrigues= np.array(map(float,table.data[column:column+3])),
symmetry = options.symmetry).reduced()
elif inputtype == 'matrix': elif inputtype == 'matrix':
o = damask.Orientation(matrix = np.array(map(float,table.data[column:column+9])).reshape(3,3).transpose(), o = damask.Orientation(matrix = np.array(map(float,table.data[column:column+9])).reshape(3,3).transpose(),
symmetry = options.symmetry).reduced() symmetry = options.symmetry).reduced()

View File

@ -51,7 +51,7 @@ parser.add_option('-c','--condition',
dest = 'condition', metavar='string', dest = 'condition', metavar='string',
help = 'condition to filter rows') help = 'condition to filter rows')
parser.set_defaults(condition = '', parser.set_defaults(condition = None,
) )
(options,filenames) = parser.parse_args() (options,filenames) = parser.parse_args()
@ -93,45 +93,61 @@ for name in filenames:
or fnmatch.fnmatch(label,needle) for needle in options.whitelist] # which whitelist items do match it or fnmatch.fnmatch(label,needle) for needle in options.whitelist] # which whitelist items do match it
whitelistitem[i] = match.index(True) if np.sum(match) == 1 else -1 # unique match to a whitelist item --> store which whitelistitem[i] = match.index(True) if np.sum(match) == 1 else -1 # unique match to a whitelist item --> store which
sorted = np.lexsort(sortingList(labels,whitelistitem)) order = range(len(labels)) if np.any(whitelistitem < 0) \
order = range(len(labels)) if sorted[0] < 0 else sorted # skip reordering if non-unique, i.e. first sorted is "-1" else np.lexsort(sortingList(labels,whitelistitem)) # reorder if unique, i.e. no "-1" in whitelistitem
else: else:
order = range(len(labels)) # maintain original order of labels order = range(len(labels)) # maintain original order of labels
interpolator = [] # --------------------------------------- evaluate condition ---------------------------------------
condition = options.condition # copy per file, might be altered if options.condition is not None:
for position,operand in enumerate(set(re.findall(r'#(([s]#)?(.+?))#',condition))): # find three groups condition = options.condition # copy per file, since might be altered inline
condition = condition.replace('#'+operand[0]+'#', breaker = False
{ '': '{{{}}}' .format(position),
's#':'"{{{}}}"'.format(position)}[operand[1]]) for position,(all,marker,column) in enumerate(set(re.findall(r'#(([s]#)?(.+?))#',condition))): # find three groups
if operand[2] in specials: # special label ? idx = table.label_index(column)
interpolator += ['specials["{}"]'.format(operand[2])] dim = table.label_dimension(column)
else: if idx < 0 and column not in specials:
try: damask.util.croak('column "{}" not found.'.format(column))
interpolator += ['{}(table.data[{}])'.format({ '':'float', breaker = True
's#':'str'}[operand[1]], else:
table.label_index(operand[2]))] if column in specials:
except: replacement = 'specials["{}"]'.format(column)
parser.error('column "{}" not found...\n'.format(operand[2])) elif dim == 1: # scalar input
replacement = '{}(table.data[{}])'.format({ '':'float',
evaluator = "'" + condition + "'.format(" + ','.join(interpolator) + ")" 's#':'str'}[marker],idx) # take float or string value of data column
elif dim > 1: # multidimensional input (vector, tensor, etc.)
replacement = 'np.array(table.data[{}:{}],dtype=float)'.format(idx,idx+dim) # use (flat) array representation
condition = condition.replace('#'+all+'#',replacement)
if breaker: continue # found mistake in condition evaluation --> next file
# ------------------------------------------ assemble header --------------------------------------- # ------------------------------------------ assemble header ---------------------------------------
table.info_append(scriptID + '\t' + ' '.join(sys.argv[1:])) table.info_append(scriptID + '\t' + ' '.join(sys.argv[1:]))
table.labels_clear() table.labels_clear()
table.labels_append(np.array(labels)[order]) # update with new label set table.labels_append(np.array(labels)[order]) # update with new label set
table.head_write() table.head_write()
# ------------------------------------------ process and output data ------------------------------------------ # ------------------------------------------ process and output data ------------------------------------------
positions = np.array(positions)[order] positions = np.array(positions)[order]
outputAlive = True
while outputAlive and table.data_read(): # read next data line of ASCII table atOnce = options.condition is None
specials['_row_'] += 1 # count row if atOnce: # read full array and filter columns
if condition == '' or eval(eval(evaluator)): # valid row ? try:
table.data = [table.data[position] for position in positions] # retain filtered columns table.data_readArray(positions+1) # read desired columns (indexed 1,...)
outputAlive = table.data_write() # output processed line table.data_writeArray() # directly write out
except:
atOnce = False # data contains items that prevent array chunking
if not atOnce: # read data line by line
outputAlive = True
while outputAlive and table.data_read(): # read next data line of ASCII table
specials['_row_'] += 1 # count row
if options.condition is None or eval(condition): # valid row ?
table.data = [table.data[position] for position in positions] # retain filtered columns
outputAlive = table.data_write() # output processed line
# ------------------------------------------ finalize output ----------------------------------------- # ------------------------------------------ finalize output -----------------------------------------

View File

@ -431,7 +431,7 @@ def mapIncremental(label, mapping, N, base, new):
'avgabs': lambda n,b,a: (n*b+abs(a))/(n+1), 'avgabs': lambda n,b,a: (n*b+abs(a))/(n+1),
'sum': lambda n,b,a: a if n==0 else b+a, 'sum': lambda n,b,a: a if n==0 else b+a,
'sumabs': lambda n,b,a: abs(a) if n==0 else b+abs(a), 'sumabs': lambda n,b,a: abs(a) if n==0 else b+abs(a),
'unique': lambda n,b,a: a if n==0 or b==a else 'n/a' 'unique': lambda n,b,a: a if n==0 or b==a else 'nan'
} }
if mapping in theMap: if mapping in theMap:
mapped = map(theMap[mapping],[N]*len(base),base,new) # map one of the standard functions to data mapped = map(theMap[mapping],[N]*len(base),base,new) # map one of the standard functions to data
@ -442,7 +442,7 @@ def mapIncremental(label, mapping, N, base, new):
try: try:
mapped = eval('map(%s,[N]*len(base),base,new)'%mapping) # map user defined function to colums in chunks mapped = eval('map(%s,[N]*len(base),base,new)'%mapping) # map user defined function to colums in chunks
except: except:
mapped = ['n/a']*len(base) mapped = ['nan']*len(base)
return mapped return mapped

View File

@ -52,7 +52,7 @@ parser.set_defaults(data = [],
(options, filenames) = parser.parse_args() (options, filenames) = parser.parse_args()
if not options.vtk: parser.error('No VTK file specified.') if not options.vtk: parser.error('no VTK file specified.')
if not os.path.exists(options.vtk): parser.error('VTK file does not exist.') if not os.path.exists(options.vtk): parser.error('VTK file does not exist.')
if os.path.splitext(options.vtk)[1] == '.vtp': if os.path.splitext(options.vtk)[1] == '.vtp':
@ -66,16 +66,16 @@ elif os.path.splitext(options.vtk)[1] == '.vtk':
reader.Update() reader.Update()
Polydata = reader.GetPolyDataOutput() Polydata = reader.GetPolyDataOutput()
else: else:
parser.error('Unsupported VTK file type extension.') parser.error('unsupported VTK file type extension.')
Npoints = Polydata.GetNumberOfPoints() Npoints = Polydata.GetNumberOfPoints()
Ncells = Polydata.GetNumberOfCells() Ncells = Polydata.GetNumberOfCells()
Nvertices = Polydata.GetNumberOfVerts() Nvertices = Polydata.GetNumberOfVerts()
if Npoints != Ncells or Npoints != Nvertices: if Npoints != Ncells or Npoints != Nvertices:
parser.error('Number of points, cells, and vertices in VTK differ from each other.') parser.error('number of points, cells, and vertices in VTK differ from each other.')
damask.util.croak('{}: {} points, {} vertices, and {} cells...'.format(options.vtk,Npoints,Nvertices,Ncells)) damask.util.croak('{}: {} points/vertices/cells...'.format(options.vtk,Npoints))
# --- loop over input files ------------------------------------------------------------------------- # --- loop over input files -------------------------------------------------------------------------
@ -97,16 +97,19 @@ for name in filenames:
VTKarray = {} VTKarray = {}
active = defaultdict(list) active = defaultdict(list)
for datatype,dimension,label in [['data',99,options.data], for datatype,dimension,label in [['data',0,options.data],
['tensor',9,options.tensor], ['tensor',9,options.tensor],
['color' ,3,options.color], ['color' ,3,options.color],
]: ]:
for i,dim in enumerate(table.label_dimension(label)): for i,dim in enumerate(table.label_dimension(label)):
me = label[i] me = label[i]
if dim == -1: remarks.append('{} "{}" not found...'.format(datatype,me)) if dim == -1: remarks.append('{} "{}" not found...'.format(datatype,me))
elif dim > dimension: remarks.append('"{}" not of dimension {}...'.format(me,dimension)) elif dimension > 0 \
and dim != dimension: remarks.append('"{}" not of dimension {}...'.format(me,dimension))
else: else:
remarks.append('adding {} "{}"...'.format(datatype,me)) remarks.append('adding {}{} "{}"...'.format(datatype if dim > 1 else 'scalar',
'' if dimension > 0 or dim == 1 else '[{}]'.format(dim),
me))
active[datatype].append(me) active[datatype].append(me)
if remarks != []: damask.util.croak(remarks) if remarks != []: damask.util.croak(remarks)

View File

@ -55,7 +55,7 @@ parser.set_defaults(data = [],
(options, filenames) = parser.parse_args() (options, filenames) = parser.parse_args()
if not options.vtk: parser.error('No VTK file specified.') if not options.vtk: parser.error('no VTK file specified.')
if not os.path.exists(options.vtk): parser.error('VTK file does not exist.') if not os.path.exists(options.vtk): parser.error('VTK file does not exist.')
if os.path.splitext(options.vtk)[1] == '.vtr': if os.path.splitext(options.vtk)[1] == '.vtr':
@ -69,7 +69,7 @@ elif os.path.splitext(options.vtk)[1] == '.vtk':
reader.Update() reader.Update()
rGrid = reader.GetRectilinearGridOutput() rGrid = reader.GetRectilinearGridOutput()
else: else:
parser.error('Unsupported VTK file type extension.') parser.error('unsupported VTK file type extension.')
Npoints = rGrid.GetNumberOfPoints() Npoints = rGrid.GetNumberOfPoints()
Ncells = rGrid.GetNumberOfCells() Ncells = rGrid.GetNumberOfCells()
@ -96,16 +96,19 @@ for name in filenames:
VTKarray = {} VTKarray = {}
active = defaultdict(list) active = defaultdict(list)
for datatype,dimension,label in [['data',99,options.data], for datatype,dimension,label in [['data',0,options.data],
['tensor',9,options.tensor], ['tensor',9,options.tensor],
['color' ,3,options.color], ['color' ,3,options.color],
]: ]:
for i,dim in enumerate(table.label_dimension(label)): for i,dim in enumerate(table.label_dimension(label)):
me = label[i] me = label[i]
if dim == -1: remarks.append('{} "{}" not found...'.format(datatype,me)) if dim == -1: remarks.append('{} "{}" not found...'.format(datatype,me))
elif dim > dimension: remarks.append('"{}" not of dimension {}...'.format(me,dimension)) elif dimension > 0 \
and dim != dimension: remarks.append('"{}" not of dimension {}...'.format(me,dimension))
else: else:
remarks.append('adding {} "{}"...'.format(datatype,me)) remarks.append('adding {}{} "{}"...'.format(datatype if dim > 1 else 'scalar',
'' if dimension > 0 or dim == 1 else '[{}]'.format(dim),
me))
active[datatype].append(me) active[datatype].append(me)
if remarks != []: damask.util.croak(remarks) if remarks != []: damask.util.croak(remarks)
@ -141,7 +144,7 @@ for name in filenames:
if len(table.data) == Npoints: mode = 'point' if len(table.data) == Npoints: mode = 'point'
elif len(table.data) == Ncells: mode = 'cell' elif len(table.data) == Ncells: mode = 'cell'
else: else:
damask.util.croak('Data count is incompatible with grid...') damask.util.croak('data count is incompatible with grid...')
continue continue
damask.util.croak('{} mode...'.format(mode)) damask.util.croak('{} mode...'.format(mode))

View File

@ -61,7 +61,6 @@ for name in filenames:
# ------------------------------------------ process data --------------------------------------- # ------------------------------------------ process data ---------------------------------------
table.data_readArray(options.pos) table.data_readArray(options.pos)
if len(table.data.shape) < 2: table.data.shape += (1,) # expand to 2D shape
if table.data.shape[1] < 3: if table.data.shape[1] < 3:
table.data = np.hstack((table.data, table.data = np.hstack((table.data,
np.zeros((table.data.shape[0], np.zeros((table.data.shape[0],

View File

@ -70,7 +70,6 @@ for name in filenames:
# --------------- figure out size and grid --------------------------------------------------------- # --------------- figure out size and grid ---------------------------------------------------------
table.data_readArray(options.pos) table.data_readArray(options.pos)
if len(table.data.shape) < 2: table.data.shape += (1,) # expand to 2D shape
if table.data.shape[1] < 3: if table.data.shape[1] < 3:
table.data = np.hstack((table.data, table.data = np.hstack((table.data,
np.zeros((table.data.shape[0], np.zeros((table.data.shape[0],

View File

@ -36,8 +36,9 @@ parser.add_option('-c', '--center', dest='center', type='float', nargs = 3,
parser.add_option('-d', '--dimension', dest='dimension', type='float', nargs = 3, metavar=' '.join(['float']*3), parser.add_option('-d', '--dimension', dest='dimension', type='float', nargs = 3, metavar=' '.join(['float']*3),
help='a,b,c extension of hexahedral box; negative values are diameters') help='a,b,c extension of hexahedral box; negative values are diameters')
parser.add_option('-e', '--exponent', dest='exponent', type='float', nargs = 3, metavar=' '.join(['float']*3), parser.add_option('-e', '--exponent', dest='exponent', type='float', nargs = 3, metavar=' '.join(['float']*3),
help='i,j,k exponents for axes - 2 gives a sphere (x^2 + y^2 + z^2 < 1), 1 makes \ help='i,j,k exponents for axes - 0 gives octahedron (|x|^(2^0) + |y|^(2^0) + |z|^(2^0) < 1), \
octahedron (|x| + |y| + |z| < 1). Large values produce boxes, 0 - 1 is concave. ') 1 gives a sphere (|x|^(2^1) + |y|^(2^1) + |z|^(2^1) < 1), \
large values produce boxes, negative turns concave.')
parser.add_option('-f', '--fill', dest='fill', type='int', metavar = 'int', parser.add_option('-f', '--fill', dest='fill', type='int', metavar = 'int',
help='grain index to fill primitive. "0" selects maximum microstructure index + 1 [%default]') help='grain index to fill primitive. "0" selects maximum microstructure index + 1 [%default]')
parser.add_option('-q', '--quaternion', dest='quaternion', type='float', nargs = 4, metavar=' '.join(['float']*4), parser.add_option('-q', '--quaternion', dest='quaternion', type='float', nargs = 4, metavar=' '.join(['float']*4),
@ -48,15 +49,14 @@ parser.add_option( '--degrees', dest='degrees', action='store_true',
help = 'angle is given in degrees [%default]') help = 'angle is given in degrees [%default]')
parser.add_option( '--nonperiodic', dest='periodic', action='store_false', parser.add_option( '--nonperiodic', dest='periodic', action='store_false',
help = 'wrap around edges [%default]') help = 'wrap around edges [%default]')
parser.add_option( '--voxelspace', dest='voxelspace', action='store_true', parser.add_option( '--realspace', dest='realspace', action='store_true',
help = '-c and -d are given in (0 to grid) coordinates instead of (origin to origin+size) \ help = '-c and -d span [origin,origin+size] instead of [0,grid] coordinates')
coordinates [%default]')
parser.set_defaults(center = (.0,.0,.0), parser.set_defaults(center = (.0,.0,.0),
fill = 0, fill = 0,
degrees = False, degrees = False,
exponent = (1e10,1e10,1e10), # box shape by default exponent = (20,20,20), # box shape by default
periodic = True, periodic = True,
voxelspace = False realspace = False,
) )
(options, filenames) = parser.parse_args() (options, filenames) = parser.parse_args()
@ -74,14 +74,16 @@ else:
options.center = np.array(options.center) options.center = np.array(options.center)
options.dimension = np.array(options.dimension) options.dimension = np.array(options.dimension)
# undo logarithmic sense of exponent and generate ellipsoids for negative dimensions (backward compatibility)
options.exponent = np.where(np.array(options.dimension) > 0, np.power(2,options.exponent), 2)
# --- loop over input files ------------------------------------------------------------------------- # --- loop over input files -------------------------------------------------------------------------
if filenames == []: filenames = [None] if filenames == []: filenames = [None]
for name in filenames: for name in filenames:
try: try: table = damask.ASCIItable(name = name,
table = damask.ASCIItable(name = name, buffered = False,
buffered = False, labeled = False) labeled = False)
except: continue except: continue
damask.util.report(scriptName,name) damask.util.report(scriptName,name)
@ -115,45 +117,42 @@ for name in filenames:
'microstructures': 0, 'microstructures': 0,
} }
if options.fill == 0: options.fill = microstructure.max()+1 if options.fill == 0 else options.fill
options.fill = microstructure.max()+1
# If we have a negative dimension, make it an ellipsoid for backwards compatibility
options.exponent = np.where(np.array(options.dimension) > 0, options.exponent, 2)
microstructure = microstructure.reshape(info['grid'],order='F') microstructure = microstructure.reshape(info['grid'],order='F')
# coordinates given in real space (default) vs voxel space # coordinates given in real space (default) vs voxel space
if not options.voxelspace: if options.realspace:
options.center += info['origin'] options.center -= info['origin']
options.center *= np.array(info['grid']) / np.array(info['size']) options.center *= np.array(info['grid']) / np.array(info['size'])
options.dimension *= np.array(info['grid']) / np.array(info['size']) options.dimension *= np.array(info['grid']) / np.array(info['size'])
size = microstructure.shape grid = microstructure.shape
# change to coordinate space where the primitive is the unit sphere/cube/etc # change to coordinate space where the primitive is the unit sphere/cube/etc
if options.periodic: # use padding to achieve periodicity if options.periodic: # use padding to achieve periodicity
(X, Y, Z) = np.meshgrid(np.arange(-size[0]/2, (3*size[0])/2, dtype=np.float32), # 50% padding on each side (X, Y, Z) = np.meshgrid(np.arange(-grid[0]/2, (3*grid[0])/2, dtype=np.float32), # 50% padding on each side
np.arange(-size[1]/2, (3*size[1])/2, dtype=np.float32), np.arange(-grid[1]/2, (3*grid[1])/2, dtype=np.float32),
np.arange(-size[2]/2, (3*size[2])/2, dtype=np.float32), np.arange(-grid[2]/2, (3*grid[2])/2, dtype=np.float32),
indexing='ij') indexing='ij')
# Padding handling # Padding handling
X = np.roll(np.roll(np.roll(X, X = np.roll(np.roll(np.roll(X,
-size[0]/2, axis=0), -grid[0]/2, axis=0),
-size[1]/2, axis=1), -grid[1]/2, axis=1),
-size[2]/2, axis=2) -grid[2]/2, axis=2)
Y = np.roll(np.roll(np.roll(Y, Y = np.roll(np.roll(np.roll(Y,
-size[0]/2, axis=0), -grid[0]/2, axis=0),
-size[1]/2, axis=1), -grid[1]/2, axis=1),
-size[2]/2, axis=2) -grid[2]/2, axis=2)
Z = np.roll(np.roll(np.roll(Z, Z = np.roll(np.roll(np.roll(Z,
-size[0]/2, axis=0), -grid[0]/2, axis=0),
-size[1]/2, axis=1), -grid[1]/2, axis=1),
-size[2]/2, axis=2) -grid[2]/2, axis=2)
else: # nonperiodic, much lighter on resources else: # nonperiodic, much lighter on resources
# change to coordinate space where the primitive is the unit sphere/cube/etc # change to coordinate space where the primitive is the unit sphere/cube/etc
(X, Y, Z) = np.meshgrid(np.arange(0, size[0], dtype=np.float32), (X, Y, Z) = np.meshgrid(np.arange(0, grid[0], dtype=np.float32),
np.arange(0, size[1], dtype=np.float32), np.arange(0, grid[1], dtype=np.float32),
np.arange(0, size[2], dtype=np.float32), np.arange(0, grid[2], dtype=np.float32),
indexing='ij') indexing='ij')
# first by translating the center onto 0, 0.5 shifts the voxel origin onto the center of the voxel # first by translating the center onto 0, 0.5 shifts the voxel origin onto the center of the voxel
@ -174,27 +173,27 @@ for name in filenames:
np.seterr(over='ignore', under='ignore') np.seterr(over='ignore', under='ignore')
if options.periodic: # use padding to achieve periodicity if options.periodic: # use padding to achieve periodicity
inside = np.zeros(size, dtype=bool) inside = np.zeros(grid, dtype=bool)
for i in range(2): for i in range(2):
for j in range(2): for j in range(2):
for k in range(2): for k in range(2):
inside = inside | ( # Most of this is handling the padding inside = inside | ( # Most of this is handling the padding
np.abs(X[size[0] * i : size[0] * (i+1), np.abs(X[grid[0] * i : grid[0] * (i+1),
size[1] * j : size[1] * (j+1), grid[1] * j : grid[1] * (j+1),
size[2] * k : size[2] * (k+1)])**options.exponent[0] + grid[2] * k : grid[2] * (k+1)])**options.exponent[0] +
np.abs(Y[size[0] * i : size[0] * (i+1), np.abs(Y[grid[0] * i : grid[0] * (i+1),
size[1] * j : size[1] * (j+1), grid[1] * j : grid[1] * (j+1),
size[2] * k : size[2] * (k+1)])**options.exponent[1] + grid[2] * k : grid[2] * (k+1)])**options.exponent[1] +
np.abs(Z[size[0] * i : size[0] * (i+1), np.abs(Z[grid[0] * i : grid[0] * (i+1),
size[1] * j : size[1] * (j+1), grid[1] * j : grid[1] * (j+1),
size[2] * k : size[2] * (k+1)])**options.exponent[2] < 1) grid[2] * k : grid[2] * (k+1)])**options.exponent[2] <= 1.0)
microstructure = np.where(inside, options.fill, microstructure) microstructure = np.where(inside, options.fill, microstructure)
else: # nonperiodic, much lighter on resources else: # nonperiodic, much lighter on resources
microstructure = np.where(np.abs(X)**options.exponent[0] + microstructure = np.where(np.abs(X)**options.exponent[0] +
np.abs(Y)**options.exponent[1] + np.abs(Y)**options.exponent[1] +
np.abs(Z)**options.exponent[2] < 1, options.fill, microstructure) np.abs(Z)**options.exponent[2] <= 1.0, options.fill, microstructure)
np.seterr(**old_settings) # Reset warnings to old state np.seterr(**old_settings) # Reset warnings to old state
newInfo['microstructures'] = microstructure.max() newInfo['microstructures'] = microstructure.max()
@ -207,14 +206,13 @@ for name in filenames:
#--- write header --------------------------------------------------------------------------------- #--- write header ---------------------------------------------------------------------------------
table.info_clear() table.info_clear()
table.info_append([ table.info_append(extra_header+[
scriptID + ' ' + ' '.join(sys.argv[1:]), scriptID + ' ' + ' '.join(sys.argv[1:]),
"grid\ta {grid[0]}\tb {grid[1]}\tc {grid[2]}".format(grid=info['grid']), "grid\ta {}\tb {}\tc {}".format(*info['grid']),
"size\tx {size[0]}\ty {size[1]}\tz {size[2]}".format(size=info['size']), "size\tx {}\ty {}\tz {}".format(*info['size']),
"origin\tx {origin[0]}\ty {origin[1]}\tz {origin[2]}".format(origin=info['origin']), "origin\tx {}\ty {}\tz {}".format(*info['origin']),
"homogenization\t{homog}".format(homog=info['homogenization']), "homogenization\t{}".format(info['homogenization']),
"microstructures\t{microstructures}".format(microstructures=newInfo['microstructures']), "microstructures\t{}".format(newInfo['microstructures']),
extra_header
]) ])
table.labels_clear() table.labels_clear()
table.head_write() table.head_write()

View File

@ -143,14 +143,13 @@ for name in filenames:
# --- write header --------------------------------------------------------------------------------- # --- write header ---------------------------------------------------------------------------------
table.info_clear() table.info_clear()
table.info_append([ table.info_append(extra_header+[
scriptID + ' ' + ' '.join(sys.argv[1:]), scriptID + ' ' + ' '.join(sys.argv[1:]),
"grid\ta {}\tb {}\tc {}".format(*newInfo['grid']), "grid\ta {}\tb {}\tc {}".format(*newInfo['grid']),
"size\tx {}\ty {}\tz {}".format(*newInfo['size']), "size\tx {}\ty {}\tz {}".format(*newInfo['size']),
"origin\tx {}\ty {}\tz {}".format(*newInfo['origin']), "origin\tx {}\ty {}\tz {}".format(*newInfo['origin']),
"homogenization\t{}".format(info['homogenization']), "homogenization\t{}".format(info['homogenization']),
"microstructures\t{}".format(newInfo['microstructures']), "microstructures\t{}".format(newInfo['microstructures']),
extra_header
]) ])
table.labels_clear() table.labels_clear()
table.head_write() table.head_write()

View File

@ -37,12 +37,9 @@ def findClosestSeed(fargs):
return np.argmin(dist) # seed point closest to point return np.argmin(dist) # seed point closest to point
def laguerreTessellation(undeformed, coords, weights, grains, nonperiodic = False, cpus = 2): def laguerreTessellation(undeformed, coords, weights, grains, periodic = True, cpus = 2):
copies = \ copies = \
np.array([
[ 0, 0, 0 ],
]).astype(float) if nonperiodic else \
np.array([ np.array([
[ -1,-1,-1 ], [ -1,-1,-1 ],
[ 0,-1,-1 ], [ 0,-1,-1 ],
@ -71,7 +68,10 @@ def laguerreTessellation(undeformed, coords, weights, grains, nonperiodic = Fals
[ -1, 1, 1 ], [ -1, 1, 1 ],
[ 0, 1, 1 ], [ 0, 1, 1 ],
[ 1, 1, 1 ], [ 1, 1, 1 ],
]).astype(float)*info['size'] ]).astype(float)*info['size'] if periodic else \
np.array([
[ 0, 0, 0 ],
]).astype(float)
repeatweights = np.tile(weights,len(copies)).flatten(order='F') # Laguerre weights (1,2,3,1,2,3,...,1,2,3) repeatweights = np.tile(weights,len(copies)).flatten(order='F') # Laguerre weights (1,2,3,1,2,3,...,1,2,3)
for i,vec in enumerate(copies): # periodic copies of seed points ... for i,vec in enumerate(copies): # periodic copies of seed points ...
@ -121,8 +121,8 @@ group.add_option('--cpus',
type = 'int', metavar = 'int', type = 'int', metavar = 'int',
help = 'number of parallel processes to use for Laguerre tessellation [%default]') help = 'number of parallel processes to use for Laguerre tessellation [%default]')
group.add_option('--nonperiodic', group.add_option('--nonperiodic',
dest = 'nonperiodic', dest = 'periodic',
action = 'store_true', action = 'store_false',
help = 'nonperiodic tessellation') help = 'nonperiodic tessellation')
parser.add_option_group(group) parser.add_option_group(group)
@ -144,6 +144,10 @@ group.add_option('-o',
dest = 'origin', dest = 'origin',
type = 'float', nargs = 3, metavar=' '.join(['float']*3), type = 'float', nargs = 3, metavar=' '.join(['float']*3),
help = 'origin of grid') help = 'origin of grid')
group.add_option('--nonnormalized',
dest = 'normalized',
action = 'store_false',
help = 'seed coordinates are not normalized to a unit cube')
parser.add_option_group(group) parser.add_option_group(group)
@ -206,7 +210,8 @@ parser.set_defaults(pos = 'pos',
phase = 1, phase = 1,
cpus = 2, cpus = 2,
laguerre = False, laguerre = False,
nonperiodic = False, periodic = True,
normalized = True,
config = True, config = True,
) )
(options,filenames) = parser.parse_args() (options,filenames) = parser.parse_args()
@ -248,7 +253,7 @@ for name in filenames:
for i in range(3): for i in range(3):
if info['size'][i] <= 0.0: # any invalid size? if info['size'][i] <= 0.0: # any invalid size?
info['size'][i] = float(info['grid'][i])/max(info['grid']) # normalize to grid info['size'][i] = float(info['grid'][i])/max(info['grid']) # normalize to grid
remarks.append('rescaling size {} to {}...'.format({0:'x',1:'y',2:'z'}[i],info['size'][i])) remarks.append('rescaling size {} to {}...'.format(['x','y','z'][i],info['size'][i]))
if table.label_dimension(options.pos) != 3: if table.label_dimension(options.pos) != 3:
errors.append('seed positions "{}" have dimension {}.'.format(options.pos, errors.append('seed positions "{}" have dimension {}.'.format(options.pos,
@ -256,6 +261,7 @@ for name in filenames:
else: else:
labels += [options.pos] labels += [options.pos]
if not options.normalized: remarks.append('using real-space seed coordinates...')
if not hasEulers: remarks.append('missing seed orientations...') if not hasEulers: remarks.append('missing seed orientations...')
else: labels += [options.eulers] else: labels += [options.eulers]
if not hasGrains: remarks.append('missing seed microstructure indices...') if not hasGrains: remarks.append('missing seed microstructure indices...')
@ -272,7 +278,8 @@ for name in filenames:
# ------------------------------------------ read seeds --------------------------------------- # ------------------------------------------ read seeds ---------------------------------------
table.data_readArray(labels) table.data_readArray(labels)
coords = table.data[:,table.label_indexrange(options.pos)] * info['size'] coords = table.data[:,table.label_indexrange(options.pos)] * info['size'] if options.normalized \
else table.data[:,table.label_indexrange(options.pos)] - info['origin']
eulers = table.data[:,table.label_indexrange(options.eulers)] if hasEulers \ eulers = table.data[:,table.label_indexrange(options.eulers)] if hasEulers \
else np.zeros(3*len(coords)) else np.zeros(3*len(coords))
grains = table.data[:,table.label_indexrange(options.microstructure)].astype('i') if hasGrains \ grains = table.data[:,table.label_indexrange(options.microstructure)].astype('i') if hasGrains \
@ -291,7 +298,7 @@ for name in filenames:
damask.util.croak('tessellating...') damask.util.croak('tessellating...')
grid = np.vstack(meshgrid2(x, y, z)).reshape(3,-1).T grid = np.vstack(meshgrid2(x, y, z)).reshape(3,-1).T
indices = laguerreTessellation(grid, coords, weights, grains, options.nonperiodic, options.cpus) indices = laguerreTessellation(grid, coords, weights, grains, options.periodic, options.cpus)
# --- write header ------------------------------------------------------------------------ # --- write header ------------------------------------------------------------------------

View File

@ -95,14 +95,13 @@ for name in filenames:
table.labels_clear() table.labels_clear()
table.info_clear() table.info_clear()
table.info_append([ table.info_append(extra_header+[
scriptID + ' ' + ' '.join(sys.argv[1:]), scriptID + ' ' + ' '.join(sys.argv[1:]),
"grid\ta {grid[0]}\tb {grid[1]}\tc {grid[2]}".format(grid=newInfo['grid']), "grid\ta {grid[0]}\tb {grid[1]}\tc {grid[2]}".format(grid=newInfo['grid']),
"size\tx {size[0]}\ty {size[1]}\tz {size[2]}".format(size=newInfo['size']), "size\tx {size[0]}\ty {size[1]}\tz {size[2]}".format(size=newInfo['size']),
"origin\tx {origin[0]}\ty {origin[1]}\tz {origin[2]}".format(origin=info['origin']), "origin\tx {origin[0]}\ty {origin[1]}\tz {origin[2]}".format(origin=info['origin']),
"homogenization\t{homog}".format(homog=info['homogenization']), "homogenization\t{homog}".format(homog=info['homogenization']),
"microstructures\t{microstructures}".format(microstructures=info['microstructures']), "microstructures\t{microstructures}".format(microstructures=info['microstructures']),
extra_header
]) ])
table.head_write() table.head_write()

102
processing/pre/geom_renumber.py Executable file
View File

@ -0,0 +1,102 @@
#!/usr/bin/env python2.7
# -*- coding: UTF-8 no BOM -*-
import os,sys,math
import numpy as np
import damask
from optparse import OptionParser
scriptName = os.path.splitext(os.path.basename(__file__))[0]
scriptID = ' '.join([scriptName,damask.version])
#--------------------------------------------------------------------------------------------------
# MAIN
#--------------------------------------------------------------------------------------------------
parser = OptionParser(option_class=damask.extendableOption, usage='%prog [file[s]]', description = """
renumber sorted microstructure indices to 1,...,N.
""", version=scriptID)
(options, filenames) = parser.parse_args()
# --- loop over input files ----------------------------------------------------------------------
if filenames == []: filenames = [None]
for name in filenames:
try: table = damask.ASCIItable(name = name,
buffered = False,
labeled = False)
except: continue
damask.util.report(scriptName,name)
# --- interpret header ---------------------------------------------------------------------------
table.head_read()
info,extra_header = table.head_getGeom()
damask.util.croak(['grid a b c: %s'%(' x '.join(map(str,info['grid']))),
'size x y z: %s'%(' x '.join(map(str,info['size']))),
'origin x y z: %s'%(' : '.join(map(str,info['origin']))),
'homogenization: %i'%info['homogenization'],
'microstructures: %i'%info['microstructures'],
])
errors = []
if np.any(info['grid'] < 1): errors.append('invalid grid a b c.')
if np.any(info['size'] <= 0.0): errors.append('invalid size x y z.')
if errors != []:
damask.util.croak(errors)
table.close(dismiss = True)
continue
# --- read data ----------------------------------------------------------------------------------
microstructure = table.microstructure_read(info['grid']) # read microstructure
# --- do work ------------------------------------------------------------------------------------
newInfo = {
'origin': np.zeros(3,'d'),
'microstructures': 0,
}
grainIDs = np.unique(microstructure)
renumbered = np.copy(microstructure)
for i, oldID in enumerate(grainIDs):
renumbered = np.where(microstructure == oldID, i+1, renumbered)
newInfo['microstructures'] = len(grainIDs)
# --- report -------------------------------------------------------------------------------------
remarks = []
if ( newInfo['microstructures'] != info['microstructures']):
remarks.append('--> microstructures: %i'%newInfo['microstructures'])
if remarks != []: damask.util.croak(remarks)
# --- write header -------------------------------------------------------------------------------
table.labels_clear()
table.info_clear()
table.info_append(extra_header+[
scriptID + ' ' + ' '.join(sys.argv[1:]),
"grid\ta {grid[0]}\tb {grid[1]}\tc {grid[2]}".format(grid=info['grid']),
"size\tx {size[0]}\ty {size[1]}\tz {size[2]}".format(size=info['size']),
"origin\tx {origin[0]}\ty {origin[1]}\tz {origin[2]}".format(origin=info['origin']),
"homogenization\t{homog}".format(homog=info['homogenization']),
"microstructures\t{microstructures}".format(microstructures=newInfo['microstructures']),
])
table.head_write()
# --- write microstructure information -----------------------------------------------------------
format = '%{}i'.format(int(math.floor(math.log10(newInfo['microstructures'])+1)))
table.data = renumbered.reshape((info['grid'][0],info['grid'][1]*info['grid'][2]),order='F').transpose()
table.data_writeArray(format,delimiter = ' ')
# --- output finalization ------------------------------------------------------------------------
table.close() # close ASCII table

View File

@ -139,14 +139,13 @@ for name in filenames:
# --- write header --------------------------------------------------------------------------------- # --- write header ---------------------------------------------------------------------------------
table.info_clear() table.info_clear()
table.info_append([ table.info_append(extra_header+[
scriptID + ' ' + ' '.join(sys.argv[1:]), scriptID + ' ' + ' '.join(sys.argv[1:]),
"grid\ta {grid[0]}\tb {grid[1]}\tc {grid[2]}".format(grid=newInfo['grid']), "grid\ta {grid[0]}\tb {grid[1]}\tc {grid[2]}".format(grid=newInfo['grid']),
"size\tx {size[0]}\ty {size[1]}\tz {size[2]}".format(size=newInfo['size']), "size\tx {size[0]}\ty {size[1]}\tz {size[2]}".format(size=newInfo['size']),
"origin\tx {origin[0]}\ty {origin[1]}\tz {origin[2]}".format(origin=info['origin']), "origin\tx {origin[0]}\ty {origin[1]}\tz {origin[2]}".format(origin=info['origin']),
"homogenization\t{homog}".format(homog=info['homogenization']), "homogenization\t{homog}".format(homog=info['homogenization']),
"microstructures\t{microstructures}".format(microstructures=newInfo['microstructures']), "microstructures\t{microstructures}".format(microstructures=newInfo['microstructures']),
extra_header
]) ])
table.labels_clear() table.labels_clear()
table.head_write() table.head_write()

View File

@ -49,16 +49,18 @@ parser.set_defaults(degrees = False,
(options, filenames) = parser.parse_args() (options, filenames) = parser.parse_args()
if sum(x is not None for x in [options.rotation,options.eulers,options.matrix,options.quaternion]) !=1: if sum(x is not None for x in [options.rotation,options.eulers,options.matrix,options.quaternion]) != 1:
parser.error('not exactly one rotation specified...') parser.error('not exactly one rotation specified...')
toRadian = math.pi/180. if options.degrees else 1.0
eulers = np.array(damask.orientation.Orientation( eulers = np.array(damask.orientation.Orientation(
quaternion=np.array(options.quaternion) if options.quaternion else None, quaternion = np.array(options.quaternion) if options.quaternion else None,
angleAxis =np.array(options.rotation) if options.rotation else None, angleAxis = np.array(options.rotation) if options.rotation else None,
matrix =np.array(options.matrix) if options.matrix else None, matrix = np.array(options.matrix) if options.matrix else None,
Eulers =np.array(options.eulers)*toRadian if options.eulers else None Eulers = np.array(options.eulers) if options.eulers else None,
).asEulers()) *180./math.pi degrees = options.degrees,
).asEulers(degrees=True))
damask.util.croak('{} {} {}'.format(*eulers))
# --- loop over input files ------------------------------------------------------------------------- # --- loop over input files -------------------------------------------------------------------------
@ -67,7 +69,8 @@ if filenames == []: filenames = [None]
for name in filenames: for name in filenames:
try: try:
table = damask.ASCIItable(name = name, table = damask.ASCIItable(name = name,
buffered = False, labeled = False) buffered = False,
labeled = False)
except: continue except: continue
damask.util.report(scriptName,name) damask.util.report(scriptName,name)
@ -76,11 +79,11 @@ for name in filenames:
table.head_read() table.head_read()
info,extra_header = table.head_getGeom() info,extra_header = table.head_getGeom()
damask.util.croak(['grid a b c: %s'%(' x '.join(map(str,info['grid']))), damask.util.croak(['grid a b c: {}'.format(' x '.join(map(str,info['grid']))),
'size x y z: %s'%(' x '.join(map(str,info['size']))), 'size x y z: {}'.format(' x '.join(map(str,info['size']))),
'origin x y z: %s'%(' : '.join(map(str,info['origin']))), 'origin x y z: {}'.format(' : '.join(map(str,info['origin']))),
'homogenization: %i'%info['homogenization'], 'homogenization: {}'.format(info['homogenization']),
'microstructures: %i'%info['microstructures'], 'microstructures: {}'.format(info['microstructures']),
]) ])
errors = [] errors = []
@ -95,10 +98,10 @@ for name in filenames:
microstructure = table.microstructure_read(info['grid']).reshape(info['grid'],order='F') # read microstructure microstructure = table.microstructure_read(info['grid']).reshape(info['grid'],order='F') # read microstructure
newGrainID = options.fill if options.fill > 0 else microstructure.max()+1 newGrainID = options.fill if options.fill != 0 else microstructure.max()+1
microstructure = ndimage.rotate(microstructure,eulers[2],(0,1),order=0,output=int,cval=newGrainID) # rotation around Z microstructure = ndimage.rotate(microstructure,eulers[2],(0,1),order=0,prefilter=False,output=int,cval=newGrainID) # rotation around Z
microstructure = ndimage.rotate(microstructure,eulers[1],(1,2),order=0,output=int,cval=newGrainID) # rotation around X microstructure = ndimage.rotate(microstructure,eulers[1],(1,2),order=0,prefilter=False,output=int,cval=newGrainID) # rotation around X
microstructure = ndimage.rotate(microstructure,eulers[0],(0,1),order=0,output=int,cval=newGrainID) # rotation around Z microstructure = ndimage.rotate(microstructure,eulers[0],(0,1),order=0,prefilter=False,output=int,cval=newGrainID) # rotation around Z
# --- do work ------------------------------------------------------------------------------------ # --- do work ------------------------------------------------------------------------------------
@ -124,14 +127,13 @@ for name in filenames:
table.labels_clear() table.labels_clear()
table.info_clear() table.info_clear()
table.info_append([ table.info_append(extra_header+[
scriptID + ' ' + ' '.join(sys.argv[1:]), scriptID + ' ' + ' '.join(sys.argv[1:]),
"grid\ta {grid[0]}\tb {grid[1]}\tc {grid[2]}".format(grid=newInfo['grid']), "grid\ta {grid[0]}\tb {grid[1]}\tc {grid[2]}".format(grid=newInfo['grid']),
"size\tx {size[0]}\ty {size[1]}\tz {size[2]}".format(size=newInfo['size']), "size\tx {size[0]}\ty {size[1]}\tz {size[2]}".format(size=newInfo['size']),
"origin\tx {origin[0]}\ty {origin[1]}\tz {origin[2]}".format(origin=info['origin']), "origin\tx {origin[0]}\ty {origin[1]}\tz {origin[2]}".format(origin=info['origin']),
"homogenization\t{homog}".format(homog=info['homogenization']), "homogenization\t{homog}".format(homog=info['homogenization']),
"microstructures\t{microstructures}".format(microstructures=newInfo['microstructures']), "microstructures\t{microstructures}".format(microstructures=newInfo['microstructures']),
extra_header
]) ])
table.head_write() table.head_write()

View File

@ -112,14 +112,13 @@ for name in filenames:
table.labels_clear() table.labels_clear()
table.info_clear() table.info_clear()
table.info_append([ table.info_append(extra_header+[
scriptID + ' ' + ' '.join(sys.argv[1:]), scriptID + ' ' + ' '.join(sys.argv[1:]),
"grid\ta {grid[0]}\tb {grid[1]}\tc {grid[2]}".format(grid=info['grid']), "grid\ta {grid[0]}\tb {grid[1]}\tc {grid[2]}".format(grid=info['grid']),
"size\tx {size[0]}\ty {size[1]}\tz {size[2]}".format(size=info['size']), "size\tx {size[0]}\ty {size[1]}\tz {size[2]}".format(size=info['size']),
"origin\tx {origin[0]}\ty {origin[1]}\tz {origin[2]}".format(origin=newInfo['origin']), "origin\tx {origin[0]}\ty {origin[1]}\tz {origin[2]}".format(origin=newInfo['origin']),
"homogenization\t{homog}".format(homog=info['homogenization']), "homogenization\t{homog}".format(homog=info['homogenization']),
"microstructures\t{microstructures}".format(microstructures=newInfo['microstructures']), "microstructures\t{microstructures}".format(microstructures=newInfo['microstructures']),
extra_header
]) ])
table.head_write() table.head_write()

View File

@ -94,14 +94,13 @@ for name in filenames:
table.labels_clear() table.labels_clear()
table.info_clear() table.info_clear()
table.info_append([ table.info_append(extra_header+[
scriptID + ' ' + ' '.join(sys.argv[1:]), scriptID + ' ' + ' '.join(sys.argv[1:]),
"grid\ta {grid[0]}\tb {grid[1]}\tc {grid[2]}".format(grid=info['grid']), "grid\ta {grid[0]}\tb {grid[1]}\tc {grid[2]}".format(grid=info['grid']),
"size\tx {size[0]}\ty {size[1]}\tz {size[2]}".format(size=info['size']), "size\tx {size[0]}\ty {size[1]}\tz {size[2]}".format(size=info['size']),
"origin\tx {origin[0]}\ty {origin[1]}\tz {origin[2]}".format(origin=info['origin']), "origin\tx {origin[0]}\ty {origin[1]}\tz {origin[2]}".format(origin=info['origin']),
"homogenization\t{homog}".format(homog=info['homogenization']), "homogenization\t{homog}".format(homog=info['homogenization']),
"microstructures\t{microstructures}".format(microstructures=newInfo['microstructures']), "microstructures\t{microstructures}".format(microstructures=newInfo['microstructures']),
extra_header
]) ])
table.head_write() table.head_write()

View File

@ -31,7 +31,7 @@ parser.add_option('-b',
help = 'blacklist of grain IDs') help = 'blacklist of grain IDs')
parser.add_option('-p', parser.add_option('-p',
'--pos', '--seedposition', '--pos', '--seedposition',
dest = 'position', dest = 'pos',
type = 'string', metavar = 'string', type = 'string', metavar = 'string',
help = 'label of coordinates [%default]') help = 'label of coordinates [%default]')

0
src/DAMASK_spectral.f90 Executable file → Normal file
View File

View File

@ -6,7 +6,6 @@
!> @brief Mathematical library, including random number generation and tensor represenations !> @brief Mathematical library, including random number generation and tensor represenations
!-------------------------------------------------------------------------------------------------- !--------------------------------------------------------------------------------------------------
module math module math
use, intrinsic :: iso_c_binding
use prec, only: & use prec, only: &
pReal, & pReal, &
pInt pInt
@ -161,13 +160,10 @@ module math
math_rotate_forward3333, & math_rotate_forward3333, &
math_limit math_limit
private :: & private :: &
math_partition, &
halton, & halton, &
halton_memory, & halton_memory, &
halton_ndim_set, & halton_ndim_set, &
halton_seed_set, & halton_seed_set
i_to_halton, &
prime
contains contains
@ -289,60 +285,55 @@ recursive subroutine math_qsort(a, istart, iend)
integer(pInt) :: ipivot integer(pInt) :: ipivot
if (istart < iend) then if (istart < iend) then
ipivot = math_partition(a,istart, iend) ipivot = qsort_partition(a,istart, iend)
call math_qsort(a, istart, ipivot-1_pInt) call math_qsort(a, istart, ipivot-1_pInt)
call math_qsort(a, ipivot+1_pInt, iend) call math_qsort(a, ipivot+1_pInt, iend)
endif endif
!--------------------------------------------------------------------------------------------------
contains
!-------------------------------------------------------------------------------------------------
!> @brief Partitioning required for quicksort
!-------------------------------------------------------------------------------------------------
integer(pInt) function qsort_partition(a, istart, iend)
implicit none
integer(pInt), dimension(:,:), intent(inout) :: a
integer(pInt), intent(in) :: istart,iend
integer(pInt) :: i,j,k,tmp
do
! find the first element on the right side less than or equal to the pivot point
do j = iend, istart, -1_pInt
if (a(1,j) <= a(1,istart)) exit
enddo
! find the first element on the left side greater than the pivot point
do i = istart, iend
if (a(1,i) > a(1,istart)) exit
enddo
if (i < j) then ! if the indexes do not cross, exchange values
do k = 1_pInt, int(size(a,1_pInt), pInt)
tmp = a(k,i)
a(k,i) = a(k,j)
a(k,j) = tmp
enddo
else ! if they do cross, exchange left value with pivot and return with the partition index
do k = 1_pInt, int(size(a,1_pInt), pInt)
tmp = a(k,istart)
a(k,istart) = a(k,j)
a(k,j) = tmp
enddo
qsort_partition = j
return
endif
enddo
end function qsort_partition
end subroutine math_qsort end subroutine math_qsort
!--------------------------------------------------------------------------------------------------
!> @brief Partitioning required for quicksort
!--------------------------------------------------------------------------------------------------
integer(pInt) function math_partition(a, istart, iend)
implicit none
integer(pInt), dimension(:,:), intent(inout) :: a
integer(pInt), intent(in) :: istart,iend
integer(pInt) :: d,i,j,k,x,tmp
d = int(size(a,1_pInt), pInt) ! number of linked data
! set the starting and ending points, and the pivot point
i = istart
j = iend
x = a(1,istart)
do
! find the first element on the right side less than or equal to the pivot point
do j = j, istart, -1_pInt
if (a(1,j) <= x) exit
enddo
! find the first element on the left side greater than the pivot point
do i = i, iend
if (a(1,i) > x) exit
enddo
if (i < j) then ! if the indexes do not cross, exchange values
do k = 1_pInt,d
tmp = a(k,i)
a(k,i) = a(k,j)
a(k,j) = tmp
enddo
else ! if they do cross, exchange left value with pivot and return with the partition index
do k = 1_pInt,d
tmp = a(k,istart)
a(k,istart) = a(k,j)
a(k,j) = tmp
enddo
math_partition = j
return
endif
enddo
end function math_partition
!-------------------------------------------------------------------------------------------------- !--------------------------------------------------------------------------------------------------
!> @brief range of integers starting at one !> @brief range of integers starting at one
!-------------------------------------------------------------------------------------------------- !--------------------------------------------------------------------------------------------------
@ -2189,6 +2180,52 @@ subroutine halton(ndim, r)
value_halton(1) = 1_pInt value_halton(1) = 1_pInt
call halton_memory ('INC', 'SEED', 1_pInt, value_halton) call halton_memory ('INC', 'SEED', 1_pInt, value_halton)
!--------------------------------------------------------------------------------------------------
contains
!-------------------------------------------------------------------------------------------------
!> @brief computes an element of a Halton sequence.
!> @details Only the absolute value of SEED is considered. SEED = 0 is allowed, and returns R = 0.
!> @details Halton Bases should be distinct prime numbers. This routine only checks that each base
!> @details is greater than 1.
!> @details Reference:
!> @details J.H. Halton: On the efficiency of certain quasi-random sequences of points in evaluating
!> @details multi-dimensional integrals, Numerische Mathematik, Volume 2, pages 84-90, 1960.
!> @author John Burkardt
!-------------------------------------------------------------------------------------------------
subroutine i_to_halton (seed, base, ndim, r)
use IO, only: &
IO_error
implicit none
integer(pInt), intent(in) :: &
ndim, & !< dimension of the sequence
seed !< index of the desired element
integer(pInt), intent(in), dimension(ndim) :: base !< Halton bases
real(pReal), intent(out), dimension(ndim) :: r !< the SEED-th element of the Halton sequence for the given bases
real(pReal), dimension(ndim) :: base_inv
integer(pInt), dimension(ndim) :: &
digit, &
seed2
seed2 = abs(seed)
r = 0.0_pReal
if (any (base(1:ndim) <= 1_pInt)) call IO_error(error_ID=405_pInt)
base_inv(1:ndim) = 1.0_pReal / real (base(1:ndim), pReal)
do while ( any ( seed2(1:ndim) /= 0_pInt) )
digit(1:ndim) = mod ( seed2(1:ndim), base(1:ndim))
r(1:ndim) = r(1:ndim) + real ( digit(1:ndim), pReal) * base_inv(1:ndim)
base_inv(1:ndim) = base_inv(1:ndim) / real ( base(1:ndim), pReal)
seed2(1:ndim) = seed2(1:ndim) / base(1:ndim)
enddo
end subroutine i_to_halton
end subroutine halton end subroutine halton
@ -2205,6 +2242,8 @@ end subroutine halton
!> @author John Burkardt !> @author John Burkardt
!-------------------------------------------------------------------------------------------------- !--------------------------------------------------------------------------------------------------
subroutine halton_memory (action_halton, name_halton, ndim, value_halton) subroutine halton_memory (action_halton, name_halton, ndim, value_halton)
use IO, only: &
IO_lc
implicit none implicit none
character(len = *), intent(in) :: & character(len = *), intent(in) :: &
@ -2214,8 +2253,8 @@ subroutine halton_memory (action_halton, name_halton, ndim, value_halton)
integer(pInt), allocatable, save, dimension(:) :: base integer(pInt), allocatable, save, dimension(:) :: base
logical, save :: first_call = .true. logical, save :: first_call = .true.
integer(pInt), intent(in) :: ndim !< dimension of the quantity integer(pInt), intent(in) :: ndim !< dimension of the quantity
integer(pInt):: i
integer(pInt), save :: ndim_save = 0_pInt, seed = 1_pInt integer(pInt), save :: ndim_save = 0_pInt, seed = 1_pInt
integer(pInt) :: i
if (first_call) then if (first_call) then
ndim_save = 1_pInt ndim_save = 1_pInt
@ -2226,60 +2265,237 @@ subroutine halton_memory (action_halton, name_halton, ndim, value_halton)
!-------------------------------------------------------------------------------------------------- !--------------------------------------------------------------------------------------------------
! Set ! Set
if(action_halton(1:1) == 'S' .or. action_halton(1:1) == 's') then actionHalton: if(IO_lc(action_halton(1:1)) == 's') then
if(name_halton(1:1) == 'B' .or. name_halton(1:1) == 'b') then
if(ndim_save /= ndim) then
deallocate(base)
ndim_save = ndim
allocate(base(ndim_save))
endif
base(1:ndim) = value_halton(1:ndim)
elseif(name_halton(1:1) == 'N' .or. name_halton(1:1) == 'n') then
nameSet: if(IO_lc(name_halton(1:1)) == 'b') then
if(ndim_save /= ndim) ndim_save = ndim
base = value_halton(1:ndim)
elseif(IO_lc(name_halton(1:1)) == 'n') then nameSet
if(ndim_save /= value_halton(1)) then if(ndim_save /= value_halton(1)) then
deallocate(base)
ndim_save = value_halton(1) ndim_save = value_halton(1)
allocate(base(ndim_save)) base = [(prime(i),i=1_pInt,ndim_save)]
do i = 1_pInt, ndim_save
base(i) = prime (i)
enddo
else else
ndim_save = value_halton(1) ndim_save = value_halton(1)
endif endif
elseif(name_halton(1:1) == 'S' .or. name_halton(1:1) == 's') then elseif(IO_lc(name_halton(1:1)) == 's') then nameSet
seed = value_halton(1) seed = value_halton(1)
endif endif nameSet
!-------------------------------------------------------------------------------------------------- !--------------------------------------------------------------------------------------------------
! Get ! Get
elseif(action_halton(1:1) == 'G' .or. action_halton(1:1) == 'g') then elseif(IO_lc(action_halton(1:1)) == 'g') then actionHalton
if(name_halton(1:1) == 'B' .or. name_halton(1:1) == 'b') then nameGet: if(IO_lc(name_halton(1:1)) == 'b') then
if(ndim /= ndim_save) then if(ndim /= ndim_save) then
deallocate(base) ndim_save = ndim
ndim_save = ndim base = [(prime(i),i=1_pInt,ndim_save)]
allocate(base(ndim_save))
do i = 1_pInt, ndim_save
base(i) = prime(i)
enddo
endif endif
value_halton(1:ndim_save) = base(1:ndim_save) value_halton(1:ndim_save) = base(1:ndim_save)
elseif(name_halton(1:1) == 'N' .or. name_halton(1:1) == 'n') then elseif(IO_lc(name_halton(1:1)) == 'n') then nameGet
value_halton(1) = ndim_save value_halton(1) = ndim_save
elseif(name_halton(1:1) == 'S' .or. name_halton(1:1) == 's') then elseif(IO_lc(name_halton(1:1)) == 's') then nameGet
value_halton(1) = seed value_halton(1) = seed
endif endif nameGet
!-------------------------------------------------------------------------------------------------- !--------------------------------------------------------------------------------------------------
! Increment ! Increment
elseif(action_halton(1:1) == 'I' .or. action_halton(1:1) == 'i') then elseif(IO_lc(action_halton(1:1)) == 'i') then actionHalton
if(name_halton(1:1) == 'S' .or. name_halton(1:1) == 's') then if(IO_lc(name_halton(1:1)) == 's') seed = seed + value_halton(1)
seed = seed + value_halton(1) endif actionHalton
!--------------------------------------------------------------------------------------------------
contains
!--------------------------------------------------------------------------------------------------
!> @brief returns any of the first 1500 prime numbers.
!> @details n = 0 is legal, returning PRIME = 1.
!> @details Reference:
!> @details Milton Abramowitz and Irene Stegun: Handbook of Mathematical Functions,
!> @details US Department of Commerce, 1964, pages 870-873.
!> @details Daniel Zwillinger: CRC Standard Mathematical Tables and Formulae,
!> @details 30th Edition, CRC Press, 1996, pages 95-98.
!> @author John Burkardt
!--------------------------------------------------------------------------------------------------
integer(pInt) function prime(n)
use IO, only: &
IO_error
implicit none
integer(pInt), intent(in) :: n !< index of the desired prime number
integer(pInt), dimension(0:1500), parameter :: &
npvec = int([&
1, &
2, 3, 5, 7, 11, 13, 17, 19, 23, 29, &
31, 37, 41, 43, 47, 53, 59, 61, 67, 71, &
73, 79, 83, 89, 97, 101, 103, 107, 109, 113, &
127, 131, 137, 139, 149, 151, 157, 163, 167, 173, &
179, 181, 191, 193, 197, 199, 211, 223, 227, 229, &
233, 239, 241, 251, 257, 263, 269, 271, 277, 281, &
283, 293, 307, 311, 313, 317, 331, 337, 347, 349, &
353, 359, 367, 373, 379, 383, 389, 397, 401, 409, &
419, 421, 431, 433, 439, 443, 449, 457, 461, 463, &
467, 479, 487, 491, 499, 503, 509, 521, 523, 541, &
! 101:200
547, 557, 563, 569, 571, 577, 587, 593, 599, 601, &
607, 613, 617, 619, 631, 641, 643, 647, 653, 659, &
661, 673, 677, 683, 691, 701, 709, 719, 727, 733, &
739, 743, 751, 757, 761, 769, 773, 787, 797, 809, &
811, 821, 823, 827, 829, 839, 853, 857, 859, 863, &
877, 881, 883, 887, 907, 911, 919, 929, 937, 941, &
947, 953, 967, 971, 977, 983, 991, 997, 1009, 1013, &
1019, 1021, 1031, 1033, 1039, 1049, 1051, 1061, 1063, 1069, &
1087, 1091, 1093, 1097, 1103, 1109, 1117, 1123, 1129, 1151, &
1153, 1163, 1171, 1181, 1187, 1193, 1201, 1213, 1217, 1223, &
! 201:300
1229, 1231, 1237, 1249, 1259, 1277, 1279, 1283, 1289, 1291, &
1297, 1301, 1303, 1307, 1319, 1321, 1327, 1361, 1367, 1373, &
1381, 1399, 1409, 1423, 1427, 1429, 1433, 1439, 1447, 1451, &
1453, 1459, 1471, 1481, 1483, 1487, 1489, 1493, 1499, 1511, &
1523, 1531, 1543, 1549, 1553, 1559, 1567, 1571, 1579, 1583, &
1597, 1601, 1607, 1609, 1613, 1619, 1621, 1627, 1637, 1657, &
1663, 1667, 1669, 1693, 1697, 1699, 1709, 1721, 1723, 1733, &
1741, 1747, 1753, 1759, 1777, 1783, 1787, 1789, 1801, 1811, &
1823, 1831, 1847, 1861, 1867, 1871, 1873, 1877, 1879, 1889, &
1901, 1907, 1913, 1931, 1933, 1949, 1951, 1973, 1979, 1987, &
! 301:400
1993, 1997, 1999, 2003, 2011, 2017, 2027, 2029, 2039, 2053, &
2063, 2069, 2081, 2083, 2087, 2089, 2099, 2111, 2113, 2129, &
2131, 2137, 2141, 2143, 2153, 2161, 2179, 2203, 2207, 2213, &
2221, 2237, 2239, 2243, 2251, 2267, 2269, 2273, 2281, 2287, &
2293, 2297, 2309, 2311, 2333, 2339, 2341, 2347, 2351, 2357, &
2371, 2377, 2381, 2383, 2389, 2393, 2399, 2411, 2417, 2423, &
2437, 2441, 2447, 2459, 2467, 2473, 2477, 2503, 2521, 2531, &
2539, 2543, 2549, 2551, 2557, 2579, 2591, 2593, 2609, 2617, &
2621, 2633, 2647, 2657, 2659, 2663, 2671, 2677, 2683, 2687, &
2689, 2693, 2699, 2707, 2711, 2713, 2719, 2729, 2731, 2741, &
! 401:500
2749, 2753, 2767, 2777, 2789, 2791, 2797, 2801, 2803, 2819, &
2833, 2837, 2843, 2851, 2857, 2861, 2879, 2887, 2897, 2903, &
2909, 2917, 2927, 2939, 2953, 2957, 2963, 2969, 2971, 2999, &
3001, 3011, 3019, 3023, 3037, 3041, 3049, 3061, 3067, 3079, &
3083, 3089, 3109, 3119, 3121, 3137, 3163, 3167, 3169, 3181, &
3187, 3191, 3203, 3209, 3217, 3221, 3229, 3251, 3253, 3257, &
3259, 3271, 3299, 3301, 3307, 3313, 3319, 3323, 3329, 3331, &
3343, 3347, 3359, 3361, 3371, 3373, 3389, 3391, 3407, 3413, &
3433, 3449, 3457, 3461, 3463, 3467, 3469, 3491, 3499, 3511, &
3517, 3527, 3529, 3533, 3539, 3541, 3547, 3557, 3559, 3571, &
! 501:600
3581, 3583, 3593, 3607, 3613, 3617, 3623, 3631, 3637, 3643, &
3659, 3671, 3673, 3677, 3691, 3697, 3701, 3709, 3719, 3727, &
3733, 3739, 3761, 3767, 3769, 3779, 3793, 3797, 3803, 3821, &
3823, 3833, 3847, 3851, 3853, 3863, 3877, 3881, 3889, 3907, &
3911, 3917, 3919, 3923, 3929, 3931, 3943, 3947, 3967, 3989, &
4001, 4003, 4007, 4013, 4019, 4021, 4027, 4049, 4051, 4057, &
4073, 4079, 4091, 4093, 4099, 4111, 4127, 4129, 4133, 4139, &
4153, 4157, 4159, 4177, 4201, 4211, 4217, 4219, 4229, 4231, &
4241, 4243, 4253, 4259, 4261, 4271, 4273, 4283, 4289, 4297, &
4327, 4337, 4339, 4349, 4357, 4363, 4373, 4391, 4397, 4409, &
! 601:700
4421, 4423, 4441, 4447, 4451, 4457, 4463, 4481, 4483, 4493, &
4507, 4513, 4517, 4519, 4523, 4547, 4549, 4561, 4567, 4583, &
4591, 4597, 4603, 4621, 4637, 4639, 4643, 4649, 4651, 4657, &
4663, 4673, 4679, 4691, 4703, 4721, 4723, 4729, 4733, 4751, &
4759, 4783, 4787, 4789, 4793, 4799, 4801, 4813, 4817, 4831, &
4861, 4871, 4877, 4889, 4903, 4909, 4919, 4931, 4933, 4937, &
4943, 4951, 4957, 4967, 4969, 4973, 4987, 4993, 4999, 5003, &
5009, 5011, 5021, 5023, 5039, 5051, 5059, 5077, 5081, 5087, &
5099, 5101, 5107, 5113, 5119, 5147, 5153, 5167, 5171, 5179, &
5189, 5197, 5209, 5227, 5231, 5233, 5237, 5261, 5273, 5279, &
! 701:800
5281, 5297, 5303, 5309, 5323, 5333, 5347, 5351, 5381, 5387, &
5393, 5399, 5407, 5413, 5417, 5419, 5431, 5437, 5441, 5443, &
5449, 5471, 5477, 5479, 5483, 5501, 5503, 5507, 5519, 5521, &
5527, 5531, 5557, 5563, 5569, 5573, 5581, 5591, 5623, 5639, &
5641, 5647, 5651, 5653, 5657, 5659, 5669, 5683, 5689, 5693, &
5701, 5711, 5717, 5737, 5741, 5743, 5749, 5779, 5783, 5791, &
5801, 5807, 5813, 5821, 5827, 5839, 5843, 5849, 5851, 5857, &
5861, 5867, 5869, 5879, 5881, 5897, 5903, 5923, 5927, 5939, &
5953, 5981, 5987, 6007, 6011, 6029, 6037, 6043, 6047, 6053, &
6067, 6073, 6079, 6089, 6091, 6101, 6113, 6121, 6131, 6133, &
! 801:900
6143, 6151, 6163, 6173, 6197, 6199, 6203, 6211, 6217, 6221, &
6229, 6247, 6257, 6263, 6269, 6271, 6277, 6287, 6299, 6301, &
6311, 6317, 6323, 6329, 6337, 6343, 6353, 6359, 6361, 6367, &
6373, 6379, 6389, 6397, 6421, 6427, 6449, 6451, 6469, 6473, &
6481, 6491, 6521, 6529, 6547, 6551, 6553, 6563, 6569, 6571, &
6577, 6581, 6599, 6607, 6619, 6637, 6653, 6659, 6661, 6673, &
6679, 6689, 6691, 6701, 6703, 6709, 6719, 6733, 6737, 6761, &
6763, 6779, 6781, 6791, 6793, 6803, 6823, 6827, 6829, 6833, &
6841, 6857, 6863, 6869, 6871, 6883, 6899, 6907, 6911, 6917, &
6947, 6949, 6959, 6961, 6967, 6971, 6977, 6983, 6991, 6997, &
! 901:1000
7001, 7013, 7019, 7027, 7039, 7043, 7057, 7069, 7079, 7103, &
7109, 7121, 7127, 7129, 7151, 7159, 7177, 7187, 7193, 7207, &
7211, 7213, 7219, 7229, 7237, 7243, 7247, 7253, 7283, 7297, &
7307, 7309, 7321, 7331, 7333, 7349, 7351, 7369, 7393, 7411, &
7417, 7433, 7451, 7457, 7459, 7477, 7481, 7487, 7489, 7499, &
7507, 7517, 7523, 7529, 7537, 7541, 7547, 7549, 7559, 7561, &
7573, 7577, 7583, 7589, 7591, 7603, 7607, 7621, 7639, 7643, &
7649, 7669, 7673, 7681, 7687, 7691, 7699, 7703, 7717, 7723, &
7727, 7741, 7753, 7757, 7759, 7789, 7793, 7817, 7823, 7829, &
7841, 7853, 7867, 7873, 7877, 7879, 7883, 7901, 7907, 7919, &
! 1001:1100
7927, 7933, 7937, 7949, 7951, 7963, 7993, 8009, 8011, 8017, &
8039, 8053, 8059, 8069, 8081, 8087, 8089, 8093, 8101, 8111, &
8117, 8123, 8147, 8161, 8167, 8171, 8179, 8191, 8209, 8219, &
8221, 8231, 8233, 8237, 8243, 8263, 8269, 8273, 8287, 8291, &
8293, 8297, 8311, 8317, 8329, 8353, 8363, 8369, 8377, 8387, &
8389, 8419, 8423, 8429, 8431, 8443, 8447, 8461, 8467, 8501, &
8513, 8521, 8527, 8537, 8539, 8543, 8563, 8573, 8581, 8597, &
8599, 8609, 8623, 8627, 8629, 8641, 8647, 8663, 8669, 8677, &
8681, 8689, 8693, 8699, 8707, 8713, 8719, 8731, 8737, 8741, &
8747, 8753, 8761, 8779, 8783, 8803, 8807, 8819, 8821, 8831, &
! 1101:1200
8837, 8839, 8849, 8861, 8863, 8867, 8887, 8893, 8923, 8929, &
8933, 8941, 8951, 8963, 8969, 8971, 8999, 9001, 9007, 9011, &
9013, 9029, 9041, 9043, 9049, 9059, 9067, 9091, 9103, 9109, &
9127, 9133, 9137, 9151, 9157, 9161, 9173, 9181, 9187, 9199, &
9203, 9209, 9221, 9227, 9239, 9241, 9257, 9277, 9281, 9283, &
9293, 9311, 9319, 9323, 9337, 9341, 9343, 9349, 9371, 9377, &
9391, 9397, 9403, 9413, 9419, 9421, 9431, 9433, 9437, 9439, &
9461, 9463, 9467, 9473, 9479, 9491, 9497, 9511, 9521, 9533, &
9539, 9547, 9551, 9587, 9601, 9613, 9619, 9623, 9629, 9631, &
9643, 9649, 9661, 9677, 9679, 9689, 9697, 9719, 9721, 9733, &
! 1201:1300
9739, 9743, 9749, 9767, 9769, 9781, 9787, 9791, 9803, 9811, &
9817, 9829, 9833, 9839, 9851, 9857, 9859, 9871, 9883, 9887, &
9901, 9907, 9923, 9929, 9931, 9941, 9949, 9967, 9973, 10007, &
10009, 10037, 10039, 10061, 10067, 10069, 10079, 10091, 10093, 10099, &
10103, 10111, 10133, 10139, 10141, 10151, 10159, 10163, 10169, 10177, &
10181, 10193, 10211, 10223, 10243, 10247, 10253, 10259, 10267, 10271, &
10273, 10289, 10301, 10303, 10313, 10321, 10331, 10333, 10337, 10343, &
10357, 10369, 10391, 10399, 10427, 10429, 10433, 10453, 10457, 10459, &
10463, 10477, 10487, 10499, 10501, 10513, 10529, 10531, 10559, 10567, &
10589, 10597, 10601, 10607, 10613, 10627, 10631, 10639, 10651, 10657, &
! 1301:1400
10663, 10667, 10687, 10691, 10709, 10711, 10723, 10729, 10733, 10739, &
10753, 10771, 10781, 10789, 10799, 10831, 10837, 10847, 10853, 10859, &
10861, 10867, 10883, 10889, 10891, 10903, 10909, 19037, 10939, 10949, &
10957, 10973, 10979, 10987, 10993, 11003, 11027, 11047, 11057, 11059, &
11069, 11071, 11083, 11087, 11093, 11113, 11117, 11119, 11131, 11149, &
11159, 11161, 11171, 11173, 11177, 11197, 11213, 11239, 11243, 11251, &
11257, 11261, 11273, 11279, 11287, 11299, 11311, 11317, 11321, 11329, &
11351, 11353, 11369, 11383, 11393, 11399, 11411, 11423, 11437, 11443, &
11447, 11467, 11471, 11483, 11489, 11491, 11497, 11503, 11519, 11527, &
11549, 11551, 11579, 11587, 11593, 11597, 11617, 11621, 11633, 11657, &
! 1401:1500
11677, 11681, 11689, 11699, 11701, 11717, 11719, 11731, 11743, 11777, &
11779, 11783, 11789, 11801, 11807, 11813, 11821, 11827, 11831, 11833, &
11839, 11863, 11867, 11887, 11897, 11903, 11909, 11923, 11927, 11933, &
11939, 11941, 11953, 11959, 11969, 11971, 11981, 11987, 12007, 12011, &
12037, 12041, 12043, 12049, 12071, 12073, 12097, 12101, 12107, 12109, &
12113, 12119, 12143, 12149, 12157, 12161, 12163, 12197, 12203, 12211, &
12227, 12239, 12241, 12251, 12253, 12263, 12269, 12277, 12281, 12289, &
12301, 12323, 12329, 12343, 12347, 12373, 12377, 12379, 12391, 12401, &
12409, 12413, 12421, 12433, 12437, 12451, 12457, 12473, 12479, 12487, &
12491, 12497, 12503, 12511, 12517, 12527, 12539, 12541, 12547, 12553],pInt)
if (n < size(npvec)) then
prime = npvec(n)
else
call IO_error(error_ID=406_pInt)
end if end if
endif
end function prime
end subroutine halton_memory end subroutine halton_memory
@ -2288,7 +2504,7 @@ end subroutine halton_memory
!> @brief sets the dimension for a Halton sequence !> @brief sets the dimension for a Halton sequence
!> @author John Burkardt !> @author John Burkardt
!-------------------------------------------------------------------------------------------------- !--------------------------------------------------------------------------------------------------
subroutine halton_ndim_set (ndim) subroutine halton_ndim_set(ndim)
implicit none implicit none
integer(pInt), intent(in) :: ndim !< dimension of the Halton vectors integer(pInt), intent(in) :: ndim !< dimension of the Halton vectors
@ -2325,252 +2541,6 @@ subroutine halton_seed_set(seed)
end subroutine halton_seed_set end subroutine halton_seed_set
!--------------------------------------------------------------------------------------------------
!> @brief computes an element of a Halton sequence.
!> @details Only the absolute value of SEED is considered. SEED = 0 is allowed, and returns R = 0.
!> @details Halton Bases should be distinct prime numbers. This routine only checks that each base
!> @details is greater than 1.
!> @details Reference:
!> @details J.H. Halton: On the efficiency of certain quasi-random sequences of points in evaluating
!> @details multi-dimensional integrals, Numerische Mathematik, Volume 2, pages 84-90, 1960.
!> @author John Burkardt
!--------------------------------------------------------------------------------------------------
subroutine i_to_halton (seed, base, ndim, r)
use IO, only: &
IO_error
implicit none
integer(pInt), intent(in) :: ndim !< dimension of the sequence
integer(pInt), intent(in), dimension(ndim) :: base !< Halton bases
real(pReal), dimension(ndim) :: base_inv
integer(pInt), dimension(ndim) :: digit
real(pReal), dimension(ndim), intent(out) ::r !< the SEED-th element of the Halton sequence for the given bases
integer(pInt) , intent(in):: seed !< index of the desired element
integer(pInt), dimension(ndim) :: seed2
seed2(1:ndim) = abs(seed)
r(1:ndim) = 0.0_pReal
if (any (base(1:ndim) <= 1_pInt)) call IO_error(error_ID=405_pInt)
base_inv(1:ndim) = 1.0_pReal / real (base(1:ndim), pReal)
do while ( any ( seed2(1:ndim) /= 0_pInt) )
digit(1:ndim) = mod ( seed2(1:ndim), base(1:ndim))
r(1:ndim) = r(1:ndim) + real ( digit(1:ndim), pReal) * base_inv(1:ndim)
base_inv(1:ndim) = base_inv(1:ndim) / real ( base(1:ndim), pReal)
seed2(1:ndim) = seed2(1:ndim) / base(1:ndim)
enddo
end subroutine i_to_halton
!--------------------------------------------------------------------------------------------------
!> @brief returns any of the first 1500 prime numbers.
!> @details n <= 0 returns 1500, the index of the largest prime (12553) available.
!> @details n = 0 is legal, returning PRIME = 1.
!> @details Reference:
!> @details Milton Abramowitz and Irene Stegun: Handbook of Mathematical Functions,
!> @details US Department of Commerce, 1964, pages 870-873.
!> @details Daniel Zwillinger: CRC Standard Mathematical Tables and Formulae,
!> @details 30th Edition, CRC Press, 1996, pages 95-98.
!> @author John Burkardt
!--------------------------------------------------------------------------------------------------
integer(pInt) function prime(n)
use IO, only: &
IO_error
implicit none
integer(pInt), intent(in) :: n !< index of the desired prime number
integer(pInt), parameter :: PRIME_MAX = 1500_pInt
integer(pInt), save :: icall = 0_pInt
integer(pInt), save, dimension(PRIME_MAX) :: npvec
if (icall == 0_pInt) then
icall = 1_pInt
npvec = [&
2_pInt, 3_pInt, 5_pInt, 7_pInt, 11_pInt, 13_pInt, 17_pInt, 19_pInt, 23_pInt, 29_pInt, &
31_pInt, 37_pInt, 41_pInt, 43_pInt, 47_pInt, 53_pInt, 59_pInt, 61_pInt, 67_pInt, 71_pInt, &
73_pInt, 79_pInt, 83_pInt, 89_pInt, 97_pInt, 101_pInt, 103_pInt, 107_pInt, 109_pInt, 113_pInt, &
127_pInt, 131_pInt, 137_pInt, 139_pInt, 149_pInt, 151_pInt, 157_pInt, 163_pInt, 167_pInt, 173_pInt, &
179_pInt, 181_pInt, 191_pInt, 193_pInt, 197_pInt, 199_pInt, 211_pInt, 223_pInt, 227_pInt, 229_pInt, &
233_pInt, 239_pInt, 241_pInt, 251_pInt, 257_pInt, 263_pInt, 269_pInt, 271_pInt, 277_pInt, 281_pInt, &
283_pInt, 293_pInt, 307_pInt, 311_pInt, 313_pInt, 317_pInt, 331_pInt, 337_pInt, 347_pInt, 349_pInt, &
353_pInt, 359_pInt, 367_pInt, 373_pInt, 379_pInt, 383_pInt, 389_pInt, 397_pInt, 401_pInt, 409_pInt, &
419_pInt, 421_pInt, 431_pInt, 433_pInt, 439_pInt, 443_pInt, 449_pInt, 457_pInt, 461_pInt, 463_pInt, &
467_pInt, 479_pInt, 487_pInt, 491_pInt, 499_pInt, 503_pInt, 509_pInt, 521_pInt, 523_pInt, 541_pInt, &
! 101:200
547_pInt, 557_pInt, 563_pInt, 569_pInt, 571_pInt, 577_pInt, 587_pInt, 593_pInt, 599_pInt, 601_pInt, &
607_pInt, 613_pInt, 617_pInt, 619_pInt, 631_pInt, 641_pInt, 643_pInt, 647_pInt, 653_pInt, 659_pInt, &
661_pInt, 673_pInt, 677_pInt, 683_pInt, 691_pInt, 701_pInt, 709_pInt, 719_pInt, 727_pInt, 733_pInt, &
739_pInt, 743_pInt, 751_pInt, 757_pInt, 761_pInt, 769_pInt, 773_pInt, 787_pInt, 797_pInt, 809_pInt, &
811_pInt, 821_pInt, 823_pInt, 827_pInt, 829_pInt, 839_pInt, 853_pInt, 857_pInt, 859_pInt, 863_pInt, &
877_pInt, 881_pInt, 883_pInt, 887_pInt, 907_pInt, 911_pInt, 919_pInt, 929_pInt, 937_pInt, 941_pInt, &
947_pInt, 953_pInt, 967_pInt, 971_pInt, 977_pInt, 983_pInt, 991_pInt, 997_pInt, 1009_pInt, 1013_pInt, &
1019_pInt, 1021_pInt, 1031_pInt, 1033_pInt, 1039_pInt, 1049_pInt, 1051_pInt, 1061_pInt, 1063_pInt, 1069_pInt, &
1087_pInt, 1091_pInt, 1093_pInt, 1097_pInt, 1103_pInt, 1109_pInt, 1117_pInt, 1123_pInt, 1129_pInt, 1151_pInt, &
1153_pInt, 1163_pInt, 1171_pInt, 1181_pInt, 1187_pInt, 1193_pInt, 1201_pInt, 1213_pInt, 1217_pInt, 1223_pInt, &
! 201:300
1229_pInt, 1231_pInt, 1237_pInt, 1249_pInt, 1259_pInt, 1277_pInt, 1279_pInt, 1283_pInt, 1289_pInt, 1291_pInt, &
1297_pInt, 1301_pInt, 1303_pInt, 1307_pInt, 1319_pInt, 1321_pInt, 1327_pInt, 1361_pInt, 1367_pInt, 1373_pInt, &
1381_pInt, 1399_pInt, 1409_pInt, 1423_pInt, 1427_pInt, 1429_pInt, 1433_pInt, 1439_pInt, 1447_pInt, 1451_pInt, &
1453_pInt, 1459_pInt, 1471_pInt, 1481_pInt, 1483_pInt, 1487_pInt, 1489_pInt, 1493_pInt, 1499_pInt, 1511_pInt, &
1523_pInt, 1531_pInt, 1543_pInt, 1549_pInt, 1553_pInt, 1559_pInt, 1567_pInt, 1571_pInt, 1579_pInt, 1583_pInt, &
1597_pInt, 1601_pInt, 1607_pInt, 1609_pInt, 1613_pInt, 1619_pInt, 1621_pInt, 1627_pInt, 1637_pInt, 1657_pInt, &
1663_pInt, 1667_pInt, 1669_pInt, 1693_pInt, 1697_pInt, 1699_pInt, 1709_pInt, 1721_pInt, 1723_pInt, 1733_pInt, &
1741_pInt, 1747_pInt, 1753_pInt, 1759_pInt, 1777_pInt, 1783_pInt, 1787_pInt, 1789_pInt, 1801_pInt, 1811_pInt, &
1823_pInt, 1831_pInt, 1847_pInt, 1861_pInt, 1867_pInt, 1871_pInt, 1873_pInt, 1877_pInt, 1879_pInt, 1889_pInt, &
1901_pInt, 1907_pInt, 1913_pInt, 1931_pInt, 1933_pInt, 1949_pInt, 1951_pInt, 1973_pInt, 1979_pInt, 1987_pInt, &
! 301:400
1993_pInt, 1997_pInt, 1999_pInt, 2003_pInt, 2011_pInt, 2017_pInt, 2027_pInt, 2029_pInt, 2039_pInt, 2053_pInt, &
2063_pInt, 2069_pInt, 2081_pInt, 2083_pInt, 2087_pInt, 2089_pInt, 2099_pInt, 2111_pInt, 2113_pInt, 2129_pInt, &
2131_pInt, 2137_pInt, 2141_pInt, 2143_pInt, 2153_pInt, 2161_pInt, 2179_pInt, 2203_pInt, 2207_pInt, 2213_pInt, &
2221_pInt, 2237_pInt, 2239_pInt, 2243_pInt, 2251_pInt, 2267_pInt, 2269_pInt, 2273_pInt, 2281_pInt, 2287_pInt, &
2293_pInt, 2297_pInt, 2309_pInt, 2311_pInt, 2333_pInt, 2339_pInt, 2341_pInt, 2347_pInt, 2351_pInt, 2357_pInt, &
2371_pInt, 2377_pInt, 2381_pInt, 2383_pInt, 2389_pInt, 2393_pInt, 2399_pInt, 2411_pInt, 2417_pInt, 2423_pInt, &
2437_pInt, 2441_pInt, 2447_pInt, 2459_pInt, 2467_pInt, 2473_pInt, 2477_pInt, 2503_pInt, 2521_pInt, 2531_pInt, &
2539_pInt, 2543_pInt, 2549_pInt, 2551_pInt, 2557_pInt, 2579_pInt, 2591_pInt, 2593_pInt, 2609_pInt, 2617_pInt, &
2621_pInt, 2633_pInt, 2647_pInt, 2657_pInt, 2659_pInt, 2663_pInt, 2671_pInt, 2677_pInt, 2683_pInt, 2687_pInt, &
2689_pInt, 2693_pInt, 2699_pInt, 2707_pInt, 2711_pInt, 2713_pInt, 2719_pInt, 2729_pInt, 2731_pInt, 2741_pInt, &
! 401:500
2749_pInt, 2753_pInt, 2767_pInt, 2777_pInt, 2789_pInt, 2791_pInt, 2797_pInt, 2801_pInt, 2803_pInt, 2819_pInt, &
2833_pInt, 2837_pInt, 2843_pInt, 2851_pInt, 2857_pInt, 2861_pInt, 2879_pInt, 2887_pInt, 2897_pInt, 2903_pInt, &
2909_pInt, 2917_pInt, 2927_pInt, 2939_pInt, 2953_pInt, 2957_pInt, 2963_pInt, 2969_pInt, 2971_pInt, 2999_pInt, &
3001_pInt, 3011_pInt, 3019_pInt, 3023_pInt, 3037_pInt, 3041_pInt, 3049_pInt, 3061_pInt, 3067_pInt, 3079_pInt, &
3083_pInt, 3089_pInt, 3109_pInt, 3119_pInt, 3121_pInt, 3137_pInt, 3163_pInt, 3167_pInt, 3169_pInt, 3181_pInt, &
3187_pInt, 3191_pInt, 3203_pInt, 3209_pInt, 3217_pInt, 3221_pInt, 3229_pInt, 3251_pInt, 3253_pInt, 3257_pInt, &
3259_pInt, 3271_pInt, 3299_pInt, 3301_pInt, 3307_pInt, 3313_pInt, 3319_pInt, 3323_pInt, 3329_pInt, 3331_pInt, &
3343_pInt, 3347_pInt, 3359_pInt, 3361_pInt, 3371_pInt, 3373_pInt, 3389_pInt, 3391_pInt, 3407_pInt, 3413_pInt, &
3433_pInt, 3449_pInt, 3457_pInt, 3461_pInt, 3463_pInt, 3467_pInt, 3469_pInt, 3491_pInt, 3499_pInt, 3511_pInt, &
3517_pInt, 3527_pInt, 3529_pInt, 3533_pInt, 3539_pInt, 3541_pInt, 3547_pInt, 3557_pInt, 3559_pInt, 3571_pInt, &
! 501:600
3581_pInt, 3583_pInt, 3593_pInt, 3607_pInt, 3613_pInt, 3617_pInt, 3623_pInt, 3631_pInt, 3637_pInt, 3643_pInt, &
3659_pInt, 3671_pInt, 3673_pInt, 3677_pInt, 3691_pInt, 3697_pInt, 3701_pInt, 3709_pInt, 3719_pInt, 3727_pInt, &
3733_pInt, 3739_pInt, 3761_pInt, 3767_pInt, 3769_pInt, 3779_pInt, 3793_pInt, 3797_pInt, 3803_pInt, 3821_pInt, &
3823_pInt, 3833_pInt, 3847_pInt, 3851_pInt, 3853_pInt, 3863_pInt, 3877_pInt, 3881_pInt, 3889_pInt, 3907_pInt, &
3911_pInt, 3917_pInt, 3919_pInt, 3923_pInt, 3929_pInt, 3931_pInt, 3943_pInt, 3947_pInt, 3967_pInt, 3989_pInt, &
4001_pInt, 4003_pInt, 4007_pInt, 4013_pInt, 4019_pInt, 4021_pInt, 4027_pInt, 4049_pInt, 4051_pInt, 4057_pInt, &
4073_pInt, 4079_pInt, 4091_pInt, 4093_pInt, 4099_pInt, 4111_pInt, 4127_pInt, 4129_pInt, 4133_pInt, 4139_pInt, &
4153_pInt, 4157_pInt, 4159_pInt, 4177_pInt, 4201_pInt, 4211_pInt, 4217_pInt, 4219_pInt, 4229_pInt, 4231_pInt, &
4241_pInt, 4243_pInt, 4253_pInt, 4259_pInt, 4261_pInt, 4271_pInt, 4273_pInt, 4283_pInt, 4289_pInt, 4297_pInt, &
4327_pInt, 4337_pInt, 4339_pInt, 4349_pInt, 4357_pInt, 4363_pInt, 4373_pInt, 4391_pInt, 4397_pInt, 4409_pInt, &
! 601:700
4421_pInt, 4423_pInt, 4441_pInt, 4447_pInt, 4451_pInt, 4457_pInt, 4463_pInt, 4481_pInt, 4483_pInt, 4493_pInt, &
4507_pInt, 4513_pInt, 4517_pInt, 4519_pInt, 4523_pInt, 4547_pInt, 4549_pInt, 4561_pInt, 4567_pInt, 4583_pInt, &
4591_pInt, 4597_pInt, 4603_pInt, 4621_pInt, 4637_pInt, 4639_pInt, 4643_pInt, 4649_pInt, 4651_pInt, 4657_pInt, &
4663_pInt, 4673_pInt, 4679_pInt, 4691_pInt, 4703_pInt, 4721_pInt, 4723_pInt, 4729_pInt, 4733_pInt, 4751_pInt, &
4759_pInt, 4783_pInt, 4787_pInt, 4789_pInt, 4793_pInt, 4799_pInt, 4801_pInt, 4813_pInt, 4817_pInt, 4831_pInt, &
4861_pInt, 4871_pInt, 4877_pInt, 4889_pInt, 4903_pInt, 4909_pInt, 4919_pInt, 4931_pInt, 4933_pInt, 4937_pInt, &
4943_pInt, 4951_pInt, 4957_pInt, 4967_pInt, 4969_pInt, 4973_pInt, 4987_pInt, 4993_pInt, 4999_pInt, 5003_pInt, &
5009_pInt, 5011_pInt, 5021_pInt, 5023_pInt, 5039_pInt, 5051_pInt, 5059_pInt, 5077_pInt, 5081_pInt, 5087_pInt, &
5099_pInt, 5101_pInt, 5107_pInt, 5113_pInt, 5119_pInt, 5147_pInt, 5153_pInt, 5167_pInt, 5171_pInt, 5179_pInt, &
5189_pInt, 5197_pInt, 5209_pInt, 5227_pInt, 5231_pInt, 5233_pInt, 5237_pInt, 5261_pInt, 5273_pInt, 5279_pInt, &
! 701:800
5281_pInt, 5297_pInt, 5303_pInt, 5309_pInt, 5323_pInt, 5333_pInt, 5347_pInt, 5351_pInt, 5381_pInt, 5387_pInt, &
5393_pInt, 5399_pInt, 5407_pInt, 5413_pInt, 5417_pInt, 5419_pInt, 5431_pInt, 5437_pInt, 5441_pInt, 5443_pInt, &
5449_pInt, 5471_pInt, 5477_pInt, 5479_pInt, 5483_pInt, 5501_pInt, 5503_pInt, 5507_pInt, 5519_pInt, 5521_pInt, &
5527_pInt, 5531_pInt, 5557_pInt, 5563_pInt, 5569_pInt, 5573_pInt, 5581_pInt, 5591_pInt, 5623_pInt, 5639_pInt, &
5641_pInt, 5647_pInt, 5651_pInt, 5653_pInt, 5657_pInt, 5659_pInt, 5669_pInt, 5683_pInt, 5689_pInt, 5693_pInt, &
5701_pInt, 5711_pInt, 5717_pInt, 5737_pInt, 5741_pInt, 5743_pInt, 5749_pInt, 5779_pInt, 5783_pInt, 5791_pInt, &
5801_pInt, 5807_pInt, 5813_pInt, 5821_pInt, 5827_pInt, 5839_pInt, 5843_pInt, 5849_pInt, 5851_pInt, 5857_pInt, &
5861_pInt, 5867_pInt, 5869_pInt, 5879_pInt, 5881_pInt, 5897_pInt, 5903_pInt, 5923_pInt, 5927_pInt, 5939_pInt, &
5953_pInt, 5981_pInt, 5987_pInt, 6007_pInt, 6011_pInt, 6029_pInt, 6037_pInt, 6043_pInt, 6047_pInt, 6053_pInt, &
6067_pInt, 6073_pInt, 6079_pInt, 6089_pInt, 6091_pInt, 6101_pInt, 6113_pInt, 6121_pInt, 6131_pInt, 6133_pInt, &
! 801:900
6143_pInt, 6151_pInt, 6163_pInt, 6173_pInt, 6197_pInt, 6199_pInt, 6203_pInt, 6211_pInt, 6217_pInt, 6221_pInt, &
6229_pInt, 6247_pInt, 6257_pInt, 6263_pInt, 6269_pInt, 6271_pInt, 6277_pInt, 6287_pInt, 6299_pInt, 6301_pInt, &
6311_pInt, 6317_pInt, 6323_pInt, 6329_pInt, 6337_pInt, 6343_pInt, 6353_pInt, 6359_pInt, 6361_pInt, 6367_pInt, &
6373_pInt, 6379_pInt, 6389_pInt, 6397_pInt, 6421_pInt, 6427_pInt, 6449_pInt, 6451_pInt, 6469_pInt, 6473_pInt, &
6481_pInt, 6491_pInt, 6521_pInt, 6529_pInt, 6547_pInt, 6551_pInt, 6553_pInt, 6563_pInt, 6569_pInt, 6571_pInt, &
6577_pInt, 6581_pInt, 6599_pInt, 6607_pInt, 6619_pInt, 6637_pInt, 6653_pInt, 6659_pInt, 6661_pInt, 6673_pInt, &
6679_pInt, 6689_pInt, 6691_pInt, 6701_pInt, 6703_pInt, 6709_pInt, 6719_pInt, 6733_pInt, 6737_pInt, 6761_pInt, &
6763_pInt, 6779_pInt, 6781_pInt, 6791_pInt, 6793_pInt, 6803_pInt, 6823_pInt, 6827_pInt, 6829_pInt, 6833_pInt, &
6841_pInt, 6857_pInt, 6863_pInt, 6869_pInt, 6871_pInt, 6883_pInt, 6899_pInt, 6907_pInt, 6911_pInt, 6917_pInt, &
6947_pInt, 6949_pInt, 6959_pInt, 6961_pInt, 6967_pInt, 6971_pInt, 6977_pInt, 6983_pInt, 6991_pInt, 6997_pInt, &
! 901:1000
7001_pInt, 7013_pInt, 7019_pInt, 7027_pInt, 7039_pInt, 7043_pInt, 7057_pInt, 7069_pInt, 7079_pInt, 7103_pInt, &
7109_pInt, 7121_pInt, 7127_pInt, 7129_pInt, 7151_pInt, 7159_pInt, 7177_pInt, 7187_pInt, 7193_pInt, 7207_pInt, &
7211_pInt, 7213_pInt, 7219_pInt, 7229_pInt, 7237_pInt, 7243_pInt, 7247_pInt, 7253_pInt, 7283_pInt, 7297_pInt, &
7307_pInt, 7309_pInt, 7321_pInt, 7331_pInt, 7333_pInt, 7349_pInt, 7351_pInt, 7369_pInt, 7393_pInt, 7411_pInt, &
7417_pInt, 7433_pInt, 7451_pInt, 7457_pInt, 7459_pInt, 7477_pInt, 7481_pInt, 7487_pInt, 7489_pInt, 7499_pInt, &
7507_pInt, 7517_pInt, 7523_pInt, 7529_pInt, 7537_pInt, 7541_pInt, 7547_pInt, 7549_pInt, 7559_pInt, 7561_pInt, &
7573_pInt, 7577_pInt, 7583_pInt, 7589_pInt, 7591_pInt, 7603_pInt, 7607_pInt, 7621_pInt, 7639_pInt, 7643_pInt, &
7649_pInt, 7669_pInt, 7673_pInt, 7681_pInt, 7687_pInt, 7691_pInt, 7699_pInt, 7703_pInt, 7717_pInt, 7723_pInt, &
7727_pInt, 7741_pInt, 7753_pInt, 7757_pInt, 7759_pInt, 7789_pInt, 7793_pInt, 7817_pInt, 7823_pInt, 7829_pInt, &
7841_pInt, 7853_pInt, 7867_pInt, 7873_pInt, 7877_pInt, 7879_pInt, 7883_pInt, 7901_pInt, 7907_pInt, 7919_pInt, &
! 1001:1100
7927_pInt, 7933_pInt, 7937_pInt, 7949_pInt, 7951_pInt, 7963_pInt, 7993_pInt, 8009_pInt, 8011_pInt, 8017_pInt, &
8039_pInt, 8053_pInt, 8059_pInt, 8069_pInt, 8081_pInt, 8087_pInt, 8089_pInt, 8093_pInt, 8101_pInt, 8111_pInt, &
8117_pInt, 8123_pInt, 8147_pInt, 8161_pInt, 8167_pInt, 8171_pInt, 8179_pInt, 8191_pInt, 8209_pInt, 8219_pInt, &
8221_pInt, 8231_pInt, 8233_pInt, 8237_pInt, 8243_pInt, 8263_pInt, 8269_pInt, 8273_pInt, 8287_pInt, 8291_pInt, &
8293_pInt, 8297_pInt, 8311_pInt, 8317_pInt, 8329_pInt, 8353_pInt, 8363_pInt, 8369_pInt, 8377_pInt, 8387_pInt, &
8389_pInt, 8419_pInt, 8423_pInt, 8429_pInt, 8431_pInt, 8443_pInt, 8447_pInt, 8461_pInt, 8467_pInt, 8501_pInt, &
8513_pInt, 8521_pInt, 8527_pInt, 8537_pInt, 8539_pInt, 8543_pInt, 8563_pInt, 8573_pInt, 8581_pInt, 8597_pInt, &
8599_pInt, 8609_pInt, 8623_pInt, 8627_pInt, 8629_pInt, 8641_pInt, 8647_pInt, 8663_pInt, 8669_pInt, 8677_pInt, &
8681_pInt, 8689_pInt, 8693_pInt, 8699_pInt, 8707_pInt, 8713_pInt, 8719_pInt, 8731_pInt, 8737_pInt, 8741_pInt, &
8747_pInt, 8753_pInt, 8761_pInt, 8779_pInt, 8783_pInt, 8803_pInt, 8807_pInt, 8819_pInt, 8821_pInt, 8831_pInt, &
! 1101:1200
8837_pInt, 8839_pInt, 8849_pInt, 8861_pInt, 8863_pInt, 8867_pInt, 8887_pInt, 8893_pInt, 8923_pInt, 8929_pInt, &
8933_pInt, 8941_pInt, 8951_pInt, 8963_pInt, 8969_pInt, 8971_pInt, 8999_pInt, 9001_pInt, 9007_pInt, 9011_pInt, &
9013_pInt, 9029_pInt, 9041_pInt, 9043_pInt, 9049_pInt, 9059_pInt, 9067_pInt, 9091_pInt, 9103_pInt, 9109_pInt, &
9127_pInt, 9133_pInt, 9137_pInt, 9151_pInt, 9157_pInt, 9161_pInt, 9173_pInt, 9181_pInt, 9187_pInt, 9199_pInt, &
9203_pInt, 9209_pInt, 9221_pInt, 9227_pInt, 9239_pInt, 9241_pInt, 9257_pInt, 9277_pInt, 9281_pInt, 9283_pInt, &
9293_pInt, 9311_pInt, 9319_pInt, 9323_pInt, 9337_pInt, 9341_pInt, 9343_pInt, 9349_pInt, 9371_pInt, 9377_pInt, &
9391_pInt, 9397_pInt, 9403_pInt, 9413_pInt, 9419_pInt, 9421_pInt, 9431_pInt, 9433_pInt, 9437_pInt, 9439_pInt, &
9461_pInt, 9463_pInt, 9467_pInt, 9473_pInt, 9479_pInt, 9491_pInt, 9497_pInt, 9511_pInt, 9521_pInt, 9533_pInt, &
9539_pInt, 9547_pInt, 9551_pInt, 9587_pInt, 9601_pInt, 9613_pInt, 9619_pInt, 9623_pInt, 9629_pInt, 9631_pInt, &
9643_pInt, 9649_pInt, 9661_pInt, 9677_pInt, 9679_pInt, 9689_pInt, 9697_pInt, 9719_pInt, 9721_pInt, 9733_pInt, &
! 1201:1300
9739_pInt, 9743_pInt, 9749_pInt, 9767_pInt, 9769_pInt, 9781_pInt, 9787_pInt, 9791_pInt, 9803_pInt, 9811_pInt, &
9817_pInt, 9829_pInt, 9833_pInt, 9839_pInt, 9851_pInt, 9857_pInt, 9859_pInt, 9871_pInt, 9883_pInt, 9887_pInt, &
9901_pInt, 9907_pInt, 9923_pInt, 9929_pInt, 9931_pInt, 9941_pInt, 9949_pInt, 9967_pInt, 9973_pInt,10007_pInt, &
10009_pInt,10037_pInt,10039_pInt,10061_pInt,10067_pInt,10069_pInt,10079_pInt,10091_pInt,10093_pInt,10099_pInt, &
10103_pInt,10111_pInt,10133_pInt,10139_pInt,10141_pInt,10151_pInt,10159_pInt,10163_pInt,10169_pInt,10177_pInt, &
10181_pInt,10193_pInt,10211_pInt,10223_pInt,10243_pInt,10247_pInt,10253_pInt,10259_pInt,10267_pInt,10271_pInt, &
10273_pInt,10289_pInt,10301_pInt,10303_pInt,10313_pInt,10321_pInt,10331_pInt,10333_pInt,10337_pInt,10343_pInt, &
10357_pInt,10369_pInt,10391_pInt,10399_pInt,10427_pInt,10429_pInt,10433_pInt,10453_pInt,10457_pInt,10459_pInt, &
10463_pInt,10477_pInt,10487_pInt,10499_pInt,10501_pInt,10513_pInt,10529_pInt,10531_pInt,10559_pInt,10567_pInt, &
10589_pInt,10597_pInt,10601_pInt,10607_pInt,10613_pInt,10627_pInt,10631_pInt,10639_pInt,10651_pInt,10657_pInt, &
! 1301:1400
10663_pInt,10667_pInt,10687_pInt,10691_pInt,10709_pInt,10711_pInt,10723_pInt,10729_pInt,10733_pInt,10739_pInt, &
10753_pInt,10771_pInt,10781_pInt,10789_pInt,10799_pInt,10831_pInt,10837_pInt,10847_pInt,10853_pInt,10859_pInt, &
10861_pInt,10867_pInt,10883_pInt,10889_pInt,10891_pInt,10903_pInt,10909_pInt,19037_pInt,10939_pInt,10949_pInt, &
10957_pInt,10973_pInt,10979_pInt,10987_pInt,10993_pInt,11003_pInt,11027_pInt,11047_pInt,11057_pInt,11059_pInt, &
11069_pInt,11071_pInt,11083_pInt,11087_pInt,11093_pInt,11113_pInt,11117_pInt,11119_pInt,11131_pInt,11149_pInt, &
11159_pInt,11161_pInt,11171_pInt,11173_pInt,11177_pInt,11197_pInt,11213_pInt,11239_pInt,11243_pInt,11251_pInt, &
11257_pInt,11261_pInt,11273_pInt,11279_pInt,11287_pInt,11299_pInt,11311_pInt,11317_pInt,11321_pInt,11329_pInt, &
11351_pInt,11353_pInt,11369_pInt,11383_pInt,11393_pInt,11399_pInt,11411_pInt,11423_pInt,11437_pInt,11443_pInt, &
11447_pInt,11467_pInt,11471_pInt,11483_pInt,11489_pInt,11491_pInt,11497_pInt,11503_pInt,11519_pInt,11527_pInt, &
11549_pInt,11551_pInt,11579_pInt,11587_pInt,11593_pInt,11597_pInt,11617_pInt,11621_pInt,11633_pInt,11657_pInt, &
! 1401:1500
11677_pInt,11681_pInt,11689_pInt,11699_pInt,11701_pInt,11717_pInt,11719_pInt,11731_pInt,11743_pInt,11777_pInt, &
11779_pInt,11783_pInt,11789_pInt,11801_pInt,11807_pInt,11813_pInt,11821_pInt,11827_pInt,11831_pInt,11833_pInt, &
11839_pInt,11863_pInt,11867_pInt,11887_pInt,11897_pInt,11903_pInt,11909_pInt,11923_pInt,11927_pInt,11933_pInt, &
11939_pInt,11941_pInt,11953_pInt,11959_pInt,11969_pInt,11971_pInt,11981_pInt,11987_pInt,12007_pInt,12011_pInt, &
12037_pInt,12041_pInt,12043_pInt,12049_pInt,12071_pInt,12073_pInt,12097_pInt,12101_pInt,12107_pInt,12109_pInt, &
12113_pInt,12119_pInt,12143_pInt,12149_pInt,12157_pInt,12161_pInt,12163_pInt,12197_pInt,12203_pInt,12211_pInt, &
12227_pInt,12239_pInt,12241_pInt,12251_pInt,12253_pInt,12263_pInt,12269_pInt,12277_pInt,12281_pInt,12289_pInt, &
12301_pInt,12323_pInt,12329_pInt,12343_pInt,12347_pInt,12373_pInt,12377_pInt,12379_pInt,12391_pInt,12401_pInt, &
12409_pInt,12413_pInt,12421_pInt,12433_pInt,12437_pInt,12451_pInt,12457_pInt,12473_pInt,12479_pInt,12487_pInt, &
12491_pInt,12497_pInt,12503_pInt,12511_pInt,12517_pInt,12527_pInt,12539_pInt,12541_pInt,12547_pInt,12553_pInt]
endif
if(n < 0_pInt) then
prime = PRIME_MAX
else if (n == 0_pInt) then
prime = 1_pInt
else if (n <= PRIME_MAX) then
prime = npvec(n)
else
prime = -1_pInt
call IO_error(error_ID=406_pInt)
end if
end function prime
!-------------------------------------------------------------------------------------------------- !--------------------------------------------------------------------------------------------------
!> @brief factorial !> @brief factorial
!-------------------------------------------------------------------------------------------------- !--------------------------------------------------------------------------------------------------