Merge branch 'development' into FEM-PETSC_3.11+
This commit is contained in:
commit
44fc65b812
|
@ -108,9 +108,9 @@ if (DAMASK_SOLVER STREQUAL "grid")
|
||||||
project (damask-grid Fortran C)
|
project (damask-grid Fortran C)
|
||||||
add_definitions (-DGrid)
|
add_definitions (-DGrid)
|
||||||
message ("Building Grid Solver\n")
|
message ("Building Grid Solver\n")
|
||||||
elseif (DAMASK_SOLVER STREQUAL "fem" OR DAMASK_SOLVER STREQUAL "mesh")
|
elseif (DAMASK_SOLVER STREQUAL "mesh")
|
||||||
project (damask-mesh Fortran C)
|
project (damask-mesh Fortran C)
|
||||||
add_definitions (-DFEM)
|
add_definitions (-DMesh)
|
||||||
message ("Building Mesh Solver\n")
|
message ("Building Mesh Solver\n")
|
||||||
else ()
|
else ()
|
||||||
message (FATAL_ERROR "Build target (DAMASK_SOLVER) is not defined")
|
message (FATAL_ERROR "Build target (DAMASK_SOLVER) is not defined")
|
||||||
|
|
|
@ -110,7 +110,7 @@ for executable in icc icpc ifort ;do
|
||||||
done
|
done
|
||||||
|
|
||||||
firstLevel "MPI Wrappers"
|
firstLevel "MPI Wrappers"
|
||||||
for executable in mpicc mpiCC mpiicc mpic++ mpicpc mpicxx mpifort mpif90 mpif77; do
|
for executable in mpicc mpiCC mpiicc mpic++ mpiicpc mpicxx mpifort mpiifort mpif90 mpif77; do
|
||||||
getDetails $executable '-show'
|
getDetails $executable '-show'
|
||||||
done
|
done
|
||||||
|
|
||||||
|
|
6
Makefile
6
Makefile
|
@ -2,19 +2,23 @@ SHELL = /bin/sh
|
||||||
########################################################################################
|
########################################################################################
|
||||||
# Makefile for the installation of DAMASK
|
# Makefile for the installation of DAMASK
|
||||||
########################################################################################
|
########################################################################################
|
||||||
DAMASK_ROOT = $(shell python -c "import os,sys; print(os.path.normpath(os.path.realpath(os.path.expanduser('$(pwd)'))))")
|
DAMASK_ROOT = $(shell python3 -c "import os,sys; print(os.path.normpath(os.path.realpath(os.path.expanduser('$(pwd)'))))")
|
||||||
.PHONY: all
|
.PHONY: all
|
||||||
all: grid mesh processing
|
all: grid mesh processing
|
||||||
|
|
||||||
.PHONY: grid
|
.PHONY: grid
|
||||||
grid: build/grid
|
grid: build/grid
|
||||||
@(cd build/grid;make -j${DAMASK_NUM_THREADS} all install;)
|
@(cd build/grid;make -j${DAMASK_NUM_THREADS} all install;)
|
||||||
|
@rm -f ${DAMASK_ROOT}/bin/DAMASK_spectral > /dev/null || true
|
||||||
|
@ln -s ${DAMASK_ROOT}/bin/DAMASK_grid ${DAMASK_ROOT}/bin/DAMASK_spectral || true
|
||||||
.PHONY: spectral
|
.PHONY: spectral
|
||||||
spectral: grid
|
spectral: grid
|
||||||
|
|
||||||
.PHONY: mesh
|
.PHONY: mesh
|
||||||
mesh: build/mesh
|
mesh: build/mesh
|
||||||
@(cd build/mesh; make -j${DAMASK_NUM_THREADS} all install;)
|
@(cd build/mesh; make -j${DAMASK_NUM_THREADS} all install;)
|
||||||
|
@rm -f ${DAMASK_ROOT}/bin/DAMASK_FEM > /dev/null || true
|
||||||
|
@ln -s ${DAMASK_ROOT}/bin/DAMASK_mesh ${DAMASK_ROOT}/bin/DAMASK_FEM || true
|
||||||
.PHONY: FEM
|
.PHONY: FEM
|
||||||
FEM: mesh
|
FEM: mesh
|
||||||
|
|
||||||
|
|
2
PRIVATE
2
PRIVATE
|
@ -1 +1 @@
|
||||||
Subproject commit c595994cd8880acadf50b5dedb79156d04d35b91
|
Subproject commit a6109acd264c683fd335b1d1f69934fc4a4078e3
|
|
@ -1,6 +1,9 @@
|
||||||
###################################################################################################
|
###################################################################################################
|
||||||
# GNU Compiler
|
# GNU Compiler
|
||||||
###################################################################################################
|
###################################################################################################
|
||||||
|
if (CMAKE_Fortran_COMPILER_VERSION VERSION_LESS 8.0)
|
||||||
|
message (FATAL_ERROR "GCC Compiler version: ${CMAKE_Fortran_COMPILER_VERSION} not supported")
|
||||||
|
endif ()
|
||||||
|
|
||||||
if (OPENMP)
|
if (OPENMP)
|
||||||
set (OPENMP_FLAGS "-fopenmp")
|
set (OPENMP_FLAGS "-fopenmp")
|
||||||
|
@ -14,7 +17,7 @@ elseif (OPTIMIZATION STREQUAL "AGGRESSIVE")
|
||||||
set (OPTIMIZATION_FLAGS "-O3 -ffast-math -funroll-loops -ftree-vectorize")
|
set (OPTIMIZATION_FLAGS "-O3 -ffast-math -funroll-loops -ftree-vectorize")
|
||||||
endif ()
|
endif ()
|
||||||
|
|
||||||
set (STANDARD_CHECK "-std=f2008ts -pedantic-errors" )
|
set (STANDARD_CHECK "-std=f2018 -pedantic-errors" )
|
||||||
set (LINKER_FLAGS "${LINKER_FLAGS} -Wl")
|
set (LINKER_FLAGS "${LINKER_FLAGS} -Wl")
|
||||||
# options parsed directly to the linker
|
# options parsed directly to the linker
|
||||||
set (LINKER_FLAGS "${LINKER_FLAGS},-undefined,dynamic_lookup" )
|
set (LINKER_FLAGS "${LINKER_FLAGS},-undefined,dynamic_lookup" )
|
||||||
|
@ -25,6 +28,9 @@ set (LINKER_FLAGS "${LINKER_FLAGS},-undefined,dynamic_lookup" )
|
||||||
set (COMPILE_FLAGS "${COMPILE_FLAGS} -xf95-cpp-input")
|
set (COMPILE_FLAGS "${COMPILE_FLAGS} -xf95-cpp-input")
|
||||||
# preprocessor
|
# preprocessor
|
||||||
|
|
||||||
|
set (COMPILE_FLAGS "${COMPILE_FLAGS} -fPIC -fPIE")
|
||||||
|
# position independent code
|
||||||
|
|
||||||
set (COMPILE_FLAGS "${COMPILE_FLAGS} -ffree-line-length-132")
|
set (COMPILE_FLAGS "${COMPILE_FLAGS} -ffree-line-length-132")
|
||||||
# restrict line length to the standard 132 characters (lattice.f90 require more characters)
|
# restrict line length to the standard 132 characters (lattice.f90 require more characters)
|
||||||
|
|
||||||
|
|
|
@ -1,6 +1,10 @@
|
||||||
###################################################################################################
|
###################################################################################################
|
||||||
# Intel Compiler
|
# Intel Compiler
|
||||||
###################################################################################################
|
###################################################################################################
|
||||||
|
if (CMAKE_Fortran_COMPILER_VERSION VERSION_LESS 18.0)
|
||||||
|
message (FATAL_ERROR "Intel Compiler version: ${CMAKE_Fortran_COMPILER_VERSION} not supported")
|
||||||
|
endif ()
|
||||||
|
|
||||||
if (OPENMP)
|
if (OPENMP)
|
||||||
set (OPENMP_FLAGS "-qopenmp -parallel")
|
set (OPENMP_FLAGS "-qopenmp -parallel")
|
||||||
endif ()
|
endif ()
|
||||||
|
|
|
@ -3,7 +3,7 @@
|
||||||
|
|
||||||
set CALLED=($_)
|
set CALLED=($_)
|
||||||
set ENV_ROOT=`dirname $CALLED[2]`
|
set ENV_ROOT=`dirname $CALLED[2]`
|
||||||
set DAMASK_ROOT=`python -c "import os,sys; print(os.path.realpath(os.path.expanduser(sys.argv[1])))" $ENV_ROOT"/../"`
|
set DAMASK_ROOT=`python3 -c "import os,sys; print(os.path.realpath(os.path.expanduser(sys.argv[1])))" $ENV_ROOT"/../"`
|
||||||
|
|
||||||
source $ENV_ROOT/CONFIG
|
source $ENV_ROOT/CONFIG
|
||||||
|
|
||||||
|
|
|
@ -2,7 +2,7 @@
|
||||||
# usage: source DAMASK.sh
|
# usage: source DAMASK.sh
|
||||||
|
|
||||||
function canonicalPath {
|
function canonicalPath {
|
||||||
python -c "import os,sys; print(os.path.normpath(os.path.realpath(os.path.expanduser(sys.argv[1]))))" $1
|
python3 -c "import os,sys; print(os.path.normpath(os.path.realpath(os.path.expanduser(sys.argv[1]))))" $1
|
||||||
}
|
}
|
||||||
|
|
||||||
function blink {
|
function blink {
|
||||||
|
|
|
@ -2,7 +2,7 @@
|
||||||
# usage: source DAMASK.zsh
|
# usage: source DAMASK.zsh
|
||||||
|
|
||||||
function canonicalPath {
|
function canonicalPath {
|
||||||
python -c "import os,sys; print(os.path.normpath(os.path.realpath(os.path.expanduser(sys.argv[1]))))" $1
|
python3 -c "import os,sys; print(os.path.normpath(os.path.realpath(os.path.expanduser(sys.argv[1]))))" $1
|
||||||
}
|
}
|
||||||
|
|
||||||
function blink {
|
function blink {
|
||||||
|
|
|
@ -1,3 +0,0 @@
|
||||||
hydrogenflux cahnhilliard
|
|
||||||
initialHydrogenConc 0.0
|
|
||||||
(output) hydrogenconc
|
|
|
@ -37,10 +37,7 @@ if options.depth is None:
|
||||||
parser.error('depth not specified')
|
parser.error('depth not specified')
|
||||||
|
|
||||||
|
|
||||||
theta=-0.75*np.pi
|
rot_to_TSL = damask.Rotation.from_axis_angle([-1,0,0,.75*np.pi])
|
||||||
RotMat2TSL=np.array([[1., 0., 0.],
|
|
||||||
[0., np.cos(theta), np.sin(theta)], # Orientation to account for -135 deg
|
|
||||||
[0., -np.sin(theta), np.cos(theta)]]) # rotation for TSL convention
|
|
||||||
|
|
||||||
for name in filenames:
|
for name in filenames:
|
||||||
damask.util.report(scriptName,name)
|
damask.util.report(scriptName,name)
|
||||||
|
@ -50,8 +47,6 @@ for name in filenames:
|
||||||
coord = - table.get(options.frame)
|
coord = - table.get(options.frame)
|
||||||
coord[:,2] += table.get(options.depth)[:,0]
|
coord[:,2] += table.get(options.depth)[:,0]
|
||||||
|
|
||||||
table.add('coord',
|
table.add('coord',rot_to_TSL.broadcast_to(coord.shape[0]) @ coord,scriptID+' '+' '.join(sys.argv[1:]))
|
||||||
np.einsum('ijk,ik->ij',np.broadcast_to(RotMat2TSL,(coord.shape[0],3,3)),coord),
|
|
||||||
scriptID+' '+' '.join(sys.argv[1:]))
|
|
||||||
|
|
||||||
table.to_ASCII(sys.stdout if name is None else name)
|
table.to_ASCII(sys.stdout if name is None else name)
|
||||||
|
|
|
@ -1,183 +0,0 @@
|
||||||
#!/usr/bin/env python3
|
|
||||||
|
|
||||||
import os
|
|
||||||
import sys
|
|
||||||
from optparse import OptionParser
|
|
||||||
import re
|
|
||||||
from collections.abc import Iterable
|
|
||||||
import math # noqa
|
|
||||||
|
|
||||||
import scipy # noqa
|
|
||||||
import scipy.linalg # noqa
|
|
||||||
import numpy as np
|
|
||||||
|
|
||||||
import damask
|
|
||||||
|
|
||||||
|
|
||||||
scriptName = os.path.splitext(os.path.basename(__file__))[0]
|
|
||||||
scriptID = ' '.join([scriptName,damask.version])
|
|
||||||
|
|
||||||
def listify(x):
|
|
||||||
return x if isinstance(x, Iterable) else [x]
|
|
||||||
|
|
||||||
|
|
||||||
# --------------------------------------------------------------------
|
|
||||||
# MAIN
|
|
||||||
# --------------------------------------------------------------------
|
|
||||||
|
|
||||||
parser = OptionParser(option_class=damask.extendableOption, usage='%prog options [ASCIItable(s)]', description = """
|
|
||||||
Add or alter column(s) with derived values according to user-defined arithmetic operation between column(s).
|
|
||||||
Column labels are tagged by '#label#' in formulas. Use ';' for ',' in functions.
|
|
||||||
Numpy is available as 'np'.
|
|
||||||
|
|
||||||
Special variables: #_row_# -- row index
|
|
||||||
Examples:
|
|
||||||
(1) magnitude of vector -- "np.linalg.norm(#vec#)"
|
|
||||||
(2) rounded root of row number -- "round(math.sqrt(#_row_#);3)"
|
|
||||||
|
|
||||||
""", version = scriptID)
|
|
||||||
|
|
||||||
parser.add_option('-l','--label',
|
|
||||||
dest = 'labels',
|
|
||||||
action = 'extend', metavar = '<string LIST>',
|
|
||||||
help = '(list of) new column labels')
|
|
||||||
parser.add_option('-f','--formula',
|
|
||||||
dest = 'formulas',
|
|
||||||
action = 'extend', metavar = '<string LIST>',
|
|
||||||
help = '(list of) formulas corresponding to labels')
|
|
||||||
|
|
||||||
parser.add_option('-c','--condition',
|
|
||||||
dest = 'condition', metavar='string',
|
|
||||||
help = 'condition to alter existing column data (optional)')
|
|
||||||
|
|
||||||
(options,filenames) = parser.parse_args()
|
|
||||||
|
|
||||||
if options.labels is None or options.formulas is None:
|
|
||||||
parser.error('no formulas and/or labels specified.')
|
|
||||||
if len(options.labels) != len(options.formulas):
|
|
||||||
parser.error('number of labels ({}) and formulas ({}) do not match.'.format(len(options.labels),len(options.formulas)))
|
|
||||||
|
|
||||||
for i in range(len(options.formulas)):
|
|
||||||
options.formulas[i] = options.formulas[i].replace(';',',')
|
|
||||||
|
|
||||||
# ------------------------------------- loop over input files --------------------------------------
|
|
||||||
|
|
||||||
if filenames == []: filenames = [None]
|
|
||||||
|
|
||||||
for name in filenames:
|
|
||||||
try:
|
|
||||||
table = damask.ASCIItable(name = name)
|
|
||||||
except IOError:
|
|
||||||
continue
|
|
||||||
damask.util.report(scriptName,name)
|
|
||||||
|
|
||||||
# ------------------------------------------ read header -------------------------------------------
|
|
||||||
|
|
||||||
table.head_read()
|
|
||||||
|
|
||||||
# --------------------------------------------------------------------------------------------------
|
|
||||||
specials = { \
|
|
||||||
'_row_': 0,
|
|
||||||
}
|
|
||||||
|
|
||||||
# --------------------------------------- evaluate condition ---------------------------------------
|
|
||||||
if options.condition is not None:
|
|
||||||
condition = options.condition # copy per file, since might be altered inline
|
|
||||||
breaker = False
|
|
||||||
|
|
||||||
for position,(all,marker,column) in enumerate(set(re.findall(r'#(([s]#)?(.+?))#',condition))): # find three groups
|
|
||||||
idx = table.label_index(column)
|
|
||||||
dim = table.label_dimension(column)
|
|
||||||
if idx < 0 and column not in specials:
|
|
||||||
damask.util.croak('column "{}" not found.'.format(column))
|
|
||||||
breaker = True
|
|
||||||
else:
|
|
||||||
if column in specials:
|
|
||||||
replacement = 'specials["{}"]'.format(column)
|
|
||||||
elif dim == 1: # scalar input
|
|
||||||
replacement = '{}(table.data[{}])'.format({ '':'float',
|
|
||||||
's#':'str'}[marker],idx) # take float or string value of data column
|
|
||||||
elif dim > 1: # multidimensional input (vector, tensor, etc.)
|
|
||||||
replacement = 'np.array(table.data[{}:{}],dtype=float)'.format(idx,idx+dim) # use (flat) array representation
|
|
||||||
|
|
||||||
condition = condition.replace('#'+all+'#',replacement)
|
|
||||||
|
|
||||||
if breaker: continue # found mistake in condition evaluation --> next file
|
|
||||||
|
|
||||||
# ------------------------------------------ build formulas ----------------------------------------
|
|
||||||
|
|
||||||
evaluator = {}
|
|
||||||
|
|
||||||
for label,formula in zip(options.labels,options.formulas):
|
|
||||||
for column in re.findall(r'#(.+?)#',formula): # loop over column labels in formula
|
|
||||||
idx = table.label_index(column)
|
|
||||||
dim = table.label_dimension(column)
|
|
||||||
if column in specials:
|
|
||||||
replacement = 'specials["{}"]'.format(column)
|
|
||||||
elif dim == 1: # scalar input
|
|
||||||
replacement = 'float(table.data[{}])'.format(idx) # take float value of data column
|
|
||||||
elif dim > 1: # multidimensional input (vector, tensor, etc.)
|
|
||||||
replacement = 'np.array(table.data[{}:{}],dtype=float)'.format(idx,idx+dim) # use (flat) array representation
|
|
||||||
else:
|
|
||||||
damask.util.croak('column {} not found, skipping {}...'.format(column,label))
|
|
||||||
options.labels.remove(label)
|
|
||||||
break
|
|
||||||
|
|
||||||
formula = formula.replace('#'+column+'#',replacement)
|
|
||||||
|
|
||||||
evaluator[label] = formula
|
|
||||||
|
|
||||||
# ---------------------------- separate requested labels into old and new --------------------------
|
|
||||||
|
|
||||||
veterans = list(set(options.labels)&set(table.labels(raw=False)+table.labels(raw=True)) ) # intersection of requested and existing
|
|
||||||
newbies = list(set(options.labels)-set(table.labels(raw=False)+table.labels(raw=True)) ) # requested but not existing
|
|
||||||
|
|
||||||
# ------------------------------------------ process data ------------------------------------------
|
|
||||||
|
|
||||||
firstLine = True
|
|
||||||
outputAlive = True
|
|
||||||
|
|
||||||
while outputAlive and table.data_read(): # read next data line of ASCII table
|
|
||||||
specials['_row_'] += 1 # count row
|
|
||||||
|
|
||||||
if firstLine:
|
|
||||||
firstLine = False
|
|
||||||
|
|
||||||
# ---------------------------- line 1: determine dimension of formulas -----------------------------
|
|
||||||
|
|
||||||
resultDim = {}
|
|
||||||
for label in list(options.labels): # iterate over stable copy
|
|
||||||
resultDim[label] = np.size(eval(evaluator[label])) # get dimension of formula[label]
|
|
||||||
if resultDim[label] == 0: options.labels.remove(label) # remove label if invalid result
|
|
||||||
|
|
||||||
for veteran in list(veterans):
|
|
||||||
if resultDim[veteran] != table.label_dimension(veteran):
|
|
||||||
damask.util.croak('skipping {} due to inconsistent dimension...'.format(veteran))
|
|
||||||
veterans.remove(veteran) # discard culprit
|
|
||||||
|
|
||||||
# ----------------------------------- line 1: assemble header --------------------------------------
|
|
||||||
|
|
||||||
for newby in newbies:
|
|
||||||
table.labels_append(['{}_{}'.format(i+1,newby) for i in range(resultDim[newby])]
|
|
||||||
if resultDim[newby] > 1 else newby)
|
|
||||||
|
|
||||||
table.info_append(scriptID + '\t' + ' '.join(sys.argv[1:]))
|
|
||||||
table.head_write()
|
|
||||||
|
|
||||||
# -------------------------------------- evaluate formulas -----------------------------------------
|
|
||||||
|
|
||||||
if options.condition is None or eval(condition): # condition for veteran replacement fulfilled
|
|
||||||
for veteran in veterans: # evaluate formulas that overwrite
|
|
||||||
table.data[table.label_index(veteran):
|
|
||||||
table.label_index(veteran)+table.label_dimension(veteran)] = \
|
|
||||||
listify(eval(evaluator[veteran]))
|
|
||||||
|
|
||||||
for newby in newbies: # evaluate formulas that append
|
|
||||||
table.data_append(listify(eval(evaluator[newby])))
|
|
||||||
|
|
||||||
outputAlive = table.data_write() # output processed line
|
|
||||||
|
|
||||||
# ------------------------------------- output finalization ----------------------------------------
|
|
||||||
|
|
||||||
table.close() # close ASCII table
|
|
||||||
|
|
|
@ -1,68 +0,0 @@
|
||||||
#!/usr/bin/env python3
|
|
||||||
|
|
||||||
import os
|
|
||||||
import sys
|
|
||||||
from io import StringIO
|
|
||||||
from optparse import OptionParser
|
|
||||||
|
|
||||||
import numpy as np
|
|
||||||
|
|
||||||
import damask
|
|
||||||
|
|
||||||
|
|
||||||
scriptName = os.path.splitext(os.path.basename(__file__))[0]
|
|
||||||
scriptID = ' '.join([scriptName,damask.version])
|
|
||||||
|
|
||||||
# --------------------------------------------------------------------
|
|
||||||
# MAIN
|
|
||||||
# --------------------------------------------------------------------
|
|
||||||
|
|
||||||
parser = OptionParser(option_class=damask.extendableOption, usage='%prog options [ASCIItable(s)]', description = """
|
|
||||||
Add RGB color value corresponding to TSL-OIM scheme for inverse pole figures.
|
|
||||||
|
|
||||||
""", version = scriptID)
|
|
||||||
|
|
||||||
parser.add_option('-p',
|
|
||||||
'--pole',
|
|
||||||
dest = 'pole',
|
|
||||||
type = 'float', nargs = 3, metavar = 'float float float',
|
|
||||||
help = 'lab frame direction for inverse pole figure [%default]')
|
|
||||||
parser.add_option('-s',
|
|
||||||
'--symmetry',
|
|
||||||
dest = 'symmetry',
|
|
||||||
type = 'choice', choices = damask.Symmetry.lattices[1:], metavar='string',
|
|
||||||
help = 'crystal symmetry [%default] {{{}}} '.format(', '.join(damask.Symmetry.lattices[1:])))
|
|
||||||
parser.add_option('-o',
|
|
||||||
'--orientation',
|
|
||||||
dest = 'quaternion',
|
|
||||||
metavar = 'string',
|
|
||||||
help = 'label of crystal orientation given as unit quaternion [%default]')
|
|
||||||
|
|
||||||
parser.set_defaults(pole = (0.0,0.0,1.0),
|
|
||||||
quaternion = 'orientation',
|
|
||||||
symmetry = damask.Symmetry.lattices[-1],
|
|
||||||
)
|
|
||||||
|
|
||||||
(options, filenames) = parser.parse_args()
|
|
||||||
if filenames == []: filenames = [None]
|
|
||||||
|
|
||||||
# damask.Orientation requires Bravais lattice, but we are only interested in symmetry
|
|
||||||
symmetry2lattice={'cubic':'fcc','hexagonal':'hex','tetragonal':'bct'}
|
|
||||||
lattice = symmetry2lattice[options.symmetry]
|
|
||||||
|
|
||||||
pole = np.array(options.pole)
|
|
||||||
pole /= np.linalg.norm(pole)
|
|
||||||
|
|
||||||
for name in filenames:
|
|
||||||
damask.util.report(scriptName,name)
|
|
||||||
|
|
||||||
table = damask.Table.from_ASCII(StringIO(''.join(sys.stdin.read())) if name is None else name)
|
|
||||||
orientation = table.get(options.quaternion)
|
|
||||||
color = np.empty((orientation.shape[0],3))
|
|
||||||
for i,o in enumerate(orientation):
|
|
||||||
color[i] = damask.Orientation(o,lattice = lattice).IPFcolor(pole)
|
|
||||||
|
|
||||||
table.add('IPF_{:g}{:g}{:g}_{sym}'.format(*options.pole,sym = options.symmetry.lower()),
|
|
||||||
color,
|
|
||||||
scriptID+' '+' '.join(sys.argv[1:]))
|
|
||||||
table.to_ASCII(sys.stdout if name is None else name)
|
|
|
@ -1,75 +0,0 @@
|
||||||
#!/usr/bin/env python3
|
|
||||||
|
|
||||||
import os
|
|
||||||
import sys
|
|
||||||
from io import StringIO
|
|
||||||
from optparse import OptionParser
|
|
||||||
|
|
||||||
import numpy as np
|
|
||||||
|
|
||||||
import damask
|
|
||||||
|
|
||||||
|
|
||||||
scriptName = os.path.splitext(os.path.basename(__file__))[0]
|
|
||||||
scriptID = ' '.join([scriptName,damask.version])
|
|
||||||
|
|
||||||
# definition of element-wise p-norms for matrices
|
|
||||||
# ToDo: better use numpy.linalg.norm
|
|
||||||
|
|
||||||
def norm(which,object):
|
|
||||||
|
|
||||||
if which == 'Abs': # p = 1
|
|
||||||
return sum(map(abs, object))
|
|
||||||
elif which == 'Frobenius': # p = 2
|
|
||||||
return np.sqrt(sum([x*x for x in object]))
|
|
||||||
elif which == 'Max': # p = inf
|
|
||||||
return max(map(abs, object))
|
|
||||||
else:
|
|
||||||
return -1
|
|
||||||
|
|
||||||
|
|
||||||
# --------------------------------------------------------------------
|
|
||||||
# MAIN
|
|
||||||
# --------------------------------------------------------------------
|
|
||||||
|
|
||||||
parser = OptionParser(option_class=damask.extendableOption, usage='%prog options [ASCIItable(s)]', description = """
|
|
||||||
Add column(s) containing norm of requested column(s) being either vectors or tensors.
|
|
||||||
|
|
||||||
""", version = scriptID)
|
|
||||||
|
|
||||||
normChoices = ['abs','frobenius','max']
|
|
||||||
parser.add_option('-n','--norm',
|
|
||||||
dest = 'norm',
|
|
||||||
type = 'choice', choices = normChoices, metavar='string',
|
|
||||||
help = 'type of element-wise p-norm [frobenius] {%s}'%(','.join(map(str,normChoices))))
|
|
||||||
parser.add_option('-l','--label',
|
|
||||||
dest = 'labels',
|
|
||||||
action = 'extend', metavar = '<string LIST>',
|
|
||||||
help = 'heading of column(s) to calculate norm of')
|
|
||||||
|
|
||||||
parser.set_defaults(norm = 'frobenius',
|
|
||||||
)
|
|
||||||
|
|
||||||
(options,filenames) = parser.parse_args()
|
|
||||||
if filenames == []: filenames = [None]
|
|
||||||
|
|
||||||
if options.norm.lower() not in normChoices:
|
|
||||||
parser.error('invalid norm ({}) specified.'.format(options.norm))
|
|
||||||
if options.labels is None:
|
|
||||||
parser.error('no data column specified.')
|
|
||||||
|
|
||||||
for name in filenames:
|
|
||||||
damask.util.report(scriptName,name)
|
|
||||||
|
|
||||||
table = damask.Table.from_ASCII(StringIO(''.join(sys.stdin.read())) if name is None else name)
|
|
||||||
for label in options.labels:
|
|
||||||
data = table.get(label)
|
|
||||||
data_norm = np.empty((data.shape[0],1))
|
|
||||||
for i,d in enumerate(data):
|
|
||||||
data_norm[i] = norm(options.norm.capitalize(),d)
|
|
||||||
|
|
||||||
table.add('norm{}({})'.format(options.norm.capitalize(),label),
|
|
||||||
data_norm,
|
|
||||||
scriptID+' '+' '.join(sys.argv[1:]))
|
|
||||||
|
|
||||||
table.to_ASCII(sys.stdout if name is None else name)
|
|
|
@ -2,6 +2,7 @@
|
||||||
|
|
||||||
import os
|
import os
|
||||||
import sys
|
import sys
|
||||||
|
from io import StringIO
|
||||||
from optparse import OptionParser
|
from optparse import OptionParser
|
||||||
|
|
||||||
import numpy as np
|
import numpy as np
|
||||||
|
@ -24,14 +25,8 @@ Additional (globally fixed) rotations of the lab frame and/or crystal frame can
|
||||||
|
|
||||||
""", version = scriptID)
|
""", version = scriptID)
|
||||||
|
|
||||||
representations = {
|
representations = ['quaternion', 'rodrigues', 'eulers', 'matrix', 'axisangle']
|
||||||
'quaternion': ['qu',4],
|
|
||||||
'rodrigues': ['ro',4],
|
|
||||||
'Rodrigues': ['Ro',3],
|
|
||||||
'eulers': ['eu',3],
|
|
||||||
'matrix': ['om',9],
|
|
||||||
'angleaxis': ['ax',4],
|
|
||||||
}
|
|
||||||
|
|
||||||
parser.add_option('-o',
|
parser.add_option('-o',
|
||||||
'--output',
|
'--output',
|
||||||
|
@ -93,8 +88,8 @@ parser.set_defaults(output = [],
|
||||||
)
|
)
|
||||||
|
|
||||||
(options, filenames) = parser.parse_args()
|
(options, filenames) = parser.parse_args()
|
||||||
|
if filenames == []: filenames = [None]
|
||||||
|
|
||||||
#options.output = list(map(lambda x: x.lower(), options.output))
|
|
||||||
if options.output == [] or (not set(options.output).issubset(set(representations))):
|
if options.output == [] or (not set(options.output).issubset(set(representations))):
|
||||||
parser.error('output must be chosen from {}.'.format(', '.join(representations)))
|
parser.error('output must be chosen from {}.'.format(', '.join(representations)))
|
||||||
|
|
||||||
|
@ -109,95 +104,47 @@ input = [options.eulers is not None,
|
||||||
|
|
||||||
if np.sum(input) != 1: parser.error('needs exactly one input format.')
|
if np.sum(input) != 1: parser.error('needs exactly one input format.')
|
||||||
|
|
||||||
(label,dim,inputtype) = [(options.eulers,representations['eulers'][1],'eulers'),
|
r = damask.Rotation.from_axis_angle(np.array(options.crystalrotation),options.degrees,normalise=True)
|
||||||
(options.rodrigues,representations['rodrigues'][1],'rodrigues'),
|
R = damask.Rotation.from_axis_angle(np.array(options.labrotation),options.degrees,normalise=True)
|
||||||
([options.x,options.y,options.z],[3,3,3],'frame'),
|
|
||||||
(options.matrix,representations['matrix'][1],'matrix'),
|
|
||||||
(options.quaternion,representations['quaternion'][1],'quaternion'),
|
|
||||||
][np.where(input)[0][0]] # select input label that was requested
|
|
||||||
|
|
||||||
r = damask.Rotation.fromAxisAngle(np.array(options.crystalrotation),options.degrees,normalise=True)
|
|
||||||
R = damask.Rotation.fromAxisAngle(np.array(options.labrotation),options.degrees,normalise=True)
|
|
||||||
|
|
||||||
|
|
||||||
# --- loop over input files ------------------------------------------------------------------------
|
|
||||||
|
|
||||||
if filenames == []: filenames = [None]
|
|
||||||
|
|
||||||
for name in filenames:
|
for name in filenames:
|
||||||
try:
|
|
||||||
table = damask.ASCIItable(name = name)
|
|
||||||
except IOError:
|
|
||||||
continue
|
|
||||||
damask.util.report(scriptName,name)
|
damask.util.report(scriptName,name)
|
||||||
|
|
||||||
# ------------------------------------------ read header ------------------------------------------
|
table = damask.Table.from_ASCII(StringIO(''.join(sys.stdin.read())) if name is None else name)
|
||||||
|
|
||||||
table.head_read()
|
if options.eulers is not None:
|
||||||
|
label = options.eulers
|
||||||
|
print(np.max(table.get(options.eulers),axis=0))
|
||||||
|
o = damask.Rotation.from_Eulers(table.get(options.eulers), options.degrees)
|
||||||
|
elif options.rodrigues is not None:
|
||||||
|
label = options.rodrigues
|
||||||
|
o = damask.Rotation.from_Rodrigues(table.get(options.rodrigues))
|
||||||
|
elif options.matrix is not None:
|
||||||
|
label = options.matrix
|
||||||
|
o = damask.Rotation.from_matrix(table.get(options.matrix).reshape(-1,3,3))
|
||||||
|
elif options.x is not None:
|
||||||
|
label = '<{},{},{}>'.format(options.x,options.y,options.z)
|
||||||
|
M = np.block([table.get(options.x),table.get(options.y),table.get(options.z)]).reshape(-1,3,3)
|
||||||
|
o = damask.Rotation.from_matrix(M/np.linalg.norm(M,axis=0))
|
||||||
|
elif options.quaternion is not None:
|
||||||
|
label = options.quaternion
|
||||||
|
o = damask.Rotation.from_quaternion(table.get(options.quaternion))
|
||||||
|
|
||||||
# ------------------------------------------ sanity checks -----------------------------------------
|
o = r.broadcast_to(o.shape) @ o @ R.broadcast_to(o.shape)
|
||||||
|
|
||||||
errors = []
|
#if options.lattice is not None:
|
||||||
remarks = []
|
# o = damask.Orientation(rotation = o,lattice = options.lattice).reduced().rotation
|
||||||
|
|
||||||
if not np.all(table.label_dimension(label) == dim): errors.append('input {} does not have dimension {}.'.format(label,dim))
|
|
||||||
else: column = table.label_index(label)
|
|
||||||
|
|
||||||
if remarks != []: damask.util.croak(remarks)
|
if 'rodrigues' in options.output:
|
||||||
if errors != []:
|
table.add('ro({})'.format(label),o.as_Rodrigues(), scriptID+' '+' '.join(sys.argv[1:]))
|
||||||
damask.util.croak(errors)
|
if 'eulers' in options.output:
|
||||||
table.close(dismiss = True)
|
table.add('eu({})'.format(label),o.as_Eulers(options.degrees), scriptID+' '+' '.join(sys.argv[1:]))
|
||||||
continue
|
if 'quaternion' in options.output:
|
||||||
|
table.add('qu({})'.format(label),o.as_quaternion(), scriptID+' '+' '.join(sys.argv[1:]))
|
||||||
|
if 'matrix' in options.output:
|
||||||
|
table.add('om({})'.format(label),o.as_matrix(), scriptID+' '+' '.join(sys.argv[1:]))
|
||||||
|
if 'axisangle' in options.output:
|
||||||
|
table.add('om({})'.format(label),o.as_axisangle(options.degrees), scriptID+' '+' '.join(sys.argv[1:]))
|
||||||
|
|
||||||
# ------------------------------------------ assemble header ---------------------------------------
|
table.to_ASCII(sys.stdout if name is None else name)
|
||||||
|
|
||||||
table.info_append(scriptID + '\t' + ' '.join(sys.argv[1:]))
|
|
||||||
for output in options.output:
|
|
||||||
if output in representations:
|
|
||||||
table.labels_append(['{}_{}({})'.format(i+1,representations[output][0],label) \
|
|
||||||
for i in range(representations[output][1])])
|
|
||||||
table.head_write()
|
|
||||||
|
|
||||||
# ------------------------------------------ process data ------------------------------------------
|
|
||||||
|
|
||||||
outputAlive = True
|
|
||||||
while outputAlive and table.data_read(): # read next data line of ASCII table
|
|
||||||
if inputtype == 'eulers':
|
|
||||||
d = representations['eulers'][1]
|
|
||||||
o = damask.Rotation.fromEulers(list(map(float,table.data[column:column+d])),options.degrees)
|
|
||||||
|
|
||||||
elif inputtype == 'rodrigues':
|
|
||||||
d = representations['rodrigues'][1]
|
|
||||||
o = damask.Rotation.fromRodrigues(list(map(float,table.data[column:column+d])))
|
|
||||||
|
|
||||||
elif inputtype == 'matrix':
|
|
||||||
d = representations['matrix'][1]
|
|
||||||
o = damask.Rotation.fromMatrix(np.array(list(map(float,table.data[column:column+d]))).reshape(3,3))
|
|
||||||
|
|
||||||
elif inputtype == 'frame':
|
|
||||||
M = np.array(list(map(float,table.data[column[0]:column[0]+3] + \
|
|
||||||
table.data[column[1]:column[1]+3] + \
|
|
||||||
table.data[column[2]:column[2]+3]))).reshape(3,3).T
|
|
||||||
o = damask.Rotation.fromMatrix(M/np.linalg.norm(M,axis=0))
|
|
||||||
|
|
||||||
elif inputtype == 'quaternion':
|
|
||||||
d = representations['quaternion'][1]
|
|
||||||
o = damask.Rotation.fromQuaternion(list(map(float,table.data[column:column+d])))
|
|
||||||
|
|
||||||
o = r*o*R # apply additional lab and crystal frame rotations
|
|
||||||
|
|
||||||
if options.lattice is not None:
|
|
||||||
o = damask.Orientation(rotation = o,lattice = options.lattice).reduced().rotation
|
|
||||||
|
|
||||||
for output in options.output:
|
|
||||||
if output == 'quaternion': table.data_append(o.asQuaternion())
|
|
||||||
elif output == 'rodrigues': table.data_append(o.asRodrigues())
|
|
||||||
elif output == 'Rodrigues': table.data_append(o.asRodrigues(vector=True))
|
|
||||||
elif output == 'eulers': table.data_append(o.asEulers(degrees=options.degrees))
|
|
||||||
elif output == 'matrix': table.data_append(o.asMatrix())
|
|
||||||
elif output == 'angleaxis': table.data_append(o.asAxisAngle(degrees=options.degrees))
|
|
||||||
outputAlive = table.data_write() # output processed line
|
|
||||||
|
|
||||||
# ------------------------------------------ output finalization -----------------------------------
|
|
||||||
|
|
||||||
table.close() # close ASCII tables
|
|
||||||
|
|
|
@ -1,50 +0,0 @@
|
||||||
#!/usr/bin/env python3
|
|
||||||
|
|
||||||
import os
|
|
||||||
import sys
|
|
||||||
from io import StringIO
|
|
||||||
from optparse import OptionParser
|
|
||||||
|
|
||||||
import damask
|
|
||||||
|
|
||||||
|
|
||||||
scriptName = os.path.splitext(os.path.basename(__file__))[0]
|
|
||||||
scriptID = ' '.join([scriptName,damask.version])
|
|
||||||
|
|
||||||
# --------------------------------------------------------------------
|
|
||||||
# MAIN
|
|
||||||
# --------------------------------------------------------------------
|
|
||||||
|
|
||||||
parser = OptionParser(option_class=damask.extendableOption, usage='%prog options [ASCIItable(s)]', description = """
|
|
||||||
Add column(s) containing Second Piola--Kirchhoff stress based on given column(s) of deformation
|
|
||||||
gradient and first Piola--Kirchhoff stress.
|
|
||||||
|
|
||||||
""", version = scriptID)
|
|
||||||
|
|
||||||
parser.add_option('-f','--defgrad',
|
|
||||||
dest = 'defgrad',
|
|
||||||
type = 'string', metavar = 'string',
|
|
||||||
help = 'heading of columns containing deformation gradient [%default]')
|
|
||||||
parser.add_option('-p','--stress',
|
|
||||||
dest = 'stress',
|
|
||||||
type = 'string', metavar = 'string',
|
|
||||||
help = 'heading of columns containing first Piola--Kirchhoff stress [%default]')
|
|
||||||
|
|
||||||
parser.set_defaults(defgrad = 'f',
|
|
||||||
stress = 'p',
|
|
||||||
)
|
|
||||||
|
|
||||||
(options,filenames) = parser.parse_args()
|
|
||||||
if filenames == []: filenames = [None]
|
|
||||||
|
|
||||||
for name in filenames:
|
|
||||||
damask.util.report(scriptName,name)
|
|
||||||
|
|
||||||
table = damask.Table.from_ASCII(StringIO(''.join(sys.stdin.read())) if name is None else name)
|
|
||||||
|
|
||||||
table.add('S',
|
|
||||||
damask.mechanics.PK2(table.get(options.stress ).reshape(-1,3,3),
|
|
||||||
table.get(options.defgrad).reshape(-1,3,3)).reshape(-1,9),
|
|
||||||
scriptID+' '+' '.join(sys.argv[1:]))
|
|
||||||
|
|
||||||
table.to_ASCII(sys.stdout if name is None else name)
|
|
|
@ -1,65 +0,0 @@
|
||||||
#!/usr/bin/env python3
|
|
||||||
|
|
||||||
import os
|
|
||||||
import sys
|
|
||||||
from io import StringIO
|
|
||||||
from optparse import OptionParser
|
|
||||||
|
|
||||||
import numpy as np
|
|
||||||
|
|
||||||
import damask
|
|
||||||
|
|
||||||
|
|
||||||
scriptName = os.path.splitext(os.path.basename(__file__))[0]
|
|
||||||
scriptID = ' '.join([scriptName,damask.version])
|
|
||||||
|
|
||||||
|
|
||||||
# --------------------------------------------------------------------
|
|
||||||
# MAIN
|
|
||||||
# --------------------------------------------------------------------
|
|
||||||
|
|
||||||
parser = OptionParser(option_class=damask.extendableOption, usage='%prog options [ASCIItable(s)]', description = """
|
|
||||||
Add coordinates of stereographic projection of given direction (pole) in crystal frame.
|
|
||||||
|
|
||||||
""", version = scriptID)
|
|
||||||
|
|
||||||
parser.add_option('-p',
|
|
||||||
'--pole',
|
|
||||||
dest = 'pole',
|
|
||||||
type = 'float', nargs = 3, metavar = 'float float float',
|
|
||||||
help = 'crystal frame direction for pole figure [%default]')
|
|
||||||
parser.add_option('--polar',
|
|
||||||
dest = 'polar',
|
|
||||||
action = 'store_true',
|
|
||||||
help = 'output polar coordinates (r,φ) instead of Cartesian coordinates (x,y)')
|
|
||||||
parser.add_option('-o',
|
|
||||||
'--orientation',
|
|
||||||
dest = 'quaternion',
|
|
||||||
metavar = 'string',
|
|
||||||
help = 'label of crystal orientation given as unit quaternion [%default]')
|
|
||||||
|
|
||||||
parser.set_defaults(pole = (1.0,0.0,0.0),
|
|
||||||
quaternion = 'orientation',
|
|
||||||
)
|
|
||||||
|
|
||||||
(options, filenames) = parser.parse_args()
|
|
||||||
if filenames == []: filenames = [None]
|
|
||||||
|
|
||||||
pole = np.array(options.pole)
|
|
||||||
pole /= np.linalg.norm(pole)
|
|
||||||
|
|
||||||
for name in filenames:
|
|
||||||
damask.util.report(scriptName,name)
|
|
||||||
|
|
||||||
table = damask.Table.from_ASCII(StringIO(''.join(sys.stdin.read())) if name is None else name)
|
|
||||||
orientation = table.get(options.quaternion)
|
|
||||||
poles = np.empty((orientation.shape[0],2))
|
|
||||||
for i,o in enumerate(orientation):
|
|
||||||
rotatedPole = damask.Rotation(o)*pole # rotate pole according to crystal orientation
|
|
||||||
(x,y) = rotatedPole[0:2]/(1.+abs(pole[2])) # stereographic projection
|
|
||||||
poles[i] = [np.sqrt(x*x+y*y),np.arctan2(y,x)] if options.polar else [x,y] # cartesian coordinates
|
|
||||||
|
|
||||||
table.add('pole_{}{}{}'.format(*options.pole),
|
|
||||||
poles,
|
|
||||||
scriptID+' '+' '.join(sys.argv[1:]))
|
|
||||||
table.to_ASCII(sys.stdout if name is None else name)
|
|
|
@ -2,6 +2,7 @@
|
||||||
|
|
||||||
import os
|
import os
|
||||||
import sys
|
import sys
|
||||||
|
from io import StringIO
|
||||||
from optparse import OptionParser
|
from optparse import OptionParser
|
||||||
|
|
||||||
import numpy as np
|
import numpy as np
|
||||||
|
@ -15,91 +16,82 @@ scriptID = ' '.join([scriptName,damask.version])
|
||||||
slipSystems = {
|
slipSystems = {
|
||||||
'fcc':
|
'fcc':
|
||||||
np.array([
|
np.array([
|
||||||
# Slip direction Plane normal
|
[+0,+1,-1 , +1,+1,+1],
|
||||||
[ 0, 1,-1, 1, 1, 1, ],
|
[-1,+0,+1 , +1,+1,+1],
|
||||||
[-1, 0, 1, 1, 1, 1, ],
|
[+1,-1,+0 , +1,+1,+1],
|
||||||
[ 1,-1, 0, 1, 1, 1, ],
|
[+0,-1,-1 , -1,-1,+1],
|
||||||
[ 0,-1,-1, -1,-1, 1, ],
|
[+1,+0,+1 , -1,-1,+1],
|
||||||
[ 1, 0, 1, -1,-1, 1, ],
|
[-1,+1,+0 , -1,-1,+1],
|
||||||
[-1, 1, 0, -1,-1, 1, ],
|
[+0,-1,+1 , +1,-1,-1],
|
||||||
[ 0,-1, 1, 1,-1,-1, ],
|
[-1,+0,-1 , +1,-1,-1],
|
||||||
[-1, 0,-1, 1,-1,-1, ],
|
[+1,+1,+0 , +1,-1,-1],
|
||||||
[ 1, 1, 0, 1,-1,-1, ],
|
[+0,+1,+1 , -1,+1,-1],
|
||||||
[ 0, 1, 1, -1, 1,-1, ],
|
[+1,+0,-1 , -1,+1,-1],
|
||||||
[ 1, 0,-1, -1, 1,-1, ],
|
[-1,-1,+0 , -1,+1,-1],
|
||||||
[-1,-1, 0, -1, 1,-1, ],
|
],'d'),
|
||||||
],'f'),
|
|
||||||
'bcc':
|
'bcc':
|
||||||
np.array([
|
np.array([
|
||||||
# Slip system <111>{110}
|
[+1,-1,+1 , +0,+1,+1],
|
||||||
[ 1,-1, 1, 0, 1, 1, ],
|
[-1,-1,+1 , +0,+1,+1],
|
||||||
[-1,-1, 1, 0, 1, 1, ],
|
[+1,+1,+1 , +0,-1,+1],
|
||||||
[ 1, 1, 1, 0,-1, 1, ],
|
[-1,+1,+1 , +0,-1,+1],
|
||||||
[-1, 1, 1, 0,-1, 1, ],
|
[-1,+1,+1 , +1,+0,+1],
|
||||||
[-1, 1, 1, 1, 0, 1, ],
|
[-1,-1,+1 , +1,+0,+1],
|
||||||
[-1,-1, 1, 1, 0, 1, ],
|
[+1,+1,+1 , -1,+0,+1],
|
||||||
[ 1, 1, 1, -1, 0, 1, ],
|
[+1,-1,+1 , -1,+0,+1],
|
||||||
[ 1,-1, 1, -1, 0, 1, ],
|
[-1,+1,+1 , +1,+1,+0],
|
||||||
[-1, 1, 1, 1, 1, 0, ],
|
[-1,+1,-1 , +1,+1,+0],
|
||||||
[-1, 1,-1, 1, 1, 0, ],
|
[+1,+1,+1 , -1,+1,+0],
|
||||||
[ 1, 1, 1, -1, 1, 0, ],
|
[+1,+1,-1 , -1,+1,+0],
|
||||||
[ 1, 1,-1, -1, 1, 0, ],
|
[-1,+1,+1 , +2,+1,+1],
|
||||||
# Slip system <111>{112}
|
[+1,+1,+1 , -2,+1,+1],
|
||||||
[-1, 1, 1, 2, 1, 1, ],
|
[+1,+1,-1 , +2,-1,+1],
|
||||||
[ 1, 1, 1, -2, 1, 1, ],
|
[+1,-1,+1 , +2,+1,-1],
|
||||||
[ 1, 1,-1, 2,-1, 1, ],
|
[+1,-1,+1 , +1,+2,+1],
|
||||||
[ 1,-1, 1, 2, 1,-1, ],
|
[+1,+1,-1 , -1,+2,+1],
|
||||||
[ 1,-1, 1, 1, 2, 1, ],
|
[+1,+1,+1 , +1,-2,+1],
|
||||||
[ 1, 1,-1, -1, 2, 1, ],
|
[-1,+1,+1 , +1,+2,-1],
|
||||||
[ 1, 1, 1, 1,-2, 1, ],
|
[+1,+1,-1 , +1,+1,+2],
|
||||||
[-1, 1, 1, 1, 2,-1, ],
|
[+1,-1,+1 , -1,+1,+2],
|
||||||
[ 1, 1,-1, 1, 1, 2, ],
|
[-1,+1,+1 , +1,-1,+2],
|
||||||
[ 1,-1, 1, -1, 1, 2, ],
|
[+1,+1,+1 , +1,+1,-2],
|
||||||
[-1, 1, 1, 1,-1, 2, ],
|
],'d'),
|
||||||
[ 1, 1, 1, 1, 1,-2, ],
|
|
||||||
],'f'),
|
|
||||||
'hex':
|
'hex':
|
||||||
np.array([
|
np.array([
|
||||||
# Basal systems <11.0>{00.1} (independent of c/a-ratio, Bravais notation (4 coordinate base))
|
[+2,-1,-1,+0 , +0,+0,+0,+1],
|
||||||
[ 2, -1, -1, 0, 0, 0, 0, 1, ],
|
[-1,+2,-1,+0 , +0,+0,+0,+1],
|
||||||
[-1, 2, -1, 0, 0, 0, 0, 1, ],
|
[-1,-1,+2,+0 , +0,+0,+0,+1],
|
||||||
[-1, -1, 2, 0, 0, 0, 0, 1, ],
|
[+2,-1,-1,+0 , +0,+1,-1,+0],
|
||||||
# 1st type prismatic systems <11.0>{10.0} (independent of c/a-ratio)
|
[-1,+2,-1,+0 , -1,+0,+1,+0],
|
||||||
[ 2, -1, -1, 0, 0, 1, -1, 0, ],
|
[-1,-1,+2,+0 , +1,-1,+0,+0],
|
||||||
[-1, 2, -1, 0, -1, 0, 1, 0, ],
|
[-1,+1,+0,+0 , +1,+1,-2,+0],
|
||||||
[-1, -1, 2, 0, 1, -1, 0, 0, ],
|
[+0,-1,+1,+0 , -2,+1,+1,+0],
|
||||||
# 2nd type prismatic systems <10.0>{11.0} -- a slip; plane normals independent of c/a-ratio
|
[+1,+0,-1,+0 , +1,-2,+1,+0],
|
||||||
[ 0, 1, -1, 0, 2, -1, -1, 0, ],
|
[-1,+2,-1,+0 , +1,+0,-1,+1],
|
||||||
[-1, 0, 1, 0, -1, 2, -1, 0, ],
|
[-2,+1,+1,+0 , +0,+1,-1,+1],
|
||||||
[ 1, -1, 0, 0, -1, -1, 2, 0, ],
|
[-1,-1,+2,+0 , -1,+1,+0,+1],
|
||||||
# 1st type 1st order pyramidal systems <11.0>{-11.1} -- plane normals depend on the c/a-ratio
|
[+1,-2,+1,+0 , -1,+0,+1,+1],
|
||||||
[ 2, -1, -1, 0, 0, 1, -1, 1, ],
|
[+2,-1,-1,+0 , +0,-1,+1,+1],
|
||||||
[-1, 2, -1, 0, -1, 0, 1, 1, ],
|
[+1,+1,-2,+0 , +1,-1,+0,+1],
|
||||||
[-1, -1, 2, 0, 1, -1, 0, 1, ],
|
[-2,+1,+1,+3 , +1,+0,-1,+1],
|
||||||
[ 1, 1, -2, 0, -1, 1, 0, 1, ],
|
[-1,-1,+2,+3 , +1,+0,-1,+1],
|
||||||
[-2, 1, 1, 0, 0, -1, 1, 1, ],
|
[-1,-1,+2,+3 , +0,+1,-1,+1],
|
||||||
[ 1, -2, 1, 0, 1, 0, -1, 1, ],
|
[+1,-2,+1,+3 , +0,+1,-1,+1],
|
||||||
# pyramidal system: c+a slip <11.3>{-10.1} -- plane normals depend on the c/a-ratio
|
[+1,-2,+1,+3 , -1,+1,+0,+1],
|
||||||
[ 2, -1, -1, 3, -1, 1, 0, 1, ],
|
[+2,-1,-1,+3 , -1,+1,+0,+1],
|
||||||
[ 1, -2, 1, 3, -1, 1, 0, 1, ],
|
[+2,-1,-1,+3 , -1,+0,+1,+1],
|
||||||
[-1, -1, 2, 3, 1, 0, -1, 1, ],
|
[+1,+1,-2,+3 , -1,+0,+1,+1],
|
||||||
[-2, 1, 1, 3, 1, 0, -1, 1, ],
|
[+1,+1,-2,+3 , +0,-1,+1,+1],
|
||||||
[-1, 2, -1, 3, 0, -1, 1, 1, ],
|
[-1,+2,-1,+3 , +0,-1,+1,+1],
|
||||||
[ 1, 1, -2, 3, 0, -1, 1, 1, ],
|
[-1,+2,-1,+3 , +1,-1,+0,+1],
|
||||||
[-2, 1, 1, 3, 1, -1, 0, 1, ],
|
[-2,+1,+1,+3 , +1,-1,+0,+1],
|
||||||
[-1, 2, -1, 3, 1, -1, 0, 1, ],
|
[-1,-1,+2,+3 , +1,+1,-2,+2],
|
||||||
[ 1, 1, -2, 3, -1, 0, 1, 1, ],
|
[+1,-2,+1,+3 , -1,+2,-1,+2],
|
||||||
[ 2, -1, -1, 3, -1, 0, 1, 1, ],
|
[+2,-1,-1,+3 , -2,+1,+1,+2],
|
||||||
[ 1, -2, 1, 3, 0, 1, -1, 1, ],
|
[+1,+1,-2,+3 , -1,-1,+2,+2],
|
||||||
[-1, -1, 2, 3, 0, 1, -1, 1, ],
|
[-1,+2,-1,+3 , +1,-2,+1,+2],
|
||||||
# pyramidal system: c+a slip <11.3>{-1-1.2} -- as for hexagonal ice (Castelnau et al. 1996, similar to twin system found below)
|
[-2,+1,+1,+3 , +2,-1,-1,+2],
|
||||||
[ 2, -1, -1, 3, -2, 1, 1, 2, ], # sorted according to similar twin system
|
],'d'),
|
||||||
[-1, 2, -1, 3, 1, -2, 1, 2, ], # <11.3>{-1-1.2} shear = 2((c/a)^2-2)/(3 c/a)
|
|
||||||
[-1, -1, 2, 3, 1, 1, -2, 2, ],
|
|
||||||
[-2, 1, 1, 3, 2, -1, -1, 2, ],
|
|
||||||
[ 1, -2, 1, 3, -1, 2, -1, 2, ],
|
|
||||||
[ 1, 1, -2, 3, -1, -1, 2, 2, ],
|
|
||||||
],'f'),
|
|
||||||
}
|
}
|
||||||
|
|
||||||
# --------------------------------------------------------------------
|
# --------------------------------------------------------------------
|
||||||
|
@ -111,11 +103,11 @@ Add columns listing Schmid factors (and optional trace vector of selected system
|
||||||
|
|
||||||
""", version = scriptID)
|
""", version = scriptID)
|
||||||
|
|
||||||
latticeChoices = ('fcc','bcc','hex')
|
lattice_choices = list(slipSystems.keys())
|
||||||
parser.add_option('-l',
|
parser.add_option('-l',
|
||||||
'--lattice',
|
'--lattice',
|
||||||
dest = 'lattice', type = 'choice', choices = latticeChoices, metavar='string',
|
dest = 'lattice', type = 'choice', choices = lattice_choices, metavar='string',
|
||||||
help = 'type of lattice structure [%default] {}'.format(latticeChoices))
|
help = 'type of lattice structure [%default] {}'.format(lattice_choices))
|
||||||
parser.add_option('--covera',
|
parser.add_option('--covera',
|
||||||
dest = 'CoverA', type = 'float', metavar = 'float',
|
dest = 'CoverA', type = 'float', metavar = 'float',
|
||||||
help = 'C over A ratio for hexagonal systems [%default]')
|
help = 'C over A ratio for hexagonal systems [%default]')
|
||||||
|
@ -138,31 +130,29 @@ parser.add_option('-o',
|
||||||
parser.set_defaults(force = (0.0,0.0,1.0),
|
parser.set_defaults(force = (0.0,0.0,1.0),
|
||||||
quaternion='orientation',
|
quaternion='orientation',
|
||||||
normal = None,
|
normal = None,
|
||||||
lattice = latticeChoices[0],
|
lattice = lattice_choices[0],
|
||||||
CoverA = np.sqrt(8./3.),
|
CoverA = np.sqrt(8./3.),
|
||||||
)
|
)
|
||||||
|
|
||||||
(options, filenames) = parser.parse_args()
|
(options, filenames) = parser.parse_args()
|
||||||
|
if filenames == []: filenames = [None]
|
||||||
|
|
||||||
force = np.array(options.force)
|
force = np.array(options.force)/np.linalg.norm(options.force)
|
||||||
force /= np.linalg.norm(force)
|
|
||||||
|
|
||||||
if options.normal is not None:
|
if options.normal is not None:
|
||||||
normal = np.array(options.normal)
|
normal = np.array(options.normal)/np.linalg.norm(options.ormal)
|
||||||
normal /= np.linalg.norm(normal)
|
|
||||||
if abs(np.dot(force,normal)) > 1e-3:
|
if abs(np.dot(force,normal)) > 1e-3:
|
||||||
parser.error('stress plane normal not orthogonal to force direction')
|
parser.error('stress plane normal not orthogonal to force direction')
|
||||||
else:
|
else:
|
||||||
normal = force
|
normal = force
|
||||||
|
|
||||||
slip_direction = np.zeros((len(slipSystems[options.lattice]),3),'f')
|
|
||||||
slip_normal = np.zeros_like(slip_direction)
|
|
||||||
|
|
||||||
|
if options.lattice in ['bcc','fcc']:
|
||||||
if options.lattice in latticeChoices[:2]:
|
|
||||||
slip_direction = slipSystems[options.lattice][:,:3]
|
slip_direction = slipSystems[options.lattice][:,:3]
|
||||||
slip_normal = slipSystems[options.lattice][:,3:]
|
slip_normal = slipSystems[options.lattice][:,3:]
|
||||||
elif options.lattice == latticeChoices[2]:
|
elif options.lattice == 'hex':
|
||||||
|
slip_direction = np.zeros((len(slipSystems['hex']),3),'d')
|
||||||
|
slip_normal = np.zeros_like(slip_direction)
|
||||||
# convert 4 Miller index notation of hex to orthogonal 3 Miller index notation
|
# convert 4 Miller index notation of hex to orthogonal 3 Miller index notation
|
||||||
for i in range(len(slip_direction)):
|
for i in range(len(slip_direction)):
|
||||||
slip_direction[i] = np.array([slipSystems['hex'][i,0]*1.5,
|
slip_direction[i] = np.array([slipSystems['hex'][i,0]*1.5,
|
||||||
|
@ -174,52 +164,29 @@ elif options.lattice == latticeChoices[2]:
|
||||||
slipSystems['hex'][i,7]/options.CoverA,
|
slipSystems['hex'][i,7]/options.CoverA,
|
||||||
])
|
])
|
||||||
|
|
||||||
slip_direction /= np.tile(np.linalg.norm(slip_direction,axis=1),(3,1)).T
|
slip_direction /= np.linalg.norm(slip_direction,axis=1,keepdims=True)
|
||||||
slip_normal /= np.tile(np.linalg.norm(slip_normal ,axis=1),(3,1)).T
|
slip_normal /= np.linalg.norm(slip_normal, axis=1,keepdims=True)
|
||||||
|
|
||||||
# --- loop over input files ------------------------------------------------------------------------
|
labels = ['S[{direction[0]:.1g}_{direction[1]:.1g}_{direction[2]:.1g}]'
|
||||||
|
'({normal[0]:.1g}_{normal[1]:.1g}_{normal[2]:.1g})'\
|
||||||
if filenames == []: filenames = [None]
|
.format(normal = theNormal, direction = theDirection,
|
||||||
|
) for theNormal,theDirection in zip(slip_normal,slip_direction)]
|
||||||
|
|
||||||
for name in filenames:
|
for name in filenames:
|
||||||
try:
|
|
||||||
table = damask.ASCIItable(name = name)
|
|
||||||
except IOError:
|
|
||||||
continue
|
|
||||||
damask.util.report(scriptName,name)
|
damask.util.report(scriptName,name)
|
||||||
|
|
||||||
# ------------------------------------------ read header ------------------------------------------
|
table = damask.Table.from_ASCII(StringIO(''.join(sys.stdin.read())) if name is None else name)
|
||||||
|
|
||||||
table.head_read()
|
o = damask.Rotation.from_quaternion(table.get(options.quaternion))
|
||||||
|
|
||||||
# ------------------------------------------ sanity checks ----------------------------------------
|
force = np.broadcast_to(force, o.shape+(3,))
|
||||||
if not table.label_dimension(options.quaternion) == 4:
|
normal = np.broadcast_to(normal,o.shape+(3,))
|
||||||
damask.util.croak('input {} does not have dimension 4.'.format(options.quaternion))
|
slip_direction = np.broadcast_to(slip_direction,o.shape+slip_direction.shape)
|
||||||
table.close(dismiss = True) # close ASCIItable and remove empty file
|
slip_normal = np.broadcast_to(slip_normal, o.shape+slip_normal.shape)
|
||||||
continue
|
S = np.abs(np.einsum('ijk,ik->ij',slip_direction,(o@force))*
|
||||||
|
np.einsum('ijk,ik->ij',slip_normal, (o@normal)))
|
||||||
|
|
||||||
column = table.label_index(options.quaternion)
|
for i,label in enumerate(labels):
|
||||||
|
table.add(label,S[:,i],scriptID+' '+' '.join(sys.argv[1:]))
|
||||||
|
|
||||||
# ------------------------------------------ assemble header ---------------------------------------
|
table.to_ASCII(sys.stdout if name is None else name)
|
||||||
|
|
||||||
table.info_append(scriptID + '\t' + ' '.join(sys.argv[1:]))
|
|
||||||
table.labels_append(['S[{direction[0]:.1g}_{direction[1]:.1g}_{direction[2]:.1g}]'
|
|
||||||
'({normal[0]:.1g}_{normal[1]:.1g}_{normal[2]:.1g})'\
|
|
||||||
.format(normal = theNormal,
|
|
||||||
direction = theDirection,
|
|
||||||
) for theNormal,theDirection in zip(slip_normal,slip_direction)])
|
|
||||||
table.head_write()
|
|
||||||
|
|
||||||
# ------------------------------------------ process data ------------------------------------------
|
|
||||||
|
|
||||||
outputAlive = True
|
|
||||||
while outputAlive and table.data_read(): # read next data line of ASCII table
|
|
||||||
o = damask.Rotation(np.array(list(map(float,table.data[column:column+4]))))
|
|
||||||
|
|
||||||
table.data_append( np.abs( np.sum(slip_direction * (o * force) ,axis=1) \
|
|
||||||
* np.sum(slip_normal * (o * normal),axis=1)))
|
|
||||||
outputAlive = table.data_write() # output processed line
|
|
||||||
|
|
||||||
# ------------------------------------------ output finalization -----------------------------------
|
|
||||||
|
|
||||||
table.close() # close ASCII tables
|
|
||||||
|
|
|
@ -1,58 +0,0 @@
|
||||||
#!/usr/bin/env python3
|
|
||||||
|
|
||||||
import os
|
|
||||||
import sys
|
|
||||||
from io import StringIO
|
|
||||||
from optparse import OptionParser
|
|
||||||
|
|
||||||
import numpy as np
|
|
||||||
|
|
||||||
import damask
|
|
||||||
|
|
||||||
|
|
||||||
scriptName = os.path.splitext(os.path.basename(__file__))[0]
|
|
||||||
scriptID = ' '.join([scriptName,damask.version])
|
|
||||||
|
|
||||||
|
|
||||||
# --------------------------------------------------------------------
|
|
||||||
# MAIN
|
|
||||||
# --------------------------------------------------------------------
|
|
||||||
|
|
||||||
parser = OptionParser(option_class=damask.extendableOption, usage='%prog options [ASCIItable(s)]', description = """
|
|
||||||
Add column(s) containing eigenvalues and eigenvectors of requested symmetric tensor column(s).
|
|
||||||
|
|
||||||
""", version = scriptID)
|
|
||||||
|
|
||||||
parser.add_option('-t','--tensor',
|
|
||||||
dest = 'tensor',
|
|
||||||
action = 'extend', metavar = '<string LIST>',
|
|
||||||
help = 'heading of columns containing tensor field values')
|
|
||||||
parser.add_option('--no-check',
|
|
||||||
dest = 'rh',
|
|
||||||
action = 'store_false',
|
|
||||||
help = 'skip check for right-handed eigenvector basis')
|
|
||||||
|
|
||||||
parser.set_defaults(rh = True,
|
|
||||||
)
|
|
||||||
|
|
||||||
(options,filenames) = parser.parse_args()
|
|
||||||
if filenames == []: filenames = [None]
|
|
||||||
|
|
||||||
for name in filenames:
|
|
||||||
damask.util.report(scriptName,name)
|
|
||||||
|
|
||||||
table = damask.Table.from_ASCII(StringIO(''.join(sys.stdin.read())) if name is None else name)
|
|
||||||
|
|
||||||
for tensor in options.tensor:
|
|
||||||
|
|
||||||
t = table.get(tensor).reshape(-1,3,3)
|
|
||||||
(u,v) = np.linalg.eigh(damask.mechanics.symmetric(t))
|
|
||||||
if options.rh: v[np.linalg.det(v) < 0.0,:,2] *= -1.0
|
|
||||||
|
|
||||||
for i,o in enumerate(['Min','Mid','Max']):
|
|
||||||
table.add('eigval{}({})'.format(o,tensor),u[:,i], scriptID+' '+' '.join(sys.argv[1:]))
|
|
||||||
|
|
||||||
for i,o in enumerate(['Min','Mid','Max']):
|
|
||||||
table.add('eigvec{}({})'.format(o,tensor),v[:,:,i],scriptID+' '+' '.join(sys.argv[1:]))
|
|
||||||
|
|
||||||
table.to_ASCII(sys.stdout if name is None else name)
|
|
|
@ -1,98 +0,0 @@
|
||||||
#!/usr/bin/env python3
|
|
||||||
|
|
||||||
import os
|
|
||||||
import sys
|
|
||||||
from io import StringIO
|
|
||||||
from optparse import OptionParser
|
|
||||||
|
|
||||||
import damask
|
|
||||||
|
|
||||||
|
|
||||||
scriptName = os.path.splitext(os.path.basename(__file__))[0]
|
|
||||||
scriptID = ' '.join([scriptName,damask.version])
|
|
||||||
|
|
||||||
def parameters(stretch,strain):
|
|
||||||
"""Albrecht Bertram: Elasticity and Plasticity of Large Deformations An Introduction (3rd Edition, 2012), p. 102."""
|
|
||||||
return {
|
|
||||||
'V#ln': ('V',0.0),
|
|
||||||
'U#ln': ('U',0.0),
|
|
||||||
'V#Biot': ('V',-.5),
|
|
||||||
'U#Biot': ('U',+.5),
|
|
||||||
'V#Green': ('V',-1.),
|
|
||||||
'U#Green': ('U',+1.),
|
|
||||||
}[stretch+'#'+strain]
|
|
||||||
|
|
||||||
|
|
||||||
# --------------------------------------------------------------------
|
|
||||||
# MAIN
|
|
||||||
# --------------------------------------------------------------------
|
|
||||||
|
|
||||||
parser = OptionParser(option_class=damask.extendableOption, usage='%prog options [ASCIItable(s)]', description = """
|
|
||||||
Add column(s) containing given strains based on given stretches of requested deformation gradient column(s).
|
|
||||||
|
|
||||||
""", version = scriptID)
|
|
||||||
|
|
||||||
parser.add_option('-u','--right',
|
|
||||||
dest = 'right',
|
|
||||||
action = 'store_true',
|
|
||||||
help = 'material strains based on right Cauchy--Green deformation, i.e., C and U')
|
|
||||||
parser.add_option('-v','--left',
|
|
||||||
dest = 'left',
|
|
||||||
action = 'store_true',
|
|
||||||
help = 'spatial strains based on left Cauchy--Green deformation, i.e., B and V')
|
|
||||||
parser.add_option('-0','--logarithmic',
|
|
||||||
dest = 'logarithmic',
|
|
||||||
action = 'store_true',
|
|
||||||
help = 'calculate logarithmic strain tensor')
|
|
||||||
parser.add_option('-1','--biot',
|
|
||||||
dest = 'biot',
|
|
||||||
action = 'store_true',
|
|
||||||
help = 'calculate biot strain tensor')
|
|
||||||
parser.add_option('-2','--green',
|
|
||||||
dest = 'green',
|
|
||||||
action = 'store_true',
|
|
||||||
help = 'calculate green strain tensor')
|
|
||||||
parser.add_option('-f','--defgrad',
|
|
||||||
dest = 'defgrad',
|
|
||||||
action = 'extend',
|
|
||||||
metavar = '<string LIST>',
|
|
||||||
help = 'heading(s) of columns containing deformation tensor values [%default]')
|
|
||||||
|
|
||||||
parser.set_defaults(
|
|
||||||
defgrad = ['f'],
|
|
||||||
)
|
|
||||||
|
|
||||||
(options,filenames) = parser.parse_args()
|
|
||||||
if filenames == []: filenames = [None]
|
|
||||||
|
|
||||||
if len(options.defgrad) > 1:
|
|
||||||
options.defgrad = options.defgrad[1:]
|
|
||||||
|
|
||||||
stretches = []
|
|
||||||
strains = []
|
|
||||||
|
|
||||||
if options.right: stretches.append('U')
|
|
||||||
if options.left: stretches.append('V')
|
|
||||||
if options.logarithmic: strains.append('ln')
|
|
||||||
if options.biot: strains.append('Biot')
|
|
||||||
if options.green: strains.append('Green')
|
|
||||||
|
|
||||||
if options.defgrad is None:
|
|
||||||
parser.error('no data column specified.')
|
|
||||||
|
|
||||||
for name in filenames:
|
|
||||||
damask.util.report(scriptName,name)
|
|
||||||
|
|
||||||
table = damask.Table.from_ASCII(StringIO(''.join(sys.stdin.read())) if name is None else name)
|
|
||||||
|
|
||||||
for defgrad in options.defgrad:
|
|
||||||
F = table.get(defgrad).reshape(-1,3,3)
|
|
||||||
for theStretch in stretches:
|
|
||||||
for theStrain in strains:
|
|
||||||
(t,m) = parameters(theStretch,theStrain)
|
|
||||||
label = '{}({}){}'.format(theStrain,theStretch,defgrad if defgrad != 'f' else '')
|
|
||||||
table.add(label,
|
|
||||||
damask.mechanics.strain_tensor(F,t,m).reshape(-1,9),
|
|
||||||
scriptID+' '+' '.join(sys.argv[1:]))
|
|
||||||
|
|
||||||
table.to_ASCII(sys.stdout if name is None else name)
|
|
|
@ -1,63 +0,0 @@
|
||||||
#!/usr/bin/env python3
|
|
||||||
|
|
||||||
import os
|
|
||||||
import sys
|
|
||||||
from io import StringIO
|
|
||||||
from optparse import OptionParser
|
|
||||||
|
|
||||||
import damask
|
|
||||||
|
|
||||||
|
|
||||||
scriptName = os.path.splitext(os.path.basename(__file__))[0]
|
|
||||||
scriptID = ' '.join([scriptName,damask.version])
|
|
||||||
|
|
||||||
|
|
||||||
# --------------------------------------------------------------------
|
|
||||||
# MAIN
|
|
||||||
# --------------------------------------------------------------------
|
|
||||||
|
|
||||||
parser = OptionParser(option_class=damask.extendableOption, usage='%prog options [ASCIItable(s)]', description = """
|
|
||||||
Rotate vector and/or tensor column data by given angle around given axis.
|
|
||||||
|
|
||||||
""", version = scriptID)
|
|
||||||
|
|
||||||
parser.add_option('-d', '--data',
|
|
||||||
dest = 'data',
|
|
||||||
action = 'extend', metavar = '<string LIST>',
|
|
||||||
help = 'vector/tensor value(s) label(s)')
|
|
||||||
parser.add_option('-r', '--rotation',
|
|
||||||
dest = 'rotation',
|
|
||||||
type = 'float', nargs = 4, metavar = ' '.join(['float']*4),
|
|
||||||
help = 'axis and angle to rotate data [%default]')
|
|
||||||
parser.add_option('--degrees',
|
|
||||||
dest = 'degrees',
|
|
||||||
action = 'store_true',
|
|
||||||
help = 'angles are given in degrees')
|
|
||||||
|
|
||||||
parser.set_defaults(rotation = (1.,1.,1.,0), # no rotation about (1,1,1)
|
|
||||||
degrees = False,
|
|
||||||
)
|
|
||||||
|
|
||||||
(options,filenames) = parser.parse_args()
|
|
||||||
if filenames == []: filenames = [None]
|
|
||||||
|
|
||||||
if options.data is None:
|
|
||||||
parser.error('no data column specified.')
|
|
||||||
|
|
||||||
r = damask.Rotation.fromAxisAngle(options.rotation,options.degrees,normalise=True)
|
|
||||||
|
|
||||||
for name in filenames:
|
|
||||||
damask.util.report(scriptName,name)
|
|
||||||
|
|
||||||
table = damask.Table.from_ASCII(StringIO(''.join(sys.stdin.read())) if name is None else name)
|
|
||||||
|
|
||||||
for data in options.data:
|
|
||||||
d = table.get(data)
|
|
||||||
if table.shapes[data] == (9,): d=d.reshape(-1,3,3)
|
|
||||||
for i,l in enumerate(d):
|
|
||||||
d[i] = r*l
|
|
||||||
if table.shapes[data] == (9,): d=d.reshape(-1,9)
|
|
||||||
|
|
||||||
table.set(data,d,scriptID+' '+' '.join(sys.argv[1:]))
|
|
||||||
|
|
||||||
table.to_ASCII(sys.stdout if name is None else name)
|
|
|
@ -1,89 +0,0 @@
|
||||||
#!/usr/bin/env python3
|
|
||||||
|
|
||||||
import os
|
|
||||||
from optparse import OptionParser
|
|
||||||
|
|
||||||
import damask
|
|
||||||
|
|
||||||
|
|
||||||
scriptName = os.path.splitext(os.path.basename(__file__))[0]
|
|
||||||
scriptID = ' '.join([scriptName,damask.version])
|
|
||||||
|
|
||||||
|
|
||||||
# --------------------------------------------------------------------
|
|
||||||
# MAIN
|
|
||||||
# --------------------------------------------------------------------
|
|
||||||
|
|
||||||
parser = OptionParser(usage='%prog options [ASCIItable(s)]', description = """
|
|
||||||
Show components of given ASCIItable(s).
|
|
||||||
|
|
||||||
""", version = scriptID)
|
|
||||||
|
|
||||||
|
|
||||||
parser.add_option('-a','--head',
|
|
||||||
dest = 'head',
|
|
||||||
action = 'store_true',
|
|
||||||
help = 'output complete header (info + labels)')
|
|
||||||
parser.add_option('-i','--info',
|
|
||||||
dest = 'info',
|
|
||||||
action = 'store_true',
|
|
||||||
help = 'output info lines')
|
|
||||||
parser.add_option('-l','--labels',
|
|
||||||
dest = 'labels',
|
|
||||||
action = 'store_true',
|
|
||||||
help = 'output labels')
|
|
||||||
parser.add_option('-d','--data',
|
|
||||||
dest = 'data',
|
|
||||||
action = 'store_true',
|
|
||||||
help = 'output data')
|
|
||||||
parser.add_option('-t','--table',
|
|
||||||
dest = 'table',
|
|
||||||
action = 'store_true',
|
|
||||||
help = 'output heading line for proper ASCIItable format')
|
|
||||||
parser.add_option('--nolabels',
|
|
||||||
dest = 'labeled',
|
|
||||||
action = 'store_false',
|
|
||||||
help = 'table has no labels')
|
|
||||||
parser.set_defaults(table = False,
|
|
||||||
head = False,
|
|
||||||
info = False,
|
|
||||||
labels = False,
|
|
||||||
data = False,
|
|
||||||
labeled = True,
|
|
||||||
)
|
|
||||||
|
|
||||||
(options,filenames) = parser.parse_args()
|
|
||||||
|
|
||||||
# --- loop over input files -------------------------------------------------------------------------
|
|
||||||
|
|
||||||
if filenames == []: filenames = [None]
|
|
||||||
|
|
||||||
for name in filenames:
|
|
||||||
try:
|
|
||||||
table = damask.ASCIItable(name = name, labeled = options.labeled, readonly = True)
|
|
||||||
except IOError:
|
|
||||||
continue
|
|
||||||
details = ', '.join(
|
|
||||||
(['header'] if options.table else []) +
|
|
||||||
(['info'] if options.head or options.info else []) +
|
|
||||||
(['labels'] if options.head or options.labels else []) +
|
|
||||||
(['data'] if options.data else []) +
|
|
||||||
[]
|
|
||||||
)
|
|
||||||
damask.util.report(scriptName,(name if name is not None else '') + ('' if details == '' else ' -- '+details))
|
|
||||||
|
|
||||||
# ------------------------------------------ output head ---------------------------------------
|
|
||||||
|
|
||||||
table.head_read()
|
|
||||||
if not (options.head or options.info): table.info_clear()
|
|
||||||
if not (options.head or (options.labels and options.labeled)): table.labels_clear()
|
|
||||||
|
|
||||||
table.head_write(header = options.table)
|
|
||||||
|
|
||||||
# ------------------------------------------ output data ---------------------------------------
|
|
||||||
|
|
||||||
outputAlive = options.data
|
|
||||||
while outputAlive and table.data_read(): # read next data line of ASCII table
|
|
||||||
outputAlive = table.data_write() # output line
|
|
||||||
|
|
||||||
table.close()
|
|
|
@ -84,9 +84,9 @@ if [options.angleaxis,options.quaternion].count(None) == 0:
|
||||||
parser.error('more than one rotation specified.')
|
parser.error('more than one rotation specified.')
|
||||||
|
|
||||||
if options.angleaxis is not None:
|
if options.angleaxis is not None:
|
||||||
rotation = damask.Rotation.fromAxisAngle(np.array(options.angleaxis),options.degrees,normalise=True)
|
rotation = damask.Rotation.from_axis_angle(np.array(options.angleaxis),options.degrees,normalise=True)
|
||||||
elif options.quaternion is not None:
|
elif options.quaternion is not None:
|
||||||
rotation = damask.Rotation.fromQuaternion(options.quaternion)
|
rotation = damask.Rotation.from_quaternion(options.quaternion)
|
||||||
else:
|
else:
|
||||||
rotation = damask.Rotation()
|
rotation = damask.Rotation()
|
||||||
|
|
||||||
|
|
|
@ -1,74 +0,0 @@
|
||||||
#!/usr/bin/env python3
|
|
||||||
|
|
||||||
import os
|
|
||||||
import sys
|
|
||||||
from io import StringIO
|
|
||||||
from optparse import OptionParser
|
|
||||||
|
|
||||||
import numpy as np
|
|
||||||
|
|
||||||
import damask
|
|
||||||
|
|
||||||
|
|
||||||
scriptName = os.path.splitext(os.path.basename(__file__))[0]
|
|
||||||
scriptID = ' '.join([scriptName,damask.version])
|
|
||||||
|
|
||||||
|
|
||||||
# --------------------------------------------------------------------
|
|
||||||
# MAIN
|
|
||||||
# --------------------------------------------------------------------
|
|
||||||
|
|
||||||
parser = OptionParser(option_class=damask.extendableOption, usage='%prog options [geomfile(s)]', description = """
|
|
||||||
Increases or decreases the (three-dimensional) canvas.
|
|
||||||
Grid can be given as absolute or relative values, e.g. 16 16 16 or 2x 0.5x 32.
|
|
||||||
|
|
||||||
""", version = scriptID)
|
|
||||||
|
|
||||||
parser.add_option('-g','--grid',
|
|
||||||
dest = 'grid',
|
|
||||||
type = 'string', nargs = 3, metavar = ' '.join(['string']*3),
|
|
||||||
help = 'a,b,c grid of hexahedral box')
|
|
||||||
parser.add_option('-o','--offset',
|
|
||||||
dest = 'offset',
|
|
||||||
type = 'int', nargs = 3, metavar = ' '.join(['int']*3),
|
|
||||||
help = 'a,b,c offset from old to new origin of grid [%default]')
|
|
||||||
parser.add_option('-f','--fill',
|
|
||||||
dest = 'fill',
|
|
||||||
type = 'float', metavar = 'int',
|
|
||||||
help = 'background microstructure index, defaults to max microstructure index + 1')
|
|
||||||
|
|
||||||
parser.set_defaults(offset = (0,0,0))
|
|
||||||
|
|
||||||
(options, filenames) = parser.parse_args()
|
|
||||||
|
|
||||||
|
|
||||||
if filenames == []: filenames = [None]
|
|
||||||
|
|
||||||
for name in filenames:
|
|
||||||
damask.util.report(scriptName,name)
|
|
||||||
|
|
||||||
geom = damask.Geom.from_file(StringIO(''.join(sys.stdin.read())) if name is None else name)
|
|
||||||
origin = geom.get_origin()
|
|
||||||
size = geom.get_size()
|
|
||||||
old = new = geom.get_grid()
|
|
||||||
offset = np.asarray(options.offset)
|
|
||||||
|
|
||||||
if options.grid is not None:
|
|
||||||
new = np.maximum(1,
|
|
||||||
np.array([int(o*float(n.lower().replace('x',''))) if n.lower().endswith('x') \
|
|
||||||
else int(n) for o,n in zip(old,options.grid)],dtype=int))
|
|
||||||
|
|
||||||
canvas = np.full(new,options.fill if options.fill is not None
|
|
||||||
else np.nanmax(geom.microstructure)+1,geom.microstructure.dtype)
|
|
||||||
|
|
||||||
l = np.clip( offset, 0,np.minimum(old +offset,new)) # noqa
|
|
||||||
r = np.clip( offset+old,0,np.minimum(old*2+offset,new))
|
|
||||||
L = np.clip(-offset, 0,np.minimum(new -offset,old))
|
|
||||||
R = np.clip(-offset+new,0,np.minimum(new*2-offset,old))
|
|
||||||
canvas[l[0]:r[0],l[1]:r[1],l[2]:r[2]] = geom.microstructure[L[0]:R[0],L[1]:R[1],L[2]:R[2]]
|
|
||||||
|
|
||||||
|
|
||||||
damask.util.croak(geom.update(canvas,origin=origin+offset*size/old,rescale=True))
|
|
||||||
geom.add_comments(scriptID + ' ' + ' '.join(sys.argv[1:]))
|
|
||||||
|
|
||||||
geom.to_file(sys.stdout if name is None else name,pack=False)
|
|
|
@ -1,37 +0,0 @@
|
||||||
#!/usr/bin/env python3
|
|
||||||
|
|
||||||
import os
|
|
||||||
import sys
|
|
||||||
from io import StringIO
|
|
||||||
from optparse import OptionParser
|
|
||||||
|
|
||||||
import damask
|
|
||||||
|
|
||||||
|
|
||||||
scriptName = os.path.splitext(os.path.basename(__file__))[0]
|
|
||||||
scriptID = ' '.join([scriptName,damask.version])
|
|
||||||
|
|
||||||
|
|
||||||
#--------------------------------------------------------------------------------------------------
|
|
||||||
# MAIN
|
|
||||||
#--------------------------------------------------------------------------------------------------
|
|
||||||
|
|
||||||
parser = OptionParser(option_class=damask.extendableOption, usage='%prog [geomfile(s)]', description = """
|
|
||||||
Writes vtk file for visualization.
|
|
||||||
|
|
||||||
""", version = scriptID)
|
|
||||||
|
|
||||||
(options, filenames) = parser.parse_args()
|
|
||||||
if filenames == []: filenames = [None]
|
|
||||||
|
|
||||||
for name in filenames:
|
|
||||||
damask.util.report(scriptName,name)
|
|
||||||
|
|
||||||
geom = damask.Geom.from_file(StringIO(''.join(sys.stdin.read())) if name is None else name)
|
|
||||||
|
|
||||||
damask.util.croak(geom)
|
|
||||||
|
|
||||||
if name:
|
|
||||||
geom.to_vtk(os.path.splitext(name)[0])
|
|
||||||
else:
|
|
||||||
sys.stdout.write(geom.to_vtk())
|
|
|
@ -97,7 +97,7 @@ for name in filenames:
|
||||||
dataset = os.path.join(group_pointwise,options.quaternion)
|
dataset = os.path.join(group_pointwise,options.quaternion)
|
||||||
try:
|
try:
|
||||||
quats = np.reshape(inFile[dataset][...],(np.product(grid),4))
|
quats = np.reshape(inFile[dataset][...],(np.product(grid),4))
|
||||||
rot = [damask.Rotation.fromQuaternion(q,True,P=+1) for q in quats]
|
rot = [damask.Rotation.from_quaternion(q,True,P=+1) for q in quats]
|
||||||
except KeyError:
|
except KeyError:
|
||||||
errors.append('Pointwise orientation (quaternion) data ({}) not readable'.format(dataset))
|
errors.append('Pointwise orientation (quaternion) data ({}) not readable'.format(dataset))
|
||||||
|
|
||||||
|
@ -123,7 +123,7 @@ for name in filenames:
|
||||||
|
|
||||||
dataset = os.path.join(group_average,options.quaternion)
|
dataset = os.path.join(group_average,options.quaternion)
|
||||||
try:
|
try:
|
||||||
rot = [damask.Rotation.fromQuaternion(q,True,P=+1) for q in inFile[dataset][...][1:]] # skip first entry (unindexed)
|
rot = [damask.Rotation.from_quaternion(q,True,P=+1) for q in inFile[dataset][...][1:]] # skip first entry (unindexed)
|
||||||
except KeyError:
|
except KeyError:
|
||||||
errors.append('Average orientation data ({}) not readable'.format(dataset))
|
errors.append('Average orientation data ({}) not readable'.format(dataset))
|
||||||
|
|
||||||
|
@ -140,7 +140,7 @@ for name in filenames:
|
||||||
config_header = ['<texture>']
|
config_header = ['<texture>']
|
||||||
for i in range(np.nanmax(microstructure)):
|
for i in range(np.nanmax(microstructure)):
|
||||||
config_header += ['[{}{}]'.format(label,i+1),
|
config_header += ['[{}{}]'.format(label,i+1),
|
||||||
'(gauss)\tphi1 {:.2f}\tPhi {:.2f}\tphi2 {:.2f}'.format(*rot[i].asEulers(degrees = True)),
|
'(gauss)\tphi1 {:.2f}\tPhi {:.2f}\tphi2 {:.2f}'.format(*rot[i].as_Eulers(degrees = True)),
|
||||||
]
|
]
|
||||||
config_header += ['<microstructure>']
|
config_header += ['<microstructure>']
|
||||||
for i in range(np.nanmax(microstructure)):
|
for i in range(np.nanmax(microstructure)):
|
||||||
|
|
|
@ -89,7 +89,7 @@ for name in filenames:
|
||||||
for i,data in enumerate(unique):
|
for i,data in enumerate(unique):
|
||||||
ori = damask.Rotation(data[0:4])
|
ori = damask.Rotation(data[0:4])
|
||||||
config_header += ['[Grain{}]'.format(i+1),
|
config_header += ['[Grain{}]'.format(i+1),
|
||||||
'(gauss)\tphi1 {:.2f}\tPhi {:.2f}\tphi2 {:.2f}'.format(*ori.asEulers(degrees = True)),
|
'(gauss)\tphi1 {:.2f}\tPhi {:.2f}\tphi2 {:.2f}'.format(*ori.as_Eulers(degrees = True)),
|
||||||
]
|
]
|
||||||
if options.axes is not None: config_header += ['axes\t{} {} {}'.format(*options.axes)]
|
if options.axes is not None: config_header += ['axes\t{} {} {}'.format(*options.axes)]
|
||||||
|
|
||||||
|
|
|
@ -1,35 +0,0 @@
|
||||||
#!/usr/bin/env python3
|
|
||||||
|
|
||||||
import os
|
|
||||||
import sys
|
|
||||||
from io import StringIO
|
|
||||||
from optparse import OptionParser
|
|
||||||
|
|
||||||
import damask
|
|
||||||
|
|
||||||
|
|
||||||
scriptName = os.path.splitext(os.path.basename(__file__))[0]
|
|
||||||
scriptID = ' '.join([scriptName,damask.version])
|
|
||||||
|
|
||||||
|
|
||||||
#--------------------------------------------------------------------------------------------------
|
|
||||||
# MAIN
|
|
||||||
#--------------------------------------------------------------------------------------------------
|
|
||||||
|
|
||||||
parser = OptionParser(option_class=damask.extendableOption, usage='%prog [geomfile(s)]', description = """
|
|
||||||
Renumber sorted microstructure indices to 1,...,N.
|
|
||||||
|
|
||||||
""", version=scriptID)
|
|
||||||
|
|
||||||
(options, filenames) = parser.parse_args()
|
|
||||||
|
|
||||||
|
|
||||||
if filenames == []: filenames = [None]
|
|
||||||
|
|
||||||
for name in filenames:
|
|
||||||
damask.util.report(scriptName,name)
|
|
||||||
|
|
||||||
geom = damask.Geom.from_file(StringIO(''.join(sys.stdin.read())) if name is None else name)
|
|
||||||
damask.util.croak(geom.renumber())
|
|
||||||
geom.add_comments(scriptID + ' ' + ' '.join(sys.argv[1:]))
|
|
||||||
geom.to_file(sys.stdout if name is None else name,pack=False)
|
|
|
@ -1,98 +0,0 @@
|
||||||
#!/usr/bin/env python3
|
|
||||||
|
|
||||||
import os
|
|
||||||
import sys
|
|
||||||
from io import StringIO
|
|
||||||
from optparse import OptionParser
|
|
||||||
|
|
||||||
from scipy import ndimage
|
|
||||||
import numpy as np
|
|
||||||
|
|
||||||
import damask
|
|
||||||
|
|
||||||
|
|
||||||
scriptName = os.path.splitext(os.path.basename(__file__))[0]
|
|
||||||
scriptID = ' '.join([scriptName,damask.version])
|
|
||||||
|
|
||||||
|
|
||||||
#--------------------------------------------------------------------------------------------------
|
|
||||||
# MAIN
|
|
||||||
#--------------------------------------------------------------------------------------------------
|
|
||||||
|
|
||||||
parser = OptionParser(option_class=damask.extendableOption, usage='%prog options [geomfile(s)]', description = """
|
|
||||||
Rotates original microstructure and embeddeds it into buffer material.
|
|
||||||
|
|
||||||
""", version=scriptID)
|
|
||||||
|
|
||||||
parser.add_option('-r', '--rotation',
|
|
||||||
dest='rotation',
|
|
||||||
type = 'float', nargs = 4, metavar = ' '.join(['float']*4),
|
|
||||||
help = 'rotation given as axis and angle')
|
|
||||||
parser.add_option('-e', '--eulers',
|
|
||||||
dest = 'eulers',
|
|
||||||
type = 'float', nargs = 3, metavar = ' '.join(['float']*3),
|
|
||||||
help = 'rotation given as Euler angles')
|
|
||||||
parser.add_option('-d', '--degrees',
|
|
||||||
dest = 'degrees',
|
|
||||||
action = 'store_true',
|
|
||||||
help = 'Euler angles/axis angle are given in degrees')
|
|
||||||
parser.add_option('-m', '--matrix',
|
|
||||||
dest = 'matrix',
|
|
||||||
type = 'float', nargs = 9, metavar = ' '.join(['float']*9),
|
|
||||||
help = 'rotation given as matrix')
|
|
||||||
parser.add_option('-q', '--quaternion',
|
|
||||||
dest = 'quaternion',
|
|
||||||
type = 'float', nargs = 4, metavar = ' '.join(['float']*4),
|
|
||||||
help = 'rotation given as quaternion')
|
|
||||||
parser.add_option('-f', '--fill',
|
|
||||||
dest = 'fill',
|
|
||||||
type = 'float', metavar = 'int',
|
|
||||||
help = 'background microstructure index, defaults to max microstructure index + 1')
|
|
||||||
|
|
||||||
parser.set_defaults(degrees = False)
|
|
||||||
|
|
||||||
(options, filenames) = parser.parse_args()
|
|
||||||
|
|
||||||
if [options.rotation,options.eulers,options.matrix,options.quaternion].count(None) < 3:
|
|
||||||
parser.error('more than one rotation specified.')
|
|
||||||
if [options.rotation,options.eulers,options.matrix,options.quaternion].count(None) > 3:
|
|
||||||
parser.error('no rotation specified.')
|
|
||||||
|
|
||||||
if options.quaternion is not None:
|
|
||||||
rot = damask.Rotation.fromQuaternion(np.array(options.quaternion)) # we might need P=+1 here, too...
|
|
||||||
if options.rotation is not None:
|
|
||||||
rot = damask.Rotation.fromAxisAngle(np.array(options.rotation),degrees=options.degrees,normalise=True,P=+1)
|
|
||||||
if options.matrix is not None:
|
|
||||||
rot = damask.Rotation.fromMatrix(np.array(options.Matrix))
|
|
||||||
if options.eulers is not None:
|
|
||||||
rot = damask.Rotation.fromEulers(np.array(options.eulers),degrees=options.degrees)
|
|
||||||
|
|
||||||
eulers = rot.asEulers(degrees=True)
|
|
||||||
|
|
||||||
|
|
||||||
if filenames == []: filenames = [None]
|
|
||||||
|
|
||||||
for name in filenames:
|
|
||||||
damask.util.report(scriptName,name)
|
|
||||||
|
|
||||||
geom = damask.Geom.from_file(StringIO(''.join(sys.stdin.read())) if name is None else name)
|
|
||||||
size = geom.get_size()
|
|
||||||
grid = geom.get_grid()
|
|
||||||
origin = geom.get_origin()
|
|
||||||
microstructure = geom.get_microstructure()
|
|
||||||
fill = np.nanmax(microstructure)+1 if options.fill is None else options.fill
|
|
||||||
dtype = float if np.isnan(fill) or int(fill) != fill or microstructure.dtype==np.float else int
|
|
||||||
|
|
||||||
# These rotations are always applied in the reference coordinate system, i.e. (z,x,z) not (z,x',z'')
|
|
||||||
# this seems to be ok, see https://www.cs.utexas.edu/~theshark/courses/cs354/lectures/cs354-14.pdf
|
|
||||||
microstructure = ndimage.rotate(microstructure,eulers[2],(0,1),order=0,
|
|
||||||
prefilter=False,output=dtype,cval=fill) # rotation around z
|
|
||||||
microstructure = ndimage.rotate(microstructure,eulers[1],(1,2),order=0,
|
|
||||||
prefilter=False,output=dtype,cval=fill) # rotation around x
|
|
||||||
microstructure = ndimage.rotate(microstructure,eulers[0],(0,1),order=0,
|
|
||||||
prefilter=False,output=dtype,cval=fill) # rotation around z
|
|
||||||
|
|
||||||
damask.util.croak(geom.update(microstructure,origin=origin-(np.asarray(microstructure.shape)-grid)/2*size/grid,rescale=True))
|
|
||||||
geom.add_comments(scriptID + ' ' + ' '.join(sys.argv[1:]))
|
|
||||||
|
|
||||||
geom.to_file(sys.stdout if name is None else name,pack=False)
|
|
|
@ -1,61 +0,0 @@
|
||||||
#!/usr/bin/env python3
|
|
||||||
|
|
||||||
import os
|
|
||||||
import sys
|
|
||||||
from optparse import OptionParser
|
|
||||||
from io import StringIO
|
|
||||||
|
|
||||||
import damask
|
|
||||||
|
|
||||||
|
|
||||||
scriptName = os.path.splitext(os.path.basename(__file__))[0]
|
|
||||||
scriptID = ' '.join([scriptName,damask.version])
|
|
||||||
|
|
||||||
|
|
||||||
#--------------------------------------------------------------------------------------------------
|
|
||||||
# MAIN
|
|
||||||
#--------------------------------------------------------------------------------------------------
|
|
||||||
|
|
||||||
parser = OptionParser(option_class=damask.extendableOption, usage='%prog options [geomfile(s)]', description = """
|
|
||||||
Translate microstructure indices (shift or substitute) and/or geometry origin.
|
|
||||||
|
|
||||||
""", version=scriptID)
|
|
||||||
|
|
||||||
parser.add_option('-o', '--origin',
|
|
||||||
dest = 'origin',
|
|
||||||
type = 'float', nargs = 3, metavar = ' '.join(['float']*3),
|
|
||||||
help = 'offset from old to new origin of grid')
|
|
||||||
parser.add_option('-m', '--microstructure',
|
|
||||||
dest = 'microstructure',
|
|
||||||
type = 'int', metavar = 'int',
|
|
||||||
help = 'offset from old to new microstructure indices (after substitution)')
|
|
||||||
parser.add_option('-s', '--substitute',
|
|
||||||
dest = 'substitute',
|
|
||||||
action = 'extend', metavar = '<string LIST>',
|
|
||||||
help = 'substitutions of microstructure indices from,to,from,to,...')
|
|
||||||
|
|
||||||
parser.set_defaults(origin = (0.0,0.0,0.0),
|
|
||||||
microstructure = 0,
|
|
||||||
substitute = []
|
|
||||||
)
|
|
||||||
|
|
||||||
(options, filenames) = parser.parse_args()
|
|
||||||
|
|
||||||
sub = list(map(int,options.substitute))
|
|
||||||
|
|
||||||
|
|
||||||
if filenames == []: filenames = [None]
|
|
||||||
|
|
||||||
for name in filenames:
|
|
||||||
damask.util.report(scriptName,name)
|
|
||||||
|
|
||||||
geom = damask.Geom.from_file(StringIO(''.join(sys.stdin.read())) if name is None else name)
|
|
||||||
|
|
||||||
substituted = geom.get_microstructure()
|
|
||||||
for old,new in zip(sub[0::2],sub[1::2]): substituted[geom.microstructure==old] = new # substitute microstructure indices
|
|
||||||
substituted += options.microstructure # constant shift
|
|
||||||
|
|
||||||
damask.util.croak(geom.update(substituted,origin=geom.get_origin()+options.origin))
|
|
||||||
geom.add_comments(scriptID + ' ' + ' '.join(sys.argv[1:]))
|
|
||||||
|
|
||||||
geom.to_file(sys.stdout if name is None else name,pack=False)
|
|
|
@ -1,39 +0,0 @@
|
||||||
#!/usr/bin/env python3
|
|
||||||
|
|
||||||
import os
|
|
||||||
import sys
|
|
||||||
from io import StringIO
|
|
||||||
from optparse import OptionParser
|
|
||||||
|
|
||||||
import damask
|
|
||||||
|
|
||||||
|
|
||||||
scriptName = os.path.splitext(os.path.basename(__file__))[0]
|
|
||||||
scriptID = ' '.join([scriptName,damask.version])
|
|
||||||
|
|
||||||
|
|
||||||
#--------------------------------------------------------------------------------------------------
|
|
||||||
# MAIN
|
|
||||||
#--------------------------------------------------------------------------------------------------
|
|
||||||
|
|
||||||
parser = OptionParser(option_class=damask.extendableOption, usage='%prog [seedfile(s)]', description = """
|
|
||||||
Writes vtk file for visualization.
|
|
||||||
|
|
||||||
""", version = scriptID)
|
|
||||||
|
|
||||||
(options, filenames) = parser.parse_args()
|
|
||||||
if filenames == []: filenames = [None]
|
|
||||||
|
|
||||||
for name in filenames:
|
|
||||||
damask.util.report(scriptName,name)
|
|
||||||
|
|
||||||
seeds = damask.Table.from_ASCII(StringIO(''.join(sys.stdin.read())) if name is None else name)
|
|
||||||
v = damask.VTK.from_polyData(seeds.get('pos'))
|
|
||||||
for label in seeds.shapes.keys():
|
|
||||||
if label == 'pos': pass
|
|
||||||
v.add(seeds.get(label),label)
|
|
||||||
|
|
||||||
if name:
|
|
||||||
v.write(os.path.splitext(name)[0])
|
|
||||||
else:
|
|
||||||
sys.stdout.write(v.__repr__())
|
|
|
@ -1,4 +1,4 @@
|
||||||
"""Main aggregator."""
|
"""Tools for pre and post processing of DAMASK simulations."""
|
||||||
import os as _os
|
import os as _os
|
||||||
import re as _re
|
import re as _re
|
||||||
|
|
||||||
|
|
|
@ -242,7 +242,7 @@ class Geom:
|
||||||
|
|
||||||
def get_grid(self):
|
def get_grid(self):
|
||||||
"""Return the grid discretization."""
|
"""Return the grid discretization."""
|
||||||
return np.array(self.microstructure.shape)
|
return np.asarray(self.microstructure.shape)
|
||||||
|
|
||||||
|
|
||||||
def get_homogenization(self):
|
def get_homogenization(self):
|
||||||
|
@ -533,8 +533,8 @@ class Geom:
|
||||||
|
|
||||||
Parameters
|
Parameters
|
||||||
----------
|
----------
|
||||||
grid : iterable of int
|
grid : numpy.ndarray of shape (3)
|
||||||
new grid dimension
|
number of grid points in x,y,z direction.
|
||||||
|
|
||||||
"""
|
"""
|
||||||
#self.add_comments('geom.py:scale v{}'.format(version)
|
#self.add_comments('geom.py:scale v{}'.format(version)
|
||||||
|
@ -581,3 +581,90 @@ class Geom:
|
||||||
|
|
||||||
#self.add_comments('geom.py:renumber v{}'.format(version)
|
#self.add_comments('geom.py:renumber v{}'.format(version)
|
||||||
return self.update(renumbered)
|
return self.update(renumbered)
|
||||||
|
|
||||||
|
|
||||||
|
def rotate(self,R,fill=None):
|
||||||
|
"""
|
||||||
|
Rotate microstructure (pad if required).
|
||||||
|
|
||||||
|
Parameters
|
||||||
|
----------
|
||||||
|
R : damask.Rotation
|
||||||
|
rotation to apply to the microstructure.
|
||||||
|
fill : int or float, optional
|
||||||
|
microstructure index to fill the corners. Defaults to microstructure.max() + 1.
|
||||||
|
|
||||||
|
"""
|
||||||
|
if fill is None: fill = np.nanmax(self.microstructure) + 1
|
||||||
|
dtype = float if np.isnan(fill) or int(fill) != fill or self.microstructure.dtype==np.float else int
|
||||||
|
|
||||||
|
Eulers = R.as_Eulers(degrees=True)
|
||||||
|
microstructure_in = self.get_microstructure()
|
||||||
|
|
||||||
|
# These rotations are always applied in the reference coordinate system, i.e. (z,x,z) not (z,x',z'')
|
||||||
|
# see https://www.cs.utexas.edu/~theshark/courses/cs354/lectures/cs354-14.pdf
|
||||||
|
for angle,axes in zip(Eulers[::-1], [(0,1),(1,2),(0,1)]):
|
||||||
|
microstructure_out = ndimage.rotate(microstructure_in,angle,axes,order=0,
|
||||||
|
prefilter=False,output=dtype,cval=fill)
|
||||||
|
if np.prod(microstructure_in.shape) == np.prod(microstructure_out.shape):
|
||||||
|
# avoid scipy interpolation errors for rotations close to multiples of 90°
|
||||||
|
microstructure_in = np.rot90(microstructure_in,k=np.rint(angle/90.).astype(int),axes=axes)
|
||||||
|
else:
|
||||||
|
microstructure_in = microstructure_out
|
||||||
|
|
||||||
|
origin = self.origin-(np.asarray(microstructure_in.shape)-self.grid)*.5 * self.size/self.grid
|
||||||
|
|
||||||
|
#self.add_comments('geom.py:rotate v{}'.format(version)
|
||||||
|
return self.update(microstructure_in,origin=origin,rescale=True)
|
||||||
|
|
||||||
|
|
||||||
|
def canvas(self,grid=None,offset=None,fill=None):
|
||||||
|
"""
|
||||||
|
Crop or enlarge/pad microstructure.
|
||||||
|
|
||||||
|
Parameters
|
||||||
|
----------
|
||||||
|
grid : numpy.ndarray of shape (3)
|
||||||
|
number of grid points in x,y,z direction.
|
||||||
|
offset : numpy.ndarray of shape (3)
|
||||||
|
offset (measured in grid points) from old to new microstructue[0,0,0].
|
||||||
|
fill : int or float, optional
|
||||||
|
microstructure index to fill the corners. Defaults to microstructure.max() + 1.
|
||||||
|
|
||||||
|
"""
|
||||||
|
if fill is None: fill = np.nanmax(self.microstructure) + 1
|
||||||
|
if offset is None: offset = 0
|
||||||
|
dtype = float if int(fill) != fill or self.microstructure.dtype==np.float else int
|
||||||
|
|
||||||
|
canvas = np.full(self.grid if grid is None else grid,
|
||||||
|
fill if fill is not None else np.nanmax(self.microstructure)+1,dtype)
|
||||||
|
|
||||||
|
l = np.clip( offset, 0,np.minimum(self.grid +offset,grid)) # noqa
|
||||||
|
r = np.clip( offset+self.grid,0,np.minimum(self.grid*2+offset,grid))
|
||||||
|
L = np.clip(-offset, 0,np.minimum(grid -offset,self.grid))
|
||||||
|
R = np.clip(-offset+grid, 0,np.minimum(grid*2 -offset,self.grid))
|
||||||
|
|
||||||
|
canvas[l[0]:r[0],l[1]:r[1],l[2]:r[2]] = self.microstructure[L[0]:R[0],L[1]:R[1],L[2]:R[2]]
|
||||||
|
|
||||||
|
#self.add_comments('geom.py:canvas v{}'.format(version)
|
||||||
|
return self.update(canvas,origin=self.origin+offset*self.size/self.grid,rescale=True)
|
||||||
|
|
||||||
|
|
||||||
|
def substitute(self,from_microstructure,to_microstructure):
|
||||||
|
"""
|
||||||
|
Substitude microstructure indices.
|
||||||
|
|
||||||
|
Parameters
|
||||||
|
----------
|
||||||
|
from_microstructure : iterable of ints
|
||||||
|
microstructure indices to be substituted.
|
||||||
|
to_microstructure : iterable of ints
|
||||||
|
new microstructure indices.
|
||||||
|
|
||||||
|
"""
|
||||||
|
substituted = self.get_microstructure()
|
||||||
|
for from_ms,to_ms in zip(from_microstructure,to_microstructure):
|
||||||
|
substituted[self.microstructure==from_ms] = to_ms
|
||||||
|
|
||||||
|
#self.add_comments('geom.py:substitute v{}'.format(version)
|
||||||
|
return self.update(substituted)
|
||||||
|
|
|
@ -229,19 +229,20 @@ class Symmetry:
|
||||||
Return inverse pole figure color if requested.
|
Return inverse pole figure color if requested.
|
||||||
Bases are computed from
|
Bases are computed from
|
||||||
|
|
||||||
basis = {'cubic' : np.linalg.inv(np.array([[0.,0.,1.], # direction of red
|
>>> basis = {'cubic' : np.linalg.inv(np.array([[0.,0.,1.], # direction of red
|
||||||
[1.,0.,1.]/np.sqrt(2.), # direction of green
|
... [1.,0.,1.]/np.sqrt(2.), # direction of green
|
||||||
[1.,1.,1.]/np.sqrt(3.)]).T), # direction of blue
|
... [1.,1.,1.]/np.sqrt(3.)]).T), # direction of blue
|
||||||
'hexagonal' : np.linalg.inv(np.array([[0.,0.,1.], # direction of red
|
... 'hexagonal' : np.linalg.inv(np.array([[0.,0.,1.], # direction of red
|
||||||
[1.,0.,0.], # direction of green
|
... [1.,0.,0.], # direction of green
|
||||||
[np.sqrt(3.),1.,0.]/np.sqrt(4.)]).T), # direction of blue
|
... [np.sqrt(3.),1.,0.]/np.sqrt(4.)]).T), # direction of blue
|
||||||
'tetragonal' : np.linalg.inv(np.array([[0.,0.,1.], # direction of red
|
... 'tetragonal' : np.linalg.inv(np.array([[0.,0.,1.], # direction of red
|
||||||
[1.,0.,0.], # direction of green
|
... [1.,0.,0.], # direction of green
|
||||||
[1.,1.,0.]/np.sqrt(2.)]).T), # direction of blue
|
... [1.,1.,0.]/np.sqrt(2.)]).T), # direction of blue
|
||||||
'orthorhombic' : np.linalg.inv(np.array([[0.,0.,1.], # direction of red
|
... 'orthorhombic': np.linalg.inv(np.array([[0.,0.,1.], # direction of red
|
||||||
[1.,0.,0.], # direction of green
|
... [1.,0.,0.], # direction of green
|
||||||
[0.,1.,0.]]).T), # direction of blue
|
... [0.,1.,0.]]).T), # direction of blue
|
||||||
}
|
... }
|
||||||
|
|
||||||
"""
|
"""
|
||||||
if self.lattice == 'cubic':
|
if self.lattice == 'cubic':
|
||||||
basis = {'improper':np.array([ [-1. , 0. , 1. ],
|
basis = {'improper':np.array([ [-1. , 0. , 1. ],
|
||||||
|
@ -634,6 +635,6 @@ class Lattice:
|
||||||
otherDir = miller[otherDir_id]/ np.linalg.norm(miller[otherDir_id])
|
otherDir = miller[otherDir_id]/ np.linalg.norm(miller[otherDir_id])
|
||||||
otherMatrix = np.array([otherDir,np.cross(otherPlane,otherDir),otherPlane])
|
otherMatrix = np.array([otherDir,np.cross(otherPlane,otherDir),otherPlane])
|
||||||
|
|
||||||
r['rotations'].append(Rotation.fromMatrix(np.dot(otherMatrix.T,myMatrix)))
|
r['rotations'].append(Rotation.from_matrix(np.dot(otherMatrix.T,myMatrix)))
|
||||||
|
|
||||||
return r
|
return r
|
||||||
|
|
|
@ -8,6 +8,7 @@ class Orientation:
|
||||||
Crystallographic orientation.
|
Crystallographic orientation.
|
||||||
|
|
||||||
A crystallographic orientation contains a rotation and a lattice.
|
A crystallographic orientation contains a rotation and a lattice.
|
||||||
|
|
||||||
"""
|
"""
|
||||||
|
|
||||||
__slots__ = ['rotation','lattice']
|
__slots__ = ['rotation','lattice']
|
||||||
|
@ -36,7 +37,7 @@ class Orientation:
|
||||||
if isinstance(rotation, Rotation):
|
if isinstance(rotation, Rotation):
|
||||||
self.rotation = rotation
|
self.rotation = rotation
|
||||||
else:
|
else:
|
||||||
self.rotation = Rotation.fromQuaternion(rotation) # assume quaternion
|
self.rotation = Rotation.from_quaternion(rotation) # assume quaternion
|
||||||
|
|
||||||
if self.rotation.quaternion.shape != (4,):
|
if self.rotation.quaternion.shape != (4,):
|
||||||
raise NotImplementedError('Support for multiple rotations missing')
|
raise NotImplementedError('Support for multiple rotations missing')
|
||||||
|
@ -49,8 +50,10 @@ class Orientation:
|
||||||
Disorientation between myself and given other orientation.
|
Disorientation between myself and given other orientation.
|
||||||
|
|
||||||
Rotation axis falls into SST if SST == True.
|
Rotation axis falls into SST if SST == True.
|
||||||
(Currently requires same symmetry for both orientations.
|
|
||||||
Look into A. Heinz and P. Neumann 1991 for cases with differing sym.)
|
Currently requires same symmetry for both orientations.
|
||||||
|
Look into A. Heinz and P. Neumann 1991 for cases with differing sym.
|
||||||
|
|
||||||
"""
|
"""
|
||||||
if self.lattice.symmetry != other.lattice.symmetry:
|
if self.lattice.symmetry != other.lattice.symmetry:
|
||||||
raise NotImplementedError('disorientation between different symmetry classes not supported yet.')
|
raise NotImplementedError('disorientation between different symmetry classes not supported yet.')
|
||||||
|
@ -65,8 +68,8 @@ class Orientation:
|
||||||
r = b*aInv
|
r = b*aInv
|
||||||
for k in range(2):
|
for k in range(2):
|
||||||
r.inverse()
|
r.inverse()
|
||||||
breaker = self.lattice.symmetry.inFZ(r.asRodrigues(vector=True)) \
|
breaker = self.lattice.symmetry.inFZ(r.as_Rodrigues(vector=True)) \
|
||||||
and (not SST or other.lattice.symmetry.inDisorientationSST(r.asRodrigues(vector=True)))
|
and (not SST or other.lattice.symmetry.inDisorientationSST(r.as_Rodrigues(vector=True)))
|
||||||
if breaker: break
|
if breaker: break
|
||||||
if breaker: break
|
if breaker: break
|
||||||
if breaker: break
|
if breaker: break
|
||||||
|
@ -75,7 +78,7 @@ class Orientation:
|
||||||
# ... own sym, other sym,
|
# ... own sym, other sym,
|
||||||
# self-->other: True, self<--other: False
|
# self-->other: True, self<--other: False
|
||||||
def inFZ(self):
|
def inFZ(self):
|
||||||
return self.lattice.symmetry.inFZ(self.rotation.asRodrigues(vector=True))
|
return self.lattice.symmetry.inFZ(self.rotation.as_Rodrigues(vector=True))
|
||||||
|
|
||||||
|
|
||||||
def equivalentOrientations(self,members=[]):
|
def equivalentOrientations(self,members=[]):
|
||||||
|
@ -97,7 +100,7 @@ class Orientation:
|
||||||
def reduced(self):
|
def reduced(self):
|
||||||
"""Transform orientation to fall into fundamental zone according to symmetry."""
|
"""Transform orientation to fall into fundamental zone according to symmetry."""
|
||||||
for me in self.equivalentOrientations():
|
for me in self.equivalentOrientations():
|
||||||
if self.lattice.symmetry.inFZ(me.rotation.asRodrigues(vector=True)): break
|
if self.lattice.symmetry.inFZ(me.rotation.as_Rodrigues(vector=True)): break
|
||||||
|
|
||||||
return self.__class__(me.rotation,self.lattice)
|
return self.__class__(me.rotation,self.lattice)
|
||||||
|
|
||||||
|
|
|
@ -1,7 +1,11 @@
|
||||||
import multiprocessing
|
import multiprocessing
|
||||||
import re
|
import re
|
||||||
|
import inspect
|
||||||
import glob
|
import glob
|
||||||
import os
|
import os
|
||||||
|
import datetime
|
||||||
|
import xml.etree.ElementTree as ET
|
||||||
|
import xml.dom.minidom
|
||||||
from functools import partial
|
from functools import partial
|
||||||
|
|
||||||
import h5py
|
import h5py
|
||||||
|
@ -32,7 +36,7 @@ class Result:
|
||||||
Parameters
|
Parameters
|
||||||
----------
|
----------
|
||||||
fname : str
|
fname : str
|
||||||
name of the DADF5 file to be openend.
|
name of the DADF5 file to be opened.
|
||||||
|
|
||||||
"""
|
"""
|
||||||
with h5py.File(fname,'r') as f:
|
with h5py.File(fname,'r') as f:
|
||||||
|
@ -65,6 +69,10 @@ class Result:
|
||||||
self.materialpoints = [m.decode() for m in np.unique(f['mapping/cellResults/materialpoint']['Name'])]
|
self.materialpoints = [m.decode() for m in np.unique(f['mapping/cellResults/materialpoint']['Name'])]
|
||||||
self.constituents = [c.decode() for c in np.unique(f['mapping/cellResults/constituent'] ['Name'])]
|
self.constituents = [c.decode() for c in np.unique(f['mapping/cellResults/constituent'] ['Name'])]
|
||||||
|
|
||||||
|
# faster, but does not work with (deprecated) DADF5_postResults
|
||||||
|
#self.materialpoints = [m for m in f['inc0/materialpoint']]
|
||||||
|
#self.constituents = [c for c in f['inc0/constituent']]
|
||||||
|
|
||||||
self.con_physics = []
|
self.con_physics = []
|
||||||
for c in self.constituents:
|
for c in self.constituents:
|
||||||
self.con_physics += f['/'.join([self.increments[0],'constituent',c])].keys()
|
self.con_physics += f['/'.join([self.increments[0],'constituent',c])].keys()
|
||||||
|
@ -80,7 +88,9 @@ class Result:
|
||||||
'con_physics': self.con_physics, 'mat_physics': self.mat_physics
|
'con_physics': self.con_physics, 'mat_physics': self.mat_physics
|
||||||
}
|
}
|
||||||
|
|
||||||
self.fname = fname
|
self.fname = os.path.abspath(fname)
|
||||||
|
|
||||||
|
self._allow_overwrite = False
|
||||||
|
|
||||||
|
|
||||||
def __repr__(self):
|
def __repr__(self):
|
||||||
|
@ -136,6 +146,7 @@ class Result:
|
||||||
choice = []
|
choice = []
|
||||||
for c in iterator:
|
for c in iterator:
|
||||||
idx = np.searchsorted(self.times,c)
|
idx = np.searchsorted(self.times,c)
|
||||||
|
if idx >= len(self.times): continue
|
||||||
if np.isclose(c,self.times[idx]):
|
if np.isclose(c,self.times[idx]):
|
||||||
choice.append(self.increments[idx])
|
choice.append(self.increments[idx])
|
||||||
elif np.isclose(c,self.times[idx+1]):
|
elif np.isclose(c,self.times[idx+1]):
|
||||||
|
@ -156,6 +167,16 @@ class Result:
|
||||||
self.selection[what] = diff_sorted
|
self.selection[what] = diff_sorted
|
||||||
|
|
||||||
|
|
||||||
|
def enable_overwrite(self):
|
||||||
|
print(util.bcolors().WARNING,util.bcolors().BOLD,
|
||||||
|
'Warning: Enabled overwrite of existing datasets!',
|
||||||
|
util.bcolors().ENDC)
|
||||||
|
self._allow_overwrite = True
|
||||||
|
|
||||||
|
def disable_overwrite(self):
|
||||||
|
self._allow_overwrite = False
|
||||||
|
|
||||||
|
|
||||||
def incs_in_range(self,start,end):
|
def incs_in_range(self,start,end):
|
||||||
selected = []
|
selected = []
|
||||||
for i,inc in enumerate([int(i[3:]) for i in self.increments]):
|
for i,inc in enumerate([int(i[3:]) for i in self.increments]):
|
||||||
|
@ -316,9 +337,10 @@ class Result:
|
||||||
Return groups that contain all requested datasets.
|
Return groups that contain all requested datasets.
|
||||||
|
|
||||||
Only groups within
|
Only groups within
|
||||||
- inc?????/constituent/*_*/*
|
- inc*/constituent/*/*
|
||||||
- inc?????/materialpoint/*_*/*
|
- inc*/materialpoint/*/*
|
||||||
- inc?????/geometry/*
|
- inc*/geometry/*
|
||||||
|
|
||||||
are considered as they contain user-relevant data.
|
are considered as they contain user-relevant data.
|
||||||
Single strings will be treated as list with one entry.
|
Single strings will be treated as list with one entry.
|
||||||
|
|
||||||
|
@ -482,7 +504,7 @@ class Result:
|
||||||
'meta': {
|
'meta': {
|
||||||
'Unit': x['meta']['Unit'],
|
'Unit': x['meta']['Unit'],
|
||||||
'Description': 'Absolute value of {} ({})'.format(x['label'],x['meta']['Description']),
|
'Description': 'Absolute value of {} ({})'.format(x['label'],x['meta']['Description']),
|
||||||
'Creator': 'result.py:add_abs v{}'.format(version)
|
'Creator': inspect.stack()[0][3][1:]
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
def add_absolute(self,x):
|
def add_absolute(self,x):
|
||||||
|
@ -510,7 +532,7 @@ class Result:
|
||||||
'meta': {
|
'meta': {
|
||||||
'Unit': kwargs['unit'],
|
'Unit': kwargs['unit'],
|
||||||
'Description': '{} (formula: {})'.format(kwargs['description'],kwargs['formula']),
|
'Description': '{} (formula: {})'.format(kwargs['description'],kwargs['formula']),
|
||||||
'Creator': 'result.py:add_calculation v{}'.format(version)
|
'Creator': inspect.stack()[0][3][1:]
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
def add_calculation(self,label,formula,unit='n/a',description=None,vectorized=True):
|
def add_calculation(self,label,formula,unit='n/a',description=None,vectorized=True):
|
||||||
|
@ -546,10 +568,9 @@ class Result:
|
||||||
'label': 'sigma',
|
'label': 'sigma',
|
||||||
'meta': {
|
'meta': {
|
||||||
'Unit': P['meta']['Unit'],
|
'Unit': P['meta']['Unit'],
|
||||||
'Description': 'Cauchy stress calculated from {} ({}) '.format(P['label'],
|
'Description': 'Cauchy stress calculated from {} ({}) and {} ({})'\
|
||||||
P['meta']['Description'])+\
|
.format(P['label'],P['meta']['Description'],F['label'],F['meta']['Description']),
|
||||||
'and {} ({})'.format(F['label'],F['meta']['Description']),
|
'Creator': inspect.stack()[0][3][1:]
|
||||||
'Creator': 'result.py:add_Cauchy v{}'.format(version)
|
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
def add_Cauchy(self,P='P',F='F'):
|
def add_Cauchy(self,P='P',F='F'):
|
||||||
|
@ -575,7 +596,7 @@ class Result:
|
||||||
'meta': {
|
'meta': {
|
||||||
'Unit': T['meta']['Unit'],
|
'Unit': T['meta']['Unit'],
|
||||||
'Description': 'Determinant of tensor {} ({})'.format(T['label'],T['meta']['Description']),
|
'Description': 'Determinant of tensor {} ({})'.format(T['label'],T['meta']['Description']),
|
||||||
'Creator': 'result.py:add_determinant v{}'.format(version)
|
'Creator': inspect.stack()[0][3][1:]
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
def add_determinant(self,T):
|
def add_determinant(self,T):
|
||||||
|
@ -593,16 +614,13 @@ class Result:
|
||||||
|
|
||||||
@staticmethod
|
@staticmethod
|
||||||
def _add_deviator(T):
|
def _add_deviator(T):
|
||||||
if not T['data'].shape[1:] == (3,3):
|
|
||||||
raise ValueError
|
|
||||||
|
|
||||||
return {
|
return {
|
||||||
'data': mechanics.deviatoric_part(T['data']),
|
'data': mechanics.deviatoric_part(T['data']),
|
||||||
'label': 's_{}'.format(T['label']),
|
'label': 's_{}'.format(T['label']),
|
||||||
'meta': {
|
'meta': {
|
||||||
'Unit': T['meta']['Unit'],
|
'Unit': T['meta']['Unit'],
|
||||||
'Description': 'Deviator of tensor {} ({})'.format(T['label'],T['meta']['Description']),
|
'Description': 'Deviator of tensor {} ({})'.format(T['label'],T['meta']['Description']),
|
||||||
'Creator': 'result.py:add_deviator v{}'.format(version)
|
'Creator': inspect.stack()[0][3][1:]
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
def add_deviator(self,T):
|
def add_deviator(self,T):
|
||||||
|
@ -619,17 +637,24 @@ class Result:
|
||||||
|
|
||||||
|
|
||||||
@staticmethod
|
@staticmethod
|
||||||
def _add_eigenvalue(T_sym):
|
def _add_eigenvalue(T_sym,eigenvalue):
|
||||||
|
if eigenvalue == 'max':
|
||||||
|
label,p = 'Maximum',2
|
||||||
|
elif eigenvalue == 'mid':
|
||||||
|
label,p = 'Intermediate',1
|
||||||
|
elif eigenvalue == 'min':
|
||||||
|
label,p = 'Minimum',0
|
||||||
|
|
||||||
return {
|
return {
|
||||||
'data': mechanics.eigenvalues(T_sym['data']),
|
'data': mechanics.eigenvalues(T_sym['data'])[:,p],
|
||||||
'label': 'lambda({})'.format(T_sym['label']),
|
'label': 'lambda_{}({})'.format(eigenvalue,T_sym['label']),
|
||||||
'meta' : {
|
'meta' : {
|
||||||
'Unit': T_sym['meta']['Unit'],
|
'Unit': T_sym['meta']['Unit'],
|
||||||
'Description': 'Eigenvalues of {} ({})'.format(T_sym['label'],T_sym['meta']['Description']),
|
'Description': '{} eigenvalue of {} ({})'.format(label,T_sym['label'],T_sym['meta']['Description']),
|
||||||
'Creator': 'result.py:add_eigenvalues v{}'.format(version)
|
'Creator': inspect.stack()[0][3][1:]
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
def add_eigenvalues(self,T_sym):
|
def add_eigenvalue(self,T_sym,eigenvalue='max'):
|
||||||
"""
|
"""
|
||||||
Add eigenvalues of symmetric tensor.
|
Add eigenvalues of symmetric tensor.
|
||||||
|
|
||||||
|
@ -637,33 +662,46 @@ class Result:
|
||||||
----------
|
----------
|
||||||
T_sym : str
|
T_sym : str
|
||||||
Label of symmetric tensor dataset.
|
Label of symmetric tensor dataset.
|
||||||
|
eigenvalue : str, optional
|
||||||
|
Eigenvalue. Select from ‘max’, ‘mid’, ‘min’. Defaults to ‘max’.
|
||||||
|
|
||||||
"""
|
"""
|
||||||
self._add_generic_pointwise(self._add_eigenvalue,{'T_sym':T_sym})
|
self._add_generic_pointwise(self._add_eigenvalue,{'T_sym':T_sym},{'eigenvalue':eigenvalue})
|
||||||
|
|
||||||
|
|
||||||
@staticmethod
|
@staticmethod
|
||||||
def _add_eigenvector(T_sym):
|
def _add_eigenvector(T_sym,eigenvalue):
|
||||||
|
if eigenvalue == 'max':
|
||||||
|
label,p = 'maximum',2
|
||||||
|
elif eigenvalue == 'mid':
|
||||||
|
label,p = 'intermediate',1
|
||||||
|
elif eigenvalue == 'min':
|
||||||
|
label,p = 'minimum',0
|
||||||
|
print('p',eigenvalue)
|
||||||
return {
|
return {
|
||||||
'data': mechanics.eigenvectors(T_sym['data']),
|
'data': mechanics.eigenvectors(T_sym['data'])[:,p],
|
||||||
'label': 'v({})'.format(T_sym['label']),
|
'label': 'v_{}({})'.format(eigenvalue,T_sym['label']),
|
||||||
'meta' : {
|
'meta' : {
|
||||||
'Unit': '1',
|
'Unit': '1',
|
||||||
'Description': 'Eigenvectors of {} ({})'.format(T_sym['label'],T_sym['meta']['Description']),
|
'Description': 'Eigenvector corresponding to {} eigenvalue of {} ({})'\
|
||||||
'Creator': 'result.py:add_eigenvectors v{}'.format(version)
|
.format(label,T_sym['label'],T_sym['meta']['Description']),
|
||||||
|
'Creator': inspect.stack()[0][3][1:]
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
def add_eigenvectors(self,T_sym):
|
def add_eigenvector(self,T_sym,eigenvalue='max'):
|
||||||
"""
|
"""
|
||||||
Add eigenvectors of symmetric tensor.
|
Add eigenvector of symmetric tensor.
|
||||||
|
|
||||||
Parameters
|
Parameters
|
||||||
----------
|
----------
|
||||||
T_sym : str
|
T_sym : str
|
||||||
Label of symmetric tensor dataset.
|
Label of symmetric tensor dataset.
|
||||||
|
eigenvalue : str, optional
|
||||||
|
Eigenvalue to which the eigenvector corresponds. Select from
|
||||||
|
‘max’, ‘mid’, ‘min’. Defaults to ‘max’.
|
||||||
|
|
||||||
"""
|
"""
|
||||||
self._add_generic_pointwise(self._add_eigenvector,{'T_sym':T_sym})
|
self._add_generic_pointwise(self._add_eigenvector,{'T_sym':T_sym},{'eigenvalue':eigenvalue})
|
||||||
|
|
||||||
|
|
||||||
@staticmethod
|
@staticmethod
|
||||||
|
@ -686,7 +724,7 @@ class Result:
|
||||||
'Unit': 'RGB (8bit)',
|
'Unit': 'RGB (8bit)',
|
||||||
'Lattice': lattice,
|
'Lattice': lattice,
|
||||||
'Description': 'Inverse Pole Figure (IPF) colors along sample direction [{} {} {}]'.format(*m),
|
'Description': 'Inverse Pole Figure (IPF) colors along sample direction [{} {} {}]'.format(*m),
|
||||||
'Creator': 'result.py:add_IPFcolor v{}'.format(version)
|
'Creator': inspect.stack()[0][3][1:]
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
def add_IPFcolor(self,q,l):
|
def add_IPFcolor(self,q,l):
|
||||||
|
@ -712,7 +750,7 @@ class Result:
|
||||||
'meta': {
|
'meta': {
|
||||||
'Unit': T_sym['meta']['Unit'],
|
'Unit': T_sym['meta']['Unit'],
|
||||||
'Description': 'Maximum shear component of {} ({})'.format(T_sym['label'],T_sym['meta']['Description']),
|
'Description': 'Maximum shear component of {} ({})'.format(T_sym['label'],T_sym['meta']['Description']),
|
||||||
'Creator': 'result.py:add_maximum_shear v{}'.format(version)
|
'Creator': inspect.stack()[0][3][1:]
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
def add_maximum_shear(self,T_sym):
|
def add_maximum_shear(self,T_sym):
|
||||||
|
@ -739,7 +777,7 @@ class Result:
|
||||||
'meta': {
|
'meta': {
|
||||||
'Unit': T_sym['meta']['Unit'],
|
'Unit': T_sym['meta']['Unit'],
|
||||||
'Description': 'Mises equivalent {} of {} ({})'.format(t,T_sym['label'],T_sym['meta']['Description']),
|
'Description': 'Mises equivalent {} of {} ({})'.format(t,T_sym['label'],T_sym['meta']['Description']),
|
||||||
'Creator': 'result.py:add_Mises v{}'.format(version)
|
'Creator': inspect.stack()[0][3][1:]
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
def add_Mises(self,T_sym):
|
def add_Mises(self,T_sym):
|
||||||
|
@ -775,7 +813,7 @@ class Result:
|
||||||
'meta': {
|
'meta': {
|
||||||
'Unit': x['meta']['Unit'],
|
'Unit': x['meta']['Unit'],
|
||||||
'Description': '{}-norm of {} {} ({})'.format(o,t,x['label'],x['meta']['Description']),
|
'Description': '{}-norm of {} {} ({})'.format(o,t,x['label'],x['meta']['Description']),
|
||||||
'Creator': 'result.py:add_norm v{}'.format(version)
|
'Creator': inspect.stack()[0][3][1:]
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
def add_norm(self,x,ord=None):
|
def add_norm(self,x,ord=None):
|
||||||
|
@ -800,10 +838,9 @@ class Result:
|
||||||
'label': 'S',
|
'label': 'S',
|
||||||
'meta': {
|
'meta': {
|
||||||
'Unit': P['meta']['Unit'],
|
'Unit': P['meta']['Unit'],
|
||||||
'Description': '2. Kirchhoff stress calculated from {} ({}) '.format(P['label'],
|
'Description': '2. Piola-Kirchhoff stress calculated from {} ({}) and {} ({})'\
|
||||||
P['meta']['Description'])+\
|
.format(P['label'],P['meta']['Description'],F['label'],F['meta']['Description']),
|
||||||
'and {} ({})'.format(F['label'],F['meta']['Description']),
|
'Creator': inspect.stack()[0][3][1:]
|
||||||
'Creator': 'result.py:add_PK2 v{}'.format(version)
|
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
def add_PK2(self,P='P',F='F'):
|
def add_PK2(self,P='P',F='F'):
|
||||||
|
@ -826,14 +863,12 @@ class Result:
|
||||||
pole = np.array(p)
|
pole = np.array(p)
|
||||||
unit_pole = pole/np.linalg.norm(pole)
|
unit_pole = pole/np.linalg.norm(pole)
|
||||||
m = util.scale_to_coprime(pole)
|
m = util.scale_to_coprime(pole)
|
||||||
coords = np.empty((len(q['data']),2))
|
rot = Rotation(q['data'].view(np.double).reshape(-1,4))
|
||||||
|
|
||||||
for i,qu in enumerate(q['data']):
|
|
||||||
o = Rotation(np.array([qu['w'],qu['x'],qu['y'],qu['z']]))
|
|
||||||
rotatedPole = o*unit_pole # rotate pole according to crystal orientation
|
|
||||||
(x,y) = rotatedPole[0:2]/(1.+abs(unit_pole[2])) # stereographic projection
|
|
||||||
coords[i] = [np.sqrt(x*x+y*y),np.arctan2(y,x)] if polar else [x,y]
|
|
||||||
|
|
||||||
|
rotatedPole = rot @ np.broadcast_to(unit_pole,rot.shape+(3,)) # rotate pole according to crystal orientation
|
||||||
|
xy = rotatedPole[:,0:2]/(1.+abs(unit_pole[2])) # stereographic projection
|
||||||
|
coords = xy if not polar else \
|
||||||
|
np.block([np.sqrt(xy[:,0:1]*xy[:,0:1]+xy[:,1:2]*xy[:,1:2]),np.arctan2(xy[:,1:2],xy[:,0:1])])
|
||||||
return {
|
return {
|
||||||
'data': coords,
|
'data': coords,
|
||||||
'label': 'p^{}_[{} {} {})'.format(u'rφ' if polar else 'xy',*m),
|
'label': 'p^{}_[{} {} {})'.format(u'rφ' if polar else 'xy',*m),
|
||||||
|
@ -841,7 +876,7 @@ class Result:
|
||||||
'Unit': '1',
|
'Unit': '1',
|
||||||
'Description': '{} coordinates of stereographic projection of pole (direction/plane) in crystal frame'\
|
'Description': '{} coordinates of stereographic projection of pole (direction/plane) in crystal frame'\
|
||||||
.format('Polar' if polar else 'Cartesian'),
|
.format('Polar' if polar else 'Cartesian'),
|
||||||
'Creator' : 'result.py:add_pole v{}'.format(version)
|
'Creator': inspect.stack()[0][3][1:]
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
def add_pole(self,q,p,polar=False):
|
def add_pole(self,q,p,polar=False):
|
||||||
|
@ -863,15 +898,13 @@ class Result:
|
||||||
|
|
||||||
@staticmethod
|
@staticmethod
|
||||||
def _add_rotational_part(F):
|
def _add_rotational_part(F):
|
||||||
if not F['data'].shape[1:] == (3,3):
|
|
||||||
raise ValueError
|
|
||||||
return {
|
return {
|
||||||
'data': mechanics.rotational_part(F['data']),
|
'data': mechanics.rotational_part(F['data']),
|
||||||
'label': 'R({})'.format(F['label']),
|
'label': 'R({})'.format(F['label']),
|
||||||
'meta': {
|
'meta': {
|
||||||
'Unit': F['meta']['Unit'],
|
'Unit': F['meta']['Unit'],
|
||||||
'Description': 'Rotational part of {} ({})'.format(F['label'],F['meta']['Description']),
|
'Description': 'Rotational part of {} ({})'.format(F['label'],F['meta']['Description']),
|
||||||
'Creator': 'result.py:add_rotational_part v{}'.format(version)
|
'Creator': inspect.stack()[0][3][1:]
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
def add_rotational_part(self,F):
|
def add_rotational_part(self,F):
|
||||||
|
@ -889,16 +922,13 @@ class Result:
|
||||||
|
|
||||||
@staticmethod
|
@staticmethod
|
||||||
def _add_spherical(T):
|
def _add_spherical(T):
|
||||||
if not T['data'].shape[1:] == (3,3):
|
|
||||||
raise ValueError
|
|
||||||
|
|
||||||
return {
|
return {
|
||||||
'data': mechanics.spherical_part(T['data']),
|
'data': mechanics.spherical_part(T['data']),
|
||||||
'label': 'p_{}'.format(T['label']),
|
'label': 'p_{}'.format(T['label']),
|
||||||
'meta': {
|
'meta': {
|
||||||
'Unit': T['meta']['Unit'],
|
'Unit': T['meta']['Unit'],
|
||||||
'Description': 'Spherical component of tensor {} ({})'.format(T['label'],T['meta']['Description']),
|
'Description': 'Spherical component of tensor {} ({})'.format(T['label'],T['meta']['Description']),
|
||||||
'Creator': 'result.py:add_spherical v{}'.format(version)
|
'Creator': inspect.stack()[0][3][1:]
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
def add_spherical(self,T):
|
def add_spherical(self,T):
|
||||||
|
@ -916,16 +946,13 @@ class Result:
|
||||||
|
|
||||||
@staticmethod
|
@staticmethod
|
||||||
def _add_strain_tensor(F,t,m):
|
def _add_strain_tensor(F,t,m):
|
||||||
if not F['data'].shape[1:] == (3,3):
|
|
||||||
raise ValueError
|
|
||||||
|
|
||||||
return {
|
return {
|
||||||
'data': mechanics.strain_tensor(F['data'],t,m),
|
'data': mechanics.strain_tensor(F['data'],t,m),
|
||||||
'label': 'epsilon_{}^{}({})'.format(t,m,F['label']),
|
'label': 'epsilon_{}^{}({})'.format(t,m,F['label']),
|
||||||
'meta': {
|
'meta': {
|
||||||
'Unit': F['meta']['Unit'],
|
'Unit': F['meta']['Unit'],
|
||||||
'Description': 'Strain tensor of {} ({})'.format(F['label'],F['meta']['Description']),
|
'Description': 'Strain tensor of {} ({})'.format(F['label'],F['meta']['Description']),
|
||||||
'Creator': 'result.py:add_strain_tensor v{}'.format(version)
|
'Creator': inspect.stack()[0][3][1:]
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
def add_strain_tensor(self,F='F',t='V',m=0.0):
|
def add_strain_tensor(self,F='F',t='V',m=0.0):
|
||||||
|
@ -950,9 +977,6 @@ class Result:
|
||||||
|
|
||||||
@staticmethod
|
@staticmethod
|
||||||
def _add_stretch_tensor(F,t):
|
def _add_stretch_tensor(F,t):
|
||||||
if not F['data'].shape[1:] == (3,3):
|
|
||||||
raise ValueError
|
|
||||||
|
|
||||||
return {
|
return {
|
||||||
'data': mechanics.left_stretch(F['data']) if t == 'V' else mechanics.right_stretch(F['data']),
|
'data': mechanics.left_stretch(F['data']) if t == 'V' else mechanics.right_stretch(F['data']),
|
||||||
'label': '{}({})'.format(t,F['label']),
|
'label': '{}({})'.format(t,F['label']),
|
||||||
|
@ -960,7 +984,7 @@ class Result:
|
||||||
'Unit': F['meta']['Unit'],
|
'Unit': F['meta']['Unit'],
|
||||||
'Description': '{} stretch tensor of {} ({})'.format('Left' if t == 'V' else 'Right',
|
'Description': '{} stretch tensor of {} ({})'.format('Left' if t == 'V' else 'Right',
|
||||||
F['label'],F['meta']['Description']),
|
F['label'],F['meta']['Description']),
|
||||||
'Creator': 'result.py:add_stretch_tensor v{}'.format(version)
|
'Creator': inspect.stack()[0][3][1:]
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
def add_stretch_tensor(self,F='F',t='V'):
|
def add_stretch_tensor(self,F='F',t='V'):
|
||||||
|
@ -1023,11 +1047,23 @@ class Result:
|
||||||
continue
|
continue
|
||||||
lock.acquire()
|
lock.acquire()
|
||||||
with h5py.File(self.fname, 'a') as f:
|
with h5py.File(self.fname, 'a') as f:
|
||||||
try: # ToDo: Replace if exists?
|
try:
|
||||||
|
if self._allow_overwrite and result[0]+'/'+result[1]['label'] in f:
|
||||||
|
dataset = f[result[0]+'/'+result[1]['label']]
|
||||||
|
dataset[...] = result[1]['data']
|
||||||
|
dataset.attrs['Overwritten'] = 'Yes'.encode()
|
||||||
|
else:
|
||||||
dataset = f[result[0]].create_dataset(result[1]['label'],data=result[1]['data'])
|
dataset = f[result[0]].create_dataset(result[1]['label'],data=result[1]['data'])
|
||||||
|
|
||||||
|
now = datetime.datetime.now().astimezone()
|
||||||
|
dataset.attrs['Created'] = now.strftime('%Y-%m-%d %H:%M:%S%z').encode()
|
||||||
|
|
||||||
for l,v in result[1]['meta'].items():
|
for l,v in result[1]['meta'].items():
|
||||||
dataset.attrs[l]=v.encode()
|
dataset.attrs[l]=v.encode()
|
||||||
except OSError as err:
|
creator = 'damask.Result.{} v{}'.format(dataset.attrs['Creator'].decode(),version)
|
||||||
|
dataset.attrs['Creator'] = creator.encode()
|
||||||
|
|
||||||
|
except (OSError,RuntimeError) as err:
|
||||||
print('Could not add dataset: {}.'.format(err))
|
print('Could not add dataset: {}.'.format(err))
|
||||||
lock.release()
|
lock.release()
|
||||||
|
|
||||||
|
@ -1035,6 +1071,102 @@ class Result:
|
||||||
pool.join()
|
pool.join()
|
||||||
|
|
||||||
|
|
||||||
|
def write_XDMF(self):
|
||||||
|
"""
|
||||||
|
Write XDMF file to directly visualize data in DADF5 file.
|
||||||
|
|
||||||
|
This works only for scalar, 3-vector and 3x3-tensor data.
|
||||||
|
Selection is not taken into account.
|
||||||
|
"""
|
||||||
|
if len(self.constituents) != 1 or not self.structured:
|
||||||
|
raise NotImplementedError('XDMF only available for grid results with 1 constituent.')
|
||||||
|
|
||||||
|
xdmf=ET.Element('Xdmf')
|
||||||
|
xdmf.attrib={'Version': '2.0',
|
||||||
|
'xmlns:xi': 'http://www.w3.org/2001/XInclude'}
|
||||||
|
|
||||||
|
domain=ET.SubElement(xdmf, 'Domain')
|
||||||
|
|
||||||
|
collection = ET.SubElement(domain, 'Grid')
|
||||||
|
collection.attrib={'GridType': 'Collection',
|
||||||
|
'CollectionType': 'Temporal'}
|
||||||
|
|
||||||
|
time = ET.SubElement(collection, 'Time')
|
||||||
|
time.attrib={'TimeType': 'List'}
|
||||||
|
|
||||||
|
time_data = ET.SubElement(time, 'DataItem')
|
||||||
|
time_data.attrib={'Format': 'XML',
|
||||||
|
'NumberType': 'Float',
|
||||||
|
'Dimensions': '{}'.format(len(self.times))}
|
||||||
|
time_data.text = ' '.join(map(str,self.times))
|
||||||
|
|
||||||
|
attributes = []
|
||||||
|
data_items = []
|
||||||
|
|
||||||
|
for inc in self.increments:
|
||||||
|
|
||||||
|
grid=ET.SubElement(collection,'Grid')
|
||||||
|
grid.attrib = {'GridType': 'Uniform',
|
||||||
|
'Name': inc}
|
||||||
|
|
||||||
|
topology=ET.SubElement(grid, 'Topology')
|
||||||
|
topology.attrib={'TopologyType': '3DCoRectMesh',
|
||||||
|
'Dimensions': '{} {} {}'.format(*self.grid+1)}
|
||||||
|
|
||||||
|
geometry=ET.SubElement(grid, 'Geometry')
|
||||||
|
geometry.attrib={'GeometryType':'Origin_DxDyDz'}
|
||||||
|
|
||||||
|
origin=ET.SubElement(geometry, 'DataItem')
|
||||||
|
origin.attrib={'Format': 'XML',
|
||||||
|
'NumberType': 'Float',
|
||||||
|
'Dimensions': '3'}
|
||||||
|
origin.text="{} {} {}".format(*self.origin)
|
||||||
|
|
||||||
|
delta=ET.SubElement(geometry, 'DataItem')
|
||||||
|
delta.attrib={'Format': 'XML',
|
||||||
|
'NumberType': 'Float',
|
||||||
|
'Dimensions': '3'}
|
||||||
|
delta.text="{} {} {}".format(*(self.size/self.grid))
|
||||||
|
|
||||||
|
|
||||||
|
with h5py.File(self.fname,'r') as f:
|
||||||
|
attributes.append(ET.SubElement(grid, 'Attribute'))
|
||||||
|
attributes[-1].attrib={'Name': 'u',
|
||||||
|
'Center': 'Node',
|
||||||
|
'AttributeType': 'Vector'}
|
||||||
|
data_items.append(ET.SubElement(attributes[-1], 'DataItem'))
|
||||||
|
data_items[-1].attrib={'Format': 'HDF',
|
||||||
|
'Precision': '8',
|
||||||
|
'Dimensions': '{} {} {} 3'.format(*(self.grid+1))}
|
||||||
|
data_items[-1].text='{}:/{}/geometry/u_n'.format(os.path.split(self.fname)[1],inc)
|
||||||
|
|
||||||
|
for o,p in zip(['constituents','materialpoints'],['con_physics','mat_physics']):
|
||||||
|
for oo in getattr(self,o):
|
||||||
|
for pp in getattr(self,p):
|
||||||
|
g = '/'.join([inc,o[:-1],oo,pp])
|
||||||
|
for l in f[g]:
|
||||||
|
name = '/'.join([g,l])
|
||||||
|
shape = f[name].shape[1:]
|
||||||
|
dtype = f[name].dtype
|
||||||
|
prec = f[name].dtype.itemsize
|
||||||
|
|
||||||
|
if (shape not in [(1,), (3,), (3,3)]) or dtype != np.float64: continue
|
||||||
|
|
||||||
|
attributes.append(ET.SubElement(grid, 'Attribute'))
|
||||||
|
attributes[-1].attrib={'Name': '{}'.format(name.split('/',2)[2]),
|
||||||
|
'Center': 'Cell',
|
||||||
|
'AttributeType': 'Tensor'}
|
||||||
|
data_items.append(ET.SubElement(attributes[-1], 'DataItem'))
|
||||||
|
data_items[-1].attrib={'Format': 'HDF',
|
||||||
|
'NumberType': 'Float',
|
||||||
|
'Precision': '{}'.format(prec),
|
||||||
|
'Dimensions': '{} {} {} {}'.format(*self.grid,np.prod(shape))}
|
||||||
|
data_items[-1].text='{}:{}'.format(os.path.split(self.fname)[1],name)
|
||||||
|
|
||||||
|
with open(os.path.splitext(self.fname)[0]+'.xdmf','w') as f:
|
||||||
|
f.write(xml.dom.minidom.parseString(ET.tostring(xdmf).decode()).toprettyxml())
|
||||||
|
|
||||||
|
|
||||||
def to_vtk(self,labels=[],mode='cell'):
|
def to_vtk(self,labels=[],mode='cell'):
|
||||||
"""
|
"""
|
||||||
Export to vtk cell/point data.
|
Export to vtk cell/point data.
|
||||||
|
@ -1111,28 +1243,3 @@ class Result:
|
||||||
inc[3:].zfill(N_digits))
|
inc[3:].zfill(N_digits))
|
||||||
|
|
||||||
v.write(file_out)
|
v.write(file_out)
|
||||||
|
|
||||||
###################################################################################################
|
|
||||||
# BEGIN DEPRECATED
|
|
||||||
|
|
||||||
def _time_to_inc(self,start,end):
|
|
||||||
selected = []
|
|
||||||
for i,time in enumerate(self.times):
|
|
||||||
if start <= time <= end:
|
|
||||||
selected.append(self.increments[i])
|
|
||||||
return selected
|
|
||||||
|
|
||||||
|
|
||||||
def set_by_time(self,start,end):
|
|
||||||
"""
|
|
||||||
Set active increments based on start and end time.
|
|
||||||
|
|
||||||
Parameters
|
|
||||||
----------
|
|
||||||
start : float
|
|
||||||
start time (included)
|
|
||||||
end : float
|
|
||||||
end time (included)
|
|
||||||
|
|
||||||
"""
|
|
||||||
self._manage_selection('set','increments',self._time_to_inc(start,end))
|
|
||||||
|
|
File diff suppressed because it is too large
Load Diff
|
@ -120,9 +120,9 @@ class VTK:
|
||||||
Parameters
|
Parameters
|
||||||
----------
|
----------
|
||||||
fname : str
|
fname : str
|
||||||
Filename for reading. Valid extensions are *.vtr, *.vtu, *.vtp, and *.vtk.
|
Filename for reading. Valid extensions are .vtr, .vtu, .vtp, and .vtk.
|
||||||
dataset_type : str, optional
|
dataset_type : str, optional
|
||||||
Name of the vtk.vtkDataSet subclass when opening an *.vtk file. Valid types are vtkRectilinearGrid,
|
Name of the vtk.vtkDataSet subclass when opening an .vtk file. Valid types are vtkRectilinearGrid,
|
||||||
vtkUnstructuredGrid, and vtkPolyData.
|
vtkUnstructuredGrid, and vtkPolyData.
|
||||||
|
|
||||||
"""
|
"""
|
||||||
|
|
|
@ -238,12 +238,13 @@ def cell_coord0_gridSizeOrigin(coord0,ordered=True):
|
||||||
start = origin + delta*.5
|
start = origin + delta*.5
|
||||||
end = origin - delta*.5 + size
|
end = origin - delta*.5 + size
|
||||||
|
|
||||||
if not (_np.allclose(coords[0],_np.linspace(start[0],end[0],grid[0])) and \
|
atol = _np.max(size)*5e-2
|
||||||
_np.allclose(coords[1],_np.linspace(start[1],end[1],grid[1])) and \
|
if not (_np.allclose(coords[0],_np.linspace(start[0],end[0],grid[0]),atol=atol) and \
|
||||||
_np.allclose(coords[2],_np.linspace(start[2],end[2],grid[2]))):
|
_np.allclose(coords[1],_np.linspace(start[1],end[1],grid[1]),atol=atol) and \
|
||||||
|
_np.allclose(coords[2],_np.linspace(start[2],end[2],grid[2]),atol=atol)):
|
||||||
raise ValueError('Regular grid spacing violated.')
|
raise ValueError('Regular grid spacing violated.')
|
||||||
|
|
||||||
if ordered and not _np.allclose(coord0.reshape(tuple(grid)+(3,),order='F'),cell_coord0(grid,size,origin)):
|
if ordered and not _np.allclose(coord0.reshape(tuple(grid)+(3,),order='F'),cell_coord0(grid,size,origin),atol=atol):
|
||||||
raise ValueError('Input data is not ordered (x fast, z slow).')
|
raise ValueError('Input data is not ordered (x fast, z slow).')
|
||||||
|
|
||||||
return (grid,size,origin)
|
return (grid,size,origin)
|
||||||
|
@ -360,7 +361,7 @@ def node_2_cell(node_data):
|
||||||
+ _np.roll(node_data,1,(0,)) + _np.roll(node_data,1,(1,)) + _np.roll(node_data,1,(2,))
|
+ _np.roll(node_data,1,(0,)) + _np.roll(node_data,1,(1,)) + _np.roll(node_data,1,(2,))
|
||||||
+ _np.roll(node_data,1,(0,1)) + _np.roll(node_data,1,(1,2)) + _np.roll(node_data,1,(2,0)))*0.125
|
+ _np.roll(node_data,1,(0,1)) + _np.roll(node_data,1,(1,2)) + _np.roll(node_data,1,(2,0)))*0.125
|
||||||
|
|
||||||
return c[:-1,:-1,:-1]
|
return c[1:,1:,1:]
|
||||||
|
|
||||||
|
|
||||||
def node_coord0_gridSizeOrigin(coord0,ordered=True):
|
def node_coord0_gridSizeOrigin(coord0,ordered=True):
|
||||||
|
@ -385,12 +386,13 @@ def node_coord0_gridSizeOrigin(coord0,ordered=True):
|
||||||
if (grid+1).prod() != len(coord0):
|
if (grid+1).prod() != len(coord0):
|
||||||
raise ValueError('Data count {} does not match grid {}.'.format(len(coord0),grid))
|
raise ValueError('Data count {} does not match grid {}.'.format(len(coord0),grid))
|
||||||
|
|
||||||
if not (_np.allclose(coords[0],_np.linspace(mincorner[0],maxcorner[0],grid[0]+1)) and \
|
atol = _np.max(size)*5e-2
|
||||||
_np.allclose(coords[1],_np.linspace(mincorner[1],maxcorner[1],grid[1]+1)) and \
|
if not (_np.allclose(coords[0],_np.linspace(mincorner[0],maxcorner[0],grid[0]+1),atol=atol) and \
|
||||||
_np.allclose(coords[2],_np.linspace(mincorner[2],maxcorner[2],grid[2]+1))):
|
_np.allclose(coords[1],_np.linspace(mincorner[1],maxcorner[1],grid[1]+1),atol=atol) and \
|
||||||
|
_np.allclose(coords[2],_np.linspace(mincorner[2],maxcorner[2],grid[2]+1),atol=atol)):
|
||||||
raise ValueError('Regular grid spacing violated.')
|
raise ValueError('Regular grid spacing violated.')
|
||||||
|
|
||||||
if ordered and not _np.allclose(coord0.reshape(tuple(grid+1)+(3,),order='F'),node_coord0(grid,size,origin)):
|
if ordered and not _np.allclose(coord0.reshape(tuple(grid+1)+(3,),order='F'),node_coord0(grid,size,origin),atol=atol):
|
||||||
raise ValueError('Input data is not ordered (x fast, z slow).')
|
raise ValueError('Input data is not ordered (x fast, z slow).')
|
||||||
|
|
||||||
return (grid,size,origin)
|
return (grid,size,origin)
|
||||||
|
|
|
@ -112,8 +112,8 @@ def execute(cmd,
|
||||||
|
|
||||||
"""
|
"""
|
||||||
initialPath = os.getcwd()
|
initialPath = os.getcwd()
|
||||||
os.chdir(wd)
|
|
||||||
myEnv = os.environ if env is None else env
|
myEnv = os.environ if env is None else env
|
||||||
|
os.chdir(wd)
|
||||||
process = subprocess.Popen(shlex.split(cmd),
|
process = subprocess.Popen(shlex.split(cmd),
|
||||||
stdout = subprocess.PIPE,
|
stdout = subprocess.PIPE,
|
||||||
stderr = subprocess.PIPE,
|
stderr = subprocess.PIPE,
|
||||||
|
@ -121,9 +121,9 @@ def execute(cmd,
|
||||||
env = myEnv)
|
env = myEnv)
|
||||||
out,error = [i for i in (process.communicate() if streamIn is None
|
out,error = [i for i in (process.communicate() if streamIn is None
|
||||||
else process.communicate(streamIn.read().encode('utf-8')))]
|
else process.communicate(streamIn.read().encode('utf-8')))]
|
||||||
|
os.chdir(initialPath)
|
||||||
out = out.decode('utf-8').replace('\x08','')
|
out = out.decode('utf-8').replace('\x08','')
|
||||||
error = error.decode('utf-8').replace('\x08','')
|
error = error.decode('utf-8').replace('\x08','')
|
||||||
os.chdir(initialPath)
|
|
||||||
if process.returncode != 0:
|
if process.returncode != 0:
|
||||||
raise RuntimeError('{} failed with returncode {}'.format(cmd,process.returncode))
|
raise RuntimeError('{} failed with returncode {}'.format(cmd,process.returncode))
|
||||||
return out,error
|
return out,error
|
||||||
|
|
|
@ -0,0 +1,85 @@
|
||||||
|
4 header
|
||||||
|
grid a 8 b 10 c 8
|
||||||
|
size x 8e-06 y 1e-05 z 8e-06
|
||||||
|
origin x 0.0 y -2.5e-06 z -2e-06
|
||||||
|
homogenization 1
|
||||||
|
42 42 42 42 42 42 42 42
|
||||||
|
42 42 42 42 42 42 42 42
|
||||||
|
42 42 42 42 42 42 42 42
|
||||||
|
42 42 42 42 42 42 42 42
|
||||||
|
42 42 42 42 42 42 42 42
|
||||||
|
42 42 42 42 42 42 42 42
|
||||||
|
42 42 42 42 42 42 42 42
|
||||||
|
42 42 42 42 42 42 42 42
|
||||||
|
42 42 42 42 42 42 42 42
|
||||||
|
42 42 42 42 42 42 42 42
|
||||||
|
42 42 42 42 42 42 42 42
|
||||||
|
42 42 42 42 42 42 42 42
|
||||||
|
42 42 30 14 18 42 42 42
|
||||||
|
42 42 29 13 17 42 42 42
|
||||||
|
42 42 42 42 42 42 42 42
|
||||||
|
42 42 42 42 42 42 42 42
|
||||||
|
42 42 42 42 42 42 42 42
|
||||||
|
42 42 42 42 42 42 42 42
|
||||||
|
42 42 42 42 42 42 42 42
|
||||||
|
42 42 42 42 42 42 42 42
|
||||||
|
42 42 42 42 42 42 42 42
|
||||||
|
42 42 42 42 42 42 42 42
|
||||||
|
42 42 31 15 19 42 42 42
|
||||||
|
42 6 10 2 2 42 42 42
|
||||||
|
42 1 2 2 2 2 42 42
|
||||||
|
42 42 42 42 42 42 42 42
|
||||||
|
42 42 42 42 42 42 42 42
|
||||||
|
42 42 42 42 42 42 42 42
|
||||||
|
42 42 42 42 42 42 42 42
|
||||||
|
42 42 42 42 42 42 42 42
|
||||||
|
42 42 42 42 42 42 42 42
|
||||||
|
42 42 32 16 20 42 42 42
|
||||||
|
42 7 11 2 2 42 42 42
|
||||||
|
42 7 11 2 2 42 42 42
|
||||||
|
42 2 2 2 2 2 42 42
|
||||||
|
42 42 2 2 31 35 42 42
|
||||||
|
42 42 22 26 10 14 1 42
|
||||||
|
42 42 42 42 42 42 42 42
|
||||||
|
42 42 42 42 42 42 42 42
|
||||||
|
42 42 42 42 42 42 42 42
|
||||||
|
42 42 42 42 42 42 42 42
|
||||||
|
42 42 42 42 42 42 42 42
|
||||||
|
42 42 42 42 42 42 42 42
|
||||||
|
42 4 2 2 2 2 42 42
|
||||||
|
42 42 2 2 32 36 42 42
|
||||||
|
42 42 24 28 12 16 1 42
|
||||||
|
42 42 42 7 7 1 1 42
|
||||||
|
42 42 42 7 7 1 1 42
|
||||||
|
42 42 42 1 1 1 42 42
|
||||||
|
42 42 42 42 42 42 42 42
|
||||||
|
42 42 42 42 42 42 42 42
|
||||||
|
42 42 42 42 42 42 42 42
|
||||||
|
42 42 42 42 42 42 42 42
|
||||||
|
42 42 42 42 42 42 42 42
|
||||||
|
42 42 42 42 42 42 42 42
|
||||||
|
42 42 25 29 13 17 1 42
|
||||||
|
42 42 42 8 8 1 1 42
|
||||||
|
42 42 42 1 1 1 42 42
|
||||||
|
42 42 42 42 42 42 42 42
|
||||||
|
42 42 42 42 42 42 42 42
|
||||||
|
42 42 42 42 42 42 42 42
|
||||||
|
42 42 42 42 42 42 42 42
|
||||||
|
42 42 42 42 42 42 42 42
|
||||||
|
42 42 42 42 42 42 42 42
|
||||||
|
42 42 42 42 42 42 42 42
|
||||||
|
42 42 42 42 42 42 42 42
|
||||||
|
42 42 42 1 1 1 42 42
|
||||||
|
42 42 42 1 1 1 42 42
|
||||||
|
42 42 42 42 42 42 42 42
|
||||||
|
42 42 42 42 42 42 42 42
|
||||||
|
42 42 42 42 42 42 42 42
|
||||||
|
42 42 42 42 42 42 42 42
|
||||||
|
42 42 42 42 42 42 42 42
|
||||||
|
42 42 42 42 42 42 42 42
|
||||||
|
42 42 42 42 42 42 42 42
|
||||||
|
42 42 42 42 42 42 42 42
|
||||||
|
42 42 42 42 42 42 42 42
|
||||||
|
42 42 42 42 42 42 42 42
|
||||||
|
42 42 42 42 42 42 42 42
|
||||||
|
42 42 42 42 42 42 42 42
|
|
@ -0,0 +1,104 @@
|
||||||
|
4 header
|
||||||
|
grid a 11 b 11 c 9
|
||||||
|
size x 1.1e-05 y 1.1000000000000001e-05 z 9e-06
|
||||||
|
origin x -1.5e-06 y -3.0000000000000005e-06 z -2.4999999999999998e-06
|
||||||
|
homogenization 1
|
||||||
|
42 42 42 42 42 42 42 42 42 42 42
|
||||||
|
42 42 42 42 42 42 42 42 42 42 42
|
||||||
|
42 42 42 42 42 42 42 42 42 42 42
|
||||||
|
42 42 42 42 42 42 42 42 42 42 42
|
||||||
|
42 42 42 42 42 42 42 42 42 42 42
|
||||||
|
42 42 42 42 42 42 42 42 42 42 42
|
||||||
|
42 42 42 42 42 42 42 42 42 42 42
|
||||||
|
42 42 42 42 42 42 42 42 42 42 42
|
||||||
|
42 42 42 42 42 42 42 42 42 42 42
|
||||||
|
42 42 42 42 42 42 42 42 42 42 42
|
||||||
|
42 42 42 42 42 42 42 42 42 42 42
|
||||||
|
42 42 42 42 42 42 42 42 42 42 42
|
||||||
|
42 42 42 42 42 42 42 42 42 42 42
|
||||||
|
42 42 42 42 42 42 42 42 42 42 42
|
||||||
|
42 42 42 42 42 42 42 42 42 42 42
|
||||||
|
42 42 42 1 2 42 42 42 42 42 42
|
||||||
|
42 42 42 42 42 42 42 42 42 42 42
|
||||||
|
42 42 42 42 42 42 42 42 42 42 42
|
||||||
|
42 42 42 42 42 42 42 42 42 42 42
|
||||||
|
42 42 42 42 42 42 42 42 42 42 42
|
||||||
|
42 42 42 42 42 42 42 42 42 42 42
|
||||||
|
42 42 42 42 42 42 42 42 42 42 42
|
||||||
|
42 42 42 42 42 42 42 42 42 42 42
|
||||||
|
42 42 42 42 42 42 42 42 42 42 42
|
||||||
|
42 42 42 42 1 42 42 42 42 42 42
|
||||||
|
42 42 42 42 1 5 42 42 42 42 42
|
||||||
|
42 42 42 1 7 4 42 42 42 42 42
|
||||||
|
42 42 42 42 42 27 42 42 42 42 42
|
||||||
|
42 42 42 42 42 42 2 42 42 42 42
|
||||||
|
42 42 42 42 42 42 42 42 42 42 42
|
||||||
|
42 42 42 42 42 42 42 42 42 42 42
|
||||||
|
42 42 42 42 42 42 42 42 42 42 42
|
||||||
|
42 42 42 42 42 42 42 42 42 42 42
|
||||||
|
42 42 42 42 42 42 42 42 42 42 42
|
||||||
|
42 42 42 42 42 42 42 42 42 42 42
|
||||||
|
42 42 42 1 1 42 42 42 42 42 42
|
||||||
|
42 42 42 1 1 9 29 42 42 42 42
|
||||||
|
42 42 1 1 11 8 28 2 42 42 42
|
||||||
|
42 42 42 1 10 31 2 42 42 42 42
|
||||||
|
42 42 42 42 30 2 2 2 42 42 42
|
||||||
|
42 42 42 42 42 42 2 1 42 42 42
|
||||||
|
42 42 42 42 42 42 42 1 42 42 42
|
||||||
|
42 42 42 42 42 42 42 42 42 42 42
|
||||||
|
42 42 42 42 42 42 42 42 42 42 42
|
||||||
|
42 42 42 42 42 42 42 42 42 42 42
|
||||||
|
42 42 42 42 42 42 42 42 42 42 42
|
||||||
|
42 42 42 1 42 42 42 42 42 42 42
|
||||||
|
42 42 42 1 1 42 42 42 42 42 42
|
||||||
|
42 42 1 16 36 12 32 42 42 42 42
|
||||||
|
42 42 42 15 35 2 2 2 42 42 42
|
||||||
|
42 42 42 42 2 2 2 11 3 42 42
|
||||||
|
42 42 42 42 42 42 10 6 42 42 42
|
||||||
|
42 42 42 42 42 42 42 6 42 42 42
|
||||||
|
42 42 42 42 42 42 42 42 42 42 42
|
||||||
|
42 42 42 42 42 42 42 42 42 42 42
|
||||||
|
42 42 42 42 42 42 42 42 42 42 42
|
||||||
|
42 42 42 42 42 42 42 42 42 42 42
|
||||||
|
42 42 42 1 42 42 42 42 42 42 42
|
||||||
|
42 42 42 1 17 42 42 42 42 42 42
|
||||||
|
42 42 42 16 36 37 2 42 42 42 42
|
||||||
|
42 42 42 42 39 2 2 12 42 42 42
|
||||||
|
42 42 42 38 2 2 2 11 8 42 42
|
||||||
|
42 42 42 42 2 2 14 30 42 42 42
|
||||||
|
42 42 42 42 42 42 13 30 42 42 42
|
||||||
|
42 42 42 42 42 42 42 42 42 42 42
|
||||||
|
42 42 42 42 42 42 42 42 42 42 42
|
||||||
|
42 42 42 42 42 42 42 42 42 42 42
|
||||||
|
42 42 42 42 42 42 42 42 42 42 42
|
||||||
|
42 42 42 42 42 42 42 42 42 42 42
|
||||||
|
42 42 42 42 42 42 42 42 42 42 42
|
||||||
|
42 42 42 42 40 42 42 42 42 42 42
|
||||||
|
42 42 42 42 42 2 42 42 42 42 42
|
||||||
|
42 42 42 42 42 2 2 15 42 42 42
|
||||||
|
42 42 42 42 42 2 18 42 42 42 42
|
||||||
|
42 42 42 42 42 42 17 42 42 42 42
|
||||||
|
42 42 42 42 42 42 42 42 42 42 42
|
||||||
|
42 42 42 42 42 42 42 42 42 42 42
|
||||||
|
42 42 42 42 42 42 42 42 42 42 42
|
||||||
|
42 42 42 42 42 42 42 42 42 42 42
|
||||||
|
42 42 42 42 42 42 42 42 42 42 42
|
||||||
|
42 42 42 42 42 42 42 42 42 42 42
|
||||||
|
42 42 42 42 42 42 42 42 42 42 42
|
||||||
|
42 42 42 42 42 42 42 42 42 42 42
|
||||||
|
42 42 42 42 42 42 2 20 42 42 42
|
||||||
|
42 42 42 42 42 42 42 42 42 42 42
|
||||||
|
42 42 42 42 42 42 42 42 42 42 42
|
||||||
|
42 42 42 42 42 42 42 42 42 42 42
|
||||||
|
42 42 42 42 42 42 42 42 42 42 42
|
||||||
|
42 42 42 42 42 42 42 42 42 42 42
|
||||||
|
42 42 42 42 42 42 42 42 42 42 42
|
||||||
|
42 42 42 42 42 42 42 42 42 42 42
|
||||||
|
42 42 42 42 42 42 42 42 42 42 42
|
||||||
|
42 42 42 42 42 42 42 42 42 42 42
|
||||||
|
42 42 42 42 42 42 42 42 42 42 42
|
||||||
|
42 42 42 42 42 42 42 42 42 42 42
|
||||||
|
42 42 42 42 42 42 42 42 42 42 42
|
||||||
|
42 42 42 42 42 42 42 42 42 42 42
|
||||||
|
42 42 42 42 42 42 42 42 42 42 42
|
||||||
|
42 42 42 42 42 42 42 42 42 42 42
|
|
@ -1,68 +1,4 @@
|
||||||
68 header
|
4 header
|
||||||
geom_fromVoronoiTessellation 2.0.3-1073-g6f3cb071
|
|
||||||
<texture>
|
|
||||||
[Grain1]
|
|
||||||
(gauss) phi1 358.98 Phi 65.62 phi2 24.48
|
|
||||||
[Grain2]
|
|
||||||
(gauss) phi1 121.05 Phi 176.11 phi2 295.73
|
|
||||||
[Grain3]
|
|
||||||
(gauss) phi1 43.79 Phi 113.76 phi2 345.90
|
|
||||||
[Grain4]
|
|
||||||
(gauss) phi1 265.15 Phi 62.52 phi2 299.71
|
|
||||||
[Grain5]
|
|
||||||
(gauss) phi1 221.23 Phi 26.54 phi2 207.05
|
|
||||||
[Grain6]
|
|
||||||
(gauss) phi1 249.81 Phi 61.47 phi2 152.14
|
|
||||||
[Grain7]
|
|
||||||
(gauss) phi1 332.45 Phi 99.16 phi2 345.34
|
|
||||||
[Grain8]
|
|
||||||
(gauss) phi1 312.27 Phi 118.27 phi2 181.59
|
|
||||||
[Grain9]
|
|
||||||
(gauss) phi1 303.10 Phi 48.21 phi2 358.03
|
|
||||||
[Grain10]
|
|
||||||
(gauss) phi1 338.26 Phi 48.11 phi2 176.78
|
|
||||||
[Grain11]
|
|
||||||
(gauss) phi1 115.17 Phi 56.54 phi2 223.84
|
|
||||||
[Grain12]
|
|
||||||
(gauss) phi1 281.04 Phi 97.48 phi2 27.94
|
|
||||||
<microstructure>
|
|
||||||
[Grain1]
|
|
||||||
crystallite 1
|
|
||||||
(constituent) phase 1 texture 1 fraction 1.0
|
|
||||||
[Grain2]
|
|
||||||
crystallite 1
|
|
||||||
(constituent) phase 1 texture 2 fraction 1.0
|
|
||||||
[Grain3]
|
|
||||||
crystallite 1
|
|
||||||
(constituent) phase 1 texture 3 fraction 1.0
|
|
||||||
[Grain4]
|
|
||||||
crystallite 1
|
|
||||||
(constituent) phase 1 texture 4 fraction 1.0
|
|
||||||
[Grain5]
|
|
||||||
crystallite 1
|
|
||||||
(constituent) phase 1 texture 5 fraction 1.0
|
|
||||||
[Grain6]
|
|
||||||
crystallite 1
|
|
||||||
(constituent) phase 1 texture 6 fraction 1.0
|
|
||||||
[Grain7]
|
|
||||||
crystallite 1
|
|
||||||
(constituent) phase 1 texture 7 fraction 1.0
|
|
||||||
[Grain8]
|
|
||||||
crystallite 1
|
|
||||||
(constituent) phase 1 texture 8 fraction 1.0
|
|
||||||
[Grain9]
|
|
||||||
crystallite 1
|
|
||||||
(constituent) phase 1 texture 9 fraction 1.0
|
|
||||||
[Grain10]
|
|
||||||
crystallite 1
|
|
||||||
(constituent) phase 1 texture 10 fraction 1.0
|
|
||||||
[Grain11]
|
|
||||||
crystallite 1
|
|
||||||
(constituent) phase 1 texture 11 fraction 1.0
|
|
||||||
[Grain12]
|
|
||||||
crystallite 1
|
|
||||||
(constituent) phase 1 texture 12 fraction 1.0
|
|
||||||
<!skip>
|
|
||||||
grid a 6 b 7 c 8
|
grid a 6 b 7 c 8
|
||||||
size x 0.75 y 0.875 z 1.0
|
size x 0.75 y 0.875 z 1.0
|
||||||
origin x 0.0 y 0.0 z 0.0
|
origin x 0.0 y 0.0 z 0.0
|
||||||
|
|
|
@ -0,0 +1,61 @@
|
||||||
|
4 header
|
||||||
|
grid a 6 b 7 c 8
|
||||||
|
size x 0.75 y 0.875 z 1.0
|
||||||
|
origin x 0.0 y 0.0 z 0.0
|
||||||
|
homogenization 1
|
||||||
|
3 3 3 4 3 3
|
||||||
|
3 1 1 1 3 3
|
||||||
|
3 5 1 1 1 3
|
||||||
|
1 5 5 1 1 1
|
||||||
|
1 5 5 1 1 1
|
||||||
|
6 3 3 4 1 6
|
||||||
|
6 3 3 4 4 6
|
||||||
|
6 3 3 1 3 3
|
||||||
|
3 1 1 1 3 3
|
||||||
|
3 1 1 1 1 1
|
||||||
|
1 1 1 1 1 1
|
||||||
|
6 6 3 1 1 1
|
||||||
|
6 3 3 3 6 6
|
||||||
|
6 3 3 3 6 6
|
||||||
|
6 3 3 1 1 6
|
||||||
|
3 1 1 1 1 3
|
||||||
|
6 1 1 1 2 2
|
||||||
|
1 6 2 2 2 2
|
||||||
|
6 6 2 2 2 6
|
||||||
|
6 3 3 3 6 6
|
||||||
|
6 3 3 3 6 6
|
||||||
|
5 6 6 6 1 6
|
||||||
|
6 6 6 6 2 2
|
||||||
|
6 6 6 2 2 2
|
||||||
|
2 6 2 2 2 2
|
||||||
|
6 5 2 2 2 2
|
||||||
|
6 5 5 2 2 6
|
||||||
|
5 5 5 3 6 6
|
||||||
|
5 5 6 6 6 5
|
||||||
|
6 6 6 6 6 6
|
||||||
|
6 6 6 6 2 2
|
||||||
|
4 4 6 2 2 2
|
||||||
|
4 4 2 2 2 2
|
||||||
|
5 5 5 2 2 2
|
||||||
|
5 5 5 5 2 5
|
||||||
|
5 5 5 4 4 5
|
||||||
|
6 6 6 6 4 4
|
||||||
|
4 4 5 5 2 4
|
||||||
|
4 4 5 2 2 4
|
||||||
|
4 4 2 2 2 2
|
||||||
|
5 5 5 2 2 2
|
||||||
|
5 5 5 4 4 5
|
||||||
|
5 5 4 4 4 3
|
||||||
|
4 5 5 5 4 3
|
||||||
|
4 4 5 5 5 4
|
||||||
|
4 4 5 5 2 4
|
||||||
|
4 4 2 2 2 2
|
||||||
|
5 5 2 2 2 2
|
||||||
|
5 5 4 4 4 4
|
||||||
|
3 4 4 4 4 3
|
||||||
|
3 5 5 4 3 3
|
||||||
|
4 5 5 5 3 3
|
||||||
|
4 5 5 5 1 1
|
||||||
|
4 4 5 2 1 1
|
||||||
|
6 4 4 4 4 1
|
||||||
|
3 4 4 4 4 3
|
Binary file not shown.
|
@ -124,6 +124,3 @@ a_slip 2.25
|
||||||
h0_slipslip 75e6
|
h0_slipslip 75e6
|
||||||
interaction_slipslip 1 1 1.4 1.4 1.4 1.4
|
interaction_slipslip 1 1 1.4 1.4 1.4 1.4
|
||||||
atol_resistance 1
|
atol_resistance 1
|
||||||
|
|
||||||
<crystallite>
|
|
||||||
[dummy]
|
|
||||||
|
|
|
@ -5,12 +5,13 @@ import pytest
|
||||||
import numpy as np
|
import numpy as np
|
||||||
|
|
||||||
from damask import Geom
|
from damask import Geom
|
||||||
|
from damask import Rotation
|
||||||
|
|
||||||
|
|
||||||
def geom_equal(a,b):
|
def geom_equal(a,b):
|
||||||
return np.all(a.get_microstructure() == b.get_microstructure()) and \
|
return np.all(a.get_microstructure() == b.get_microstructure()) and \
|
||||||
np.all(a.get_size() == b.get_size()) and \
|
np.all(a.get_grid() == b.get_grid()) and \
|
||||||
np.all(a.get_grid() == b.get_grid())
|
np.allclose(a.get_size(), b.get_size())
|
||||||
|
|
||||||
@pytest.fixture
|
@pytest.fixture
|
||||||
def default():
|
def default():
|
||||||
|
@ -36,6 +37,7 @@ class TestGeom:
|
||||||
default.get_size(),
|
default.get_size(),
|
||||||
default.get_origin()
|
default.get_origin()
|
||||||
)
|
)
|
||||||
|
print(modified)
|
||||||
assert geom_equal(modified,default)
|
assert geom_equal(modified,default)
|
||||||
|
|
||||||
|
|
||||||
|
@ -57,6 +59,22 @@ class TestGeom:
|
||||||
new = Geom.from_file(tmpdir.join('default.geom'))
|
new = Geom.from_file(tmpdir.join('default.geom'))
|
||||||
assert geom_equal(new,default)
|
assert geom_equal(new,default)
|
||||||
|
|
||||||
|
def test_invalid_combination(self,default):
|
||||||
|
with pytest.raises(ValueError):
|
||||||
|
default.update(default.microstructure[1:,1:,1:],size=np.ones(3), rescale=True)
|
||||||
|
|
||||||
|
def test_invalid_size(self,default):
|
||||||
|
with pytest.raises(ValueError):
|
||||||
|
default.update(default.microstructure[1:,1:,1:],size=np.ones(2))
|
||||||
|
|
||||||
|
def test_invalid_microstructure(self,default):
|
||||||
|
with pytest.raises(ValueError):
|
||||||
|
default.update(default.microstructure[1])
|
||||||
|
|
||||||
|
def test_invalid_homogenization(self,default):
|
||||||
|
with pytest.raises(TypeError):
|
||||||
|
default.set_homogenization(homogenization=0)
|
||||||
|
|
||||||
@pytest.mark.parametrize('directions,reflect',[
|
@pytest.mark.parametrize('directions,reflect',[
|
||||||
(['x'], False),
|
(['x'], False),
|
||||||
(['x','y','z'],True),
|
(['x','y','z'],True),
|
||||||
|
@ -98,6 +116,52 @@ class TestGeom:
|
||||||
if update: modified.to_file(reference)
|
if update: modified.to_file(reference)
|
||||||
assert geom_equal(modified,Geom.from_file(reference))
|
assert geom_equal(modified,Geom.from_file(reference))
|
||||||
|
|
||||||
|
def test_renumber(self,default):
|
||||||
|
modified = copy.deepcopy(default)
|
||||||
|
microstructure = modified.get_microstructure()
|
||||||
|
for m in np.unique(microstructure):
|
||||||
|
microstructure[microstructure==m] = microstructure.max() + np.random.randint(1,30)
|
||||||
|
modified.update(microstructure)
|
||||||
|
assert not geom_equal(modified,default)
|
||||||
|
modified.renumber()
|
||||||
|
assert geom_equal(modified,default)
|
||||||
|
|
||||||
|
def test_substitute(self,default):
|
||||||
|
modified = copy.deepcopy(default)
|
||||||
|
microstructure = modified.get_microstructure()
|
||||||
|
offset = np.random.randint(1,500)
|
||||||
|
microstructure+=offset
|
||||||
|
modified.update(microstructure)
|
||||||
|
assert not geom_equal(modified,default)
|
||||||
|
modified.substitute(np.arange(default.microstructure.max())+1+offset,
|
||||||
|
np.arange(default.microstructure.max())+1)
|
||||||
|
assert geom_equal(modified,default)
|
||||||
|
|
||||||
|
@pytest.mark.parametrize('axis_angle',[np.array([1,0,0,86.7]), np.array([0,1,0,90.4]), np.array([0,0,1,90]),
|
||||||
|
np.array([1,0,0,175]),np.array([0,-1,0,178]),np.array([0,0,1,180])])
|
||||||
|
def test_rotate360(self,default,axis_angle):
|
||||||
|
modified = copy.deepcopy(default)
|
||||||
|
for i in range(np.rint(360/axis_angle[3]).astype(int)):
|
||||||
|
modified.rotate(Rotation.from_axis_angle(axis_angle,degrees=True))
|
||||||
|
assert geom_equal(modified,default)
|
||||||
|
|
||||||
|
@pytest.mark.parametrize('Eulers',[[32.0,68.0,21.0],
|
||||||
|
[0.0,32.0,240.0]])
|
||||||
|
def test_rotate(self,default,update,reference_dir,Eulers):
|
||||||
|
modified = copy.deepcopy(default)
|
||||||
|
modified.rotate(Rotation.from_Eulers(Eulers,degrees=True))
|
||||||
|
tag = 'Eulers={}-{}-{}'.format(*Eulers)
|
||||||
|
reference = os.path.join(reference_dir,'rotate_{}.geom'.format(tag))
|
||||||
|
if update: modified.to_file(reference)
|
||||||
|
assert geom_equal(modified,Geom.from_file(reference))
|
||||||
|
|
||||||
|
def test_canvas(self,default):
|
||||||
|
grid_add = np.random.randint(0,30,(3))
|
||||||
|
modified = copy.deepcopy(default)
|
||||||
|
modified.canvas(modified.grid + grid_add)
|
||||||
|
e = default.grid
|
||||||
|
assert np.all(modified.microstructure[:e[0],:e[1],:e[2]] == default.microstructure)
|
||||||
|
|
||||||
@pytest.mark.parametrize('periodic',[True,False])
|
@pytest.mark.parametrize('periodic',[True,False])
|
||||||
def test_tessellation_approaches(self,periodic):
|
def test_tessellation_approaches(self,periodic):
|
||||||
grid = np.random.randint(10,20,3)
|
grid = np.random.randint(10,20,3)
|
||||||
|
|
|
@ -0,0 +1,41 @@
|
||||||
|
import random
|
||||||
|
|
||||||
|
import pytest
|
||||||
|
import numpy as np
|
||||||
|
|
||||||
|
from damask import Symmetry
|
||||||
|
|
||||||
|
class TestSymmetry:
|
||||||
|
|
||||||
|
@pytest.mark.parametrize('invalid_symmetry',['fcc','bcc','hello'])
|
||||||
|
def test_invalid_symmetry(self,invalid_symmetry):
|
||||||
|
with pytest.raises(KeyError):
|
||||||
|
s = Symmetry(invalid_symmetry) # noqa
|
||||||
|
|
||||||
|
def test_equal(self):
|
||||||
|
symmetry = random.choice(Symmetry.lattices)
|
||||||
|
print(symmetry)
|
||||||
|
assert Symmetry(symmetry) == Symmetry(symmetry)
|
||||||
|
|
||||||
|
def test_not_equal(self):
|
||||||
|
symmetries = random.sample(Symmetry.lattices,k=2)
|
||||||
|
assert Symmetry(symmetries[0]) != Symmetry(symmetries[1])
|
||||||
|
|
||||||
|
@pytest.mark.parametrize('lattice',Symmetry.lattices)
|
||||||
|
def test_inFZ(self,lattice):
|
||||||
|
assert Symmetry(lattice).inFZ(np.zeros(3))
|
||||||
|
|
||||||
|
@pytest.mark.parametrize('lattice',Symmetry.lattices)
|
||||||
|
def test_inDisorientationSST(self,lattice):
|
||||||
|
assert Symmetry(lattice).inDisorientationSST(np.zeros(3))
|
||||||
|
|
||||||
|
@pytest.mark.parametrize('lattice',Symmetry.lattices)
|
||||||
|
@pytest.mark.parametrize('proper',[True,False])
|
||||||
|
def test_inSST(self,lattice,proper):
|
||||||
|
assert Symmetry(lattice).inSST(np.zeros(3),proper)
|
||||||
|
|
||||||
|
@pytest.mark.parametrize('function',['inFZ','inDisorientationSST'])
|
||||||
|
def test_invalid_argument(self,function):
|
||||||
|
s = Symmetry() # noqa
|
||||||
|
with pytest.raises(ValueError):
|
||||||
|
eval('s.{}(np.ones(4))'.format(function))
|
|
@ -11,11 +11,6 @@ from damask import Lattice
|
||||||
|
|
||||||
n = 1000
|
n = 1000
|
||||||
|
|
||||||
@pytest.fixture
|
|
||||||
def default():
|
|
||||||
"""A set of n random rotations."""
|
|
||||||
return [Rotation.fromRandom() for r in range(n)]
|
|
||||||
|
|
||||||
@pytest.fixture
|
@pytest.fixture
|
||||||
def reference_dir(reference_dir_base):
|
def reference_dir(reference_dir_base):
|
||||||
"""Directory containing reference results."""
|
"""Directory containing reference results."""
|
||||||
|
@ -28,15 +23,15 @@ class TestOrientation:
|
||||||
{'label':'green','RGB':[0,1,0],'direction':[0,1,1]},
|
{'label':'green','RGB':[0,1,0],'direction':[0,1,1]},
|
||||||
{'label':'blue', 'RGB':[0,0,1],'direction':[1,1,1]}])
|
{'label':'blue', 'RGB':[0,0,1],'direction':[1,1,1]}])
|
||||||
@pytest.mark.parametrize('lattice',['fcc','bcc'])
|
@pytest.mark.parametrize('lattice',['fcc','bcc'])
|
||||||
def test_IPF_cubic(self,default,color,lattice):
|
def test_IPF_cubic(self,color,lattice):
|
||||||
cube = damask.Orientation(damask.Rotation(),lattice)
|
cube = damask.Orientation(damask.Rotation(),lattice)
|
||||||
for direction in set(permutations(np.array(color['direction']))):
|
for direction in set(permutations(np.array(color['direction']))):
|
||||||
assert np.allclose(cube.IPFcolor(direction),np.array(color['RGB']))
|
assert np.allclose(cube.IPFcolor(np.array(direction)),np.array(color['RGB']))
|
||||||
|
|
||||||
@pytest.mark.parametrize('lattice',Lattice.lattices)
|
@pytest.mark.parametrize('lattice',Lattice.lattices)
|
||||||
def test_IPF(self,lattice):
|
def test_IPF(self,lattice):
|
||||||
direction = np.random.random(3)*2.0-1
|
direction = np.random.random(3)*2.0-1
|
||||||
for rot in [Rotation.fromRandom() for r in range(n//100)]:
|
for rot in [Rotation.from_random() for r in range(n//100)]:
|
||||||
R = damask.Orientation(rot,lattice)
|
R = damask.Orientation(rot,lattice)
|
||||||
color = R.IPFcolor(direction)
|
color = R.IPFcolor(direction)
|
||||||
for equivalent in R.equivalentOrientations():
|
for equivalent in R.equivalentOrientations():
|
||||||
|
@ -45,7 +40,7 @@ class TestOrientation:
|
||||||
@pytest.mark.parametrize('model',['Bain','KS','GT','GT_prime','NW','Pitsch'])
|
@pytest.mark.parametrize('model',['Bain','KS','GT','GT_prime','NW','Pitsch'])
|
||||||
@pytest.mark.parametrize('lattice',['fcc','bcc'])
|
@pytest.mark.parametrize('lattice',['fcc','bcc'])
|
||||||
def test_relationship_forward_backward(self,model,lattice):
|
def test_relationship_forward_backward(self,model,lattice):
|
||||||
ori = Orientation(Rotation.fromRandom(),lattice)
|
ori = Orientation(Rotation.from_random(),lattice)
|
||||||
for i,r in enumerate(ori.relatedOrientations(model)):
|
for i,r in enumerate(ori.relatedOrientations(model)):
|
||||||
ori2 = r.relatedOrientations(model)[i]
|
ori2 = r.relatedOrientations(model)[i]
|
||||||
misorientation = ori.rotation.misorientation(ori2.rotation)
|
misorientation = ori.rotation.misorientation(ori2.rotation)
|
||||||
|
@ -56,7 +51,7 @@ class TestOrientation:
|
||||||
def test_relationship_reference(self,update,reference_dir,model,lattice):
|
def test_relationship_reference(self,update,reference_dir,model,lattice):
|
||||||
reference = os.path.join(reference_dir,'{}_{}.txt'.format(lattice,model))
|
reference = os.path.join(reference_dir,'{}_{}.txt'.format(lattice,model))
|
||||||
ori = Orientation(Rotation(),lattice)
|
ori = Orientation(Rotation(),lattice)
|
||||||
eu = np.array([o.rotation.asEulers(degrees=True) for o in ori.relatedOrientations(model)])
|
eu = np.array([o.rotation.as_Eulers(degrees=True) for o in ori.relatedOrientations(model)])
|
||||||
if update:
|
if update:
|
||||||
coords = np.array([(1,i+1) for i,x in enumerate(eu)])
|
coords = np.array([(1,i+1) for i,x in enumerate(eu)])
|
||||||
table = damask.Table(eu,{'Eulers':(3,)})
|
table = damask.Table(eu,{'Eulers':(3,)})
|
||||||
|
|
|
@ -1,9 +1,13 @@
|
||||||
|
import time
|
||||||
import shutil
|
import shutil
|
||||||
import os
|
import os
|
||||||
|
from datetime import datetime
|
||||||
|
|
||||||
import pytest
|
import pytest
|
||||||
import numpy as np
|
import numpy as np
|
||||||
|
import h5py
|
||||||
|
|
||||||
|
import damask
|
||||||
from damask import Result
|
from damask import Result
|
||||||
from damask import mechanics
|
from damask import mechanics
|
||||||
|
|
||||||
|
@ -13,9 +17,16 @@ def default(tmp_path,reference_dir):
|
||||||
fname = '12grains6x7x8_tensionY.hdf5'
|
fname = '12grains6x7x8_tensionY.hdf5'
|
||||||
shutil.copy(os.path.join(reference_dir,fname),tmp_path)
|
shutil.copy(os.path.join(reference_dir,fname),tmp_path)
|
||||||
f = Result(os.path.join(tmp_path,fname))
|
f = Result(os.path.join(tmp_path,fname))
|
||||||
f.set_by_time(20.0,20.0)
|
f.pick('times',20.0)
|
||||||
return f
|
return f
|
||||||
|
|
||||||
|
@pytest.fixture
|
||||||
|
def single_phase(tmp_path,reference_dir):
|
||||||
|
"""Single phase Result file in temp location for modification."""
|
||||||
|
fname = '6grains6x7x8_single_phase_tensionY.hdf5'
|
||||||
|
shutil.copy(os.path.join(reference_dir,fname),tmp_path)
|
||||||
|
return Result(os.path.join(tmp_path,fname))
|
||||||
|
|
||||||
@pytest.fixture
|
@pytest.fixture
|
||||||
def reference_dir(reference_dir_base):
|
def reference_dir(reference_dir_base):
|
||||||
"""Directory containing reference results."""
|
"""Directory containing reference results."""
|
||||||
|
@ -28,12 +39,56 @@ class TestResult:
|
||||||
print(default)
|
print(default)
|
||||||
|
|
||||||
|
|
||||||
def test_time_increments(self,default):
|
def test_pick_all(self,default):
|
||||||
shape = default.read_dataset(default.get_dataset_location('F'),0).shape
|
default.pick('increments',True)
|
||||||
default.set_by_time(0.0,20.0)
|
a = default.get_dataset_location('F')
|
||||||
for i in default.iterate('increments'):
|
default.pick('increments','*')
|
||||||
assert shape == default.read_dataset(default.get_dataset_location('F'),0).shape
|
b = default.get_dataset_location('F')
|
||||||
|
default.pick('increments',default.incs_in_range(0,np.iinfo(int).max))
|
||||||
|
c = default.get_dataset_location('F')
|
||||||
|
|
||||||
|
default.pick('times',True)
|
||||||
|
d = default.get_dataset_location('F')
|
||||||
|
default.pick('times','*')
|
||||||
|
e = default.get_dataset_location('F')
|
||||||
|
default.pick('times',default.times_in_range(0.0,np.inf))
|
||||||
|
f = default.get_dataset_location('F')
|
||||||
|
assert a == b == c == d == e ==f
|
||||||
|
|
||||||
|
@pytest.mark.parametrize('what',['increments','times','constituents']) # ToDo: discuss materialpoints
|
||||||
|
def test_pick_none(self,default,what):
|
||||||
|
default.pick(what,False)
|
||||||
|
a = default.get_dataset_location('F')
|
||||||
|
default.pick(what,[])
|
||||||
|
b = default.get_dataset_location('F')
|
||||||
|
|
||||||
|
assert a == b == []
|
||||||
|
|
||||||
|
@pytest.mark.parametrize('what',['increments','times','constituents']) # ToDo: discuss materialpoints
|
||||||
|
def test_pick_more(self,default,what):
|
||||||
|
default.pick(what,False)
|
||||||
|
default.pick_more(what,'*')
|
||||||
|
a = default.get_dataset_location('F')
|
||||||
|
|
||||||
|
default.pick(what,True)
|
||||||
|
b = default.get_dataset_location('F')
|
||||||
|
|
||||||
|
assert a == b
|
||||||
|
|
||||||
|
@pytest.mark.parametrize('what',['increments','times','constituents']) # ToDo: discuss materialpoints
|
||||||
|
def test_pick_less(self,default,what):
|
||||||
|
default.pick(what,True)
|
||||||
|
default.pick_less(what,'*')
|
||||||
|
a = default.get_dataset_location('F')
|
||||||
|
|
||||||
|
default.pick(what,False)
|
||||||
|
b = default.get_dataset_location('F')
|
||||||
|
|
||||||
|
assert a == b == []
|
||||||
|
|
||||||
|
def test_pick_invalid(self,default):
|
||||||
|
with pytest.raises(AttributeError):
|
||||||
|
default.pick('invalid',True)
|
||||||
|
|
||||||
def test_add_absolute(self,default):
|
def test_add_absolute(self,default):
|
||||||
default.add_absolute('Fe')
|
default.add_absolute('Fe')
|
||||||
|
@ -77,24 +132,40 @@ class TestResult:
|
||||||
in_file = default.read_dataset(loc['s_P'],0)
|
in_file = default.read_dataset(loc['s_P'],0)
|
||||||
assert np.allclose(in_memory,in_file)
|
assert np.allclose(in_memory,in_file)
|
||||||
|
|
||||||
def test_add_eigenvalues(self,default):
|
@pytest.mark.parametrize('eigenvalue,function',[('max',np.amax),('min',np.amin)])
|
||||||
|
def test_add_eigenvalue(self,default,eigenvalue,function):
|
||||||
default.add_Cauchy('P','F')
|
default.add_Cauchy('P','F')
|
||||||
default.add_eigenvalues('sigma')
|
default.add_eigenvalue('sigma',eigenvalue)
|
||||||
loc = {'sigma' :default.get_dataset_location('sigma'),
|
loc = {'sigma' :default.get_dataset_location('sigma'),
|
||||||
'lambda(sigma)':default.get_dataset_location('lambda(sigma)')}
|
'lambda':default.get_dataset_location('lambda_{}(sigma)'.format(eigenvalue))}
|
||||||
in_memory = mechanics.eigenvalues(default.read_dataset(loc['sigma'],0))
|
in_memory = function(mechanics.eigenvalues(default.read_dataset(loc['sigma'],0)),axis=1,keepdims=True)
|
||||||
in_file = default.read_dataset(loc['lambda(sigma)'],0)
|
in_file = default.read_dataset(loc['lambda'],0)
|
||||||
assert np.allclose(in_memory,in_file)
|
assert np.allclose(in_memory,in_file)
|
||||||
|
|
||||||
def test_add_eigenvectors(self,default):
|
@pytest.mark.parametrize('eigenvalue,idx',[('max',2),('mid',1),('min',0)])
|
||||||
|
def test_add_eigenvector(self,default,eigenvalue,idx):
|
||||||
default.add_Cauchy('P','F')
|
default.add_Cauchy('P','F')
|
||||||
default.add_eigenvectors('sigma')
|
default.add_eigenvector('sigma',eigenvalue)
|
||||||
loc = {'sigma' :default.get_dataset_location('sigma'),
|
loc = {'sigma' :default.get_dataset_location('sigma'),
|
||||||
'v(sigma)':default.get_dataset_location('v(sigma)')}
|
'v(sigma)':default.get_dataset_location('v_{}(sigma)'.format(eigenvalue))}
|
||||||
in_memory = mechanics.eigenvectors(default.read_dataset(loc['sigma'],0))
|
in_memory = mechanics.eigenvectors(default.read_dataset(loc['sigma'],0))[:,idx]
|
||||||
in_file = default.read_dataset(loc['v(sigma)'],0)
|
in_file = default.read_dataset(loc['v(sigma)'],0)
|
||||||
assert np.allclose(in_memory,in_file)
|
assert np.allclose(in_memory,in_file)
|
||||||
|
|
||||||
|
@pytest.mark.parametrize('d',[[1,0,0],[0,1,0],[0,0,1]])
|
||||||
|
def test_add_IPFcolor(self,default,d):
|
||||||
|
default.add_IPFcolor('orientation',d)
|
||||||
|
loc = {'orientation': default.get_dataset_location('orientation'),
|
||||||
|
'color': default.get_dataset_location('IPFcolor_[{} {} {}]'.format(*d))}
|
||||||
|
qu = default.read_dataset(loc['orientation']).view(np.double).reshape(-1,4)
|
||||||
|
crystal_structure = default.get_crystal_structure()
|
||||||
|
in_memory = np.empty((qu.shape[0],3),np.uint8)
|
||||||
|
for i,q in enumerate(qu):
|
||||||
|
o = damask.Orientation(q,crystal_structure).reduced()
|
||||||
|
in_memory[i] = np.uint8(o.IPFcolor(np.array(d))*255)
|
||||||
|
in_file = default.read_dataset(loc['color'])
|
||||||
|
assert np.allclose(in_memory,in_file)
|
||||||
|
|
||||||
def test_add_maximum_shear(self,default):
|
def test_add_maximum_shear(self,default):
|
||||||
default.add_Cauchy('P','F')
|
default.add_Cauchy('P','F')
|
||||||
default.add_maximum_shear('sigma')
|
default.add_maximum_shear('sigma')
|
||||||
|
@ -143,6 +214,20 @@ class TestResult:
|
||||||
in_file = default.read_dataset(loc['S'],0)
|
in_file = default.read_dataset(loc['S'],0)
|
||||||
assert np.allclose(in_memory,in_file)
|
assert np.allclose(in_memory,in_file)
|
||||||
|
|
||||||
|
@pytest.mark.parametrize('polar',[True,False])
|
||||||
|
def test_add_pole(self,default,polar):
|
||||||
|
pole = np.array([1.,0.,0.])
|
||||||
|
default.add_pole('orientation',pole,polar)
|
||||||
|
loc = {'orientation': default.get_dataset_location('orientation'),
|
||||||
|
'pole': default.get_dataset_location('p^{}_[1 0 0)'.format(u'rφ' if polar else 'xy'))}
|
||||||
|
rot = damask.Rotation(default.read_dataset(loc['orientation']).view(np.double))
|
||||||
|
rotated_pole = rot * np.broadcast_to(pole,rot.shape+(3,))
|
||||||
|
xy = rotated_pole[:,0:2]/(1.+abs(pole[2]))
|
||||||
|
in_memory = xy if not polar else \
|
||||||
|
np.block([np.sqrt(xy[:,0:1]*xy[:,0:1]+xy[:,1:2]*xy[:,1:2]),np.arctan2(xy[:,1:2],xy[:,0:1])])
|
||||||
|
in_file = default.read_dataset(loc['pole'])
|
||||||
|
assert np.allclose(in_memory,in_file)
|
||||||
|
|
||||||
def test_add_rotational_part(self,default):
|
def test_add_rotational_part(self,default):
|
||||||
default.add_rotational_part('F')
|
default.add_rotational_part('F')
|
||||||
loc = {'F': default.get_dataset_location('F'),
|
loc = {'F': default.get_dataset_location('F'),
|
||||||
|
@ -185,3 +270,42 @@ class TestResult:
|
||||||
in_memory = mechanics.left_stretch(default.read_dataset(loc['F'],0))
|
in_memory = mechanics.left_stretch(default.read_dataset(loc['F'],0))
|
||||||
in_file = default.read_dataset(loc['V(F)'],0)
|
in_file = default.read_dataset(loc['V(F)'],0)
|
||||||
assert np.allclose(in_memory,in_file)
|
assert np.allclose(in_memory,in_file)
|
||||||
|
|
||||||
|
def test_add_invalid(self,default):
|
||||||
|
with pytest.raises(TypeError):
|
||||||
|
default.add_calculation('#invalid#*2')
|
||||||
|
|
||||||
|
@pytest.mark.parametrize('overwrite',['off','on'])
|
||||||
|
def test_add_overwrite(self,default,overwrite):
|
||||||
|
default.pick('times',default.times_in_range(0,np.inf)[-1])
|
||||||
|
|
||||||
|
default.add_Cauchy()
|
||||||
|
loc = default.get_dataset_location('sigma')
|
||||||
|
print(loc)
|
||||||
|
with h5py.File(default.fname,'r') as f:
|
||||||
|
created_first = f[loc[0]].attrs['Created'].decode()
|
||||||
|
created_first = datetime.strptime(created_first,'%Y-%m-%d %H:%M:%S%z')
|
||||||
|
|
||||||
|
if overwrite == 'on':
|
||||||
|
default.enable_overwrite()
|
||||||
|
else:
|
||||||
|
default.disable_overwrite()
|
||||||
|
|
||||||
|
time.sleep(2.)
|
||||||
|
default.add_calculation('sigma','#sigma#*0.0+311.','not the Cauchy stress')
|
||||||
|
with h5py.File(default.fname,'r') as f:
|
||||||
|
created_second = f[loc[0]].attrs['Created'].decode()
|
||||||
|
created_second = datetime.strptime(created_second,'%Y-%m-%d %H:%M:%S%z')
|
||||||
|
if overwrite == 'on':
|
||||||
|
assert created_first < created_second and np.allclose(default.read_dataset(loc),311.)
|
||||||
|
else:
|
||||||
|
assert created_first == created_second and not np.allclose(default.read_dataset(loc),311.)
|
||||||
|
|
||||||
|
@pytest.mark.parametrize('output',['F',[],['F','P']])
|
||||||
|
def test_vtk(self,tmp_path,default,output):
|
||||||
|
os.chdir(tmp_path)
|
||||||
|
default.to_vtk(output)
|
||||||
|
|
||||||
|
def test_XDMF(self,tmp_path,single_phase):
|
||||||
|
os.chdir(tmp_path)
|
||||||
|
single_phase.write_XDMF
|
||||||
|
|
File diff suppressed because it is too large
Load Diff
|
@ -75,6 +75,11 @@ class TestTable:
|
||||||
d=default.get('F')
|
d=default.get('F')
|
||||||
assert np.allclose(d,0.0) and d.shape[1:] == (3,3)
|
assert np.allclose(d,0.0) and d.shape[1:] == (3,3)
|
||||||
|
|
||||||
|
def test_set_component(self,default):
|
||||||
|
default.set('1_F',np.zeros((5)),'set to zero')
|
||||||
|
d=default.get('F')
|
||||||
|
assert np.allclose(d[...,0,0],0.0) and d.shape[1:] == (3,3)
|
||||||
|
|
||||||
def test_labels(self,default):
|
def test_labels(self,default):
|
||||||
assert default.labels == ['F','v','s']
|
assert default.labels == ['F','v','s']
|
||||||
|
|
||||||
|
|
|
@ -42,12 +42,25 @@ class TestGridFilters:
|
||||||
assert np.allclose(grid_filters.node_displacement_fluct(size,F),
|
assert np.allclose(grid_filters.node_displacement_fluct(size,F),
|
||||||
grid_filters.cell_2_node(grid_filters.cell_displacement_fluct(size,F)))
|
grid_filters.cell_2_node(grid_filters.cell_displacement_fluct(size,F)))
|
||||||
|
|
||||||
def test_interpolation_nonperiodic(self):
|
def test_interpolation_to_node(self):
|
||||||
size = np.random.random(3)
|
size = np.random.random(3)
|
||||||
grid = np.random.randint(8,32,(3))
|
grid = np.random.randint(8,32,(3))
|
||||||
F = np.random.random(tuple(grid)+(3,3))
|
F = np.random.random(tuple(grid)+(3,3))
|
||||||
assert np.allclose(grid_filters.node_coord(size,F) [1:-1,1:-1,1:-1],grid_filters.cell_2_node(
|
assert np.allclose(grid_filters.node_coord(size,F) [1:-1,1:-1,1:-1],
|
||||||
grid_filters.cell_coord(size,F))[1:-1,1:-1,1:-1])
|
grid_filters.cell_2_node(grid_filters.cell_coord(size,F))[1:-1,1:-1,1:-1])
|
||||||
|
|
||||||
|
def test_interpolation_to_cell(self):
|
||||||
|
grid = np.random.randint(1,30,(3))
|
||||||
|
|
||||||
|
node_coord_x = np.linspace(0,np.pi*2,num=grid[0]+1)
|
||||||
|
node_field_x = np.cos(node_coord_x)
|
||||||
|
node_field = np.broadcast_to(node_field_x.reshape(-1,1,1),grid+1)
|
||||||
|
|
||||||
|
cell_coord_x = node_coord_x[:-1]+node_coord_x[1]*.5
|
||||||
|
cell_field_x = np.interp(cell_coord_x,node_coord_x,node_field_x,period=np.pi*2.)
|
||||||
|
cell_field = np.broadcast_to(cell_field_x.reshape(-1,1,1),grid)
|
||||||
|
|
||||||
|
assert np.allclose(cell_field,grid_filters.node_2_cell(node_field))
|
||||||
|
|
||||||
@pytest.mark.parametrize('mode',['cell','node'])
|
@pytest.mark.parametrize('mode',['cell','node'])
|
||||||
def test_coord0_origin(self,mode):
|
def test_coord0_origin(self,mode):
|
||||||
|
|
|
@ -63,25 +63,25 @@ class TestMechanics:
|
||||||
assert np.allclose(np.matmul(R,U),
|
assert np.allclose(np.matmul(R,U),
|
||||||
np.matmul(V,R))
|
np.matmul(V,R))
|
||||||
|
|
||||||
def test_strain_tensor_no_rotation(self):
|
@pytest.mark.parametrize('m',[0.0,np.random.random()*10.,np.random.random()*-10.])
|
||||||
|
def test_strain_tensor_no_rotation(self,m):
|
||||||
"""Ensure that left and right stretch give same results for no rotation."""
|
"""Ensure that left and right stretch give same results for no rotation."""
|
||||||
F = np.broadcast_to(np.eye(3),[self.n,3,3])*np.random.rand(self.n,3,3)
|
F = np.broadcast_to(np.eye(3),[self.n,3,3])*np.random.rand(self.n,3,3)
|
||||||
m = np.random.random()*20.0-10.0
|
|
||||||
assert np.allclose(mechanics.strain_tensor(F,'U',m),
|
assert np.allclose(mechanics.strain_tensor(F,'U',m),
|
||||||
mechanics.strain_tensor(F,'V',m))
|
mechanics.strain_tensor(F,'V',m))
|
||||||
|
|
||||||
def test_strain_tensor_rotation_equivalence(self):
|
@pytest.mark.parametrize('m',[0.0,np.random.random()*2.5,np.random.random()*-2.5])
|
||||||
|
def test_strain_tensor_rotation_equivalence(self,m):
|
||||||
"""Ensure that left and right strain differ only by a rotation."""
|
"""Ensure that left and right strain differ only by a rotation."""
|
||||||
F = np.broadcast_to(np.eye(3),[self.n,3,3]) + (np.random.rand(self.n,3,3)*0.5 - 0.25)
|
F = np.broadcast_to(np.eye(3),[self.n,3,3]) + (np.random.rand(self.n,3,3)*0.5 - 0.25)
|
||||||
m = np.random.random()*5.0-2.5
|
|
||||||
assert np.allclose(np.linalg.det(mechanics.strain_tensor(F,'U',m)),
|
assert np.allclose(np.linalg.det(mechanics.strain_tensor(F,'U',m)),
|
||||||
np.linalg.det(mechanics.strain_tensor(F,'V',m)))
|
np.linalg.det(mechanics.strain_tensor(F,'V',m)))
|
||||||
|
|
||||||
def test_strain_tensor_rotation(self):
|
@pytest.mark.parametrize('m',[0.0,np.random.random(),np.random.random()*-1.])
|
||||||
|
@pytest.mark.parametrize('t',['V','U'])
|
||||||
|
def test_strain_tensor_rotation(self,m,t):
|
||||||
"""Ensure that pure rotation results in no strain."""
|
"""Ensure that pure rotation results in no strain."""
|
||||||
F = mechanics.rotational_part(np.random.rand(self.n,3,3))
|
F = mechanics.rotational_part(np.random.rand(self.n,3,3))
|
||||||
t = ['V','U'][np.random.randint(0,2)]
|
|
||||||
m = np.random.random()*2.0 - 1.0
|
|
||||||
assert np.allclose(mechanics.strain_tensor(F,t,m),
|
assert np.allclose(mechanics.strain_tensor(F,t,m),
|
||||||
0.0)
|
0.0)
|
||||||
|
|
||||||
|
|
|
@ -17,10 +17,10 @@ if (PROJECT_NAME STREQUAL "damask-grid")
|
||||||
file(GLOB grid-sources grid/*.f90)
|
file(GLOB grid-sources grid/*.f90)
|
||||||
|
|
||||||
if(NOT CMAKE_BUILD_TYPE STREQUAL "SYNTAXONLY")
|
if(NOT CMAKE_BUILD_TYPE STREQUAL "SYNTAXONLY")
|
||||||
add_executable(DAMASK_spectral ${damask-sources} ${grid-sources})
|
add_executable(DAMASK_grid ${damask-sources} ${grid-sources})
|
||||||
install (TARGETS DAMASK_spectral RUNTIME DESTINATION bin)
|
install (TARGETS DAMASK_grid RUNTIME DESTINATION bin)
|
||||||
else()
|
else()
|
||||||
add_library(DAMASK_spectral OBJECT ${damask-sources} ${grid-sources})
|
add_library(DAMASK_grid OBJECT ${damask-sources} ${grid-sources})
|
||||||
exec_program (mktemp OUTPUT_VARIABLE nothing)
|
exec_program (mktemp OUTPUT_VARIABLE nothing)
|
||||||
exec_program (mktemp ARGS -d OUTPUT_VARIABLE black_hole)
|
exec_program (mktemp ARGS -d OUTPUT_VARIABLE black_hole)
|
||||||
install (PROGRAMS ${nothing} DESTINATION ${black_hole})
|
install (PROGRAMS ${nothing} DESTINATION ${black_hole})
|
||||||
|
@ -30,7 +30,7 @@ elseif (PROJECT_NAME STREQUAL "damask-mesh")
|
||||||
|
|
||||||
file(GLOB mesh-sources mesh/*.f90)
|
file(GLOB mesh-sources mesh/*.f90)
|
||||||
|
|
||||||
add_executable(DAMASK_FEM ${damask-sources} ${mesh-sources})
|
add_executable(DAMASK_mesh ${damask-sources} ${mesh-sources})
|
||||||
install (TARGETS DAMASK_FEM RUNTIME DESTINATION bin)
|
install (TARGETS DAMASK_mesh RUNTIME DESTINATION bin)
|
||||||
|
|
||||||
endif()
|
endif()
|
||||||
|
|
|
@ -11,6 +11,7 @@ module CPFEM
|
||||||
use math
|
use math
|
||||||
use rotations
|
use rotations
|
||||||
use YAML_types
|
use YAML_types
|
||||||
|
use YAML_parse
|
||||||
use discretization_marc
|
use discretization_marc
|
||||||
use material
|
use material
|
||||||
use config
|
use config
|
||||||
|
@ -84,11 +85,12 @@ subroutine CPFEM_initAll(el,ip)
|
||||||
call math_init
|
call math_init
|
||||||
call rotations_init
|
call rotations_init
|
||||||
call YAML_types_init
|
call YAML_types_init
|
||||||
|
call YAML_init
|
||||||
call HDF5_utilities_init
|
call HDF5_utilities_init
|
||||||
call results_init
|
call results_init(.false.)
|
||||||
call discretization_marc_init(ip, el)
|
call discretization_marc_init(ip, el)
|
||||||
call lattice_init
|
call lattice_init
|
||||||
call material_init
|
call material_init(.false.)
|
||||||
call constitutive_init
|
call constitutive_init
|
||||||
call crystallite_init
|
call crystallite_init
|
||||||
call homogenization_init
|
call homogenization_init
|
||||||
|
|
|
@ -12,6 +12,7 @@ module CPFEM2
|
||||||
use math
|
use math
|
||||||
use rotations
|
use rotations
|
||||||
use YAML_types
|
use YAML_types
|
||||||
|
use YAML_parse
|
||||||
use material
|
use material
|
||||||
use lattice
|
use lattice
|
||||||
use IO
|
use IO
|
||||||
|
@ -22,7 +23,7 @@ module CPFEM2
|
||||||
use homogenization
|
use homogenization
|
||||||
use constitutive
|
use constitutive
|
||||||
use crystallite
|
use crystallite
|
||||||
#if defined(FEM)
|
#if defined(Mesh)
|
||||||
use FEM_quadrature
|
use FEM_quadrature
|
||||||
use discretization_mesh
|
use discretization_mesh
|
||||||
#elif defined(Grid)
|
#elif defined(Grid)
|
||||||
|
@ -43,7 +44,7 @@ subroutine CPFEM_initAll
|
||||||
call DAMASK_interface_init ! Spectral and FEM interface to commandline
|
call DAMASK_interface_init ! Spectral and FEM interface to commandline
|
||||||
call prec_init
|
call prec_init
|
||||||
call IO_init
|
call IO_init
|
||||||
#ifdef FEM
|
#ifdef Mesh
|
||||||
call FEM_quadrature_init
|
call FEM_quadrature_init
|
||||||
#endif
|
#endif
|
||||||
call numerics_init
|
call numerics_init
|
||||||
|
@ -52,15 +53,16 @@ subroutine CPFEM_initAll
|
||||||
call math_init
|
call math_init
|
||||||
call rotations_init
|
call rotations_init
|
||||||
call YAML_types_init
|
call YAML_types_init
|
||||||
|
call YAML_init
|
||||||
call lattice_init
|
call lattice_init
|
||||||
call HDF5_utilities_init
|
call HDF5_utilities_init
|
||||||
call results_init
|
call results_init(restart=interface_restartInc>0)
|
||||||
#if defined(FEM)
|
#if defined(Mesh)
|
||||||
call discretization_mesh_init
|
call discretization_mesh_init(restart=interface_restartInc>0)
|
||||||
#elif defined(Grid)
|
#elif defined(Grid)
|
||||||
call discretization_grid_init
|
call discretization_grid_init(restart=interface_restartInc>0)
|
||||||
#endif
|
#endif
|
||||||
call material_init
|
call material_init(restart=interface_restartInc>0)
|
||||||
call constitutive_init
|
call constitutive_init
|
||||||
call crystallite_init
|
call crystallite_init
|
||||||
call homogenization_init
|
call homogenization_init
|
||||||
|
|
|
@ -9,8 +9,6 @@
|
||||||
!> by DAMASK. Interpretating the command line arguments to get load case, geometry file,
|
!> by DAMASK. Interpretating the command line arguments to get load case, geometry file,
|
||||||
!> and working directory.
|
!> and working directory.
|
||||||
!--------------------------------------------------------------------------------------------------
|
!--------------------------------------------------------------------------------------------------
|
||||||
#define GCC_MIN 6
|
|
||||||
#define INTEL_MIN 1700
|
|
||||||
#define PETSC_MAJOR 3
|
#define PETSC_MAJOR 3
|
||||||
#define PETSC_MINOR_MIN 10
|
#define PETSC_MINOR_MIN 10
|
||||||
#define PETSC_MINOR_MAX 13
|
#define PETSC_MINOR_MAX 13
|
||||||
|
@ -50,29 +48,6 @@ contains
|
||||||
!--------------------------------------------------------------------------------------------------
|
!--------------------------------------------------------------------------------------------------
|
||||||
subroutine DAMASK_interface_init
|
subroutine DAMASK_interface_init
|
||||||
#include <petsc/finclude/petscsys.h>
|
#include <petsc/finclude/petscsys.h>
|
||||||
#if defined(__GFORTRAN__) && __GNUC__<GCC_MIN
|
|
||||||
===================================================================================================
|
|
||||||
----- WRONG COMPILER VERSION ----- WRONG COMPILER VERSION ----- WRONG COMPILER VERSION -----
|
|
||||||
===================================================================================================
|
|
||||||
=============== THIS VERSION OF DAMASK REQUIRES A NEWER gfortran VERSION ======================
|
|
||||||
================== THIS VERSION OF DAMASK REQUIRES A NEWER gfortran VERSION ===================
|
|
||||||
===================== THIS VERSION OF DAMASK REQUIRES A NEWER gfortran VERSION ================
|
|
||||||
===================================================================================================
|
|
||||||
----- WRONG COMPILER VERSION ----- WRONG COMPILER VERSION ----- WRONG COMPILER VERSION -----
|
|
||||||
===================================================================================================
|
|
||||||
#endif
|
|
||||||
|
|
||||||
#if defined(__INTEL_COMPILER) && __INTEL_COMPILER<INTEL_MIN
|
|
||||||
===================================================================================================
|
|
||||||
----- WRONG COMPILER VERSION ----- WRONG COMPILER VERSION ----- WRONG COMPILER VERSION -----
|
|
||||||
===================================================================================================
|
|
||||||
================= THIS VERSION OF DAMASK REQUIRES A NEWER ifort VERSION =======================
|
|
||||||
==================== THIS VERSION OF DAMASK REQUIRES A NEWER ifort VERSION ====================
|
|
||||||
======================= THIS VERSION OF DAMASK REQUIRES A NEWER ifort VERSION =================
|
|
||||||
===================================================================================================
|
|
||||||
----- WRONG COMPILER VERSION ----- WRONG COMPILER VERSION ----- WRONG COMPILER VERSION -----
|
|
||||||
===================================================================================================
|
|
||||||
#endif
|
|
||||||
|
|
||||||
#if PETSC_VERSION_MAJOR!=3 || PETSC_VERSION_MINOR<PETSC_MINOR_MIN || PETSC_VERSION_MINOR>PETSC_MINOR_MAX
|
#if PETSC_VERSION_MAJOR!=3 || PETSC_VERSION_MINOR<PETSC_MINOR_MIN || PETSC_VERSION_MINOR>PETSC_MINOR_MAX
|
||||||
===================================================================================================
|
===================================================================================================
|
||||||
|
@ -106,7 +81,7 @@ subroutine DAMASK_interface_init
|
||||||
typeSize
|
typeSize
|
||||||
integer, dimension(8) :: &
|
integer, dimension(8) :: &
|
||||||
dateAndTime
|
dateAndTime
|
||||||
integer :: mpi_err
|
integer :: err
|
||||||
PetscErrorCode :: petsc_err
|
PetscErrorCode :: petsc_err
|
||||||
external :: &
|
external :: &
|
||||||
quit
|
quit
|
||||||
|
@ -118,8 +93,8 @@ subroutine DAMASK_interface_init
|
||||||
#ifdef _OPENMP
|
#ifdef _OPENMP
|
||||||
! If openMP is enabled, check if the MPI libary supports it and initialize accordingly.
|
! If openMP is enabled, check if the MPI libary supports it and initialize accordingly.
|
||||||
! Otherwise, the first call to PETSc will do the initialization.
|
! Otherwise, the first call to PETSc will do the initialization.
|
||||||
call MPI_Init_Thread(MPI_THREAD_FUNNELED,threadLevel,mpi_err)
|
call MPI_Init_Thread(MPI_THREAD_FUNNELED,threadLevel,err)
|
||||||
if (mpi_err /= 0) call quit(1)
|
if (err /= 0) call quit(1)
|
||||||
if (threadLevel<MPI_THREAD_FUNNELED) then
|
if (threadLevel<MPI_THREAD_FUNNELED) then
|
||||||
write(6,'(/,a)') ' ERROR: MPI library does not support OpenMP'
|
write(6,'(/,a)') ' ERROR: MPI library does not support OpenMP'
|
||||||
call quit(1)
|
call quit(1)
|
||||||
|
@ -128,10 +103,10 @@ subroutine DAMASK_interface_init
|
||||||
call PETScInitializeNoArguments(petsc_err) ! according to PETSc manual, that should be the first line in the code
|
call PETScInitializeNoArguments(petsc_err) ! according to PETSc manual, that should be the first line in the code
|
||||||
CHKERRQ(petsc_err) ! this is a macro definition, it is case sensitive
|
CHKERRQ(petsc_err) ! this is a macro definition, it is case sensitive
|
||||||
|
|
||||||
call MPI_Comm_rank(PETSC_COMM_WORLD,worldrank,mpi_err)
|
call MPI_Comm_rank(PETSC_COMM_WORLD,worldrank,err)
|
||||||
if (mpi_err /= 0) call quit(1)
|
if (err /= 0) call quit(1)
|
||||||
call MPI_Comm_size(PETSC_COMM_WORLD,worldsize,mpi_err)
|
call MPI_Comm_size(PETSC_COMM_WORLD,worldsize,err)
|
||||||
if (mpi_err /= 0) call quit(1)
|
if (err /= 0) call quit(1)
|
||||||
|
|
||||||
mainProcess: if (worldrank == 0) then
|
mainProcess: if (worldrank == 0) then
|
||||||
if (output_unit /= 6) then
|
if (output_unit /= 6) then
|
||||||
|
@ -181,22 +156,23 @@ subroutine DAMASK_interface_init
|
||||||
write(6,'(/,a,2(i2.2,a),i4.4)') ' Date: ',dateAndTime(3),'/',dateAndTime(2),'/', dateAndTime(1)
|
write(6,'(/,a,2(i2.2,a),i4.4)') ' Date: ',dateAndTime(3),'/',dateAndTime(2),'/', dateAndTime(1)
|
||||||
write(6,'(a,2(i2.2,a),i2.2)') ' Time: ',dateAndTime(5),':', dateAndTime(6),':', dateAndTime(7)
|
write(6,'(a,2(i2.2,a),i2.2)') ' Time: ',dateAndTime(5),':', dateAndTime(6),':', dateAndTime(7)
|
||||||
|
|
||||||
call MPI_Type_size(MPI_INTEGER,typeSize,mpi_err)
|
call MPI_Type_size(MPI_INTEGER,typeSize,err)
|
||||||
if (mpi_err /= 0) call quit(1)
|
if (err /= 0) call quit(1)
|
||||||
if (typeSize*8 /= bit_size(0)) then
|
if (typeSize*8 /= bit_size(0)) then
|
||||||
write(6,'(a)') ' Mismatch between MPI and DAMASK integer'
|
write(6,'(a)') ' Mismatch between MPI and DAMASK integer'
|
||||||
call quit(1)
|
call quit(1)
|
||||||
endif
|
endif
|
||||||
|
|
||||||
call MPI_Type_size(MPI_DOUBLE,typeSize,mpi_err)
|
call MPI_Type_size(MPI_DOUBLE,typeSize,err)
|
||||||
if (mpi_err /= 0) call quit(1)
|
if (err /= 0) call quit(1)
|
||||||
if (typeSize*8 /= storage_size(0.0_pReal)) then
|
if (typeSize*8 /= storage_size(0.0_pReal)) then
|
||||||
write(6,'(a)') ' Mismatch between MPI and DAMASK real'
|
write(6,'(a)') ' Mismatch between MPI and DAMASK real'
|
||||||
call quit(1)
|
call quit(1)
|
||||||
endif
|
endif
|
||||||
|
|
||||||
do i = 1, command_argument_count()
|
do i = 1, command_argument_count()
|
||||||
call get_command_argument(i,arg)
|
call get_command_argument(i,arg,status=err)
|
||||||
|
if (err /= 0) call quit(1)
|
||||||
select case(trim(arg)) ! extract key
|
select case(trim(arg)) ! extract key
|
||||||
case ('-h','--help')
|
case ('-h','--help')
|
||||||
write(6,'(a)') ' #######################################################################'
|
write(6,'(a)') ' #######################################################################'
|
||||||
|
@ -206,7 +182,7 @@ subroutine DAMASK_interface_init
|
||||||
write(6,'(a,/)')' Valid command line switches:'
|
write(6,'(a,/)')' Valid command line switches:'
|
||||||
write(6,'(a)') ' --geom (-g, --geometry)'
|
write(6,'(a)') ' --geom (-g, --geometry)'
|
||||||
write(6,'(a)') ' --load (-l, --loadcase)'
|
write(6,'(a)') ' --load (-l, --loadcase)'
|
||||||
write(6,'(a)') ' --workingdir (-w, --wd, --workingdirectory, -d, --directory)'
|
write(6,'(a)') ' --workingdir (-w, --wd, --workingdirectory)'
|
||||||
write(6,'(a)') ' --restart (-r, --rs)'
|
write(6,'(a)') ' --restart (-r, --rs)'
|
||||||
write(6,'(a)') ' --help (-h)'
|
write(6,'(a)') ' --help (-h)'
|
||||||
write(6,'(/,a)')' -----------------------------------------------------------------------'
|
write(6,'(/,a)')' -----------------------------------------------------------------------'
|
||||||
|
@ -223,12 +199,12 @@ subroutine DAMASK_interface_init
|
||||||
write(6,'(a)') ' directory.'
|
write(6,'(a)') ' directory.'
|
||||||
write(6,'(a)') ' For further configuration place "numerics.config"'
|
write(6,'(a)') ' For further configuration place "numerics.config"'
|
||||||
write(6,'(a)')' and "debug.config" in that directory.'
|
write(6,'(a)')' and "debug.config" in that directory.'
|
||||||
write(6,'(/,a)')' --restart XX'
|
write(6,'(/,a)')' --restart N'
|
||||||
write(6,'(a)') ' Reads in increment XX and continues with calculating'
|
write(6,'(a)') ' Reads in increment N and continues with calculating'
|
||||||
write(6,'(a)') ' increment XX+1 based on this.'
|
write(6,'(a)') ' increment N+1 based on this.'
|
||||||
write(6,'(a)') ' Appends to existing results file'
|
write(6,'(a)') ' Appends to existing results file'
|
||||||
write(6,'(a)') ' "NameOfGeom_NameOfLoadFile".'
|
write(6,'(a)') ' "NameOfGeom_NameOfLoadFile.hdf5".'
|
||||||
write(6,'(a)') ' Works only if the restart information for increment XX'
|
write(6,'(a)') ' Works only if the restart information for increment N'
|
||||||
write(6,'(a)') ' is available in the working directory.'
|
write(6,'(a)') ' is available in the working directory.'
|
||||||
write(6,'(/,a)')' -----------------------------------------------------------------------'
|
write(6,'(/,a)')' -----------------------------------------------------------------------'
|
||||||
write(6,'(a)') ' Help:'
|
write(6,'(a)') ' Help:'
|
||||||
|
@ -236,19 +212,20 @@ subroutine DAMASK_interface_init
|
||||||
write(6,'(a,/)')' Prints this message and exits'
|
write(6,'(a,/)')' Prints this message and exits'
|
||||||
call quit(0) ! normal Termination
|
call quit(0) ! normal Termination
|
||||||
case ('-l', '--load', '--loadcase')
|
case ('-l', '--load', '--loadcase')
|
||||||
call get_command_argument(i+1,loadCaseArg)
|
call get_command_argument(i+1,loadCaseArg,status=err)
|
||||||
case ('-g', '--geom', '--geometry')
|
case ('-g', '--geom', '--geometry')
|
||||||
call get_command_argument(i+1,geometryArg)
|
call get_command_argument(i+1,geometryArg,status=err)
|
||||||
case ('-w', '-d', '--wd', '--directory', '--workingdir', '--workingdirectory')
|
case ('-w', '--wd', '--workingdir', '--workingdirectory')
|
||||||
call get_command_argument(i+1,workingDirArg)
|
call get_command_argument(i+1,workingDirArg,status=err)
|
||||||
case ('-r', '--rs', '--restart')
|
case ('-r', '--rs', '--restart')
|
||||||
call get_command_argument(i+1,arg)
|
call get_command_argument(i+1,arg,status=err)
|
||||||
read(arg,*,iostat=stat) interface_restartInc
|
read(arg,*,iostat=stat) interface_restartInc
|
||||||
if (interface_restartInc < 0 .or. stat /=0) then
|
if (interface_restartInc < 0 .or. stat /=0) then
|
||||||
write(6,'(/,a)') ' ERROR: Could not parse restart increment: '//trim(arg)
|
write(6,'(/,a)') ' ERROR: Could not parse restart increment: '//trim(arg)
|
||||||
call quit(1)
|
call quit(1)
|
||||||
endif
|
endif
|
||||||
end select
|
end select
|
||||||
|
if (err /= 0) call quit(1)
|
||||||
enddo
|
enddo
|
||||||
|
|
||||||
if (len_trim(loadcaseArg) == 0 .or. len_trim(geometryArg) == 0) then
|
if (len_trim(loadcaseArg) == 0 .or. len_trim(geometryArg) == 0) then
|
||||||
|
|
|
@ -81,7 +81,7 @@ contains
|
||||||
|
|
||||||
|
|
||||||
!--------------------------------------------------------------------------------------------------
|
!--------------------------------------------------------------------------------------------------
|
||||||
!> @brief open libary and do sanity checks
|
!> @brief initialize HDF5 libary and do sanity checks
|
||||||
!--------------------------------------------------------------------------------------------------
|
!--------------------------------------------------------------------------------------------------
|
||||||
subroutine HDF5_utilities_init
|
subroutine HDF5_utilities_init
|
||||||
|
|
||||||
|
|
19
src/IO.f90
19
src/IO.f90
|
@ -49,7 +49,7 @@ subroutine IO_init
|
||||||
|
|
||||||
write(6,'(/,a)') ' <<<+- IO init -+>>>'; flush(6)
|
write(6,'(/,a)') ' <<<+- IO init -+>>>'; flush(6)
|
||||||
|
|
||||||
call unitTest
|
call selfTest
|
||||||
|
|
||||||
end subroutine IO_init
|
end subroutine IO_init
|
||||||
|
|
||||||
|
@ -536,6 +536,19 @@ subroutine IO_error(error_ID,el,ip,g,instance,ext_msg)
|
||||||
case (602)
|
case (602)
|
||||||
msg = 'invalid selection for debug'
|
msg = 'invalid selection for debug'
|
||||||
|
|
||||||
|
!------------------------------------------------------------------------------------------------
|
||||||
|
! errors related to YAML input files
|
||||||
|
case (701)
|
||||||
|
msg = 'Incorrect indent/Null value not allowed'
|
||||||
|
case (702)
|
||||||
|
msg = 'Invalid use of flow yaml'
|
||||||
|
case (703)
|
||||||
|
msg = 'Space expected after a list indicator - '
|
||||||
|
case (704)
|
||||||
|
msg = 'Space expected after a colon for <key>: <value> pair'
|
||||||
|
case (705)
|
||||||
|
msg = 'Unsupported feature'
|
||||||
|
|
||||||
!-------------------------------------------------------------------------------------------------
|
!-------------------------------------------------------------------------------------------------
|
||||||
! errors related to the grid solver
|
! errors related to the grid solver
|
||||||
case (809)
|
case (809)
|
||||||
|
@ -696,7 +709,7 @@ end subroutine IO_warning
|
||||||
!--------------------------------------------------------------------------------------------------
|
!--------------------------------------------------------------------------------------------------
|
||||||
!> @brief check correctness of some IO functions
|
!> @brief check correctness of some IO functions
|
||||||
!--------------------------------------------------------------------------------------------------
|
!--------------------------------------------------------------------------------------------------
|
||||||
subroutine unitTest
|
subroutine selfTest
|
||||||
|
|
||||||
integer, dimension(:), allocatable :: chunkPos
|
integer, dimension(:), allocatable :: chunkPos
|
||||||
character(len=:), allocatable :: str
|
character(len=:), allocatable :: str
|
||||||
|
@ -745,6 +758,6 @@ subroutine unitTest
|
||||||
str = IO_rmComment(' ab #')
|
str = IO_rmComment(' ab #')
|
||||||
if (str /= ' ab'.or. len(str) /= 3) call IO_error(0,ext_msg='IO_rmComment/7')
|
if (str /= ' ab'.or. len(str) /= 3) call IO_error(0,ext_msg='IO_rmComment/7')
|
||||||
|
|
||||||
end subroutine unitTest
|
end subroutine selfTest
|
||||||
|
|
||||||
end module IO
|
end module IO
|
||||||
|
|
|
@ -0,0 +1,644 @@
|
||||||
|
!----------------------------------------------------------------------------------------------------
|
||||||
|
!> @author Martin Diehl, Max-Planck-Institut für Eisenforschung GmbH
|
||||||
|
!> @author Sharan Roongta, Max-Planck-Institut für Eisenforschung GmbH
|
||||||
|
!> @brief Parser for YAML files
|
||||||
|
!> @details module converts a YAML input file to an equivalent YAML flow style which is then parsed.
|
||||||
|
!----------------------------------------------------------------------------------------------------
|
||||||
|
|
||||||
|
module YAML_parse
|
||||||
|
|
||||||
|
use prec
|
||||||
|
use IO
|
||||||
|
use YAML_types
|
||||||
|
|
||||||
|
implicit none
|
||||||
|
|
||||||
|
private
|
||||||
|
|
||||||
|
public :: YAML_init
|
||||||
|
public :: parse_flow,to_flow
|
||||||
|
|
||||||
|
contains
|
||||||
|
|
||||||
|
!--------------------------------------------------------------------------------------------------
|
||||||
|
!> @brief do sanity checks
|
||||||
|
!--------------------------------------------------------------------------------------------------
|
||||||
|
subroutine YAML_init
|
||||||
|
|
||||||
|
call selfTest
|
||||||
|
|
||||||
|
end subroutine YAML_init
|
||||||
|
|
||||||
|
|
||||||
|
!--------------------------------------------------------------------------------------------------
|
||||||
|
!> @brief reads the flow style string and stores it in the form of dictionaries, lists and scalars.
|
||||||
|
!> @details A node type pointer can either point to a dictionary, list or scalar type entities.
|
||||||
|
!--------------------------------------------------------------------------------------------------
|
||||||
|
recursive function parse_flow(flow_string) result(node)
|
||||||
|
|
||||||
|
character(len=*), intent(inout) :: flow_string
|
||||||
|
class (tNode), pointer :: node
|
||||||
|
|
||||||
|
class (tNode), pointer :: myVal
|
||||||
|
character(len=pStringLen) :: key
|
||||||
|
|
||||||
|
integer :: e, & !> end position of dictionary or list
|
||||||
|
s, & !> start position of dictionary or list
|
||||||
|
d !> position of key: value separator (':')
|
||||||
|
|
||||||
|
flow_string = trim(adjustl(flow_string(:)))
|
||||||
|
if (flow_string(1:1) == '{') then ! start of a dictionary
|
||||||
|
e = 1
|
||||||
|
allocate(tDict::node)
|
||||||
|
do while (e < len_trim(flow_string))
|
||||||
|
s = e
|
||||||
|
d = s + scan(flow_string(s+1:),':')
|
||||||
|
e = d + find_end(flow_string(d+1:),'}')
|
||||||
|
|
||||||
|
key = trim(adjustl(flow_string(s+1:d-1)))
|
||||||
|
myVal => parse_flow(flow_string(d+1:e-1)) ! parse items (recursively)
|
||||||
|
|
||||||
|
select type (node)
|
||||||
|
class is (tDict)
|
||||||
|
call node%set(key,myVal)
|
||||||
|
end select
|
||||||
|
end do
|
||||||
|
|
||||||
|
elseif (flow_string(1:1) == '[') then ! start of a list
|
||||||
|
e = 1
|
||||||
|
allocate(tList::node)
|
||||||
|
do while (e < len_trim(flow_string))
|
||||||
|
s = e
|
||||||
|
e = s + find_end(flow_string(s+1:),']')
|
||||||
|
myVal => parse_flow(flow_string(s+1:e-1)) ! parse items (recursively)
|
||||||
|
|
||||||
|
select type (node)
|
||||||
|
class is (tList)
|
||||||
|
call node%append(myVal)
|
||||||
|
end select
|
||||||
|
end do
|
||||||
|
|
||||||
|
else ! scalar value
|
||||||
|
allocate(tScalar::node)
|
||||||
|
select type (node)
|
||||||
|
class is (tScalar)
|
||||||
|
node = trim(adjustl(flow_string))
|
||||||
|
end select
|
||||||
|
endif
|
||||||
|
|
||||||
|
end function parse_flow
|
||||||
|
|
||||||
|
|
||||||
|
!--------------------------------------------------------------------------------------------------
|
||||||
|
!> @brief finds location of chunk end: ',' or '}' or ']'
|
||||||
|
!> @details leaves nested lists ( '[...]' and dicts '{...}') intact
|
||||||
|
!--------------------------------------------------------------------------------------------------
|
||||||
|
integer function find_end(str,e_char)
|
||||||
|
|
||||||
|
character(len=*), intent(in) :: str
|
||||||
|
character, intent(in) :: e_char !< end of list/dict ( '}' or ']')
|
||||||
|
|
||||||
|
integer :: N_sq, & !< number of open square brackets
|
||||||
|
N_cu, & !< number of open curly brackets
|
||||||
|
i
|
||||||
|
|
||||||
|
N_sq = 0
|
||||||
|
N_cu = 0
|
||||||
|
do i = 1, len_trim(str)
|
||||||
|
if (N_sq==0 .and. N_cu==0 .and. scan(str(i:i),e_char//',') == 1) exit
|
||||||
|
N_sq = N_sq + merge(1,0,str(i:i) == '[')
|
||||||
|
N_cu = N_cu + merge(1,0,str(i:i) == '{')
|
||||||
|
N_sq = N_sq - merge(1,0,str(i:i) == ']')
|
||||||
|
N_cu = N_cu - merge(1,0,str(i:i) == '}')
|
||||||
|
enddo
|
||||||
|
find_end = i
|
||||||
|
|
||||||
|
end function find_end
|
||||||
|
|
||||||
|
|
||||||
|
!--------------------------------------------------------------------------------------------------
|
||||||
|
! @brief Returns Indentation.
|
||||||
|
! @details It determines the indentation level for a given block/line.
|
||||||
|
! In cases for nested lists, an offset is added to determine the indent of the item block (skip
|
||||||
|
! leading dashes)
|
||||||
|
!--------------------------------------------------------------------------------------------------
|
||||||
|
integer function indentDepth(line,offset)
|
||||||
|
|
||||||
|
character(len=*), intent(in) :: line
|
||||||
|
integer, optional,intent(in) :: offset
|
||||||
|
|
||||||
|
indentDepth = verify(line,IO_WHITESPACE) -1
|
||||||
|
if(present(offset)) indentDepth = indentDepth + offset
|
||||||
|
|
||||||
|
end function indentDepth
|
||||||
|
|
||||||
|
|
||||||
|
!--------------------------------------------------------------------------------------------------
|
||||||
|
! @brief check whether a string is in flow style, i.e. starts with '{' or '['
|
||||||
|
!--------------------------------------------------------------------------------------------------
|
||||||
|
logical function isFlow(line)
|
||||||
|
|
||||||
|
character(len=*), intent(in) :: line
|
||||||
|
|
||||||
|
isFlow = index(adjustl(line),'[') == 1 .or. index(adjustl(line),'{') == 1
|
||||||
|
|
||||||
|
end function isFlow
|
||||||
|
|
||||||
|
|
||||||
|
!--------------------------------------------------------------------------------------------------
|
||||||
|
! @brief check whether a string is a scalar item, i.e. starts without any special symbols
|
||||||
|
!--------------------------------------------------------------------------------------------------
|
||||||
|
logical function isScalar(line)
|
||||||
|
|
||||||
|
character(len=*), intent(in) :: line
|
||||||
|
|
||||||
|
isScalar = (.not.isKeyValue(line) .and. .not.isKey(line) .and. .not.isListItem(line) &
|
||||||
|
.and. .not.isFlow(line))
|
||||||
|
|
||||||
|
end function isScalar
|
||||||
|
|
||||||
|
|
||||||
|
!--------------------------------------------------------------------------------------------------
|
||||||
|
! @brief check whether a string is a list item, i.e. starts with '-'
|
||||||
|
!--------------------------------------------------------------------------------------------------
|
||||||
|
logical function isListItem(line)
|
||||||
|
|
||||||
|
character(len=*), intent(in) :: line
|
||||||
|
|
||||||
|
isListItem = index(adjustl(line),'-') == 1
|
||||||
|
|
||||||
|
end function isListItem
|
||||||
|
|
||||||
|
|
||||||
|
!--------------------------------------------------------------------------------------------------
|
||||||
|
! @brief check whether a string contains a key value pair of the for '<key>: <value>'
|
||||||
|
!--------------------------------------------------------------------------------------------------
|
||||||
|
logical function isKeyValue(line)
|
||||||
|
|
||||||
|
character(len=*), intent(in) :: line
|
||||||
|
isKeyValue = .false.
|
||||||
|
|
||||||
|
if( .not. isKey(line) .and. index(IO_rmComment(line),':') > 0 .and. .not. isFlow(line)) then
|
||||||
|
if(index(IO_rmComment(line),': ') > 0) then
|
||||||
|
isKeyValue = .true.
|
||||||
|
else
|
||||||
|
call IO_error(704,ext_msg=line)
|
||||||
|
endif
|
||||||
|
endif
|
||||||
|
|
||||||
|
end function isKeyValue
|
||||||
|
|
||||||
|
|
||||||
|
!--------------------------------------------------------------------------------------------------
|
||||||
|
! @brief check whether a string contains a key without a value, i.e. it ends with ':'
|
||||||
|
! ToDo: check whether this is safe for trailing spaces followed by a new line character
|
||||||
|
!--------------------------------------------------------------------------------------------------
|
||||||
|
logical function isKey(line)
|
||||||
|
|
||||||
|
character(len=*), intent(in) :: line
|
||||||
|
|
||||||
|
if(len(IO_rmComment(line)) == 0) then
|
||||||
|
isKey = .false.
|
||||||
|
else
|
||||||
|
isKey = IO_rmComment(line(len(IO_rmComment(line)):len(IO_rmComment(line)))) == ':' &
|
||||||
|
.and. .not. isFlow(line)
|
||||||
|
endif
|
||||||
|
|
||||||
|
end function isKey
|
||||||
|
|
||||||
|
|
||||||
|
!--------------------------------------------------------------------------------------------------
|
||||||
|
! @brief reads a line of YAML block which is already in flow style
|
||||||
|
! @details Dicts should be enlcosed within '{}' for it to be consistent with DAMASK YAML parser
|
||||||
|
!--------------------------------------------------------------------------------------------------
|
||||||
|
recursive subroutine line_isFlow(flow,s_flow,line)
|
||||||
|
|
||||||
|
character(len=*), intent(inout) :: flow !< YAML in flow style only
|
||||||
|
integer, intent(inout) :: s_flow !< start position in flow
|
||||||
|
character(len=*), intent(in) :: line
|
||||||
|
|
||||||
|
integer :: &
|
||||||
|
s, &
|
||||||
|
list_chunk, &
|
||||||
|
dict_chunk
|
||||||
|
|
||||||
|
if(index(adjustl(line),'[') == 1) then
|
||||||
|
s = index(line,'[')
|
||||||
|
flow(s_flow:s_flow) = '['
|
||||||
|
s_flow = s_flow +1
|
||||||
|
do while(s < len_trim(line))
|
||||||
|
list_chunk = s + find_end(line(s+1:),']')
|
||||||
|
if(iskeyValue(line(s+1:list_chunk-1))) then
|
||||||
|
flow(s_flow:s_flow) = '{'
|
||||||
|
s_flow = s_flow +1
|
||||||
|
call keyValue_toFlow(flow,s_flow,line(s+1:list_chunk-1))
|
||||||
|
flow(s_flow:s_flow) = '}'
|
||||||
|
s_flow = s_flow +1
|
||||||
|
elseif(isFlow(line(s+1:list_chunk-1))) then
|
||||||
|
call line_isFlow(flow,s_flow,line(s+1:list_chunk-1))
|
||||||
|
else
|
||||||
|
call line_toFlow(flow,s_flow,line(s+1:list_chunk-1))
|
||||||
|
endif
|
||||||
|
flow(s_flow:s_flow+1) = ', '
|
||||||
|
s_flow = s_flow +2
|
||||||
|
s = s + find_end(line(s+1:),']')
|
||||||
|
enddo
|
||||||
|
s_flow = s_flow - 1
|
||||||
|
if (flow(s_flow-1:s_flow-1) == ',') s_flow = s_flow - 1
|
||||||
|
flow(s_flow:s_flow) = ']'
|
||||||
|
s_flow = s_flow+1
|
||||||
|
|
||||||
|
elseif(index(adjustl(line),'{') == 1) then
|
||||||
|
s = index(line,'{')
|
||||||
|
flow(s_flow:s_flow) = '{'
|
||||||
|
s_flow = s_flow +1
|
||||||
|
do while(s < len_trim(line))
|
||||||
|
dict_chunk = s + find_end(line(s+1:),'}')
|
||||||
|
if( .not. iskeyValue(line(s+1:dict_chunk-1))) call IO_error(705,ext_msg=line)
|
||||||
|
call keyValue_toFlow(flow,s_flow,line(s+1:dict_chunk-1))
|
||||||
|
flow(s_flow:s_flow+1) = ', '
|
||||||
|
s_flow = s_flow +2
|
||||||
|
s = s + find_end(line(s+1:),'}')
|
||||||
|
enddo
|
||||||
|
s_flow = s_flow -1
|
||||||
|
if(flow(s_flow-1:s_flow-1) == ',') s_flow = s_flow -1
|
||||||
|
flow(s_flow:s_flow) = '}'
|
||||||
|
s_flow = s_flow +1
|
||||||
|
else
|
||||||
|
call line_toFlow(flow,s_flow,line)
|
||||||
|
endif
|
||||||
|
|
||||||
|
end subroutine line_isFlow
|
||||||
|
|
||||||
|
|
||||||
|
!-------------------------------------------------------------------------------------------------
|
||||||
|
! @brief reads a line of YAML block of type <key>: <value> and places it in the YAML flow style structure
|
||||||
|
! @details Makes sure that the <value> is consistent with the input required in DAMASK YAML parser
|
||||||
|
!-------------------------------------------------------------------------------------------------
|
||||||
|
recursive subroutine keyValue_toFlow(flow,s_flow,line)
|
||||||
|
|
||||||
|
character(len=*), intent(inout) :: flow !< YAML in flow style only
|
||||||
|
integer, intent(inout) :: s_flow !< start position in flow
|
||||||
|
character(len=*), intent(in) :: line
|
||||||
|
|
||||||
|
character(len=:), allocatable :: line_asStandard ! standard form of <key>: <value>
|
||||||
|
integer :: &
|
||||||
|
d_flow, &
|
||||||
|
col_pos, &
|
||||||
|
offset_value
|
||||||
|
|
||||||
|
col_pos = index(line,':')
|
||||||
|
if(isFlow(line(col_pos+1:))) then
|
||||||
|
d_flow = len_trim(adjustl(line(:col_pos)))
|
||||||
|
flow(s_flow:s_flow+d_flow+1) = trim(adjustl(line(:col_pos)))//' '
|
||||||
|
s_flow = s_flow + d_flow+1
|
||||||
|
call line_isFlow(flow,s_flow,line(col_pos+1:))
|
||||||
|
else
|
||||||
|
offset_value = indentDepth(line(col_pos+2:))
|
||||||
|
line_asStandard = line(:col_pos+1)//line(col_pos+2+offset_value:)
|
||||||
|
call line_toFlow(flow,s_flow,line_asStandard)
|
||||||
|
endif
|
||||||
|
|
||||||
|
end subroutine keyValue_toFlow
|
||||||
|
|
||||||
|
|
||||||
|
!-------------------------------------------------------------------------------------------------
|
||||||
|
! @brief reads a line of YAML block and places it in the YAML flow style structure
|
||||||
|
!-------------------------------------------------------------------------------------------------
|
||||||
|
subroutine line_toFlow(flow,s_flow,line)
|
||||||
|
|
||||||
|
character(len=*), intent(inout) :: flow !< YAML in flow style only
|
||||||
|
integer, intent(inout) :: s_flow !< start position in flow
|
||||||
|
character(len=*), intent(in) :: line
|
||||||
|
|
||||||
|
integer :: &
|
||||||
|
d_flow
|
||||||
|
|
||||||
|
d_flow = len_trim(adjustl(line))
|
||||||
|
flow(s_flow:s_flow+d_flow) = trim(adjustl(line))
|
||||||
|
s_flow = s_flow + d_flow
|
||||||
|
|
||||||
|
end subroutine line_toFlow
|
||||||
|
|
||||||
|
|
||||||
|
!-------------------------------------------------------------------------------------------------
|
||||||
|
! @brief convert a yaml list in block style to a yaml list in flow style
|
||||||
|
! @details enters the function when encountered with the list indicator '- '
|
||||||
|
! reads each scalar list item and separates each other with a ','
|
||||||
|
! If list item is non scalar, it stores the offset for that list item block
|
||||||
|
! Increase in the indentation level or when list item is not scalar -> 'decide' function is called.
|
||||||
|
! decrease in indentation level indicates the end of an indentation block
|
||||||
|
!-------------------------------------------------------------------------------------------------
|
||||||
|
recursive subroutine lst(blck,flow,s_blck,s_flow,offset)
|
||||||
|
|
||||||
|
character(len=*), intent(in) :: blck !< YAML in mixed style
|
||||||
|
character(len=*), intent(inout) :: flow !< YAML in flow style only
|
||||||
|
integer, intent(inout) :: s_blck, & !< start position in blck
|
||||||
|
s_flow, & !< start position in flow
|
||||||
|
offset !< stores leading '- ' in nested lists
|
||||||
|
character(len=pStringLen) :: line
|
||||||
|
integer :: e_blck,indent
|
||||||
|
|
||||||
|
indent = indentDepth(blck(s_blck:),offset)
|
||||||
|
do while (s_blck <= len_trim(blck))
|
||||||
|
e_blck = s_blck + index(blck(s_blck:),IO_EOL) - 2
|
||||||
|
line = IO_rmComment(blck(s_blck:e_blck))
|
||||||
|
if (len_trim(line) == 0) then
|
||||||
|
s_blck = e_blck + 2 ! forward to next line
|
||||||
|
cycle
|
||||||
|
elseif(indentDepth(line,offset) > indent) then
|
||||||
|
call decide(blck,flow,s_blck,s_flow,offset)
|
||||||
|
offset = 0
|
||||||
|
flow(s_flow:s_flow+1) = ', '
|
||||||
|
s_flow = s_flow + 2
|
||||||
|
elseif(indentDepth(line,offset) < indent .or. .not. isListItem(line)) then
|
||||||
|
offset = 0
|
||||||
|
exit ! job done (lower level)
|
||||||
|
else
|
||||||
|
if(trim(adjustl(line)) == '-') then ! list item in next line
|
||||||
|
s_blck = e_blck + 2
|
||||||
|
e_blck = e_blck + index(blck(e_blck+2:),IO_EOL)
|
||||||
|
line = IO_rmComment(blck(s_blck:e_blck))
|
||||||
|
if(indentDepth(line) < indent .or. indentDepth(line) == indent) &
|
||||||
|
call IO_error(701,ext_msg=line)
|
||||||
|
|
||||||
|
if(isScalar(line)) then
|
||||||
|
call line_toFlow(flow,s_flow,line)
|
||||||
|
s_blck = e_blck +2
|
||||||
|
offset = 0
|
||||||
|
elseif(isFlow(line)) then
|
||||||
|
call line_isFlow(flow,s_flow,line)
|
||||||
|
s_blck = e_blck +2
|
||||||
|
offset = 0
|
||||||
|
endif
|
||||||
|
else ! list item in the same line
|
||||||
|
if(line(indentDepth(line)+2:indentDepth(line)+2) /= ' ') &
|
||||||
|
call IO_error(703,ext_msg=line)
|
||||||
|
line = line(indentDepth(line)+3:)
|
||||||
|
if(isScalar(line)) then
|
||||||
|
call line_toFlow(flow,s_flow,line)
|
||||||
|
s_blck = e_blck +2
|
||||||
|
offset = 0
|
||||||
|
elseif(isFlow(line)) then
|
||||||
|
call line_isFlow(flow,s_flow,line)
|
||||||
|
s_blck = e_blck +2
|
||||||
|
offset = 0
|
||||||
|
else ! non scalar list item
|
||||||
|
offset = offset + indentDepth(blck(s_blck:))+1 ! offset in spaces to be ignored
|
||||||
|
s_blck = s_blck + index(blck(s_blck:e_blck),'-') ! s_blck after '-' symbol
|
||||||
|
endif
|
||||||
|
end if
|
||||||
|
end if
|
||||||
|
|
||||||
|
if(isScalar(line) .or. isFlow(line)) then
|
||||||
|
flow(s_flow:s_flow+1) = ', '
|
||||||
|
s_flow = s_flow +2
|
||||||
|
endif
|
||||||
|
|
||||||
|
end do
|
||||||
|
|
||||||
|
s_flow = s_flow - 1
|
||||||
|
if (flow(s_flow-1:s_flow-1) == ',') s_flow = s_flow - 1
|
||||||
|
|
||||||
|
end subroutine lst
|
||||||
|
|
||||||
|
|
||||||
|
!--------------------------------------------------------------------------------------------------
|
||||||
|
! @brief convert a yaml dict in block style to a yaml dict in flow style
|
||||||
|
! @details enters the function when encountered with the dictionary indicator ':'
|
||||||
|
! parses each line in the block and compares indentation of a line with the preceding line
|
||||||
|
! upon increase in indentation level -> 'decide' function decides if the line is a list or dict
|
||||||
|
! decrease in indentation indicates the end of an indentation block
|
||||||
|
!--------------------------------------------------------------------------------------------------
|
||||||
|
recursive subroutine dct(blck,flow,s_blck,s_flow,offset)
|
||||||
|
|
||||||
|
character(len=*), intent(in) :: blck !< YAML in mixed style
|
||||||
|
character(len=*), intent(inout) :: flow !< YAML in flow style only
|
||||||
|
integer, intent(inout) :: s_blck, & !< start position in blck
|
||||||
|
s_flow, & !< start position in flow
|
||||||
|
offset
|
||||||
|
|
||||||
|
character(len=pStringLen) :: line
|
||||||
|
integer :: e_blck,indent
|
||||||
|
logical :: previous_isKey
|
||||||
|
|
||||||
|
previous_isKey = .false.
|
||||||
|
|
||||||
|
indent = indentDepth(blck(s_blck:),offset)
|
||||||
|
|
||||||
|
do while (s_blck <= len_trim(blck))
|
||||||
|
e_blck = s_blck + index(blck(s_blck:),IO_EOL) - 2
|
||||||
|
line = IO_rmComment(blck(s_blck:e_blck))
|
||||||
|
if (len_trim(line) == 0) then
|
||||||
|
s_blck = e_blck + 2 ! forward to next line
|
||||||
|
cycle
|
||||||
|
elseif(indentDepth(line,offset) < indent) then
|
||||||
|
if(isScalar(line) .or. isFlow(line) .and. previous_isKey) &
|
||||||
|
call IO_error(701,ext_msg=line)
|
||||||
|
offset = 0
|
||||||
|
exit ! job done (lower level)
|
||||||
|
elseif(indentDepth(line,offset) > indent .or. isListItem(line)) then
|
||||||
|
offset = 0
|
||||||
|
call decide(blck,flow,s_blck,s_flow,offset)
|
||||||
|
else
|
||||||
|
if(isScalar(line)) call IO_error(701,ext_msg=line)
|
||||||
|
if(isFlow(line)) call IO_error(702,ext_msg=line)
|
||||||
|
|
||||||
|
line = line(indentDepth(line)+1:)
|
||||||
|
if(previous_isKey) then
|
||||||
|
flow(s_flow-1:s_flow) = ', '
|
||||||
|
s_flow = s_flow + 1
|
||||||
|
endif
|
||||||
|
|
||||||
|
if(isKeyValue(line)) then
|
||||||
|
call keyValue_toFlow(flow,s_flow,line)
|
||||||
|
else
|
||||||
|
call line_toFlow(flow,s_flow,line)
|
||||||
|
endif
|
||||||
|
|
||||||
|
s_blck = e_blck +2
|
||||||
|
end if
|
||||||
|
|
||||||
|
if(isScalar(line) .or. isKeyValue(line)) then
|
||||||
|
flow(s_flow:s_flow) = ','
|
||||||
|
s_flow = s_flow + 1
|
||||||
|
previous_isKey = .false.
|
||||||
|
else
|
||||||
|
previous_isKey = .true.
|
||||||
|
endif
|
||||||
|
|
||||||
|
flow(s_flow:s_flow) = ' '
|
||||||
|
s_flow = s_flow + 1
|
||||||
|
offset = 0
|
||||||
|
end do
|
||||||
|
|
||||||
|
s_flow = s_flow - 1
|
||||||
|
if (flow(s_flow-1:s_flow-1) == ',') s_flow = s_flow - 1
|
||||||
|
|
||||||
|
end subroutine dct
|
||||||
|
|
||||||
|
|
||||||
|
!--------------------------------------------------------------------------------------------------
|
||||||
|
! @brief decide whether next block is list or dict
|
||||||
|
!--------------------------------------------------------------------------------------------------
|
||||||
|
recursive subroutine decide(blck,flow,s_blck,s_flow,offset)
|
||||||
|
|
||||||
|
character(len=*), intent(in) :: blck !< YAML in mixed style
|
||||||
|
character(len=*), intent(inout) :: flow !< YAML in flow style only
|
||||||
|
integer, intent(inout) :: s_blck, & !< start position in blck
|
||||||
|
s_flow, & !< start position in flow
|
||||||
|
offset
|
||||||
|
integer :: e_blck
|
||||||
|
character(len=pStringLen) :: line
|
||||||
|
|
||||||
|
if(s_blck <= len(blck)) then
|
||||||
|
e_blck = s_blck + index(blck(s_blck:),IO_EOL) - 2
|
||||||
|
line = IO_rmComment(blck(s_blck:e_blck))
|
||||||
|
|
||||||
|
! exit here if '---' is found
|
||||||
|
if (isListItem(line)) then
|
||||||
|
flow(s_flow:s_flow) = '['
|
||||||
|
s_flow = s_flow + 1
|
||||||
|
call lst(blck,flow,s_blck,s_flow,offset)
|
||||||
|
flow(s_flow:s_flow) = ']'
|
||||||
|
s_flow = s_flow + 1
|
||||||
|
elseif(isKey(line) .or. isKeyValue(line)) then
|
||||||
|
flow(s_flow:s_flow) = '{'
|
||||||
|
s_flow = s_flow + 1
|
||||||
|
call dct(blck,flow,s_blck,s_flow,offset)
|
||||||
|
flow(s_flow:s_flow) = '}'
|
||||||
|
s_flow = s_flow + 1
|
||||||
|
elseif(isFlow(line)) then
|
||||||
|
call line_isFlow(flow,s_flow,line)
|
||||||
|
s_blck = e_blck +2
|
||||||
|
else
|
||||||
|
line = line(indentDepth(line)+1:)
|
||||||
|
call line_toFlow(flow,s_flow,line)
|
||||||
|
s_blck = e_blck +2
|
||||||
|
endif
|
||||||
|
endif
|
||||||
|
|
||||||
|
end subroutine
|
||||||
|
|
||||||
|
|
||||||
|
!--------------------------------------------------------------------------------------------------
|
||||||
|
! @brief convert all block style YAML parts to flow style
|
||||||
|
!--------------------------------------------------------------------------------------------------
|
||||||
|
function to_flow(blck)
|
||||||
|
|
||||||
|
character(len=:), allocatable :: to_flow
|
||||||
|
character(len=*), intent(in) :: blck !< YAML mixed style
|
||||||
|
integer :: s_blck, & !< start position in blck
|
||||||
|
s_flow, & !< start position in flow
|
||||||
|
offset, & !< counts leading '- ' in nested lists
|
||||||
|
end_line
|
||||||
|
if(isFlow(blck)) then
|
||||||
|
to_flow = trim(adjustl(blck))
|
||||||
|
else
|
||||||
|
allocate(character(len=len(blck)*2)::to_flow)
|
||||||
|
! move forward here (skip empty lines) and remove '----' if found
|
||||||
|
s_flow = 1
|
||||||
|
s_blck = 1
|
||||||
|
offset = 0
|
||||||
|
call decide(blck,to_flow,s_blck,s_flow,offset)
|
||||||
|
to_flow = trim(to_flow(:s_flow-1))
|
||||||
|
endif
|
||||||
|
end_line = index(to_flow,new_line(''))
|
||||||
|
if(end_line > 0) to_flow = to_flow(:end_line-1)
|
||||||
|
|
||||||
|
end function to_flow
|
||||||
|
|
||||||
|
|
||||||
|
!--------------------------------------------------------------------------------------------------
|
||||||
|
subroutine selfTest()
|
||||||
|
|
||||||
|
if (indentDepth(' a') /= 1) call IO_error(0,ext_msg='indentDepth')
|
||||||
|
if (indentDepth('a') /= 0) call IO_error(0,ext_msg='indentDepth')
|
||||||
|
if (indentDepth('x ') /= 0) call IO_error(0,ext_msg='indentDepth')
|
||||||
|
|
||||||
|
if ( isFlow(' a')) call IO_error(0,ext_msg='isFLow')
|
||||||
|
if (.not. isFlow('{')) call IO_error(0,ext_msg='isFlow')
|
||||||
|
if (.not. isFlow(' [')) call IO_error(0,ext_msg='isFlow')
|
||||||
|
|
||||||
|
if ( isListItem(' a')) call IO_error(0,ext_msg='isListItem')
|
||||||
|
if (.not. isListItem('- a ')) call IO_error(0,ext_msg='isListItem')
|
||||||
|
if (.not. isListItem(' -b')) call IO_error(0,ext_msg='isListItem')
|
||||||
|
|
||||||
|
if ( isKeyValue(' a')) call IO_error(0,ext_msg='isKeyValue')
|
||||||
|
if ( isKeyValue(' a: ')) call IO_error(0,ext_msg='isKeyValue')
|
||||||
|
if (.not. isKeyValue(' a: b')) call IO_error(0,ext_msg='isKeyValue')
|
||||||
|
|
||||||
|
if ( isKey(' a')) call IO_error(0,ext_msg='isKey')
|
||||||
|
if ( isKey('{a:b}')) call IO_error(0,ext_msg='isKey')
|
||||||
|
if ( isKey(' a:b')) call IO_error(0,ext_msg='isKey')
|
||||||
|
if (.not. isKey(' a: ')) call IO_error(0,ext_msg='isKey')
|
||||||
|
if (.not. isKey(' a:')) call IO_error(0,ext_msg='isKey')
|
||||||
|
if (.not. isKey(' a: #')) call IO_error(0,ext_msg='isKey')
|
||||||
|
|
||||||
|
if( isScalar('a: ')) call IO_error(0,ext_msg='isScalar')
|
||||||
|
if( isScalar('a: b')) call IO_error(0,ext_msg='isScalar')
|
||||||
|
if( isScalar('{a:b}')) call IO_error(0,ext_msg='isScalar')
|
||||||
|
if( isScalar('- a:')) call IO_error(0,ext_msg='isScalar')
|
||||||
|
if(.not. isScalar(' a')) call IO_error(0,ext_msg='isScalar')
|
||||||
|
|
||||||
|
basic_list: block
|
||||||
|
character(len=*), parameter :: block_list = &
|
||||||
|
" - Casablanca"//IO_EOL//&
|
||||||
|
" - North by Northwest"//IO_EOL
|
||||||
|
character(len=*), parameter :: block_list_newline = &
|
||||||
|
" -"//IO_EOL//&
|
||||||
|
" Casablanca"//IO_EOL//&
|
||||||
|
" -"//IO_EOL//&
|
||||||
|
" North by Northwest"//IO_EOL
|
||||||
|
character(len=*), parameter :: flow_list = &
|
||||||
|
"[Casablanca, North by Northwest]"
|
||||||
|
|
||||||
|
if (.not. to_flow(block_list) == flow_list) call IO_error(0,ext_msg='to_flow')
|
||||||
|
if (.not. to_flow(block_list_newline) == flow_list) call IO_error(0,ext_msg='to_flow')
|
||||||
|
end block basic_list
|
||||||
|
|
||||||
|
basic_dict: block
|
||||||
|
character(len=*), parameter :: block_dict = &
|
||||||
|
" aa: Casablanca"//IO_EOL//&
|
||||||
|
" bb: North by Northwest"//IO_EOL
|
||||||
|
character(len=*), parameter :: block_dict_newline = &
|
||||||
|
" aa:"//IO_EOL//&
|
||||||
|
" Casablanca"//IO_EOL//&
|
||||||
|
" bb:"//IO_EOL//&
|
||||||
|
" North by Northwest"//IO_EOL
|
||||||
|
character(len=*), parameter :: flow_dict = &
|
||||||
|
"{aa: Casablanca, bb: North by Northwest}"
|
||||||
|
|
||||||
|
if (.not. to_flow(block_dict) == flow_dict) call IO_error(0,ext_msg='to_flow')
|
||||||
|
if (.not. to_flow(block_dict_newline) == flow_dict) call IO_error(0,ext_msg='to_flow')
|
||||||
|
end block basic_dict
|
||||||
|
|
||||||
|
basic_flow: block
|
||||||
|
character(len=*), parameter :: flow_braces = &
|
||||||
|
" source: [{param: 1}, {param: 2}, {param: 3}, {param: 4}]"//IO_EOL
|
||||||
|
character(len=*), parameter :: flow_mixed_braces = &
|
||||||
|
" source: [param: 1, {param: 2}, param: 3, {param: 4}]"//IO_EOL
|
||||||
|
character(len=*), parameter :: flow = &
|
||||||
|
"{source: [{param: 1}, {param: 2}, {param: 3}, {param: 4}]}"
|
||||||
|
|
||||||
|
if (.not. to_flow(flow_braces) == flow) call IO_error(0,ext_msg='to_flow')
|
||||||
|
if (.not. to_flow(flow_mixed_braces) == flow) call IO_error(0,ext_msg='to_flow')
|
||||||
|
end block basic_flow
|
||||||
|
|
||||||
|
basic_mixed: block
|
||||||
|
character(len=*), parameter :: block_flow = &
|
||||||
|
" aa:"//IO_EOL//&
|
||||||
|
" - "//IO_EOL//&
|
||||||
|
" param_1: [a: b, c, {d: {e: [f: g, h]}}]"//IO_EOL//&
|
||||||
|
" - c: d"//IO_EOL//&
|
||||||
|
" bb:"//IO_EOL//&
|
||||||
|
" - {param_1: [{a: b}, c, {d: {e: [{f: g}, h]}}]}"//IO_EOL
|
||||||
|
character(len=*), parameter :: mixed_flow = &
|
||||||
|
"{aa: [{param_1: [{a: b}, c, {d: {e: [{f: g}, h]}}]}, {c: d}], bb: [{param_1: [{a: b}, c, {d: {e: [{f: g}, h]}}]}]}"
|
||||||
|
|
||||||
|
if(.not. to_flow(block_flow) == mixed_flow) call IO_error(0,ext_msg='to_flow')
|
||||||
|
end block basic_mixed
|
||||||
|
|
||||||
|
end subroutine selfTest
|
||||||
|
|
||||||
|
end module YAML_parse
|
|
@ -16,14 +16,7 @@ module YAML_types
|
||||||
|
|
||||||
private
|
private
|
||||||
|
|
||||||
public :: &
|
type, abstract, public :: tNode
|
||||||
tNode, &
|
|
||||||
tScalar, &
|
|
||||||
tDict, &
|
|
||||||
tList, &
|
|
||||||
YAML_types_init
|
|
||||||
|
|
||||||
type, abstract :: tNode
|
|
||||||
integer :: length = 0
|
integer :: length = 0
|
||||||
contains
|
contains
|
||||||
procedure(asFormattedString), deferred :: asFormattedString
|
procedure(asFormattedString), deferred :: asFormattedString
|
||||||
|
@ -102,7 +95,7 @@ module YAML_types
|
||||||
end type tNode
|
end type tNode
|
||||||
|
|
||||||
|
|
||||||
type, extends(tNode) :: tScalar
|
type, extends(tNode), public :: tScalar
|
||||||
|
|
||||||
character(len=:), allocatable, private :: value
|
character(len=:), allocatable, private :: value
|
||||||
|
|
||||||
|
@ -118,7 +111,7 @@ module YAML_types
|
||||||
asString => tScalar_asString
|
asString => tScalar_asString
|
||||||
end type tScalar
|
end type tScalar
|
||||||
|
|
||||||
type, extends(tNode) :: tList
|
type, extends(tNode), public :: tList
|
||||||
|
|
||||||
class(tItem), pointer :: first => null()
|
class(tItem), pointer :: first => null()
|
||||||
|
|
||||||
|
@ -136,7 +129,7 @@ module YAML_types
|
||||||
final :: tList_finalize
|
final :: tList_finalize
|
||||||
end type tList
|
end type tList
|
||||||
|
|
||||||
type, extends(tList) :: tDict
|
type, extends(tList), public :: tDict
|
||||||
contains
|
contains
|
||||||
procedure :: asFormattedString => tDict_asFormattedString
|
procedure :: asFormattedString => tDict_asFormattedString
|
||||||
procedure :: set => tDict_set
|
procedure :: set => tDict_set
|
||||||
|
@ -171,6 +164,10 @@ module YAML_types
|
||||||
module procedure tScalar_assign__
|
module procedure tScalar_assign__
|
||||||
end interface assignment (=)
|
end interface assignment (=)
|
||||||
|
|
||||||
|
public :: &
|
||||||
|
YAML_types_init, &
|
||||||
|
assignment(=)
|
||||||
|
|
||||||
contains
|
contains
|
||||||
|
|
||||||
!--------------------------------------------------------------------------------------------------
|
!--------------------------------------------------------------------------------------------------
|
||||||
|
@ -180,7 +177,7 @@ subroutine YAML_types_init
|
||||||
|
|
||||||
write(6,'(/,a)') ' <<<+- YAML_types init -+>>>'
|
write(6,'(/,a)') ' <<<+- YAML_types init -+>>>'
|
||||||
|
|
||||||
call unitTest
|
call selfTest
|
||||||
|
|
||||||
end subroutine YAML_types_init
|
end subroutine YAML_types_init
|
||||||
|
|
||||||
|
@ -188,7 +185,7 @@ end subroutine YAML_types_init
|
||||||
!--------------------------------------------------------------------------------------------------
|
!--------------------------------------------------------------------------------------------------
|
||||||
!> @brief check correctness of some type bound procedures
|
!> @brief check correctness of some type bound procedures
|
||||||
!--------------------------------------------------------------------------------------------------
|
!--------------------------------------------------------------------------------------------------
|
||||||
subroutine unitTest
|
subroutine selfTest
|
||||||
|
|
||||||
class(tNode), pointer :: s1,s2
|
class(tNode), pointer :: s1,s2
|
||||||
allocate(tScalar::s1)
|
allocate(tScalar::s1)
|
||||||
|
@ -260,7 +257,7 @@ subroutine unitTest
|
||||||
if(n%get_asString(1) /= 'True') call IO_error(0,ext_msg='byIndex_asString')
|
if(n%get_asString(1) /= 'True') call IO_error(0,ext_msg='byIndex_asString')
|
||||||
end block
|
end block
|
||||||
|
|
||||||
end subroutine unitTest
|
end subroutine selfTest
|
||||||
|
|
||||||
|
|
||||||
!---------------------------------------------------------------------------------------------------
|
!---------------------------------------------------------------------------------------------------
|
||||||
|
|
|
@ -8,6 +8,7 @@
|
||||||
#include "debug.f90"
|
#include "debug.f90"
|
||||||
#include "list.f90"
|
#include "list.f90"
|
||||||
#include "YAML_types.f90"
|
#include "YAML_types.f90"
|
||||||
|
#include "YAML_parse.f90"
|
||||||
#include "future.f90"
|
#include "future.f90"
|
||||||
#include "config.f90"
|
#include "config.f90"
|
||||||
#include "LAPACK_interface.f90"
|
#include "LAPACK_interface.f90"
|
||||||
|
|
|
@ -160,7 +160,7 @@ module subroutine plastic_phenopowerlaw_init
|
||||||
config%getFloats('interaction_twintwin'), &
|
config%getFloats('interaction_twintwin'), &
|
||||||
config%getString('lattice_structure'))
|
config%getString('lattice_structure'))
|
||||||
prm%gamma_twin_char = lattice_characteristicShear_twin(N_tw,config%getString('lattice_structure'),&
|
prm%gamma_twin_char = lattice_characteristicShear_twin(N_tw,config%getString('lattice_structure'),&
|
||||||
config%getFloat('c/a'))
|
config%getFloat('c/a',defaultVal=0.0_pReal))
|
||||||
|
|
||||||
xi_twin_0 = config%getFloats('tau0_twin',requiredSize=size(N_tw))
|
xi_twin_0 = config%getFloats('tau0_twin',requiredSize=size(N_tw))
|
||||||
|
|
||||||
|
|
|
@ -6,7 +6,7 @@
|
||||||
!> @details doing cutbacking, forwarding in case of restart, reporting statistics, writing
|
!> @details doing cutbacking, forwarding in case of restart, reporting statistics, writing
|
||||||
!> results
|
!> results
|
||||||
!--------------------------------------------------------------------------------------------------
|
!--------------------------------------------------------------------------------------------------
|
||||||
program DAMASK_spectral
|
program DAMASK_grid
|
||||||
#include <petsc/finclude/petscsys.h>
|
#include <petsc/finclude/petscsys.h>
|
||||||
use PETScsys
|
use PETScsys
|
||||||
use prec
|
use prec
|
||||||
|
@ -495,4 +495,4 @@ program DAMASK_spectral
|
||||||
|
|
||||||
call quit(0) ! no complains ;)
|
call quit(0) ! no complains ;)
|
||||||
|
|
||||||
end program DAMASK_spectral
|
end program DAMASK_grid
|
||||||
|
|
|
@ -42,7 +42,9 @@ contains
|
||||||
!--------------------------------------------------------------------------------------------------
|
!--------------------------------------------------------------------------------------------------
|
||||||
!> @brief reads the geometry file to obtain information on discretization
|
!> @brief reads the geometry file to obtain information on discretization
|
||||||
!--------------------------------------------------------------------------------------------------
|
!--------------------------------------------------------------------------------------------------
|
||||||
subroutine discretization_grid_init
|
subroutine discretization_grid_init(restart)
|
||||||
|
|
||||||
|
logical, intent(in) :: restart
|
||||||
|
|
||||||
include 'fftw3-mpi.f03'
|
include 'fftw3-mpi.f03'
|
||||||
real(pReal), dimension(3) :: &
|
real(pReal), dimension(3) :: &
|
||||||
|
@ -100,13 +102,14 @@ subroutine discretization_grid_init
|
||||||
|
|
||||||
!--------------------------------------------------------------------------------------------------
|
!--------------------------------------------------------------------------------------------------
|
||||||
! store geometry information for post processing
|
! store geometry information for post processing
|
||||||
|
if(.not. restart) then
|
||||||
call results_openJobFile
|
call results_openJobFile
|
||||||
call results_closeGroup(results_addGroup('geometry'))
|
call results_closeGroup(results_addGroup('geometry'))
|
||||||
call results_addAttribute('grid', grid, 'geometry')
|
call results_addAttribute('grid', grid, 'geometry')
|
||||||
call results_addAttribute('size', geomSize,'geometry')
|
call results_addAttribute('size', geomSize,'geometry')
|
||||||
call results_addAttribute('origin',origin, 'geometry')
|
call results_addAttribute('origin',origin, 'geometry')
|
||||||
call results_closeJobFile
|
call results_closeJobFile
|
||||||
|
endif
|
||||||
!--------------------------------------------------------------------------------------------------
|
!--------------------------------------------------------------------------------------------------
|
||||||
! geometry information required by the nonlocal CP model
|
! geometry information required by the nonlocal CP model
|
||||||
call geometry_plastic_nonlocal_setIPvolume(reshape([(product(mySize/real(myGrid,pReal)),j=1,product(myGrid))], &
|
call geometry_plastic_nonlocal_setIPvolume(reshape([(product(mySize/real(myGrid,pReal)),j=1,product(myGrid))], &
|
||||||
|
|
|
@ -529,7 +529,7 @@ subroutine lattice_init
|
||||||
lattice_DamageMobility(p) = config_phase(p)%getFloat('damage_mobility',defaultVal=0.0_pReal)
|
lattice_DamageMobility(p) = config_phase(p)%getFloat('damage_mobility',defaultVal=0.0_pReal)
|
||||||
! SHOULD NOT BE PART OF LATTICE END
|
! SHOULD NOT BE PART OF LATTICE END
|
||||||
|
|
||||||
call unitTest
|
call selfTest
|
||||||
|
|
||||||
enddo
|
enddo
|
||||||
|
|
||||||
|
@ -1436,6 +1436,7 @@ function lattice_SchmidMatrix_slip(Nslip,structure,cOverA) result(SchmidMatrix)
|
||||||
NslipMax = BCT_NSLIPSYSTEM
|
NslipMax = BCT_NSLIPSYSTEM
|
||||||
slipSystems = BCT_SYSTEMSLIP
|
slipSystems = BCT_SYSTEMSLIP
|
||||||
case default
|
case default
|
||||||
|
allocate(NslipMax(0))
|
||||||
call IO_error(137,ext_msg='lattice_SchmidMatrix_slip: '//trim(structure))
|
call IO_error(137,ext_msg='lattice_SchmidMatrix_slip: '//trim(structure))
|
||||||
end select
|
end select
|
||||||
|
|
||||||
|
@ -1485,6 +1486,7 @@ function lattice_SchmidMatrix_twin(Ntwin,structure,cOverA) result(SchmidMatrix)
|
||||||
NtwinMax = HEX_NTWINSYSTEM
|
NtwinMax = HEX_NTWINSYSTEM
|
||||||
twinSystems = HEX_SYSTEMTWIN
|
twinSystems = HEX_SYSTEMTWIN
|
||||||
case default
|
case default
|
||||||
|
allocate(NtwinMax(0))
|
||||||
call IO_error(137,ext_msg='lattice_SchmidMatrix_twin: '//trim(structure))
|
call IO_error(137,ext_msg='lattice_SchmidMatrix_twin: '//trim(structure))
|
||||||
end select
|
end select
|
||||||
|
|
||||||
|
@ -1564,6 +1566,7 @@ function lattice_SchmidMatrix_cleavage(Ncleavage,structure,cOverA) result(Schmid
|
||||||
NcleavageMax = BCC_NCLEAVAGESYSTEM
|
NcleavageMax = BCC_NCLEAVAGESYSTEM
|
||||||
cleavageSystems = BCC_SYSTEMCLEAVAGE
|
cleavageSystems = BCC_SYSTEMCLEAVAGE
|
||||||
case default
|
case default
|
||||||
|
allocate(NcleavageMax(0))
|
||||||
call IO_error(137,ext_msg='lattice_SchmidMatrix_cleavage: '//trim(structure))
|
call IO_error(137,ext_msg='lattice_SchmidMatrix_cleavage: '//trim(structure))
|
||||||
end select
|
end select
|
||||||
|
|
||||||
|
@ -1919,6 +1922,7 @@ function coordinateSystem_slip(Nslip,structure,cOverA) result(coordinateSystem)
|
||||||
NslipMax = BCT_NSLIPSYSTEM
|
NslipMax = BCT_NSLIPSYSTEM
|
||||||
slipSystems = BCT_SYSTEMSLIP
|
slipSystems = BCT_SYSTEMSLIP
|
||||||
case default
|
case default
|
||||||
|
allocate(NslipMax(0))
|
||||||
call IO_error(137,ext_msg='coordinateSystem_slip: '//trim(structure))
|
call IO_error(137,ext_msg='coordinateSystem_slip: '//trim(structure))
|
||||||
end select
|
end select
|
||||||
|
|
||||||
|
@ -2291,7 +2295,7 @@ end function equivalent_mu
|
||||||
!--------------------------------------------------------------------------------------------------
|
!--------------------------------------------------------------------------------------------------
|
||||||
!> @brief check correctness of some lattice functions
|
!> @brief check correctness of some lattice functions
|
||||||
!--------------------------------------------------------------------------------------------------
|
!--------------------------------------------------------------------------------------------------
|
||||||
subroutine unitTest
|
subroutine selfTest
|
||||||
|
|
||||||
real(pReal), dimension(:,:,:), allocatable :: CoSy
|
real(pReal), dimension(:,:,:), allocatable :: CoSy
|
||||||
real(pReal), dimension(:,:), allocatable :: system
|
real(pReal), dimension(:,:), allocatable :: system
|
||||||
|
@ -2304,7 +2308,6 @@ subroutine unitTest
|
||||||
|
|
||||||
system = reshape([1.0_pReal+r(1),0.0_pReal,0.0_pReal, 0.0_pReal,1.0_pReal+r(2),0.0_pReal],[6,1])
|
system = reshape([1.0_pReal+r(1),0.0_pReal,0.0_pReal, 0.0_pReal,1.0_pReal+r(2),0.0_pReal],[6,1])
|
||||||
CoSy = buildCoordinateSystem([1],[1],system,'fcc',0.0_pReal)
|
CoSy = buildCoordinateSystem([1],[1],system,'fcc',0.0_pReal)
|
||||||
|
|
||||||
if(any(dNeq(CoSy(1:3,1:3,1),math_I3))) &
|
if(any(dNeq(CoSy(1:3,1:3,1),math_I3))) &
|
||||||
call IO_error(0)
|
call IO_error(0)
|
||||||
|
|
||||||
|
@ -2321,6 +2324,6 @@ subroutine unitTest
|
||||||
if(dNeq(lambda*0.5_pReal/(lambda+equivalent_mu(C,'reuss')),equivalent_nu(C,'reuss'),1.0e-12_pReal)) &
|
if(dNeq(lambda*0.5_pReal/(lambda+equivalent_mu(C,'reuss')),equivalent_nu(C,'reuss'),1.0e-12_pReal)) &
|
||||||
call IO_error(0,ext_msg='equivalent_nu/reuss')
|
call IO_error(0,ext_msg='equivalent_nu/reuss')
|
||||||
|
|
||||||
end subroutine unitTest
|
end subroutine selfTest
|
||||||
|
|
||||||
end module lattice
|
end module lattice
|
||||||
|
|
|
@ -207,7 +207,9 @@ contains
|
||||||
!--------------------------------------------------------------------------------------------------
|
!--------------------------------------------------------------------------------------------------
|
||||||
!> @brief parses material configuration file
|
!> @brief parses material configuration file
|
||||||
!--------------------------------------------------------------------------------------------------
|
!--------------------------------------------------------------------------------------------------
|
||||||
subroutine material_init
|
subroutine material_init(restart)
|
||||||
|
|
||||||
|
logical, intent(in) :: restart
|
||||||
|
|
||||||
integer :: i,e,m,c,h, myDebug, myPhase, myHomog, myMicro
|
integer :: i,e,m,c,h, myDebug, myPhase, myHomog, myMicro
|
||||||
integer, dimension(:), allocatable :: &
|
integer, dimension(:), allocatable :: &
|
||||||
|
@ -339,11 +341,12 @@ subroutine material_init
|
||||||
call config_deallocate('material.config/microstructure')
|
call config_deallocate('material.config/microstructure')
|
||||||
call config_deallocate('material.config/texture')
|
call config_deallocate('material.config/texture')
|
||||||
|
|
||||||
|
if (.not. restart) then
|
||||||
call results_openJobFile
|
call results_openJobFile
|
||||||
call results_mapping_constituent(material_phaseAt,material_phaseMemberAt,config_name_phase)
|
call results_mapping_constituent(material_phaseAt,material_phaseMemberAt,config_name_phase)
|
||||||
call results_mapping_materialpoint(material_homogenizationAt,material_homogenizationMemberAt,config_name_homogenization)
|
call results_mapping_materialpoint(material_homogenizationAt,material_homogenizationMemberAt,config_name_homogenization)
|
||||||
call results_closeJobFile
|
call results_closeJobFile
|
||||||
|
endif
|
||||||
|
|
||||||
!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!
|
!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!
|
||||||
! BEGIN DEPRECATED
|
! BEGIN DEPRECATED
|
||||||
|
|
|
@ -79,7 +79,7 @@ module math
|
||||||
|
|
||||||
!---------------------------------------------------------------------------------------------------
|
!---------------------------------------------------------------------------------------------------
|
||||||
private :: &
|
private :: &
|
||||||
unitTest
|
selfTest
|
||||||
|
|
||||||
contains
|
contains
|
||||||
|
|
||||||
|
@ -113,7 +113,7 @@ subroutine math_init
|
||||||
|
|
||||||
call random_seed(put = randInit)
|
call random_seed(put = randInit)
|
||||||
|
|
||||||
call unitTest
|
call selfTest
|
||||||
|
|
||||||
end subroutine math_init
|
end subroutine math_init
|
||||||
|
|
||||||
|
@ -1192,7 +1192,7 @@ end function math_clip
|
||||||
!--------------------------------------------------------------------------------------------------
|
!--------------------------------------------------------------------------------------------------
|
||||||
!> @brief check correctness of some math functions
|
!> @brief check correctness of some math functions
|
||||||
!--------------------------------------------------------------------------------------------------
|
!--------------------------------------------------------------------------------------------------
|
||||||
subroutine unitTest
|
subroutine selfTest
|
||||||
|
|
||||||
integer, dimension(2,4) :: &
|
integer, dimension(2,4) :: &
|
||||||
sort_in_ = reshape([+1,+5, +5,+6, -1,-1, +3,-2],[2,4])
|
sort_in_ = reshape([+1,+5, +5,+6, -1,-1, +3,-2],[2,4])
|
||||||
|
@ -1330,6 +1330,6 @@ subroutine unitTest
|
||||||
if(dNeq0(math_LeviCivita(ijk(1),ijk(2),ijk(3))))&
|
if(dNeq0(math_LeviCivita(ijk(1),ijk(2),ijk(3))))&
|
||||||
call IO_error(0,ext_msg='math_LeviCivita')
|
call IO_error(0,ext_msg='math_LeviCivita')
|
||||||
|
|
||||||
end subroutine unitTest
|
end subroutine selfTest
|
||||||
|
|
||||||
end module math
|
end module math
|
||||||
|
|
|
@ -6,7 +6,7 @@
|
||||||
!> @details doing cutbacking, reporting statistics, writing
|
!> @details doing cutbacking, reporting statistics, writing
|
||||||
!> results
|
!> results
|
||||||
!--------------------------------------------------------------------------------------------------
|
!--------------------------------------------------------------------------------------------------
|
||||||
program DAMASK_FEM
|
program DAMASK_mesh
|
||||||
#include <petsc/finclude/petscsys.h>
|
#include <petsc/finclude/petscsys.h>
|
||||||
use PetscDM
|
use PetscDM
|
||||||
use prec
|
use prec
|
||||||
|
@ -367,4 +367,4 @@ program DAMASK_FEM
|
||||||
|
|
||||||
call quit(0) ! no complains ;)
|
call quit(0) ! no complains ;)
|
||||||
|
|
||||||
end program DAMASK_FEM
|
end program DAMASK_mesh
|
||||||
|
|
|
@ -63,7 +63,9 @@ contains
|
||||||
!> @brief initializes the mesh by calling all necessary private routines the mesh module
|
!> @brief initializes the mesh by calling all necessary private routines the mesh module
|
||||||
!! Order and routines strongly depend on type of solver
|
!! Order and routines strongly depend on type of solver
|
||||||
!--------------------------------------------------------------------------------------------------
|
!--------------------------------------------------------------------------------------------------
|
||||||
subroutine discretization_mesh_init
|
subroutine discretization_mesh_init(restart)
|
||||||
|
|
||||||
|
logical, intent(in) :: restart
|
||||||
|
|
||||||
integer, dimension(1), parameter:: FE_geomtype = [1] !< geometry type of particular element type
|
integer, dimension(1), parameter:: FE_geomtype = [1] !< geometry type of particular element type
|
||||||
integer, dimension(1) :: FE_Nips !< number of IPs in a specific type of element
|
integer, dimension(1) :: FE_Nips !< number of IPs in a specific type of element
|
||||||
|
|
|
@ -17,11 +17,9 @@ module mesh_mech_FEM
|
||||||
use prec
|
use prec
|
||||||
use FEM_utilities
|
use FEM_utilities
|
||||||
use discretization_mesh
|
use discretization_mesh
|
||||||
use IO
|
|
||||||
use DAMASK_interface
|
use DAMASK_interface
|
||||||
use numerics
|
use numerics
|
||||||
use FEM_quadrature
|
use FEM_quadrature
|
||||||
use FEsolving
|
|
||||||
use homogenization
|
use homogenization
|
||||||
use math
|
use math
|
||||||
|
|
||||||
|
|
|
@ -63,8 +63,8 @@ module numerics
|
||||||
#endif
|
#endif
|
||||||
|
|
||||||
!--------------------------------------------------------------------------------------------------
|
!--------------------------------------------------------------------------------------------------
|
||||||
! FEM parameters:
|
! Mesh parameters:
|
||||||
#ifdef FEM
|
#ifdef Mesh
|
||||||
integer, protected, public :: &
|
integer, protected, public :: &
|
||||||
integrationOrder = 2, & !< order of quadrature rule required
|
integrationOrder = 2, & !< order of quadrature rule required
|
||||||
structOrder = 2 !< order of displacement shape functions
|
structOrder = 2 !< order of displacement shape functions
|
||||||
|
@ -200,8 +200,8 @@ subroutine numerics_init
|
||||||
#endif
|
#endif
|
||||||
|
|
||||||
!--------------------------------------------------------------------------------------------------
|
!--------------------------------------------------------------------------------------------------
|
||||||
! FEM parameters
|
! Mesh parameters
|
||||||
#ifdef FEM
|
#ifdef Mesh
|
||||||
case ('integrationorder')
|
case ('integrationorder')
|
||||||
integrationorder = IO_intValue(line,chunkPos,2)
|
integrationorder = IO_intValue(line,chunkPos,2)
|
||||||
case ('structorder')
|
case ('structorder')
|
||||||
|
@ -267,7 +267,7 @@ subroutine numerics_init
|
||||||
|
|
||||||
!--------------------------------------------------------------------------------------------------
|
!--------------------------------------------------------------------------------------------------
|
||||||
! spectral parameters
|
! spectral parameters
|
||||||
#ifdef FEM
|
#ifdef Mesh
|
||||||
write(6,'(a24,1x,i8)') ' integrationOrder: ',integrationOrder
|
write(6,'(a24,1x,i8)') ' integrationOrder: ',integrationOrder
|
||||||
write(6,'(a24,1x,i8)') ' structOrder: ',structOrder
|
write(6,'(a24,1x,i8)') ' structOrder: ',structOrder
|
||||||
write(6,'(a24,1x,L8)') ' B-Bar stabilisation: ',BBarStabilisation
|
write(6,'(a24,1x,L8)') ' B-Bar stabilisation: ',BBarStabilisation
|
||||||
|
|
|
@ -75,7 +75,7 @@ module prec
|
||||||
emptyStringArray = [character(len=pStringLen)::]
|
emptyStringArray = [character(len=pStringLen)::]
|
||||||
|
|
||||||
private :: &
|
private :: &
|
||||||
unitTest
|
selfTest
|
||||||
|
|
||||||
contains
|
contains
|
||||||
|
|
||||||
|
@ -94,7 +94,7 @@ subroutine prec_init
|
||||||
write(6,'(a,e10.3)') ' Minimum value: ',tiny(0.0_pReal)
|
write(6,'(a,e10.3)') ' Minimum value: ',tiny(0.0_pReal)
|
||||||
write(6,'(a,i3)') ' Decimal precision: ',precision(0.0_pReal)
|
write(6,'(a,i3)') ' Decimal precision: ',precision(0.0_pReal)
|
||||||
|
|
||||||
call unitTest
|
call selfTest
|
||||||
|
|
||||||
end subroutine prec_init
|
end subroutine prec_init
|
||||||
|
|
||||||
|
@ -233,7 +233,7 @@ end function cNeq
|
||||||
!--------------------------------------------------------------------------------------------------
|
!--------------------------------------------------------------------------------------------------
|
||||||
!> @brief check correctness of some prec functions
|
!> @brief check correctness of some prec functions
|
||||||
!--------------------------------------------------------------------------------------------------
|
!--------------------------------------------------------------------------------------------------
|
||||||
subroutine unitTest
|
subroutine selfTest
|
||||||
|
|
||||||
integer, allocatable, dimension(:) :: realloc_lhs_test
|
integer, allocatable, dimension(:) :: realloc_lhs_test
|
||||||
real(pReal), dimension(2) :: r
|
real(pReal), dimension(2) :: r
|
||||||
|
@ -249,6 +249,6 @@ subroutine unitTest
|
||||||
realloc_lhs_test = [1,2]
|
realloc_lhs_test = [1,2]
|
||||||
if (any(realloc_lhs_test/=[1,2])) call quit(9000)
|
if (any(realloc_lhs_test/=[1,2])) call quit(9000)
|
||||||
|
|
||||||
end subroutine unitTest
|
end subroutine selfTest
|
||||||
|
|
||||||
end module prec
|
end module prec
|
||||||
|
|
|
@ -112,7 +112,7 @@ contains
|
||||||
subroutine quaternions_init
|
subroutine quaternions_init
|
||||||
|
|
||||||
write(6,'(/,a)') ' <<<+- quaternions init -+>>>'; flush(6)
|
write(6,'(/,a)') ' <<<+- quaternions init -+>>>'; flush(6)
|
||||||
call unitTest
|
call selfTest
|
||||||
|
|
||||||
end subroutine quaternions_init
|
end subroutine quaternions_init
|
||||||
|
|
||||||
|
@ -457,7 +457,7 @@ end function inverse
|
||||||
!--------------------------------------------------------------------------------------------------
|
!--------------------------------------------------------------------------------------------------
|
||||||
!> @brief check correctness of some quaternions functions
|
!> @brief check correctness of some quaternions functions
|
||||||
!--------------------------------------------------------------------------------------------------
|
!--------------------------------------------------------------------------------------------------
|
||||||
subroutine unitTest
|
subroutine selfTest
|
||||||
|
|
||||||
real(pReal), dimension(4) :: qu
|
real(pReal), dimension(4) :: qu
|
||||||
type(quaternion) :: q, q_2
|
type(quaternion) :: q, q_2
|
||||||
|
@ -524,7 +524,7 @@ subroutine unitTest
|
||||||
endif
|
endif
|
||||||
#endif
|
#endif
|
||||||
|
|
||||||
end subroutine unitTest
|
end subroutine selfTest
|
||||||
|
|
||||||
|
|
||||||
end module quaternions
|
end module quaternions
|
||||||
|
|
|
@ -59,15 +59,18 @@ module results
|
||||||
results_mapping_materialpoint
|
results_mapping_materialpoint
|
||||||
contains
|
contains
|
||||||
|
|
||||||
subroutine results_init
|
subroutine results_init(restart)
|
||||||
|
|
||||||
|
logical, intent(in) :: restart
|
||||||
|
|
||||||
character(len=pStringLen) :: commandLine
|
character(len=pStringLen) :: commandLine
|
||||||
|
|
||||||
write(6,'(/,a)') ' <<<+- results init -+>>>'
|
write(6,'(/,a)') ' <<<+- results init -+>>>'; flush(6)
|
||||||
|
|
||||||
write(6,'(/,a)') ' Diehl et al., Integrating Materials and Manufacturing Innovation 6(1):83–91, 2017'
|
write(6,'(/,a)') ' Diehl et al., Integrating Materials and Manufacturing Innovation 6(1):83–91, 2017'
|
||||||
write(6,'(a)') ' https://doi.org/10.1007/s40192-017-0084-5'
|
write(6,'(a)') ' https://doi.org/10.1007/s40192-017-0084-5'
|
||||||
|
|
||||||
|
if(.not. restart) then
|
||||||
resultsFile = HDF5_openFile(trim(getSolverJobName())//'.hdf5','w',.true.)
|
resultsFile = HDF5_openFile(trim(getSolverJobName())//'.hdf5','w',.true.)
|
||||||
call results_addAttribute('DADF5_version_major',0)
|
call results_addAttribute('DADF5_version_major',0)
|
||||||
call results_addAttribute('DADF5_version_minor',6)
|
call results_addAttribute('DADF5_version_minor',6)
|
||||||
|
@ -77,6 +80,7 @@ subroutine results_init
|
||||||
call results_closeGroup(results_addGroup('mapping'))
|
call results_closeGroup(results_addGroup('mapping'))
|
||||||
call results_closeGroup(results_addGroup('mapping/cellResults'))
|
call results_closeGroup(results_addGroup('mapping/cellResults'))
|
||||||
call results_closeJobFile
|
call results_closeJobFile
|
||||||
|
endif
|
||||||
|
|
||||||
end subroutine results_init
|
end subroutine results_init
|
||||||
|
|
||||||
|
@ -307,6 +311,8 @@ subroutine results_writeScalarDataset_real(group,dataset,label,description,SIuni
|
||||||
call HDF5_addAttribute(groupHandle,'Unit',SIunit,label)
|
call HDF5_addAttribute(groupHandle,'Unit',SIunit,label)
|
||||||
if (HDF5_objectExists(groupHandle,label)) &
|
if (HDF5_objectExists(groupHandle,label)) &
|
||||||
call HDF5_addAttribute(groupHandle,'Creator','DAMASK '//DAMASKVERSION,label)
|
call HDF5_addAttribute(groupHandle,'Creator','DAMASK '//DAMASKVERSION,label)
|
||||||
|
if (HDF5_objectExists(groupHandle,label)) &
|
||||||
|
call HDF5_addAttribute(groupHandle,'Created',now(),label)
|
||||||
call HDF5_closeGroup(groupHandle)
|
call HDF5_closeGroup(groupHandle)
|
||||||
|
|
||||||
end subroutine results_writeScalarDataset_real
|
end subroutine results_writeScalarDataset_real
|
||||||
|
@ -336,6 +342,8 @@ subroutine results_writeVectorDataset_real(group,dataset,label,description,SIuni
|
||||||
call HDF5_addAttribute(groupHandle,'Unit',SIunit,label)
|
call HDF5_addAttribute(groupHandle,'Unit',SIunit,label)
|
||||||
if (HDF5_objectExists(groupHandle,label)) &
|
if (HDF5_objectExists(groupHandle,label)) &
|
||||||
call HDF5_addAttribute(groupHandle,'Creator','DAMASK '//DAMASKVERSION,label)
|
call HDF5_addAttribute(groupHandle,'Creator','DAMASK '//DAMASKVERSION,label)
|
||||||
|
if (HDF5_objectExists(groupHandle,label)) &
|
||||||
|
call HDF5_addAttribute(groupHandle,'Created',now(),label)
|
||||||
call HDF5_closeGroup(groupHandle)
|
call HDF5_closeGroup(groupHandle)
|
||||||
|
|
||||||
end subroutine results_writeVectorDataset_real
|
end subroutine results_writeVectorDataset_real
|
||||||
|
@ -387,6 +395,8 @@ subroutine results_writeTensorDataset_real(group,dataset,label,description,SIuni
|
||||||
call HDF5_addAttribute(groupHandle,'Unit',SIunit,label)
|
call HDF5_addAttribute(groupHandle,'Unit',SIunit,label)
|
||||||
if (HDF5_objectExists(groupHandle,label)) &
|
if (HDF5_objectExists(groupHandle,label)) &
|
||||||
call HDF5_addAttribute(groupHandle,'Creator','DAMASK '//DAMASKVERSION,label)
|
call HDF5_addAttribute(groupHandle,'Creator','DAMASK '//DAMASKVERSION,label)
|
||||||
|
if (HDF5_objectExists(groupHandle,label)) &
|
||||||
|
call HDF5_addAttribute(groupHandle,'Created',now(),label)
|
||||||
call HDF5_closeGroup(groupHandle)
|
call HDF5_closeGroup(groupHandle)
|
||||||
|
|
||||||
end subroutine results_writeTensorDataset_real
|
end subroutine results_writeTensorDataset_real
|
||||||
|
@ -417,6 +427,8 @@ subroutine results_writeVectorDataset_int(group,dataset,label,description,SIunit
|
||||||
call HDF5_addAttribute(groupHandle,'Unit',SIunit,label)
|
call HDF5_addAttribute(groupHandle,'Unit',SIunit,label)
|
||||||
if (HDF5_objectExists(groupHandle,label)) &
|
if (HDF5_objectExists(groupHandle,label)) &
|
||||||
call HDF5_addAttribute(groupHandle,'Creator','DAMASK '//DAMASKVERSION,label)
|
call HDF5_addAttribute(groupHandle,'Creator','DAMASK '//DAMASKVERSION,label)
|
||||||
|
if (HDF5_objectExists(groupHandle,label)) &
|
||||||
|
call HDF5_addAttribute(groupHandle,'Created',now(),label)
|
||||||
call HDF5_closeGroup(groupHandle)
|
call HDF5_closeGroup(groupHandle)
|
||||||
|
|
||||||
end subroutine results_writeVectorDataset_int
|
end subroutine results_writeVectorDataset_int
|
||||||
|
@ -447,6 +459,8 @@ subroutine results_writeTensorDataset_int(group,dataset,label,description,SIunit
|
||||||
call HDF5_addAttribute(groupHandle,'Unit',SIunit,label)
|
call HDF5_addAttribute(groupHandle,'Unit',SIunit,label)
|
||||||
if (HDF5_objectExists(groupHandle,label)) &
|
if (HDF5_objectExists(groupHandle,label)) &
|
||||||
call HDF5_addAttribute(groupHandle,'Creator','DAMASK '//DAMASKVERSION,label)
|
call HDF5_addAttribute(groupHandle,'Creator','DAMASK '//DAMASKVERSION,label)
|
||||||
|
if (HDF5_objectExists(groupHandle,label)) &
|
||||||
|
call HDF5_addAttribute(groupHandle,'Created',now(),label)
|
||||||
call HDF5_closeGroup(groupHandle)
|
call HDF5_closeGroup(groupHandle)
|
||||||
|
|
||||||
end subroutine results_writeTensorDataset_int
|
end subroutine results_writeTensorDataset_int
|
||||||
|
@ -477,6 +491,8 @@ subroutine results_writeScalarDataset_rotation(group,dataset,label,description,l
|
||||||
call HDF5_addAttribute(groupHandle,'Lattice',lattice_structure,label)
|
call HDF5_addAttribute(groupHandle,'Lattice',lattice_structure,label)
|
||||||
if (HDF5_objectExists(groupHandle,label)) &
|
if (HDF5_objectExists(groupHandle,label)) &
|
||||||
call HDF5_addAttribute(groupHandle,'Creator','DAMASK '//DAMASKVERSION,label)
|
call HDF5_addAttribute(groupHandle,'Creator','DAMASK '//DAMASKVERSION,label)
|
||||||
|
if (HDF5_objectExists(groupHandle,label)) &
|
||||||
|
call HDF5_addAttribute(groupHandle,'Created',now(),label)
|
||||||
call HDF5_closeGroup(groupHandle)
|
call HDF5_closeGroup(groupHandle)
|
||||||
|
|
||||||
end subroutine results_writeScalarDataset_rotation
|
end subroutine results_writeScalarDataset_rotation
|
||||||
|
@ -752,6 +768,21 @@ subroutine results_mapping_materialpoint(homogenizationAt,memberAtLocal,label)
|
||||||
end subroutine results_mapping_materialpoint
|
end subroutine results_mapping_materialpoint
|
||||||
|
|
||||||
|
|
||||||
|
!--------------------------------------------------------------------------------------------------
|
||||||
|
!> @brief current date and time (including time zone information)
|
||||||
|
!--------------------------------------------------------------------------------------------------
|
||||||
|
character(len=24) function now()
|
||||||
|
|
||||||
|
character(len=5) :: zone
|
||||||
|
integer, dimension(8) :: values
|
||||||
|
|
||||||
|
call date_and_time(values=values,zone=zone)
|
||||||
|
write(now,'(i4.4,5(a,i2.2),a)') &
|
||||||
|
values(1),'-',values(2),'-',values(3),' ',values(5),':',values(6),':',values(7),zone
|
||||||
|
|
||||||
|
end function now
|
||||||
|
|
||||||
|
|
||||||
!!--------------------------------------------------------------------------------------------------
|
!!--------------------------------------------------------------------------------------------------
|
||||||
!!> @brief adds the backward mapping from spatial position and constituent ID to results
|
!!> @brief adds the backward mapping from spatial position and constituent ID to results
|
||||||
!!--------------------------------------------------------------------------------------------------
|
!!--------------------------------------------------------------------------------------------------
|
||||||
|
|
|
@ -105,7 +105,11 @@ subroutine rotations_init
|
||||||
|
|
||||||
call quaternions_init
|
call quaternions_init
|
||||||
write(6,'(/,a)') ' <<<+- rotations init -+>>>'; flush(6)
|
write(6,'(/,a)') ' <<<+- rotations init -+>>>'; flush(6)
|
||||||
call unitTest
|
|
||||||
|
write(6,'(/,a)') ' Rowenhorst et al., Modelling and Simulation in Materials Science and Engineering 23:083501, 2015'
|
||||||
|
write(6,'(a)') ' https://doi.org/10.1088/0965-0393/23/8/083501'
|
||||||
|
|
||||||
|
call selfTest
|
||||||
|
|
||||||
end subroutine rotations_init
|
end subroutine rotations_init
|
||||||
|
|
||||||
|
@ -1340,7 +1344,7 @@ end function GetPyramidOrder
|
||||||
!--------------------------------------------------------------------------------------------------
|
!--------------------------------------------------------------------------------------------------
|
||||||
!> @brief check correctness of some rotations functions
|
!> @brief check correctness of some rotations functions
|
||||||
!--------------------------------------------------------------------------------------------------
|
!--------------------------------------------------------------------------------------------------
|
||||||
subroutine unitTest
|
subroutine selfTest
|
||||||
|
|
||||||
type(rotation) :: R
|
type(rotation) :: R
|
||||||
real(pReal), dimension(4) :: qu, ax, ro
|
real(pReal), dimension(4) :: qu, ax, ro
|
||||||
|
@ -1443,7 +1447,7 @@ subroutine unitTest
|
||||||
|
|
||||||
enddo
|
enddo
|
||||||
|
|
||||||
end subroutine unitTest
|
end subroutine selfTest
|
||||||
|
|
||||||
|
|
||||||
end module rotations
|
end module rotations
|
||||||
|
|
Loading…
Reference in New Issue