Merge branch 'development' into FEM-PETSC_3.11+

This commit is contained in:
Martin Diehl 2020-06-01 15:19:37 +02:00
commit 44fc65b812
78 changed files with 3237 additions and 2670 deletions

View File

@ -108,9 +108,9 @@ if (DAMASK_SOLVER STREQUAL "grid")
project (damask-grid Fortran C)
add_definitions (-DGrid)
message ("Building Grid Solver\n")
elseif (DAMASK_SOLVER STREQUAL "fem" OR DAMASK_SOLVER STREQUAL "mesh")
elseif (DAMASK_SOLVER STREQUAL "mesh")
project (damask-mesh Fortran C)
add_definitions (-DFEM)
add_definitions (-DMesh)
message ("Building Mesh Solver\n")
else ()
message (FATAL_ERROR "Build target (DAMASK_SOLVER) is not defined")

View File

@ -110,7 +110,7 @@ for executable in icc icpc ifort ;do
done
firstLevel "MPI Wrappers"
for executable in mpicc mpiCC mpiicc mpic++ mpicpc mpicxx mpifort mpif90 mpif77; do
for executable in mpicc mpiCC mpiicc mpic++ mpiicpc mpicxx mpifort mpiifort mpif90 mpif77; do
getDetails $executable '-show'
done

View File

@ -2,19 +2,23 @@ SHELL = /bin/sh
########################################################################################
# Makefile for the installation of DAMASK
########################################################################################
DAMASK_ROOT = $(shell python -c "import os,sys; print(os.path.normpath(os.path.realpath(os.path.expanduser('$(pwd)'))))")
DAMASK_ROOT = $(shell python3 -c "import os,sys; print(os.path.normpath(os.path.realpath(os.path.expanduser('$(pwd)'))))")
.PHONY: all
all: grid mesh processing
.PHONY: grid
grid: build/grid
@(cd build/grid;make -j${DAMASK_NUM_THREADS} all install;)
@rm -f ${DAMASK_ROOT}/bin/DAMASK_spectral > /dev/null || true
@ln -s ${DAMASK_ROOT}/bin/DAMASK_grid ${DAMASK_ROOT}/bin/DAMASK_spectral || true
.PHONY: spectral
spectral: grid
.PHONY: mesh
mesh: build/mesh
@(cd build/mesh; make -j${DAMASK_NUM_THREADS} all install;)
@rm -f ${DAMASK_ROOT}/bin/DAMASK_FEM > /dev/null || true
@ln -s ${DAMASK_ROOT}/bin/DAMASK_mesh ${DAMASK_ROOT}/bin/DAMASK_FEM || true
.PHONY: FEM
FEM: mesh

@ -1 +1 @@
Subproject commit c595994cd8880acadf50b5dedb79156d04d35b91
Subproject commit a6109acd264c683fd335b1d1f69934fc4a4078e3

View File

@ -1 +1 @@
v2.0.3-2464-g90f93d23
v2.0.3-2614-g4b6b9478

View File

@ -1,6 +1,9 @@
###################################################################################################
# GNU Compiler
###################################################################################################
if (CMAKE_Fortran_COMPILER_VERSION VERSION_LESS 8.0)
message (FATAL_ERROR "GCC Compiler version: ${CMAKE_Fortran_COMPILER_VERSION} not supported")
endif ()
if (OPENMP)
set (OPENMP_FLAGS "-fopenmp")
@ -14,7 +17,7 @@ elseif (OPTIMIZATION STREQUAL "AGGRESSIVE")
set (OPTIMIZATION_FLAGS "-O3 -ffast-math -funroll-loops -ftree-vectorize")
endif ()
set (STANDARD_CHECK "-std=f2008ts -pedantic-errors" )
set (STANDARD_CHECK "-std=f2018 -pedantic-errors" )
set (LINKER_FLAGS "${LINKER_FLAGS} -Wl")
# options parsed directly to the linker
set (LINKER_FLAGS "${LINKER_FLAGS},-undefined,dynamic_lookup" )
@ -25,6 +28,9 @@ set (LINKER_FLAGS "${LINKER_FLAGS},-undefined,dynamic_lookup" )
set (COMPILE_FLAGS "${COMPILE_FLAGS} -xf95-cpp-input")
# preprocessor
set (COMPILE_FLAGS "${COMPILE_FLAGS} -fPIC -fPIE")
# position independent code
set (COMPILE_FLAGS "${COMPILE_FLAGS} -ffree-line-length-132")
# restrict line length to the standard 132 characters (lattice.f90 require more characters)

View File

@ -1,6 +1,10 @@
###################################################################################################
# Intel Compiler
###################################################################################################
if (CMAKE_Fortran_COMPILER_VERSION VERSION_LESS 18.0)
message (FATAL_ERROR "Intel Compiler version: ${CMAKE_Fortran_COMPILER_VERSION} not supported")
endif ()
if (OPENMP)
set (OPENMP_FLAGS "-qopenmp -parallel")
endif ()

2
env/DAMASK.csh vendored
View File

@ -3,7 +3,7 @@
set CALLED=($_)
set ENV_ROOT=`dirname $CALLED[2]`
set DAMASK_ROOT=`python -c "import os,sys; print(os.path.realpath(os.path.expanduser(sys.argv[1])))" $ENV_ROOT"/../"`
set DAMASK_ROOT=`python3 -c "import os,sys; print(os.path.realpath(os.path.expanduser(sys.argv[1])))" $ENV_ROOT"/../"`
source $ENV_ROOT/CONFIG

2
env/DAMASK.sh vendored
View File

@ -2,7 +2,7 @@
# usage: source DAMASK.sh
function canonicalPath {
python -c "import os,sys; print(os.path.normpath(os.path.realpath(os.path.expanduser(sys.argv[1]))))" $1
python3 -c "import os,sys; print(os.path.normpath(os.path.realpath(os.path.expanduser(sys.argv[1]))))" $1
}
function blink {

2
env/DAMASK.zsh vendored
View File

@ -2,7 +2,7 @@
# usage: source DAMASK.zsh
function canonicalPath {
python -c "import os,sys; print(os.path.normpath(os.path.realpath(os.path.expanduser(sys.argv[1]))))" $1
python3 -c "import os,sys; print(os.path.normpath(os.path.realpath(os.path.expanduser(sys.argv[1]))))" $1
}
function blink {

View File

@ -1,3 +0,0 @@
hydrogenflux cahnhilliard
initialHydrogenConc 0.0
(output) hydrogenconc

View File

@ -32,15 +32,12 @@ parser.add_option('--depth', dest='depth', metavar='string',
if filenames == []: filenames = [None]
if options.frame is None:
parser.error('frame not specified')
parser.error('frame not specified')
if options.depth is None:
parser.error('depth not specified')
parser.error('depth not specified')
theta=-0.75*np.pi
RotMat2TSL=np.array([[1., 0., 0.],
[0., np.cos(theta), np.sin(theta)], # Orientation to account for -135 deg
[0., -np.sin(theta), np.cos(theta)]]) # rotation for TSL convention
rot_to_TSL = damask.Rotation.from_axis_angle([-1,0,0,.75*np.pi])
for name in filenames:
damask.util.report(scriptName,name)
@ -50,8 +47,6 @@ for name in filenames:
coord = - table.get(options.frame)
coord[:,2] += table.get(options.depth)[:,0]
table.add('coord',
np.einsum('ijk,ik->ij',np.broadcast_to(RotMat2TSL,(coord.shape[0],3,3)),coord),
scriptID+' '+' '.join(sys.argv[1:]))
table.add('coord',rot_to_TSL.broadcast_to(coord.shape[0]) @ coord,scriptID+' '+' '.join(sys.argv[1:]))
table.to_ASCII(sys.stdout if name is None else name)

View File

@ -1,183 +0,0 @@
#!/usr/bin/env python3
import os
import sys
from optparse import OptionParser
import re
from collections.abc import Iterable
import math # noqa
import scipy # noqa
import scipy.linalg # noqa
import numpy as np
import damask
scriptName = os.path.splitext(os.path.basename(__file__))[0]
scriptID = ' '.join([scriptName,damask.version])
def listify(x):
return x if isinstance(x, Iterable) else [x]
# --------------------------------------------------------------------
# MAIN
# --------------------------------------------------------------------
parser = OptionParser(option_class=damask.extendableOption, usage='%prog options [ASCIItable(s)]', description = """
Add or alter column(s) with derived values according to user-defined arithmetic operation between column(s).
Column labels are tagged by '#label#' in formulas. Use ';' for ',' in functions.
Numpy is available as 'np'.
Special variables: #_row_# -- row index
Examples:
(1) magnitude of vector -- "np.linalg.norm(#vec#)"
(2) rounded root of row number -- "round(math.sqrt(#_row_#);3)"
""", version = scriptID)
parser.add_option('-l','--label',
dest = 'labels',
action = 'extend', metavar = '<string LIST>',
help = '(list of) new column labels')
parser.add_option('-f','--formula',
dest = 'formulas',
action = 'extend', metavar = '<string LIST>',
help = '(list of) formulas corresponding to labels')
parser.add_option('-c','--condition',
dest = 'condition', metavar='string',
help = 'condition to alter existing column data (optional)')
(options,filenames) = parser.parse_args()
if options.labels is None or options.formulas is None:
parser.error('no formulas and/or labels specified.')
if len(options.labels) != len(options.formulas):
parser.error('number of labels ({}) and formulas ({}) do not match.'.format(len(options.labels),len(options.formulas)))
for i in range(len(options.formulas)):
options.formulas[i] = options.formulas[i].replace(';',',')
# ------------------------------------- loop over input files --------------------------------------
if filenames == []: filenames = [None]
for name in filenames:
try:
table = damask.ASCIItable(name = name)
except IOError:
continue
damask.util.report(scriptName,name)
# ------------------------------------------ read header -------------------------------------------
table.head_read()
# --------------------------------------------------------------------------------------------------
specials = { \
'_row_': 0,
}
# --------------------------------------- evaluate condition ---------------------------------------
if options.condition is not None:
condition = options.condition # copy per file, since might be altered inline
breaker = False
for position,(all,marker,column) in enumerate(set(re.findall(r'#(([s]#)?(.+?))#',condition))): # find three groups
idx = table.label_index(column)
dim = table.label_dimension(column)
if idx < 0 and column not in specials:
damask.util.croak('column "{}" not found.'.format(column))
breaker = True
else:
if column in specials:
replacement = 'specials["{}"]'.format(column)
elif dim == 1: # scalar input
replacement = '{}(table.data[{}])'.format({ '':'float',
's#':'str'}[marker],idx) # take float or string value of data column
elif dim > 1: # multidimensional input (vector, tensor, etc.)
replacement = 'np.array(table.data[{}:{}],dtype=float)'.format(idx,idx+dim) # use (flat) array representation
condition = condition.replace('#'+all+'#',replacement)
if breaker: continue # found mistake in condition evaluation --> next file
# ------------------------------------------ build formulas ----------------------------------------
evaluator = {}
for label,formula in zip(options.labels,options.formulas):
for column in re.findall(r'#(.+?)#',formula): # loop over column labels in formula
idx = table.label_index(column)
dim = table.label_dimension(column)
if column in specials:
replacement = 'specials["{}"]'.format(column)
elif dim == 1: # scalar input
replacement = 'float(table.data[{}])'.format(idx) # take float value of data column
elif dim > 1: # multidimensional input (vector, tensor, etc.)
replacement = 'np.array(table.data[{}:{}],dtype=float)'.format(idx,idx+dim) # use (flat) array representation
else:
damask.util.croak('column {} not found, skipping {}...'.format(column,label))
options.labels.remove(label)
break
formula = formula.replace('#'+column+'#',replacement)
evaluator[label] = formula
# ---------------------------- separate requested labels into old and new --------------------------
veterans = list(set(options.labels)&set(table.labels(raw=False)+table.labels(raw=True)) ) # intersection of requested and existing
newbies = list(set(options.labels)-set(table.labels(raw=False)+table.labels(raw=True)) ) # requested but not existing
# ------------------------------------------ process data ------------------------------------------
firstLine = True
outputAlive = True
while outputAlive and table.data_read(): # read next data line of ASCII table
specials['_row_'] += 1 # count row
if firstLine:
firstLine = False
# ---------------------------- line 1: determine dimension of formulas -----------------------------
resultDim = {}
for label in list(options.labels): # iterate over stable copy
resultDim[label] = np.size(eval(evaluator[label])) # get dimension of formula[label]
if resultDim[label] == 0: options.labels.remove(label) # remove label if invalid result
for veteran in list(veterans):
if resultDim[veteran] != table.label_dimension(veteran):
damask.util.croak('skipping {} due to inconsistent dimension...'.format(veteran))
veterans.remove(veteran) # discard culprit
# ----------------------------------- line 1: assemble header --------------------------------------
for newby in newbies:
table.labels_append(['{}_{}'.format(i+1,newby) for i in range(resultDim[newby])]
if resultDim[newby] > 1 else newby)
table.info_append(scriptID + '\t' + ' '.join(sys.argv[1:]))
table.head_write()
# -------------------------------------- evaluate formulas -----------------------------------------
if options.condition is None or eval(condition): # condition for veteran replacement fulfilled
for veteran in veterans: # evaluate formulas that overwrite
table.data[table.label_index(veteran):
table.label_index(veteran)+table.label_dimension(veteran)] = \
listify(eval(evaluator[veteran]))
for newby in newbies: # evaluate formulas that append
table.data_append(listify(eval(evaluator[newby])))
outputAlive = table.data_write() # output processed line
# ------------------------------------- output finalization ----------------------------------------
table.close() # close ASCII table

View File

@ -1,68 +0,0 @@
#!/usr/bin/env python3
import os
import sys
from io import StringIO
from optparse import OptionParser
import numpy as np
import damask
scriptName = os.path.splitext(os.path.basename(__file__))[0]
scriptID = ' '.join([scriptName,damask.version])
# --------------------------------------------------------------------
# MAIN
# --------------------------------------------------------------------
parser = OptionParser(option_class=damask.extendableOption, usage='%prog options [ASCIItable(s)]', description = """
Add RGB color value corresponding to TSL-OIM scheme for inverse pole figures.
""", version = scriptID)
parser.add_option('-p',
'--pole',
dest = 'pole',
type = 'float', nargs = 3, metavar = 'float float float',
help = 'lab frame direction for inverse pole figure [%default]')
parser.add_option('-s',
'--symmetry',
dest = 'symmetry',
type = 'choice', choices = damask.Symmetry.lattices[1:], metavar='string',
help = 'crystal symmetry [%default] {{{}}} '.format(', '.join(damask.Symmetry.lattices[1:])))
parser.add_option('-o',
'--orientation',
dest = 'quaternion',
metavar = 'string',
help = 'label of crystal orientation given as unit quaternion [%default]')
parser.set_defaults(pole = (0.0,0.0,1.0),
quaternion = 'orientation',
symmetry = damask.Symmetry.lattices[-1],
)
(options, filenames) = parser.parse_args()
if filenames == []: filenames = [None]
# damask.Orientation requires Bravais lattice, but we are only interested in symmetry
symmetry2lattice={'cubic':'fcc','hexagonal':'hex','tetragonal':'bct'}
lattice = symmetry2lattice[options.symmetry]
pole = np.array(options.pole)
pole /= np.linalg.norm(pole)
for name in filenames:
damask.util.report(scriptName,name)
table = damask.Table.from_ASCII(StringIO(''.join(sys.stdin.read())) if name is None else name)
orientation = table.get(options.quaternion)
color = np.empty((orientation.shape[0],3))
for i,o in enumerate(orientation):
color[i] = damask.Orientation(o,lattice = lattice).IPFcolor(pole)
table.add('IPF_{:g}{:g}{:g}_{sym}'.format(*options.pole,sym = options.symmetry.lower()),
color,
scriptID+' '+' '.join(sys.argv[1:]))
table.to_ASCII(sys.stdout if name is None else name)

View File

@ -1,75 +0,0 @@
#!/usr/bin/env python3
import os
import sys
from io import StringIO
from optparse import OptionParser
import numpy as np
import damask
scriptName = os.path.splitext(os.path.basename(__file__))[0]
scriptID = ' '.join([scriptName,damask.version])
# definition of element-wise p-norms for matrices
# ToDo: better use numpy.linalg.norm
def norm(which,object):
if which == 'Abs': # p = 1
return sum(map(abs, object))
elif which == 'Frobenius': # p = 2
return np.sqrt(sum([x*x for x in object]))
elif which == 'Max': # p = inf
return max(map(abs, object))
else:
return -1
# --------------------------------------------------------------------
# MAIN
# --------------------------------------------------------------------
parser = OptionParser(option_class=damask.extendableOption, usage='%prog options [ASCIItable(s)]', description = """
Add column(s) containing norm of requested column(s) being either vectors or tensors.
""", version = scriptID)
normChoices = ['abs','frobenius','max']
parser.add_option('-n','--norm',
dest = 'norm',
type = 'choice', choices = normChoices, metavar='string',
help = 'type of element-wise p-norm [frobenius] {%s}'%(','.join(map(str,normChoices))))
parser.add_option('-l','--label',
dest = 'labels',
action = 'extend', metavar = '<string LIST>',
help = 'heading of column(s) to calculate norm of')
parser.set_defaults(norm = 'frobenius',
)
(options,filenames) = parser.parse_args()
if filenames == []: filenames = [None]
if options.norm.lower() not in normChoices:
parser.error('invalid norm ({}) specified.'.format(options.norm))
if options.labels is None:
parser.error('no data column specified.')
for name in filenames:
damask.util.report(scriptName,name)
table = damask.Table.from_ASCII(StringIO(''.join(sys.stdin.read())) if name is None else name)
for label in options.labels:
data = table.get(label)
data_norm = np.empty((data.shape[0],1))
for i,d in enumerate(data):
data_norm[i] = norm(options.norm.capitalize(),d)
table.add('norm{}({})'.format(options.norm.capitalize(),label),
data_norm,
scriptID+' '+' '.join(sys.argv[1:]))
table.to_ASCII(sys.stdout if name is None else name)

View File

@ -2,6 +2,7 @@
import os
import sys
from io import StringIO
from optparse import OptionParser
import numpy as np
@ -24,14 +25,8 @@ Additional (globally fixed) rotations of the lab frame and/or crystal frame can
""", version = scriptID)
representations = {
'quaternion': ['qu',4],
'rodrigues': ['ro',4],
'Rodrigues': ['Ro',3],
'eulers': ['eu',3],
'matrix': ['om',9],
'angleaxis': ['ax',4],
}
representations = ['quaternion', 'rodrigues', 'eulers', 'matrix', 'axisangle']
parser.add_option('-o',
'--output',
@ -93,10 +88,10 @@ parser.set_defaults(output = [],
)
(options, filenames) = parser.parse_args()
if filenames == []: filenames = [None]
#options.output = list(map(lambda x: x.lower(), options.output))
if options.output == [] or (not set(options.output).issubset(set(representations))):
parser.error('output must be chosen from {}.'.format(', '.join(representations)))
parser.error('output must be chosen from {}.'.format(', '.join(representations)))
input = [options.eulers is not None,
options.rodrigues is not None,
@ -109,95 +104,47 @@ input = [options.eulers is not None,
if np.sum(input) != 1: parser.error('needs exactly one input format.')
(label,dim,inputtype) = [(options.eulers,representations['eulers'][1],'eulers'),
(options.rodrigues,representations['rodrigues'][1],'rodrigues'),
([options.x,options.y,options.z],[3,3,3],'frame'),
(options.matrix,representations['matrix'][1],'matrix'),
(options.quaternion,representations['quaternion'][1],'quaternion'),
][np.where(input)[0][0]] # select input label that was requested
r = damask.Rotation.fromAxisAngle(np.array(options.crystalrotation),options.degrees,normalise=True)
R = damask.Rotation.fromAxisAngle(np.array(options.labrotation),options.degrees,normalise=True)
# --- loop over input files ------------------------------------------------------------------------
if filenames == []: filenames = [None]
r = damask.Rotation.from_axis_angle(np.array(options.crystalrotation),options.degrees,normalise=True)
R = damask.Rotation.from_axis_angle(np.array(options.labrotation),options.degrees,normalise=True)
for name in filenames:
try:
table = damask.ASCIItable(name = name)
except IOError:
continue
damask.util.report(scriptName,name)
damask.util.report(scriptName,name)
# ------------------------------------------ read header ------------------------------------------
table = damask.Table.from_ASCII(StringIO(''.join(sys.stdin.read())) if name is None else name)
table.head_read()
if options.eulers is not None:
label = options.eulers
print(np.max(table.get(options.eulers),axis=0))
o = damask.Rotation.from_Eulers(table.get(options.eulers), options.degrees)
elif options.rodrigues is not None:
label = options.rodrigues
o = damask.Rotation.from_Rodrigues(table.get(options.rodrigues))
elif options.matrix is not None:
label = options.matrix
o = damask.Rotation.from_matrix(table.get(options.matrix).reshape(-1,3,3))
elif options.x is not None:
label = '<{},{},{}>'.format(options.x,options.y,options.z)
M = np.block([table.get(options.x),table.get(options.y),table.get(options.z)]).reshape(-1,3,3)
o = damask.Rotation.from_matrix(M/np.linalg.norm(M,axis=0))
elif options.quaternion is not None:
label = options.quaternion
o = damask.Rotation.from_quaternion(table.get(options.quaternion))
# ------------------------------------------ sanity checks -----------------------------------------
o = r.broadcast_to(o.shape) @ o @ R.broadcast_to(o.shape)
errors = []
remarks = []
if not np.all(table.label_dimension(label) == dim): errors.append('input {} does not have dimension {}.'.format(label,dim))
else: column = table.label_index(label)
#if options.lattice is not None:
# o = damask.Orientation(rotation = o,lattice = options.lattice).reduced().rotation
if remarks != []: damask.util.croak(remarks)
if errors != []:
damask.util.croak(errors)
table.close(dismiss = True)
continue
# ------------------------------------------ assemble header ---------------------------------------
if 'rodrigues' in options.output:
table.add('ro({})'.format(label),o.as_Rodrigues(), scriptID+' '+' '.join(sys.argv[1:]))
if 'eulers' in options.output:
table.add('eu({})'.format(label),o.as_Eulers(options.degrees), scriptID+' '+' '.join(sys.argv[1:]))
if 'quaternion' in options.output:
table.add('qu({})'.format(label),o.as_quaternion(), scriptID+' '+' '.join(sys.argv[1:]))
if 'matrix' in options.output:
table.add('om({})'.format(label),o.as_matrix(), scriptID+' '+' '.join(sys.argv[1:]))
if 'axisangle' in options.output:
table.add('om({})'.format(label),o.as_axisangle(options.degrees), scriptID+' '+' '.join(sys.argv[1:]))
table.info_append(scriptID + '\t' + ' '.join(sys.argv[1:]))
for output in options.output:
if output in representations:
table.labels_append(['{}_{}({})'.format(i+1,representations[output][0],label) \
for i in range(representations[output][1])])
table.head_write()
# ------------------------------------------ process data ------------------------------------------
outputAlive = True
while outputAlive and table.data_read(): # read next data line of ASCII table
if inputtype == 'eulers':
d = representations['eulers'][1]
o = damask.Rotation.fromEulers(list(map(float,table.data[column:column+d])),options.degrees)
elif inputtype == 'rodrigues':
d = representations['rodrigues'][1]
o = damask.Rotation.fromRodrigues(list(map(float,table.data[column:column+d])))
elif inputtype == 'matrix':
d = representations['matrix'][1]
o = damask.Rotation.fromMatrix(np.array(list(map(float,table.data[column:column+d]))).reshape(3,3))
elif inputtype == 'frame':
M = np.array(list(map(float,table.data[column[0]:column[0]+3] + \
table.data[column[1]:column[1]+3] + \
table.data[column[2]:column[2]+3]))).reshape(3,3).T
o = damask.Rotation.fromMatrix(M/np.linalg.norm(M,axis=0))
elif inputtype == 'quaternion':
d = representations['quaternion'][1]
o = damask.Rotation.fromQuaternion(list(map(float,table.data[column:column+d])))
o = r*o*R # apply additional lab and crystal frame rotations
if options.lattice is not None:
o = damask.Orientation(rotation = o,lattice = options.lattice).reduced().rotation
for output in options.output:
if output == 'quaternion': table.data_append(o.asQuaternion())
elif output == 'rodrigues': table.data_append(o.asRodrigues())
elif output == 'Rodrigues': table.data_append(o.asRodrigues(vector=True))
elif output == 'eulers': table.data_append(o.asEulers(degrees=options.degrees))
elif output == 'matrix': table.data_append(o.asMatrix())
elif output == 'angleaxis': table.data_append(o.asAxisAngle(degrees=options.degrees))
outputAlive = table.data_write() # output processed line
# ------------------------------------------ output finalization -----------------------------------
table.close() # close ASCII tables
table.to_ASCII(sys.stdout if name is None else name)

View File

@ -1,50 +0,0 @@
#!/usr/bin/env python3
import os
import sys
from io import StringIO
from optparse import OptionParser
import damask
scriptName = os.path.splitext(os.path.basename(__file__))[0]
scriptID = ' '.join([scriptName,damask.version])
# --------------------------------------------------------------------
# MAIN
# --------------------------------------------------------------------
parser = OptionParser(option_class=damask.extendableOption, usage='%prog options [ASCIItable(s)]', description = """
Add column(s) containing Second Piola--Kirchhoff stress based on given column(s) of deformation
gradient and first Piola--Kirchhoff stress.
""", version = scriptID)
parser.add_option('-f','--defgrad',
dest = 'defgrad',
type = 'string', metavar = 'string',
help = 'heading of columns containing deformation gradient [%default]')
parser.add_option('-p','--stress',
dest = 'stress',
type = 'string', metavar = 'string',
help = 'heading of columns containing first Piola--Kirchhoff stress [%default]')
parser.set_defaults(defgrad = 'f',
stress = 'p',
)
(options,filenames) = parser.parse_args()
if filenames == []: filenames = [None]
for name in filenames:
damask.util.report(scriptName,name)
table = damask.Table.from_ASCII(StringIO(''.join(sys.stdin.read())) if name is None else name)
table.add('S',
damask.mechanics.PK2(table.get(options.stress ).reshape(-1,3,3),
table.get(options.defgrad).reshape(-1,3,3)).reshape(-1,9),
scriptID+' '+' '.join(sys.argv[1:]))
table.to_ASCII(sys.stdout if name is None else name)

View File

@ -1,65 +0,0 @@
#!/usr/bin/env python3
import os
import sys
from io import StringIO
from optparse import OptionParser
import numpy as np
import damask
scriptName = os.path.splitext(os.path.basename(__file__))[0]
scriptID = ' '.join([scriptName,damask.version])
# --------------------------------------------------------------------
# MAIN
# --------------------------------------------------------------------
parser = OptionParser(option_class=damask.extendableOption, usage='%prog options [ASCIItable(s)]', description = """
Add coordinates of stereographic projection of given direction (pole) in crystal frame.
""", version = scriptID)
parser.add_option('-p',
'--pole',
dest = 'pole',
type = 'float', nargs = 3, metavar = 'float float float',
help = 'crystal frame direction for pole figure [%default]')
parser.add_option('--polar',
dest = 'polar',
action = 'store_true',
help = 'output polar coordinates (r,φ) instead of Cartesian coordinates (x,y)')
parser.add_option('-o',
'--orientation',
dest = 'quaternion',
metavar = 'string',
help = 'label of crystal orientation given as unit quaternion [%default]')
parser.set_defaults(pole = (1.0,0.0,0.0),
quaternion = 'orientation',
)
(options, filenames) = parser.parse_args()
if filenames == []: filenames = [None]
pole = np.array(options.pole)
pole /= np.linalg.norm(pole)
for name in filenames:
damask.util.report(scriptName,name)
table = damask.Table.from_ASCII(StringIO(''.join(sys.stdin.read())) if name is None else name)
orientation = table.get(options.quaternion)
poles = np.empty((orientation.shape[0],2))
for i,o in enumerate(orientation):
rotatedPole = damask.Rotation(o)*pole # rotate pole according to crystal orientation
(x,y) = rotatedPole[0:2]/(1.+abs(pole[2])) # stereographic projection
poles[i] = [np.sqrt(x*x+y*y),np.arctan2(y,x)] if options.polar else [x,y] # cartesian coordinates
table.add('pole_{}{}{}'.format(*options.pole),
poles,
scriptID+' '+' '.join(sys.argv[1:]))
table.to_ASCII(sys.stdout if name is None else name)

View File

@ -2,6 +2,7 @@
import os
import sys
from io import StringIO
from optparse import OptionParser
import numpy as np
@ -15,91 +16,82 @@ scriptID = ' '.join([scriptName,damask.version])
slipSystems = {
'fcc':
np.array([
# Slip direction Plane normal
[ 0, 1,-1, 1, 1, 1, ],
[-1, 0, 1, 1, 1, 1, ],
[ 1,-1, 0, 1, 1, 1, ],
[ 0,-1,-1, -1,-1, 1, ],
[ 1, 0, 1, -1,-1, 1, ],
[-1, 1, 0, -1,-1, 1, ],
[ 0,-1, 1, 1,-1,-1, ],
[-1, 0,-1, 1,-1,-1, ],
[ 1, 1, 0, 1,-1,-1, ],
[ 0, 1, 1, -1, 1,-1, ],
[ 1, 0,-1, -1, 1,-1, ],
[-1,-1, 0, -1, 1,-1, ],
],'f'),
[+0,+1,-1 , +1,+1,+1],
[-1,+0,+1 , +1,+1,+1],
[+1,-1,+0 , +1,+1,+1],
[+0,-1,-1 , -1,-1,+1],
[+1,+0,+1 , -1,-1,+1],
[-1,+1,+0 , -1,-1,+1],
[+0,-1,+1 , +1,-1,-1],
[-1,+0,-1 , +1,-1,-1],
[+1,+1,+0 , +1,-1,-1],
[+0,+1,+1 , -1,+1,-1],
[+1,+0,-1 , -1,+1,-1],
[-1,-1,+0 , -1,+1,-1],
],'d'),
'bcc':
np.array([
# Slip system <111>{110}
[ 1,-1, 1, 0, 1, 1, ],
[-1,-1, 1, 0, 1, 1, ],
[ 1, 1, 1, 0,-1, 1, ],
[-1, 1, 1, 0,-1, 1, ],
[-1, 1, 1, 1, 0, 1, ],
[-1,-1, 1, 1, 0, 1, ],
[ 1, 1, 1, -1, 0, 1, ],
[ 1,-1, 1, -1, 0, 1, ],
[-1, 1, 1, 1, 1, 0, ],
[-1, 1,-1, 1, 1, 0, ],
[ 1, 1, 1, -1, 1, 0, ],
[ 1, 1,-1, -1, 1, 0, ],
# Slip system <111>{112}
[-1, 1, 1, 2, 1, 1, ],
[ 1, 1, 1, -2, 1, 1, ],
[ 1, 1,-1, 2,-1, 1, ],
[ 1,-1, 1, 2, 1,-1, ],
[ 1,-1, 1, 1, 2, 1, ],
[ 1, 1,-1, -1, 2, 1, ],
[ 1, 1, 1, 1,-2, 1, ],
[-1, 1, 1, 1, 2,-1, ],
[ 1, 1,-1, 1, 1, 2, ],
[ 1,-1, 1, -1, 1, 2, ],
[-1, 1, 1, 1,-1, 2, ],
[ 1, 1, 1, 1, 1,-2, ],
],'f'),
[+1,-1,+1 , +0,+1,+1],
[-1,-1,+1 , +0,+1,+1],
[+1,+1,+1 , +0,-1,+1],
[-1,+1,+1 , +0,-1,+1],
[-1,+1,+1 , +1,+0,+1],
[-1,-1,+1 , +1,+0,+1],
[+1,+1,+1 , -1,+0,+1],
[+1,-1,+1 , -1,+0,+1],
[-1,+1,+1 , +1,+1,+0],
[-1,+1,-1 , +1,+1,+0],
[+1,+1,+1 , -1,+1,+0],
[+1,+1,-1 , -1,+1,+0],
[-1,+1,+1 , +2,+1,+1],
[+1,+1,+1 , -2,+1,+1],
[+1,+1,-1 , +2,-1,+1],
[+1,-1,+1 , +2,+1,-1],
[+1,-1,+1 , +1,+2,+1],
[+1,+1,-1 , -1,+2,+1],
[+1,+1,+1 , +1,-2,+1],
[-1,+1,+1 , +1,+2,-1],
[+1,+1,-1 , +1,+1,+2],
[+1,-1,+1 , -1,+1,+2],
[-1,+1,+1 , +1,-1,+2],
[+1,+1,+1 , +1,+1,-2],
],'d'),
'hex':
np.array([
# Basal systems <11.0>{00.1} (independent of c/a-ratio, Bravais notation (4 coordinate base))
[ 2, -1, -1, 0, 0, 0, 0, 1, ],
[-1, 2, -1, 0, 0, 0, 0, 1, ],
[-1, -1, 2, 0, 0, 0, 0, 1, ],
# 1st type prismatic systems <11.0>{10.0} (independent of c/a-ratio)
[ 2, -1, -1, 0, 0, 1, -1, 0, ],
[-1, 2, -1, 0, -1, 0, 1, 0, ],
[-1, -1, 2, 0, 1, -1, 0, 0, ],
# 2nd type prismatic systems <10.0>{11.0} -- a slip; plane normals independent of c/a-ratio
[ 0, 1, -1, 0, 2, -1, -1, 0, ],
[-1, 0, 1, 0, -1, 2, -1, 0, ],
[ 1, -1, 0, 0, -1, -1, 2, 0, ],
# 1st type 1st order pyramidal systems <11.0>{-11.1} -- plane normals depend on the c/a-ratio
[ 2, -1, -1, 0, 0, 1, -1, 1, ],
[-1, 2, -1, 0, -1, 0, 1, 1, ],
[-1, -1, 2, 0, 1, -1, 0, 1, ],
[ 1, 1, -2, 0, -1, 1, 0, 1, ],
[-2, 1, 1, 0, 0, -1, 1, 1, ],
[ 1, -2, 1, 0, 1, 0, -1, 1, ],
# pyramidal system: c+a slip <11.3>{-10.1} -- plane normals depend on the c/a-ratio
[ 2, -1, -1, 3, -1, 1, 0, 1, ],
[ 1, -2, 1, 3, -1, 1, 0, 1, ],
[-1, -1, 2, 3, 1, 0, -1, 1, ],
[-2, 1, 1, 3, 1, 0, -1, 1, ],
[-1, 2, -1, 3, 0, -1, 1, 1, ],
[ 1, 1, -2, 3, 0, -1, 1, 1, ],
[-2, 1, 1, 3, 1, -1, 0, 1, ],
[-1, 2, -1, 3, 1, -1, 0, 1, ],
[ 1, 1, -2, 3, -1, 0, 1, 1, ],
[ 2, -1, -1, 3, -1, 0, 1, 1, ],
[ 1, -2, 1, 3, 0, 1, -1, 1, ],
[-1, -1, 2, 3, 0, 1, -1, 1, ],
# pyramidal system: c+a slip <11.3>{-1-1.2} -- as for hexagonal ice (Castelnau et al. 1996, similar to twin system found below)
[ 2, -1, -1, 3, -2, 1, 1, 2, ], # sorted according to similar twin system
[-1, 2, -1, 3, 1, -2, 1, 2, ], # <11.3>{-1-1.2} shear = 2((c/a)^2-2)/(3 c/a)
[-1, -1, 2, 3, 1, 1, -2, 2, ],
[-2, 1, 1, 3, 2, -1, -1, 2, ],
[ 1, -2, 1, 3, -1, 2, -1, 2, ],
[ 1, 1, -2, 3, -1, -1, 2, 2, ],
],'f'),
[+2,-1,-1,+0 , +0,+0,+0,+1],
[-1,+2,-1,+0 , +0,+0,+0,+1],
[-1,-1,+2,+0 , +0,+0,+0,+1],
[+2,-1,-1,+0 , +0,+1,-1,+0],
[-1,+2,-1,+0 , -1,+0,+1,+0],
[-1,-1,+2,+0 , +1,-1,+0,+0],
[-1,+1,+0,+0 , +1,+1,-2,+0],
[+0,-1,+1,+0 , -2,+1,+1,+0],
[+1,+0,-1,+0 , +1,-2,+1,+0],
[-1,+2,-1,+0 , +1,+0,-1,+1],
[-2,+1,+1,+0 , +0,+1,-1,+1],
[-1,-1,+2,+0 , -1,+1,+0,+1],
[+1,-2,+1,+0 , -1,+0,+1,+1],
[+2,-1,-1,+0 , +0,-1,+1,+1],
[+1,+1,-2,+0 , +1,-1,+0,+1],
[-2,+1,+1,+3 , +1,+0,-1,+1],
[-1,-1,+2,+3 , +1,+0,-1,+1],
[-1,-1,+2,+3 , +0,+1,-1,+1],
[+1,-2,+1,+3 , +0,+1,-1,+1],
[+1,-2,+1,+3 , -1,+1,+0,+1],
[+2,-1,-1,+3 , -1,+1,+0,+1],
[+2,-1,-1,+3 , -1,+0,+1,+1],
[+1,+1,-2,+3 , -1,+0,+1,+1],
[+1,+1,-2,+3 , +0,-1,+1,+1],
[-1,+2,-1,+3 , +0,-1,+1,+1],
[-1,+2,-1,+3 , +1,-1,+0,+1],
[-2,+1,+1,+3 , +1,-1,+0,+1],
[-1,-1,+2,+3 , +1,+1,-2,+2],
[+1,-2,+1,+3 , -1,+2,-1,+2],
[+2,-1,-1,+3 , -2,+1,+1,+2],
[+1,+1,-2,+3 , -1,-1,+2,+2],
[-1,+2,-1,+3 , +1,-2,+1,+2],
[-2,+1,+1,+3 , +2,-1,-1,+2],
],'d'),
}
# --------------------------------------------------------------------
@ -111,11 +103,11 @@ Add columns listing Schmid factors (and optional trace vector of selected system
""", version = scriptID)
latticeChoices = ('fcc','bcc','hex')
lattice_choices = list(slipSystems.keys())
parser.add_option('-l',
'--lattice',
dest = 'lattice', type = 'choice', choices = latticeChoices, metavar='string',
help = 'type of lattice structure [%default] {}'.format(latticeChoices))
dest = 'lattice', type = 'choice', choices = lattice_choices, metavar='string',
help = 'type of lattice structure [%default] {}'.format(lattice_choices))
parser.add_option('--covera',
dest = 'CoverA', type = 'float', metavar = 'float',
help = 'C over A ratio for hexagonal systems [%default]')
@ -138,88 +130,63 @@ parser.add_option('-o',
parser.set_defaults(force = (0.0,0.0,1.0),
quaternion='orientation',
normal = None,
lattice = latticeChoices[0],
lattice = lattice_choices[0],
CoverA = np.sqrt(8./3.),
)
(options, filenames) = parser.parse_args()
force = np.array(options.force)
force /= np.linalg.norm(force)
if options.normal is not None:
normal = np.array(options.normal)
normal /= np.linalg.norm(normal)
if abs(np.dot(force,normal)) > 1e-3:
parser.error('stress plane normal not orthogonal to force direction')
else:
normal = force
slip_direction = np.zeros((len(slipSystems[options.lattice]),3),'f')
slip_normal = np.zeros_like(slip_direction)
if options.lattice in latticeChoices[:2]:
slip_direction = slipSystems[options.lattice][:,:3]
slip_normal = slipSystems[options.lattice][:,3:]
elif options.lattice == latticeChoices[2]:
# convert 4 Miller index notation of hex to orthogonal 3 Miller index notation
for i in range(len(slip_direction)):
slip_direction[i] = np.array([slipSystems['hex'][i,0]*1.5,
(slipSystems['hex'][i,0] + 2.*slipSystems['hex'][i,1])*0.5*np.sqrt(3),
slipSystems['hex'][i,3]*options.CoverA,
])
slip_normal[i] = np.array([slipSystems['hex'][i,4],
(slipSystems['hex'][i,4] + 2.*slipSystems['hex'][i,5])/np.sqrt(3),
slipSystems['hex'][i,7]/options.CoverA,
])
slip_direction /= np.tile(np.linalg.norm(slip_direction,axis=1),(3,1)).T
slip_normal /= np.tile(np.linalg.norm(slip_normal ,axis=1),(3,1)).T
# --- loop over input files ------------------------------------------------------------------------
if filenames == []: filenames = [None]
force = np.array(options.force)/np.linalg.norm(options.force)
if options.normal is not None:
normal = np.array(options.normal)/np.linalg.norm(options.ormal)
if abs(np.dot(force,normal)) > 1e-3:
parser.error('stress plane normal not orthogonal to force direction')
else:
normal = force
if options.lattice in ['bcc','fcc']:
slip_direction = slipSystems[options.lattice][:,:3]
slip_normal = slipSystems[options.lattice][:,3:]
elif options.lattice == 'hex':
slip_direction = np.zeros((len(slipSystems['hex']),3),'d')
slip_normal = np.zeros_like(slip_direction)
# convert 4 Miller index notation of hex to orthogonal 3 Miller index notation
for i in range(len(slip_direction)):
slip_direction[i] = np.array([slipSystems['hex'][i,0]*1.5,
(slipSystems['hex'][i,0] + 2.*slipSystems['hex'][i,1])*0.5*np.sqrt(3),
slipSystems['hex'][i,3]*options.CoverA,
])
slip_normal[i] = np.array([slipSystems['hex'][i,4],
(slipSystems['hex'][i,4] + 2.*slipSystems['hex'][i,5])/np.sqrt(3),
slipSystems['hex'][i,7]/options.CoverA,
])
slip_direction /= np.linalg.norm(slip_direction,axis=1,keepdims=True)
slip_normal /= np.linalg.norm(slip_normal, axis=1,keepdims=True)
labels = ['S[{direction[0]:.1g}_{direction[1]:.1g}_{direction[2]:.1g}]'
'({normal[0]:.1g}_{normal[1]:.1g}_{normal[2]:.1g})'\
.format(normal = theNormal, direction = theDirection,
) for theNormal,theDirection in zip(slip_normal,slip_direction)]
for name in filenames:
try:
table = damask.ASCIItable(name = name)
except IOError:
continue
damask.util.report(scriptName,name)
damask.util.report(scriptName,name)
# ------------------------------------------ read header ------------------------------------------
table = damask.Table.from_ASCII(StringIO(''.join(sys.stdin.read())) if name is None else name)
table.head_read()
o = damask.Rotation.from_quaternion(table.get(options.quaternion))
# ------------------------------------------ sanity checks ----------------------------------------
if not table.label_dimension(options.quaternion) == 4:
damask.util.croak('input {} does not have dimension 4.'.format(options.quaternion))
table.close(dismiss = True) # close ASCIItable and remove empty file
continue
force = np.broadcast_to(force, o.shape+(3,))
normal = np.broadcast_to(normal,o.shape+(3,))
slip_direction = np.broadcast_to(slip_direction,o.shape+slip_direction.shape)
slip_normal = np.broadcast_to(slip_normal, o.shape+slip_normal.shape)
S = np.abs(np.einsum('ijk,ik->ij',slip_direction,(o@force))*
np.einsum('ijk,ik->ij',slip_normal, (o@normal)))
column = table.label_index(options.quaternion)
for i,label in enumerate(labels):
table.add(label,S[:,i],scriptID+' '+' '.join(sys.argv[1:]))
# ------------------------------------------ assemble header ---------------------------------------
table.info_append(scriptID + '\t' + ' '.join(sys.argv[1:]))
table.labels_append(['S[{direction[0]:.1g}_{direction[1]:.1g}_{direction[2]:.1g}]'
'({normal[0]:.1g}_{normal[1]:.1g}_{normal[2]:.1g})'\
.format(normal = theNormal,
direction = theDirection,
) for theNormal,theDirection in zip(slip_normal,slip_direction)])
table.head_write()
# ------------------------------------------ process data ------------------------------------------
outputAlive = True
while outputAlive and table.data_read(): # read next data line of ASCII table
o = damask.Rotation(np.array(list(map(float,table.data[column:column+4]))))
table.data_append( np.abs( np.sum(slip_direction * (o * force) ,axis=1) \
* np.sum(slip_normal * (o * normal),axis=1)))
outputAlive = table.data_write() # output processed line
# ------------------------------------------ output finalization -----------------------------------
table.close() # close ASCII tables
table.to_ASCII(sys.stdout if name is None else name)

View File

@ -1,58 +0,0 @@
#!/usr/bin/env python3
import os
import sys
from io import StringIO
from optparse import OptionParser
import numpy as np
import damask
scriptName = os.path.splitext(os.path.basename(__file__))[0]
scriptID = ' '.join([scriptName,damask.version])
# --------------------------------------------------------------------
# MAIN
# --------------------------------------------------------------------
parser = OptionParser(option_class=damask.extendableOption, usage='%prog options [ASCIItable(s)]', description = """
Add column(s) containing eigenvalues and eigenvectors of requested symmetric tensor column(s).
""", version = scriptID)
parser.add_option('-t','--tensor',
dest = 'tensor',
action = 'extend', metavar = '<string LIST>',
help = 'heading of columns containing tensor field values')
parser.add_option('--no-check',
dest = 'rh',
action = 'store_false',
help = 'skip check for right-handed eigenvector basis')
parser.set_defaults(rh = True,
)
(options,filenames) = parser.parse_args()
if filenames == []: filenames = [None]
for name in filenames:
damask.util.report(scriptName,name)
table = damask.Table.from_ASCII(StringIO(''.join(sys.stdin.read())) if name is None else name)
for tensor in options.tensor:
t = table.get(tensor).reshape(-1,3,3)
(u,v) = np.linalg.eigh(damask.mechanics.symmetric(t))
if options.rh: v[np.linalg.det(v) < 0.0,:,2] *= -1.0
for i,o in enumerate(['Min','Mid','Max']):
table.add('eigval{}({})'.format(o,tensor),u[:,i], scriptID+' '+' '.join(sys.argv[1:]))
for i,o in enumerate(['Min','Mid','Max']):
table.add('eigvec{}({})'.format(o,tensor),v[:,:,i],scriptID+' '+' '.join(sys.argv[1:]))
table.to_ASCII(sys.stdout if name is None else name)

View File

@ -1,98 +0,0 @@
#!/usr/bin/env python3
import os
import sys
from io import StringIO
from optparse import OptionParser
import damask
scriptName = os.path.splitext(os.path.basename(__file__))[0]
scriptID = ' '.join([scriptName,damask.version])
def parameters(stretch,strain):
"""Albrecht Bertram: Elasticity and Plasticity of Large Deformations An Introduction (3rd Edition, 2012), p. 102."""
return {
'V#ln': ('V',0.0),
'U#ln': ('U',0.0),
'V#Biot': ('V',-.5),
'U#Biot': ('U',+.5),
'V#Green': ('V',-1.),
'U#Green': ('U',+1.),
}[stretch+'#'+strain]
# --------------------------------------------------------------------
# MAIN
# --------------------------------------------------------------------
parser = OptionParser(option_class=damask.extendableOption, usage='%prog options [ASCIItable(s)]', description = """
Add column(s) containing given strains based on given stretches of requested deformation gradient column(s).
""", version = scriptID)
parser.add_option('-u','--right',
dest = 'right',
action = 'store_true',
help = 'material strains based on right Cauchy--Green deformation, i.e., C and U')
parser.add_option('-v','--left',
dest = 'left',
action = 'store_true',
help = 'spatial strains based on left Cauchy--Green deformation, i.e., B and V')
parser.add_option('-0','--logarithmic',
dest = 'logarithmic',
action = 'store_true',
help = 'calculate logarithmic strain tensor')
parser.add_option('-1','--biot',
dest = 'biot',
action = 'store_true',
help = 'calculate biot strain tensor')
parser.add_option('-2','--green',
dest = 'green',
action = 'store_true',
help = 'calculate green strain tensor')
parser.add_option('-f','--defgrad',
dest = 'defgrad',
action = 'extend',
metavar = '<string LIST>',
help = 'heading(s) of columns containing deformation tensor values [%default]')
parser.set_defaults(
defgrad = ['f'],
)
(options,filenames) = parser.parse_args()
if filenames == []: filenames = [None]
if len(options.defgrad) > 1:
options.defgrad = options.defgrad[1:]
stretches = []
strains = []
if options.right: stretches.append('U')
if options.left: stretches.append('V')
if options.logarithmic: strains.append('ln')
if options.biot: strains.append('Biot')
if options.green: strains.append('Green')
if options.defgrad is None:
parser.error('no data column specified.')
for name in filenames:
damask.util.report(scriptName,name)
table = damask.Table.from_ASCII(StringIO(''.join(sys.stdin.read())) if name is None else name)
for defgrad in options.defgrad:
F = table.get(defgrad).reshape(-1,3,3)
for theStretch in stretches:
for theStrain in strains:
(t,m) = parameters(theStretch,theStrain)
label = '{}({}){}'.format(theStrain,theStretch,defgrad if defgrad != 'f' else '')
table.add(label,
damask.mechanics.strain_tensor(F,t,m).reshape(-1,9),
scriptID+' '+' '.join(sys.argv[1:]))
table.to_ASCII(sys.stdout if name is None else name)

View File

@ -1,63 +0,0 @@
#!/usr/bin/env python3
import os
import sys
from io import StringIO
from optparse import OptionParser
import damask
scriptName = os.path.splitext(os.path.basename(__file__))[0]
scriptID = ' '.join([scriptName,damask.version])
# --------------------------------------------------------------------
# MAIN
# --------------------------------------------------------------------
parser = OptionParser(option_class=damask.extendableOption, usage='%prog options [ASCIItable(s)]', description = """
Rotate vector and/or tensor column data by given angle around given axis.
""", version = scriptID)
parser.add_option('-d', '--data',
dest = 'data',
action = 'extend', metavar = '<string LIST>',
help = 'vector/tensor value(s) label(s)')
parser.add_option('-r', '--rotation',
dest = 'rotation',
type = 'float', nargs = 4, metavar = ' '.join(['float']*4),
help = 'axis and angle to rotate data [%default]')
parser.add_option('--degrees',
dest = 'degrees',
action = 'store_true',
help = 'angles are given in degrees')
parser.set_defaults(rotation = (1.,1.,1.,0), # no rotation about (1,1,1)
degrees = False,
)
(options,filenames) = parser.parse_args()
if filenames == []: filenames = [None]
if options.data is None:
parser.error('no data column specified.')
r = damask.Rotation.fromAxisAngle(options.rotation,options.degrees,normalise=True)
for name in filenames:
damask.util.report(scriptName,name)
table = damask.Table.from_ASCII(StringIO(''.join(sys.stdin.read())) if name is None else name)
for data in options.data:
d = table.get(data)
if table.shapes[data] == (9,): d=d.reshape(-1,3,3)
for i,l in enumerate(d):
d[i] = r*l
if table.shapes[data] == (9,): d=d.reshape(-1,9)
table.set(data,d,scriptID+' '+' '.join(sys.argv[1:]))
table.to_ASCII(sys.stdout if name is None else name)

View File

@ -1,89 +0,0 @@
#!/usr/bin/env python3
import os
from optparse import OptionParser
import damask
scriptName = os.path.splitext(os.path.basename(__file__))[0]
scriptID = ' '.join([scriptName,damask.version])
# --------------------------------------------------------------------
# MAIN
# --------------------------------------------------------------------
parser = OptionParser(usage='%prog options [ASCIItable(s)]', description = """
Show components of given ASCIItable(s).
""", version = scriptID)
parser.add_option('-a','--head',
dest = 'head',
action = 'store_true',
help = 'output complete header (info + labels)')
parser.add_option('-i','--info',
dest = 'info',
action = 'store_true',
help = 'output info lines')
parser.add_option('-l','--labels',
dest = 'labels',
action = 'store_true',
help = 'output labels')
parser.add_option('-d','--data',
dest = 'data',
action = 'store_true',
help = 'output data')
parser.add_option('-t','--table',
dest = 'table',
action = 'store_true',
help = 'output heading line for proper ASCIItable format')
parser.add_option('--nolabels',
dest = 'labeled',
action = 'store_false',
help = 'table has no labels')
parser.set_defaults(table = False,
head = False,
info = False,
labels = False,
data = False,
labeled = True,
)
(options,filenames) = parser.parse_args()
# --- loop over input files -------------------------------------------------------------------------
if filenames == []: filenames = [None]
for name in filenames:
try:
table = damask.ASCIItable(name = name, labeled = options.labeled, readonly = True)
except IOError:
continue
details = ', '.join(
(['header'] if options.table else []) +
(['info'] if options.head or options.info else []) +
(['labels'] if options.head or options.labels else []) +
(['data'] if options.data else []) +
[]
)
damask.util.report(scriptName,(name if name is not None else '') + ('' if details == '' else ' -- '+details))
# ------------------------------------------ output head ---------------------------------------
table.head_read()
if not (options.head or options.info): table.info_clear()
if not (options.head or (options.labels and options.labeled)): table.labels_clear()
table.head_write(header = options.table)
# ------------------------------------------ output data ---------------------------------------
outputAlive = options.data
while outputAlive and table.data_read(): # read next data line of ASCII table
outputAlive = table.data_write() # output line
table.close()

View File

@ -84,9 +84,9 @@ if [options.angleaxis,options.quaternion].count(None) == 0:
parser.error('more than one rotation specified.')
if options.angleaxis is not None:
rotation = damask.Rotation.fromAxisAngle(np.array(options.angleaxis),options.degrees,normalise=True)
rotation = damask.Rotation.from_axis_angle(np.array(options.angleaxis),options.degrees,normalise=True)
elif options.quaternion is not None:
rotation = damask.Rotation.fromQuaternion(options.quaternion)
rotation = damask.Rotation.from_quaternion(options.quaternion)
else:
rotation = damask.Rotation()

View File

@ -1,74 +0,0 @@
#!/usr/bin/env python3
import os
import sys
from io import StringIO
from optparse import OptionParser
import numpy as np
import damask
scriptName = os.path.splitext(os.path.basename(__file__))[0]
scriptID = ' '.join([scriptName,damask.version])
# --------------------------------------------------------------------
# MAIN
# --------------------------------------------------------------------
parser = OptionParser(option_class=damask.extendableOption, usage='%prog options [geomfile(s)]', description = """
Increases or decreases the (three-dimensional) canvas.
Grid can be given as absolute or relative values, e.g. 16 16 16 or 2x 0.5x 32.
""", version = scriptID)
parser.add_option('-g','--grid',
dest = 'grid',
type = 'string', nargs = 3, metavar = ' '.join(['string']*3),
help = 'a,b,c grid of hexahedral box')
parser.add_option('-o','--offset',
dest = 'offset',
type = 'int', nargs = 3, metavar = ' '.join(['int']*3),
help = 'a,b,c offset from old to new origin of grid [%default]')
parser.add_option('-f','--fill',
dest = 'fill',
type = 'float', metavar = 'int',
help = 'background microstructure index, defaults to max microstructure index + 1')
parser.set_defaults(offset = (0,0,0))
(options, filenames) = parser.parse_args()
if filenames == []: filenames = [None]
for name in filenames:
damask.util.report(scriptName,name)
geom = damask.Geom.from_file(StringIO(''.join(sys.stdin.read())) if name is None else name)
origin = geom.get_origin()
size = geom.get_size()
old = new = geom.get_grid()
offset = np.asarray(options.offset)
if options.grid is not None:
new = np.maximum(1,
np.array([int(o*float(n.lower().replace('x',''))) if n.lower().endswith('x') \
else int(n) for o,n in zip(old,options.grid)],dtype=int))
canvas = np.full(new,options.fill if options.fill is not None
else np.nanmax(geom.microstructure)+1,geom.microstructure.dtype)
l = np.clip( offset, 0,np.minimum(old +offset,new)) # noqa
r = np.clip( offset+old,0,np.minimum(old*2+offset,new))
L = np.clip(-offset, 0,np.minimum(new -offset,old))
R = np.clip(-offset+new,0,np.minimum(new*2-offset,old))
canvas[l[0]:r[0],l[1]:r[1],l[2]:r[2]] = geom.microstructure[L[0]:R[0],L[1]:R[1],L[2]:R[2]]
damask.util.croak(geom.update(canvas,origin=origin+offset*size/old,rescale=True))
geom.add_comments(scriptID + ' ' + ' '.join(sys.argv[1:]))
geom.to_file(sys.stdout if name is None else name,pack=False)

View File

@ -1,37 +0,0 @@
#!/usr/bin/env python3
import os
import sys
from io import StringIO
from optparse import OptionParser
import damask
scriptName = os.path.splitext(os.path.basename(__file__))[0]
scriptID = ' '.join([scriptName,damask.version])
#--------------------------------------------------------------------------------------------------
# MAIN
#--------------------------------------------------------------------------------------------------
parser = OptionParser(option_class=damask.extendableOption, usage='%prog [geomfile(s)]', description = """
Writes vtk file for visualization.
""", version = scriptID)
(options, filenames) = parser.parse_args()
if filenames == []: filenames = [None]
for name in filenames:
damask.util.report(scriptName,name)
geom = damask.Geom.from_file(StringIO(''.join(sys.stdin.read())) if name is None else name)
damask.util.croak(geom)
if name:
geom.to_vtk(os.path.splitext(name)[0])
else:
sys.stdout.write(geom.to_vtk())

View File

@ -97,7 +97,7 @@ for name in filenames:
dataset = os.path.join(group_pointwise,options.quaternion)
try:
quats = np.reshape(inFile[dataset][...],(np.product(grid),4))
rot = [damask.Rotation.fromQuaternion(q,True,P=+1) for q in quats]
rot = [damask.Rotation.from_quaternion(q,True,P=+1) for q in quats]
except KeyError:
errors.append('Pointwise orientation (quaternion) data ({}) not readable'.format(dataset))
@ -123,7 +123,7 @@ for name in filenames:
dataset = os.path.join(group_average,options.quaternion)
try:
rot = [damask.Rotation.fromQuaternion(q,True,P=+1) for q in inFile[dataset][...][1:]] # skip first entry (unindexed)
rot = [damask.Rotation.from_quaternion(q,True,P=+1) for q in inFile[dataset][...][1:]] # skip first entry (unindexed)
except KeyError:
errors.append('Average orientation data ({}) not readable'.format(dataset))
@ -140,7 +140,7 @@ for name in filenames:
config_header = ['<texture>']
for i in range(np.nanmax(microstructure)):
config_header += ['[{}{}]'.format(label,i+1),
'(gauss)\tphi1 {:.2f}\tPhi {:.2f}\tphi2 {:.2f}'.format(*rot[i].asEulers(degrees = True)),
'(gauss)\tphi1 {:.2f}\tPhi {:.2f}\tphi2 {:.2f}'.format(*rot[i].as_Eulers(degrees = True)),
]
config_header += ['<microstructure>']
for i in range(np.nanmax(microstructure)):

View File

@ -89,7 +89,7 @@ for name in filenames:
for i,data in enumerate(unique):
ori = damask.Rotation(data[0:4])
config_header += ['[Grain{}]'.format(i+1),
'(gauss)\tphi1 {:.2f}\tPhi {:.2f}\tphi2 {:.2f}'.format(*ori.asEulers(degrees = True)),
'(gauss)\tphi1 {:.2f}\tPhi {:.2f}\tphi2 {:.2f}'.format(*ori.as_Eulers(degrees = True)),
]
if options.axes is not None: config_header += ['axes\t{} {} {}'.format(*options.axes)]

View File

@ -1,35 +0,0 @@
#!/usr/bin/env python3
import os
import sys
from io import StringIO
from optparse import OptionParser
import damask
scriptName = os.path.splitext(os.path.basename(__file__))[0]
scriptID = ' '.join([scriptName,damask.version])
#--------------------------------------------------------------------------------------------------
# MAIN
#--------------------------------------------------------------------------------------------------
parser = OptionParser(option_class=damask.extendableOption, usage='%prog [geomfile(s)]', description = """
Renumber sorted microstructure indices to 1,...,N.
""", version=scriptID)
(options, filenames) = parser.parse_args()
if filenames == []: filenames = [None]
for name in filenames:
damask.util.report(scriptName,name)
geom = damask.Geom.from_file(StringIO(''.join(sys.stdin.read())) if name is None else name)
damask.util.croak(geom.renumber())
geom.add_comments(scriptID + ' ' + ' '.join(sys.argv[1:]))
geom.to_file(sys.stdout if name is None else name,pack=False)

View File

@ -1,98 +0,0 @@
#!/usr/bin/env python3
import os
import sys
from io import StringIO
from optparse import OptionParser
from scipy import ndimage
import numpy as np
import damask
scriptName = os.path.splitext(os.path.basename(__file__))[0]
scriptID = ' '.join([scriptName,damask.version])
#--------------------------------------------------------------------------------------------------
# MAIN
#--------------------------------------------------------------------------------------------------
parser = OptionParser(option_class=damask.extendableOption, usage='%prog options [geomfile(s)]', description = """
Rotates original microstructure and embeddeds it into buffer material.
""", version=scriptID)
parser.add_option('-r', '--rotation',
dest='rotation',
type = 'float', nargs = 4, metavar = ' '.join(['float']*4),
help = 'rotation given as axis and angle')
parser.add_option('-e', '--eulers',
dest = 'eulers',
type = 'float', nargs = 3, metavar = ' '.join(['float']*3),
help = 'rotation given as Euler angles')
parser.add_option('-d', '--degrees',
dest = 'degrees',
action = 'store_true',
help = 'Euler angles/axis angle are given in degrees')
parser.add_option('-m', '--matrix',
dest = 'matrix',
type = 'float', nargs = 9, metavar = ' '.join(['float']*9),
help = 'rotation given as matrix')
parser.add_option('-q', '--quaternion',
dest = 'quaternion',
type = 'float', nargs = 4, metavar = ' '.join(['float']*4),
help = 'rotation given as quaternion')
parser.add_option('-f', '--fill',
dest = 'fill',
type = 'float', metavar = 'int',
help = 'background microstructure index, defaults to max microstructure index + 1')
parser.set_defaults(degrees = False)
(options, filenames) = parser.parse_args()
if [options.rotation,options.eulers,options.matrix,options.quaternion].count(None) < 3:
parser.error('more than one rotation specified.')
if [options.rotation,options.eulers,options.matrix,options.quaternion].count(None) > 3:
parser.error('no rotation specified.')
if options.quaternion is not None:
rot = damask.Rotation.fromQuaternion(np.array(options.quaternion)) # we might need P=+1 here, too...
if options.rotation is not None:
rot = damask.Rotation.fromAxisAngle(np.array(options.rotation),degrees=options.degrees,normalise=True,P=+1)
if options.matrix is not None:
rot = damask.Rotation.fromMatrix(np.array(options.Matrix))
if options.eulers is not None:
rot = damask.Rotation.fromEulers(np.array(options.eulers),degrees=options.degrees)
eulers = rot.asEulers(degrees=True)
if filenames == []: filenames = [None]
for name in filenames:
damask.util.report(scriptName,name)
geom = damask.Geom.from_file(StringIO(''.join(sys.stdin.read())) if name is None else name)
size = geom.get_size()
grid = geom.get_grid()
origin = geom.get_origin()
microstructure = geom.get_microstructure()
fill = np.nanmax(microstructure)+1 if options.fill is None else options.fill
dtype = float if np.isnan(fill) or int(fill) != fill or microstructure.dtype==np.float else int
# These rotations are always applied in the reference coordinate system, i.e. (z,x,z) not (z,x',z'')
# this seems to be ok, see https://www.cs.utexas.edu/~theshark/courses/cs354/lectures/cs354-14.pdf
microstructure = ndimage.rotate(microstructure,eulers[2],(0,1),order=0,
prefilter=False,output=dtype,cval=fill) # rotation around z
microstructure = ndimage.rotate(microstructure,eulers[1],(1,2),order=0,
prefilter=False,output=dtype,cval=fill) # rotation around x
microstructure = ndimage.rotate(microstructure,eulers[0],(0,1),order=0,
prefilter=False,output=dtype,cval=fill) # rotation around z
damask.util.croak(geom.update(microstructure,origin=origin-(np.asarray(microstructure.shape)-grid)/2*size/grid,rescale=True))
geom.add_comments(scriptID + ' ' + ' '.join(sys.argv[1:]))
geom.to_file(sys.stdout if name is None else name,pack=False)

View File

@ -1,61 +0,0 @@
#!/usr/bin/env python3
import os
import sys
from optparse import OptionParser
from io import StringIO
import damask
scriptName = os.path.splitext(os.path.basename(__file__))[0]
scriptID = ' '.join([scriptName,damask.version])
#--------------------------------------------------------------------------------------------------
# MAIN
#--------------------------------------------------------------------------------------------------
parser = OptionParser(option_class=damask.extendableOption, usage='%prog options [geomfile(s)]', description = """
Translate microstructure indices (shift or substitute) and/or geometry origin.
""", version=scriptID)
parser.add_option('-o', '--origin',
dest = 'origin',
type = 'float', nargs = 3, metavar = ' '.join(['float']*3),
help = 'offset from old to new origin of grid')
parser.add_option('-m', '--microstructure',
dest = 'microstructure',
type = 'int', metavar = 'int',
help = 'offset from old to new microstructure indices (after substitution)')
parser.add_option('-s', '--substitute',
dest = 'substitute',
action = 'extend', metavar = '<string LIST>',
help = 'substitutions of microstructure indices from,to,from,to,...')
parser.set_defaults(origin = (0.0,0.0,0.0),
microstructure = 0,
substitute = []
)
(options, filenames) = parser.parse_args()
sub = list(map(int,options.substitute))
if filenames == []: filenames = [None]
for name in filenames:
damask.util.report(scriptName,name)
geom = damask.Geom.from_file(StringIO(''.join(sys.stdin.read())) if name is None else name)
substituted = geom.get_microstructure()
for old,new in zip(sub[0::2],sub[1::2]): substituted[geom.microstructure==old] = new # substitute microstructure indices
substituted += options.microstructure # constant shift
damask.util.croak(geom.update(substituted,origin=geom.get_origin()+options.origin))
geom.add_comments(scriptID + ' ' + ' '.join(sys.argv[1:]))
geom.to_file(sys.stdout if name is None else name,pack=False)

View File

@ -1,39 +0,0 @@
#!/usr/bin/env python3
import os
import sys
from io import StringIO
from optparse import OptionParser
import damask
scriptName = os.path.splitext(os.path.basename(__file__))[0]
scriptID = ' '.join([scriptName,damask.version])
#--------------------------------------------------------------------------------------------------
# MAIN
#--------------------------------------------------------------------------------------------------
parser = OptionParser(option_class=damask.extendableOption, usage='%prog [seedfile(s)]', description = """
Writes vtk file for visualization.
""", version = scriptID)
(options, filenames) = parser.parse_args()
if filenames == []: filenames = [None]
for name in filenames:
damask.util.report(scriptName,name)
seeds = damask.Table.from_ASCII(StringIO(''.join(sys.stdin.read())) if name is None else name)
v = damask.VTK.from_polyData(seeds.get('pos'))
for label in seeds.shapes.keys():
if label == 'pos': pass
v.add(seeds.get(label),label)
if name:
v.write(os.path.splitext(name)[0])
else:
sys.stdout.write(v.__repr__())

View File

@ -1,4 +1,4 @@
"""Main aggregator."""
"""Tools for pre and post processing of DAMASK simulations."""
import os as _os
import re as _re

View File

@ -242,7 +242,7 @@ class Geom:
def get_grid(self):
"""Return the grid discretization."""
return np.array(self.microstructure.shape)
return np.asarray(self.microstructure.shape)
def get_homogenization(self):
@ -533,8 +533,8 @@ class Geom:
Parameters
----------
grid : iterable of int
new grid dimension
grid : numpy.ndarray of shape (3)
number of grid points in x,y,z direction.
"""
#self.add_comments('geom.py:scale v{}'.format(version)
@ -581,3 +581,90 @@ class Geom:
#self.add_comments('geom.py:renumber v{}'.format(version)
return self.update(renumbered)
def rotate(self,R,fill=None):
"""
Rotate microstructure (pad if required).
Parameters
----------
R : damask.Rotation
rotation to apply to the microstructure.
fill : int or float, optional
microstructure index to fill the corners. Defaults to microstructure.max() + 1.
"""
if fill is None: fill = np.nanmax(self.microstructure) + 1
dtype = float if np.isnan(fill) or int(fill) != fill or self.microstructure.dtype==np.float else int
Eulers = R.as_Eulers(degrees=True)
microstructure_in = self.get_microstructure()
# These rotations are always applied in the reference coordinate system, i.e. (z,x,z) not (z,x',z'')
# see https://www.cs.utexas.edu/~theshark/courses/cs354/lectures/cs354-14.pdf
for angle,axes in zip(Eulers[::-1], [(0,1),(1,2),(0,1)]):
microstructure_out = ndimage.rotate(microstructure_in,angle,axes,order=0,
prefilter=False,output=dtype,cval=fill)
if np.prod(microstructure_in.shape) == np.prod(microstructure_out.shape):
# avoid scipy interpolation errors for rotations close to multiples of 90°
microstructure_in = np.rot90(microstructure_in,k=np.rint(angle/90.).astype(int),axes=axes)
else:
microstructure_in = microstructure_out
origin = self.origin-(np.asarray(microstructure_in.shape)-self.grid)*.5 * self.size/self.grid
#self.add_comments('geom.py:rotate v{}'.format(version)
return self.update(microstructure_in,origin=origin,rescale=True)
def canvas(self,grid=None,offset=None,fill=None):
"""
Crop or enlarge/pad microstructure.
Parameters
----------
grid : numpy.ndarray of shape (3)
number of grid points in x,y,z direction.
offset : numpy.ndarray of shape (3)
offset (measured in grid points) from old to new microstructue[0,0,0].
fill : int or float, optional
microstructure index to fill the corners. Defaults to microstructure.max() + 1.
"""
if fill is None: fill = np.nanmax(self.microstructure) + 1
if offset is None: offset = 0
dtype = float if int(fill) != fill or self.microstructure.dtype==np.float else int
canvas = np.full(self.grid if grid is None else grid,
fill if fill is not None else np.nanmax(self.microstructure)+1,dtype)
l = np.clip( offset, 0,np.minimum(self.grid +offset,grid)) # noqa
r = np.clip( offset+self.grid,0,np.minimum(self.grid*2+offset,grid))
L = np.clip(-offset, 0,np.minimum(grid -offset,self.grid))
R = np.clip(-offset+grid, 0,np.minimum(grid*2 -offset,self.grid))
canvas[l[0]:r[0],l[1]:r[1],l[2]:r[2]] = self.microstructure[L[0]:R[0],L[1]:R[1],L[2]:R[2]]
#self.add_comments('geom.py:canvas v{}'.format(version)
return self.update(canvas,origin=self.origin+offset*self.size/self.grid,rescale=True)
def substitute(self,from_microstructure,to_microstructure):
"""
Substitude microstructure indices.
Parameters
----------
from_microstructure : iterable of ints
microstructure indices to be substituted.
to_microstructure : iterable of ints
new microstructure indices.
"""
substituted = self.get_microstructure()
for from_ms,to_ms in zip(from_microstructure,to_microstructure):
substituted[self.microstructure==from_ms] = to_ms
#self.add_comments('geom.py:substitute v{}'.format(version)
return self.update(substituted)

View File

@ -229,19 +229,20 @@ class Symmetry:
Return inverse pole figure color if requested.
Bases are computed from
basis = {'cubic' : np.linalg.inv(np.array([[0.,0.,1.], # direction of red
[1.,0.,1.]/np.sqrt(2.), # direction of green
[1.,1.,1.]/np.sqrt(3.)]).T), # direction of blue
'hexagonal' : np.linalg.inv(np.array([[0.,0.,1.], # direction of red
[1.,0.,0.], # direction of green
[np.sqrt(3.),1.,0.]/np.sqrt(4.)]).T), # direction of blue
'tetragonal' : np.linalg.inv(np.array([[0.,0.,1.], # direction of red
[1.,0.,0.], # direction of green
[1.,1.,0.]/np.sqrt(2.)]).T), # direction of blue
'orthorhombic' : np.linalg.inv(np.array([[0.,0.,1.], # direction of red
[1.,0.,0.], # direction of green
[0.,1.,0.]]).T), # direction of blue
}
>>> basis = {'cubic' : np.linalg.inv(np.array([[0.,0.,1.], # direction of red
... [1.,0.,1.]/np.sqrt(2.), # direction of green
... [1.,1.,1.]/np.sqrt(3.)]).T), # direction of blue
... 'hexagonal' : np.linalg.inv(np.array([[0.,0.,1.], # direction of red
... [1.,0.,0.], # direction of green
... [np.sqrt(3.),1.,0.]/np.sqrt(4.)]).T), # direction of blue
... 'tetragonal' : np.linalg.inv(np.array([[0.,0.,1.], # direction of red
... [1.,0.,0.], # direction of green
... [1.,1.,0.]/np.sqrt(2.)]).T), # direction of blue
... 'orthorhombic': np.linalg.inv(np.array([[0.,0.,1.], # direction of red
... [1.,0.,0.], # direction of green
... [0.,1.,0.]]).T), # direction of blue
... }
"""
if self.lattice == 'cubic':
basis = {'improper':np.array([ [-1. , 0. , 1. ],
@ -634,6 +635,6 @@ class Lattice:
otherDir = miller[otherDir_id]/ np.linalg.norm(miller[otherDir_id])
otherMatrix = np.array([otherDir,np.cross(otherPlane,otherDir),otherPlane])
r['rotations'].append(Rotation.fromMatrix(np.dot(otherMatrix.T,myMatrix)))
r['rotations'].append(Rotation.from_matrix(np.dot(otherMatrix.T,myMatrix)))
return r

View File

@ -8,6 +8,7 @@ class Orientation:
Crystallographic orientation.
A crystallographic orientation contains a rotation and a lattice.
"""
__slots__ = ['rotation','lattice']
@ -36,7 +37,7 @@ class Orientation:
if isinstance(rotation, Rotation):
self.rotation = rotation
else:
self.rotation = Rotation.fromQuaternion(rotation) # assume quaternion
self.rotation = Rotation.from_quaternion(rotation) # assume quaternion
if self.rotation.quaternion.shape != (4,):
raise NotImplementedError('Support for multiple rotations missing')
@ -49,8 +50,10 @@ class Orientation:
Disorientation between myself and given other orientation.
Rotation axis falls into SST if SST == True.
(Currently requires same symmetry for both orientations.
Look into A. Heinz and P. Neumann 1991 for cases with differing sym.)
Currently requires same symmetry for both orientations.
Look into A. Heinz and P. Neumann 1991 for cases with differing sym.
"""
if self.lattice.symmetry != other.lattice.symmetry:
raise NotImplementedError('disorientation between different symmetry classes not supported yet.')
@ -65,8 +68,8 @@ class Orientation:
r = b*aInv
for k in range(2):
r.inverse()
breaker = self.lattice.symmetry.inFZ(r.asRodrigues(vector=True)) \
and (not SST or other.lattice.symmetry.inDisorientationSST(r.asRodrigues(vector=True)))
breaker = self.lattice.symmetry.inFZ(r.as_Rodrigues(vector=True)) \
and (not SST or other.lattice.symmetry.inDisorientationSST(r.as_Rodrigues(vector=True)))
if breaker: break
if breaker: break
if breaker: break
@ -75,7 +78,7 @@ class Orientation:
# ... own sym, other sym,
# self-->other: True, self<--other: False
def inFZ(self):
return self.lattice.symmetry.inFZ(self.rotation.asRodrigues(vector=True))
return self.lattice.symmetry.inFZ(self.rotation.as_Rodrigues(vector=True))
def equivalentOrientations(self,members=[]):
@ -97,7 +100,7 @@ class Orientation:
def reduced(self):
"""Transform orientation to fall into fundamental zone according to symmetry."""
for me in self.equivalentOrientations():
if self.lattice.symmetry.inFZ(me.rotation.asRodrigues(vector=True)): break
if self.lattice.symmetry.inFZ(me.rotation.as_Rodrigues(vector=True)): break
return self.__class__(me.rotation,self.lattice)

View File

@ -1,7 +1,11 @@
import multiprocessing
import re
import inspect
import glob
import os
import datetime
import xml.etree.ElementTree as ET
import xml.dom.minidom
from functools import partial
import h5py
@ -32,7 +36,7 @@ class Result:
Parameters
----------
fname : str
name of the DADF5 file to be openend.
name of the DADF5 file to be opened.
"""
with h5py.File(fname,'r') as f:
@ -65,6 +69,10 @@ class Result:
self.materialpoints = [m.decode() for m in np.unique(f['mapping/cellResults/materialpoint']['Name'])]
self.constituents = [c.decode() for c in np.unique(f['mapping/cellResults/constituent'] ['Name'])]
# faster, but does not work with (deprecated) DADF5_postResults
#self.materialpoints = [m for m in f['inc0/materialpoint']]
#self.constituents = [c for c in f['inc0/constituent']]
self.con_physics = []
for c in self.constituents:
self.con_physics += f['/'.join([self.increments[0],'constituent',c])].keys()
@ -80,7 +88,9 @@ class Result:
'con_physics': self.con_physics, 'mat_physics': self.mat_physics
}
self.fname = fname
self.fname = os.path.abspath(fname)
self._allow_overwrite = False
def __repr__(self):
@ -117,7 +127,7 @@ class Result:
"""
# allow True/False and string arguments
if datasets is True:
if datasets is True:
datasets = ['*']
elif datasets is False:
datasets = []
@ -136,6 +146,7 @@ class Result:
choice = []
for c in iterator:
idx = np.searchsorted(self.times,c)
if idx >= len(self.times): continue
if np.isclose(c,self.times[idx]):
choice.append(self.increments[idx])
elif np.isclose(c,self.times[idx+1]):
@ -156,6 +167,16 @@ class Result:
self.selection[what] = diff_sorted
def enable_overwrite(self):
print(util.bcolors().WARNING,util.bcolors().BOLD,
'Warning: Enabled overwrite of existing datasets!',
util.bcolors().ENDC)
self._allow_overwrite = True
def disable_overwrite(self):
self._allow_overwrite = False
def incs_in_range(self,start,end):
selected = []
for i,inc in enumerate([int(i[3:]) for i in self.increments]):
@ -316,9 +337,10 @@ class Result:
Return groups that contain all requested datasets.
Only groups within
- inc?????/constituent/*_*/*
- inc?????/materialpoint/*_*/*
- inc?????/geometry/*
- inc*/constituent/*/*
- inc*/materialpoint/*/*
- inc*/geometry/*
are considered as they contain user-relevant data.
Single strings will be treated as list with one entry.
@ -482,7 +504,7 @@ class Result:
'meta': {
'Unit': x['meta']['Unit'],
'Description': 'Absolute value of {} ({})'.format(x['label'],x['meta']['Description']),
'Creator': 'result.py:add_abs v{}'.format(version)
'Creator': inspect.stack()[0][3][1:]
}
}
def add_absolute(self,x):
@ -510,7 +532,7 @@ class Result:
'meta': {
'Unit': kwargs['unit'],
'Description': '{} (formula: {})'.format(kwargs['description'],kwargs['formula']),
'Creator': 'result.py:add_calculation v{}'.format(version)
'Creator': inspect.stack()[0][3][1:]
}
}
def add_calculation(self,label,formula,unit='n/a',description=None,vectorized=True):
@ -546,10 +568,9 @@ class Result:
'label': 'sigma',
'meta': {
'Unit': P['meta']['Unit'],
'Description': 'Cauchy stress calculated from {} ({}) '.format(P['label'],
P['meta']['Description'])+\
'and {} ({})'.format(F['label'],F['meta']['Description']),
'Creator': 'result.py:add_Cauchy v{}'.format(version)
'Description': 'Cauchy stress calculated from {} ({}) and {} ({})'\
.format(P['label'],P['meta']['Description'],F['label'],F['meta']['Description']),
'Creator': inspect.stack()[0][3][1:]
}
}
def add_Cauchy(self,P='P',F='F'):
@ -575,7 +596,7 @@ class Result:
'meta': {
'Unit': T['meta']['Unit'],
'Description': 'Determinant of tensor {} ({})'.format(T['label'],T['meta']['Description']),
'Creator': 'result.py:add_determinant v{}'.format(version)
'Creator': inspect.stack()[0][3][1:]
}
}
def add_determinant(self,T):
@ -593,16 +614,13 @@ class Result:
@staticmethod
def _add_deviator(T):
if not T['data'].shape[1:] == (3,3):
raise ValueError
return {
'data': mechanics.deviatoric_part(T['data']),
'label': 's_{}'.format(T['label']),
'meta': {
'Unit': T['meta']['Unit'],
'Description': 'Deviator of tensor {} ({})'.format(T['label'],T['meta']['Description']),
'Creator': 'result.py:add_deviator v{}'.format(version)
'Creator': inspect.stack()[0][3][1:]
}
}
def add_deviator(self,T):
@ -619,17 +637,24 @@ class Result:
@staticmethod
def _add_eigenvalue(T_sym):
def _add_eigenvalue(T_sym,eigenvalue):
if eigenvalue == 'max':
label,p = 'Maximum',2
elif eigenvalue == 'mid':
label,p = 'Intermediate',1
elif eigenvalue == 'min':
label,p = 'Minimum',0
return {
'data': mechanics.eigenvalues(T_sym['data']),
'label': 'lambda({})'.format(T_sym['label']),
'data': mechanics.eigenvalues(T_sym['data'])[:,p],
'label': 'lambda_{}({})'.format(eigenvalue,T_sym['label']),
'meta' : {
'Unit': T_sym['meta']['Unit'],
'Description': 'Eigenvalues of {} ({})'.format(T_sym['label'],T_sym['meta']['Description']),
'Creator': 'result.py:add_eigenvalues v{}'.format(version)
'Description': '{} eigenvalue of {} ({})'.format(label,T_sym['label'],T_sym['meta']['Description']),
'Creator': inspect.stack()[0][3][1:]
}
}
def add_eigenvalues(self,T_sym):
def add_eigenvalue(self,T_sym,eigenvalue='max'):
"""
Add eigenvalues of symmetric tensor.
@ -637,33 +662,46 @@ class Result:
----------
T_sym : str
Label of symmetric tensor dataset.
eigenvalue : str, optional
Eigenvalue. Select from max, mid, min. Defaults to max.
"""
self._add_generic_pointwise(self._add_eigenvalue,{'T_sym':T_sym})
self._add_generic_pointwise(self._add_eigenvalue,{'T_sym':T_sym},{'eigenvalue':eigenvalue})
@staticmethod
def _add_eigenvector(T_sym):
def _add_eigenvector(T_sym,eigenvalue):
if eigenvalue == 'max':
label,p = 'maximum',2
elif eigenvalue == 'mid':
label,p = 'intermediate',1
elif eigenvalue == 'min':
label,p = 'minimum',0
print('p',eigenvalue)
return {
'data': mechanics.eigenvectors(T_sym['data']),
'label': 'v({})'.format(T_sym['label']),
'data': mechanics.eigenvectors(T_sym['data'])[:,p],
'label': 'v_{}({})'.format(eigenvalue,T_sym['label']),
'meta' : {
'Unit': '1',
'Description': 'Eigenvectors of {} ({})'.format(T_sym['label'],T_sym['meta']['Description']),
'Creator': 'result.py:add_eigenvectors v{}'.format(version)
'Description': 'Eigenvector corresponding to {} eigenvalue of {} ({})'\
.format(label,T_sym['label'],T_sym['meta']['Description']),
'Creator': inspect.stack()[0][3][1:]
}
}
def add_eigenvectors(self,T_sym):
def add_eigenvector(self,T_sym,eigenvalue='max'):
"""
Add eigenvectors of symmetric tensor.
Add eigenvector of symmetric tensor.
Parameters
----------
T_sym : str
Label of symmetric tensor dataset.
eigenvalue : str, optional
Eigenvalue to which the eigenvector corresponds. Select from
max, mid, min. Defaults to max.
"""
self._add_generic_pointwise(self._add_eigenvector,{'T_sym':T_sym})
self._add_generic_pointwise(self._add_eigenvector,{'T_sym':T_sym},{'eigenvalue':eigenvalue})
@staticmethod
@ -686,7 +724,7 @@ class Result:
'Unit': 'RGB (8bit)',
'Lattice': lattice,
'Description': 'Inverse Pole Figure (IPF) colors along sample direction [{} {} {}]'.format(*m),
'Creator': 'result.py:add_IPFcolor v{}'.format(version)
'Creator': inspect.stack()[0][3][1:]
}
}
def add_IPFcolor(self,q,l):
@ -712,7 +750,7 @@ class Result:
'meta': {
'Unit': T_sym['meta']['Unit'],
'Description': 'Maximum shear component of {} ({})'.format(T_sym['label'],T_sym['meta']['Description']),
'Creator': 'result.py:add_maximum_shear v{}'.format(version)
'Creator': inspect.stack()[0][3][1:]
}
}
def add_maximum_shear(self,T_sym):
@ -739,7 +777,7 @@ class Result:
'meta': {
'Unit': T_sym['meta']['Unit'],
'Description': 'Mises equivalent {} of {} ({})'.format(t,T_sym['label'],T_sym['meta']['Description']),
'Creator': 'result.py:add_Mises v{}'.format(version)
'Creator': inspect.stack()[0][3][1:]
}
}
def add_Mises(self,T_sym):
@ -775,7 +813,7 @@ class Result:
'meta': {
'Unit': x['meta']['Unit'],
'Description': '{}-norm of {} {} ({})'.format(o,t,x['label'],x['meta']['Description']),
'Creator': 'result.py:add_norm v{}'.format(version)
'Creator': inspect.stack()[0][3][1:]
}
}
def add_norm(self,x,ord=None):
@ -800,10 +838,9 @@ class Result:
'label': 'S',
'meta': {
'Unit': P['meta']['Unit'],
'Description': '2. Kirchhoff stress calculated from {} ({}) '.format(P['label'],
P['meta']['Description'])+\
'and {} ({})'.format(F['label'],F['meta']['Description']),
'Creator': 'result.py:add_PK2 v{}'.format(version)
'Description': '2. Piola-Kirchhoff stress calculated from {} ({}) and {} ({})'\
.format(P['label'],P['meta']['Description'],F['label'],F['meta']['Description']),
'Creator': inspect.stack()[0][3][1:]
}
}
def add_PK2(self,P='P',F='F'):
@ -826,22 +863,20 @@ class Result:
pole = np.array(p)
unit_pole = pole/np.linalg.norm(pole)
m = util.scale_to_coprime(pole)
coords = np.empty((len(q['data']),2))
for i,qu in enumerate(q['data']):
o = Rotation(np.array([qu['w'],qu['x'],qu['y'],qu['z']]))
rotatedPole = o*unit_pole # rotate pole according to crystal orientation
(x,y) = rotatedPole[0:2]/(1.+abs(unit_pole[2])) # stereographic projection
coords[i] = [np.sqrt(x*x+y*y),np.arctan2(y,x)] if polar else [x,y]
rot = Rotation(q['data'].view(np.double).reshape(-1,4))
rotatedPole = rot @ np.broadcast_to(unit_pole,rot.shape+(3,)) # rotate pole according to crystal orientation
xy = rotatedPole[:,0:2]/(1.+abs(unit_pole[2])) # stereographic projection
coords = xy if not polar else \
np.block([np.sqrt(xy[:,0:1]*xy[:,0:1]+xy[:,1:2]*xy[:,1:2]),np.arctan2(xy[:,1:2],xy[:,0:1])])
return {
'data': coords,
'label': 'p^{}_[{} {} {})'.format(u'' if polar else 'xy',*m),
'meta' : {
'Unit': '1',
'Unit': '1',
'Description': '{} coordinates of stereographic projection of pole (direction/plane) in crystal frame'\
.format('Polar' if polar else 'Cartesian'),
'Creator' : 'result.py:add_pole v{}'.format(version)
'Creator': inspect.stack()[0][3][1:]
}
}
def add_pole(self,q,p,polar=False):
@ -863,15 +898,13 @@ class Result:
@staticmethod
def _add_rotational_part(F):
if not F['data'].shape[1:] == (3,3):
raise ValueError
return {
'data': mechanics.rotational_part(F['data']),
'label': 'R({})'.format(F['label']),
'meta': {
'Unit': F['meta']['Unit'],
'Description': 'Rotational part of {} ({})'.format(F['label'],F['meta']['Description']),
'Creator': 'result.py:add_rotational_part v{}'.format(version)
'Creator': inspect.stack()[0][3][1:]
}
}
def add_rotational_part(self,F):
@ -889,16 +922,13 @@ class Result:
@staticmethod
def _add_spherical(T):
if not T['data'].shape[1:] == (3,3):
raise ValueError
return {
'data': mechanics.spherical_part(T['data']),
'label': 'p_{}'.format(T['label']),
'meta': {
'Unit': T['meta']['Unit'],
'Description': 'Spherical component of tensor {} ({})'.format(T['label'],T['meta']['Description']),
'Creator': 'result.py:add_spherical v{}'.format(version)
'Creator': inspect.stack()[0][3][1:]
}
}
def add_spherical(self,T):
@ -916,16 +946,13 @@ class Result:
@staticmethod
def _add_strain_tensor(F,t,m):
if not F['data'].shape[1:] == (3,3):
raise ValueError
return {
'data': mechanics.strain_tensor(F['data'],t,m),
'label': 'epsilon_{}^{}({})'.format(t,m,F['label']),
'meta': {
'Unit': F['meta']['Unit'],
'Description': 'Strain tensor of {} ({})'.format(F['label'],F['meta']['Description']),
'Creator': 'result.py:add_strain_tensor v{}'.format(version)
'Creator': inspect.stack()[0][3][1:]
}
}
def add_strain_tensor(self,F='F',t='V',m=0.0):
@ -950,9 +977,6 @@ class Result:
@staticmethod
def _add_stretch_tensor(F,t):
if not F['data'].shape[1:] == (3,3):
raise ValueError
return {
'data': mechanics.left_stretch(F['data']) if t == 'V' else mechanics.right_stretch(F['data']),
'label': '{}({})'.format(t,F['label']),
@ -960,7 +984,7 @@ class Result:
'Unit': F['meta']['Unit'],
'Description': '{} stretch tensor of {} ({})'.format('Left' if t == 'V' else 'Right',
F['label'],F['meta']['Description']),
'Creator': 'result.py:add_stretch_tensor v{}'.format(version)
'Creator': inspect.stack()[0][3][1:]
}
}
def add_stretch_tensor(self,F='F',t='V'):
@ -1023,11 +1047,23 @@ class Result:
continue
lock.acquire()
with h5py.File(self.fname, 'a') as f:
try: # ToDo: Replace if exists?
dataset = f[result[0]].create_dataset(result[1]['label'],data=result[1]['data'])
try:
if self._allow_overwrite and result[0]+'/'+result[1]['label'] in f:
dataset = f[result[0]+'/'+result[1]['label']]
dataset[...] = result[1]['data']
dataset.attrs['Overwritten'] = 'Yes'.encode()
else:
dataset = f[result[0]].create_dataset(result[1]['label'],data=result[1]['data'])
now = datetime.datetime.now().astimezone()
dataset.attrs['Created'] = now.strftime('%Y-%m-%d %H:%M:%S%z').encode()
for l,v in result[1]['meta'].items():
dataset.attrs[l]=v.encode()
except OSError as err:
creator = 'damask.Result.{} v{}'.format(dataset.attrs['Creator'].decode(),version)
dataset.attrs['Creator'] = creator.encode()
except (OSError,RuntimeError) as err:
print('Could not add dataset: {}.'.format(err))
lock.release()
@ -1035,6 +1071,102 @@ class Result:
pool.join()
def write_XDMF(self):
"""
Write XDMF file to directly visualize data in DADF5 file.
This works only for scalar, 3-vector and 3x3-tensor data.
Selection is not taken into account.
"""
if len(self.constituents) != 1 or not self.structured:
raise NotImplementedError('XDMF only available for grid results with 1 constituent.')
xdmf=ET.Element('Xdmf')
xdmf.attrib={'Version': '2.0',
'xmlns:xi': 'http://www.w3.org/2001/XInclude'}
domain=ET.SubElement(xdmf, 'Domain')
collection = ET.SubElement(domain, 'Grid')
collection.attrib={'GridType': 'Collection',
'CollectionType': 'Temporal'}
time = ET.SubElement(collection, 'Time')
time.attrib={'TimeType': 'List'}
time_data = ET.SubElement(time, 'DataItem')
time_data.attrib={'Format': 'XML',
'NumberType': 'Float',
'Dimensions': '{}'.format(len(self.times))}
time_data.text = ' '.join(map(str,self.times))
attributes = []
data_items = []
for inc in self.increments:
grid=ET.SubElement(collection,'Grid')
grid.attrib = {'GridType': 'Uniform',
'Name': inc}
topology=ET.SubElement(grid, 'Topology')
topology.attrib={'TopologyType': '3DCoRectMesh',
'Dimensions': '{} {} {}'.format(*self.grid+1)}
geometry=ET.SubElement(grid, 'Geometry')
geometry.attrib={'GeometryType':'Origin_DxDyDz'}
origin=ET.SubElement(geometry, 'DataItem')
origin.attrib={'Format': 'XML',
'NumberType': 'Float',
'Dimensions': '3'}
origin.text="{} {} {}".format(*self.origin)
delta=ET.SubElement(geometry, 'DataItem')
delta.attrib={'Format': 'XML',
'NumberType': 'Float',
'Dimensions': '3'}
delta.text="{} {} {}".format(*(self.size/self.grid))
with h5py.File(self.fname,'r') as f:
attributes.append(ET.SubElement(grid, 'Attribute'))
attributes[-1].attrib={'Name': 'u',
'Center': 'Node',
'AttributeType': 'Vector'}
data_items.append(ET.SubElement(attributes[-1], 'DataItem'))
data_items[-1].attrib={'Format': 'HDF',
'Precision': '8',
'Dimensions': '{} {} {} 3'.format(*(self.grid+1))}
data_items[-1].text='{}:/{}/geometry/u_n'.format(os.path.split(self.fname)[1],inc)
for o,p in zip(['constituents','materialpoints'],['con_physics','mat_physics']):
for oo in getattr(self,o):
for pp in getattr(self,p):
g = '/'.join([inc,o[:-1],oo,pp])
for l in f[g]:
name = '/'.join([g,l])
shape = f[name].shape[1:]
dtype = f[name].dtype
prec = f[name].dtype.itemsize
if (shape not in [(1,), (3,), (3,3)]) or dtype != np.float64: continue
attributes.append(ET.SubElement(grid, 'Attribute'))
attributes[-1].attrib={'Name': '{}'.format(name.split('/',2)[2]),
'Center': 'Cell',
'AttributeType': 'Tensor'}
data_items.append(ET.SubElement(attributes[-1], 'DataItem'))
data_items[-1].attrib={'Format': 'HDF',
'NumberType': 'Float',
'Precision': '{}'.format(prec),
'Dimensions': '{} {} {} {}'.format(*self.grid,np.prod(shape))}
data_items[-1].text='{}:{}'.format(os.path.split(self.fname)[1],name)
with open(os.path.splitext(self.fname)[0]+'.xdmf','w') as f:
f.write(xml.dom.minidom.parseString(ET.tostring(xdmf).decode()).toprettyxml())
def to_vtk(self,labels=[],mode='cell'):
"""
Export to vtk cell/point data.
@ -1111,28 +1243,3 @@ class Result:
inc[3:].zfill(N_digits))
v.write(file_out)
###################################################################################################
# BEGIN DEPRECATED
def _time_to_inc(self,start,end):
selected = []
for i,time in enumerate(self.times):
if start <= time <= end:
selected.append(self.increments[i])
return selected
def set_by_time(self,start,end):
"""
Set active increments based on start and end time.
Parameters
----------
start : float
start time (included)
end : float
end time (included)
"""
self._manage_selection('set','increments',self._time_to_inc(start,end))

File diff suppressed because it is too large Load Diff

View File

@ -120,9 +120,9 @@ class VTK:
Parameters
----------
fname : str
Filename for reading. Valid extensions are *.vtr, *.vtu, *.vtp, and *.vtk.
Filename for reading. Valid extensions are .vtr, .vtu, .vtp, and .vtk.
dataset_type : str, optional
Name of the vtk.vtkDataSet subclass when opening an *.vtk file. Valid types are vtkRectilinearGrid,
Name of the vtk.vtkDataSet subclass when opening an .vtk file. Valid types are vtkRectilinearGrid,
vtkUnstructuredGrid, and vtkPolyData.
"""

View File

@ -238,12 +238,13 @@ def cell_coord0_gridSizeOrigin(coord0,ordered=True):
start = origin + delta*.5
end = origin - delta*.5 + size
if not (_np.allclose(coords[0],_np.linspace(start[0],end[0],grid[0])) and \
_np.allclose(coords[1],_np.linspace(start[1],end[1],grid[1])) and \
_np.allclose(coords[2],_np.linspace(start[2],end[2],grid[2]))):
atol = _np.max(size)*5e-2
if not (_np.allclose(coords[0],_np.linspace(start[0],end[0],grid[0]),atol=atol) and \
_np.allclose(coords[1],_np.linspace(start[1],end[1],grid[1]),atol=atol) and \
_np.allclose(coords[2],_np.linspace(start[2],end[2],grid[2]),atol=atol)):
raise ValueError('Regular grid spacing violated.')
if ordered and not _np.allclose(coord0.reshape(tuple(grid)+(3,),order='F'),cell_coord0(grid,size,origin)):
if ordered and not _np.allclose(coord0.reshape(tuple(grid)+(3,),order='F'),cell_coord0(grid,size,origin),atol=atol):
raise ValueError('Input data is not ordered (x fast, z slow).')
return (grid,size,origin)
@ -360,7 +361,7 @@ def node_2_cell(node_data):
+ _np.roll(node_data,1,(0,)) + _np.roll(node_data,1,(1,)) + _np.roll(node_data,1,(2,))
+ _np.roll(node_data,1,(0,1)) + _np.roll(node_data,1,(1,2)) + _np.roll(node_data,1,(2,0)))*0.125
return c[:-1,:-1,:-1]
return c[1:,1:,1:]
def node_coord0_gridSizeOrigin(coord0,ordered=True):
@ -385,12 +386,13 @@ def node_coord0_gridSizeOrigin(coord0,ordered=True):
if (grid+1).prod() != len(coord0):
raise ValueError('Data count {} does not match grid {}.'.format(len(coord0),grid))
if not (_np.allclose(coords[0],_np.linspace(mincorner[0],maxcorner[0],grid[0]+1)) and \
_np.allclose(coords[1],_np.linspace(mincorner[1],maxcorner[1],grid[1]+1)) and \
_np.allclose(coords[2],_np.linspace(mincorner[2],maxcorner[2],grid[2]+1))):
atol = _np.max(size)*5e-2
if not (_np.allclose(coords[0],_np.linspace(mincorner[0],maxcorner[0],grid[0]+1),atol=atol) and \
_np.allclose(coords[1],_np.linspace(mincorner[1],maxcorner[1],grid[1]+1),atol=atol) and \
_np.allclose(coords[2],_np.linspace(mincorner[2],maxcorner[2],grid[2]+1),atol=atol)):
raise ValueError('Regular grid spacing violated.')
if ordered and not _np.allclose(coord0.reshape(tuple(grid+1)+(3,),order='F'),node_coord0(grid,size,origin)):
if ordered and not _np.allclose(coord0.reshape(tuple(grid+1)+(3,),order='F'),node_coord0(grid,size,origin),atol=atol):
raise ValueError('Input data is not ordered (x fast, z slow).')
return (grid,size,origin)

View File

@ -112,8 +112,8 @@ def execute(cmd,
"""
initialPath = os.getcwd()
os.chdir(wd)
myEnv = os.environ if env is None else env
os.chdir(wd)
process = subprocess.Popen(shlex.split(cmd),
stdout = subprocess.PIPE,
stderr = subprocess.PIPE,
@ -121,9 +121,9 @@ def execute(cmd,
env = myEnv)
out,error = [i for i in (process.communicate() if streamIn is None
else process.communicate(streamIn.read().encode('utf-8')))]
os.chdir(initialPath)
out = out.decode('utf-8').replace('\x08','')
error = error.decode('utf-8').replace('\x08','')
os.chdir(initialPath)
if process.returncode != 0:
raise RuntimeError('{} failed with returncode {}'.format(cmd,process.returncode))
return out,error

View File

@ -0,0 +1,85 @@
4 header
grid a 8 b 10 c 8
size x 8e-06 y 1e-05 z 8e-06
origin x 0.0 y -2.5e-06 z -2e-06
homogenization 1
42 42 42 42 42 42 42 42
42 42 42 42 42 42 42 42
42 42 42 42 42 42 42 42
42 42 42 42 42 42 42 42
42 42 42 42 42 42 42 42
42 42 42 42 42 42 42 42
42 42 42 42 42 42 42 42
42 42 42 42 42 42 42 42
42 42 42 42 42 42 42 42
42 42 42 42 42 42 42 42
42 42 42 42 42 42 42 42
42 42 42 42 42 42 42 42
42 42 30 14 18 42 42 42
42 42 29 13 17 42 42 42
42 42 42 42 42 42 42 42
42 42 42 42 42 42 42 42
42 42 42 42 42 42 42 42
42 42 42 42 42 42 42 42
42 42 42 42 42 42 42 42
42 42 42 42 42 42 42 42
42 42 42 42 42 42 42 42
42 42 42 42 42 42 42 42
42 42 31 15 19 42 42 42
42 6 10 2 2 42 42 42
42 1 2 2 2 2 42 42
42 42 42 42 42 42 42 42
42 42 42 42 42 42 42 42
42 42 42 42 42 42 42 42
42 42 42 42 42 42 42 42
42 42 42 42 42 42 42 42
42 42 42 42 42 42 42 42
42 42 32 16 20 42 42 42
42 7 11 2 2 42 42 42
42 7 11 2 2 42 42 42
42 2 2 2 2 2 42 42
42 42 2 2 31 35 42 42
42 42 22 26 10 14 1 42
42 42 42 42 42 42 42 42
42 42 42 42 42 42 42 42
42 42 42 42 42 42 42 42
42 42 42 42 42 42 42 42
42 42 42 42 42 42 42 42
42 42 42 42 42 42 42 42
42 4 2 2 2 2 42 42
42 42 2 2 32 36 42 42
42 42 24 28 12 16 1 42
42 42 42 7 7 1 1 42
42 42 42 7 7 1 1 42
42 42 42 1 1 1 42 42
42 42 42 42 42 42 42 42
42 42 42 42 42 42 42 42
42 42 42 42 42 42 42 42
42 42 42 42 42 42 42 42
42 42 42 42 42 42 42 42
42 42 42 42 42 42 42 42
42 42 25 29 13 17 1 42
42 42 42 8 8 1 1 42
42 42 42 1 1 1 42 42
42 42 42 42 42 42 42 42
42 42 42 42 42 42 42 42
42 42 42 42 42 42 42 42
42 42 42 42 42 42 42 42
42 42 42 42 42 42 42 42
42 42 42 42 42 42 42 42
42 42 42 42 42 42 42 42
42 42 42 42 42 42 42 42
42 42 42 1 1 1 42 42
42 42 42 1 1 1 42 42
42 42 42 42 42 42 42 42
42 42 42 42 42 42 42 42
42 42 42 42 42 42 42 42
42 42 42 42 42 42 42 42
42 42 42 42 42 42 42 42
42 42 42 42 42 42 42 42
42 42 42 42 42 42 42 42
42 42 42 42 42 42 42 42
42 42 42 42 42 42 42 42
42 42 42 42 42 42 42 42
42 42 42 42 42 42 42 42
42 42 42 42 42 42 42 42

View File

@ -0,0 +1,104 @@
4 header
grid a 11 b 11 c 9
size x 1.1e-05 y 1.1000000000000001e-05 z 9e-06
origin x -1.5e-06 y -3.0000000000000005e-06 z -2.4999999999999998e-06
homogenization 1
42 42 42 42 42 42 42 42 42 42 42
42 42 42 42 42 42 42 42 42 42 42
42 42 42 42 42 42 42 42 42 42 42
42 42 42 42 42 42 42 42 42 42 42
42 42 42 42 42 42 42 42 42 42 42
42 42 42 42 42 42 42 42 42 42 42
42 42 42 42 42 42 42 42 42 42 42
42 42 42 42 42 42 42 42 42 42 42
42 42 42 42 42 42 42 42 42 42 42
42 42 42 42 42 42 42 42 42 42 42
42 42 42 42 42 42 42 42 42 42 42
42 42 42 42 42 42 42 42 42 42 42
42 42 42 42 42 42 42 42 42 42 42
42 42 42 42 42 42 42 42 42 42 42
42 42 42 42 42 42 42 42 42 42 42
42 42 42 1 2 42 42 42 42 42 42
42 42 42 42 42 42 42 42 42 42 42
42 42 42 42 42 42 42 42 42 42 42
42 42 42 42 42 42 42 42 42 42 42
42 42 42 42 42 42 42 42 42 42 42
42 42 42 42 42 42 42 42 42 42 42
42 42 42 42 42 42 42 42 42 42 42
42 42 42 42 42 42 42 42 42 42 42
42 42 42 42 42 42 42 42 42 42 42
42 42 42 42 1 42 42 42 42 42 42
42 42 42 42 1 5 42 42 42 42 42
42 42 42 1 7 4 42 42 42 42 42
42 42 42 42 42 27 42 42 42 42 42
42 42 42 42 42 42 2 42 42 42 42
42 42 42 42 42 42 42 42 42 42 42
42 42 42 42 42 42 42 42 42 42 42
42 42 42 42 42 42 42 42 42 42 42
42 42 42 42 42 42 42 42 42 42 42
42 42 42 42 42 42 42 42 42 42 42
42 42 42 42 42 42 42 42 42 42 42
42 42 42 1 1 42 42 42 42 42 42
42 42 42 1 1 9 29 42 42 42 42
42 42 1 1 11 8 28 2 42 42 42
42 42 42 1 10 31 2 42 42 42 42
42 42 42 42 30 2 2 2 42 42 42
42 42 42 42 42 42 2 1 42 42 42
42 42 42 42 42 42 42 1 42 42 42
42 42 42 42 42 42 42 42 42 42 42
42 42 42 42 42 42 42 42 42 42 42
42 42 42 42 42 42 42 42 42 42 42
42 42 42 42 42 42 42 42 42 42 42
42 42 42 1 42 42 42 42 42 42 42
42 42 42 1 1 42 42 42 42 42 42
42 42 1 16 36 12 32 42 42 42 42
42 42 42 15 35 2 2 2 42 42 42
42 42 42 42 2 2 2 11 3 42 42
42 42 42 42 42 42 10 6 42 42 42
42 42 42 42 42 42 42 6 42 42 42
42 42 42 42 42 42 42 42 42 42 42
42 42 42 42 42 42 42 42 42 42 42
42 42 42 42 42 42 42 42 42 42 42
42 42 42 42 42 42 42 42 42 42 42
42 42 42 1 42 42 42 42 42 42 42
42 42 42 1 17 42 42 42 42 42 42
42 42 42 16 36 37 2 42 42 42 42
42 42 42 42 39 2 2 12 42 42 42
42 42 42 38 2 2 2 11 8 42 42
42 42 42 42 2 2 14 30 42 42 42
42 42 42 42 42 42 13 30 42 42 42
42 42 42 42 42 42 42 42 42 42 42
42 42 42 42 42 42 42 42 42 42 42
42 42 42 42 42 42 42 42 42 42 42
42 42 42 42 42 42 42 42 42 42 42
42 42 42 42 42 42 42 42 42 42 42
42 42 42 42 42 42 42 42 42 42 42
42 42 42 42 40 42 42 42 42 42 42
42 42 42 42 42 2 42 42 42 42 42
42 42 42 42 42 2 2 15 42 42 42
42 42 42 42 42 2 18 42 42 42 42
42 42 42 42 42 42 17 42 42 42 42
42 42 42 42 42 42 42 42 42 42 42
42 42 42 42 42 42 42 42 42 42 42
42 42 42 42 42 42 42 42 42 42 42
42 42 42 42 42 42 42 42 42 42 42
42 42 42 42 42 42 42 42 42 42 42
42 42 42 42 42 42 42 42 42 42 42
42 42 42 42 42 42 42 42 42 42 42
42 42 42 42 42 42 42 42 42 42 42
42 42 42 42 42 42 2 20 42 42 42
42 42 42 42 42 42 42 42 42 42 42
42 42 42 42 42 42 42 42 42 42 42
42 42 42 42 42 42 42 42 42 42 42
42 42 42 42 42 42 42 42 42 42 42
42 42 42 42 42 42 42 42 42 42 42
42 42 42 42 42 42 42 42 42 42 42
42 42 42 42 42 42 42 42 42 42 42
42 42 42 42 42 42 42 42 42 42 42
42 42 42 42 42 42 42 42 42 42 42
42 42 42 42 42 42 42 42 42 42 42
42 42 42 42 42 42 42 42 42 42 42
42 42 42 42 42 42 42 42 42 42 42
42 42 42 42 42 42 42 42 42 42 42
42 42 42 42 42 42 42 42 42 42 42
42 42 42 42 42 42 42 42 42 42 42

View File

@ -1,68 +1,4 @@
68 header
geom_fromVoronoiTessellation 2.0.3-1073-g6f3cb071
<texture>
[Grain1]
(gauss) phi1 358.98 Phi 65.62 phi2 24.48
[Grain2]
(gauss) phi1 121.05 Phi 176.11 phi2 295.73
[Grain3]
(gauss) phi1 43.79 Phi 113.76 phi2 345.90
[Grain4]
(gauss) phi1 265.15 Phi 62.52 phi2 299.71
[Grain5]
(gauss) phi1 221.23 Phi 26.54 phi2 207.05
[Grain6]
(gauss) phi1 249.81 Phi 61.47 phi2 152.14
[Grain7]
(gauss) phi1 332.45 Phi 99.16 phi2 345.34
[Grain8]
(gauss) phi1 312.27 Phi 118.27 phi2 181.59
[Grain9]
(gauss) phi1 303.10 Phi 48.21 phi2 358.03
[Grain10]
(gauss) phi1 338.26 Phi 48.11 phi2 176.78
[Grain11]
(gauss) phi1 115.17 Phi 56.54 phi2 223.84
[Grain12]
(gauss) phi1 281.04 Phi 97.48 phi2 27.94
<microstructure>
[Grain1]
crystallite 1
(constituent) phase 1 texture 1 fraction 1.0
[Grain2]
crystallite 1
(constituent) phase 1 texture 2 fraction 1.0
[Grain3]
crystallite 1
(constituent) phase 1 texture 3 fraction 1.0
[Grain4]
crystallite 1
(constituent) phase 1 texture 4 fraction 1.0
[Grain5]
crystallite 1
(constituent) phase 1 texture 5 fraction 1.0
[Grain6]
crystallite 1
(constituent) phase 1 texture 6 fraction 1.0
[Grain7]
crystallite 1
(constituent) phase 1 texture 7 fraction 1.0
[Grain8]
crystallite 1
(constituent) phase 1 texture 8 fraction 1.0
[Grain9]
crystallite 1
(constituent) phase 1 texture 9 fraction 1.0
[Grain10]
crystallite 1
(constituent) phase 1 texture 10 fraction 1.0
[Grain11]
crystallite 1
(constituent) phase 1 texture 11 fraction 1.0
[Grain12]
crystallite 1
(constituent) phase 1 texture 12 fraction 1.0
<!skip>
4 header
grid a 6 b 7 c 8
size x 0.75 y 0.875 z 1.0
origin x 0.0 y 0.0 z 0.0

View File

@ -0,0 +1,61 @@
4 header
grid a 6 b 7 c 8
size x 0.75 y 0.875 z 1.0
origin x 0.0 y 0.0 z 0.0
homogenization 1
3 3 3 4 3 3
3 1 1 1 3 3
3 5 1 1 1 3
1 5 5 1 1 1
1 5 5 1 1 1
6 3 3 4 1 6
6 3 3 4 4 6
6 3 3 1 3 3
3 1 1 1 3 3
3 1 1 1 1 1
1 1 1 1 1 1
6 6 3 1 1 1
6 3 3 3 6 6
6 3 3 3 6 6
6 3 3 1 1 6
3 1 1 1 1 3
6 1 1 1 2 2
1 6 2 2 2 2
6 6 2 2 2 6
6 3 3 3 6 6
6 3 3 3 6 6
5 6 6 6 1 6
6 6 6 6 2 2
6 6 6 2 2 2
2 6 2 2 2 2
6 5 2 2 2 2
6 5 5 2 2 6
5 5 5 3 6 6
5 5 6 6 6 5
6 6 6 6 6 6
6 6 6 6 2 2
4 4 6 2 2 2
4 4 2 2 2 2
5 5 5 2 2 2
5 5 5 5 2 5
5 5 5 4 4 5
6 6 6 6 4 4
4 4 5 5 2 4
4 4 5 2 2 4
4 4 2 2 2 2
5 5 5 2 2 2
5 5 5 4 4 5
5 5 4 4 4 3
4 5 5 5 4 3
4 4 5 5 5 4
4 4 5 5 2 4
4 4 2 2 2 2
5 5 2 2 2 2
5 5 4 4 4 4
3 4 4 4 4 3
3 5 5 4 3 3
4 5 5 5 3 3
4 5 5 5 1 1
4 4 5 2 1 1
6 4 4 4 4 1
3 4 4 4 4 3

View File

@ -124,6 +124,3 @@ a_slip 2.25
h0_slipslip 75e6
interaction_slipslip 1 1 1.4 1.4 1.4 1.4
atol_resistance 1
<crystallite>
[dummy]

View File

@ -5,12 +5,13 @@ import pytest
import numpy as np
from damask import Geom
from damask import Rotation
def geom_equal(a,b):
return np.all(a.get_microstructure() == b.get_microstructure()) and \
np.all(a.get_size() == b.get_size()) and \
np.all(a.get_grid() == b.get_grid())
np.all(a.get_grid() == b.get_grid()) and \
np.allclose(a.get_size(), b.get_size())
@pytest.fixture
def default():
@ -36,6 +37,7 @@ class TestGeom:
default.get_size(),
default.get_origin()
)
print(modified)
assert geom_equal(modified,default)
@ -57,6 +59,22 @@ class TestGeom:
new = Geom.from_file(tmpdir.join('default.geom'))
assert geom_equal(new,default)
def test_invalid_combination(self,default):
with pytest.raises(ValueError):
default.update(default.microstructure[1:,1:,1:],size=np.ones(3), rescale=True)
def test_invalid_size(self,default):
with pytest.raises(ValueError):
default.update(default.microstructure[1:,1:,1:],size=np.ones(2))
def test_invalid_microstructure(self,default):
with pytest.raises(ValueError):
default.update(default.microstructure[1])
def test_invalid_homogenization(self,default):
with pytest.raises(TypeError):
default.set_homogenization(homogenization=0)
@pytest.mark.parametrize('directions,reflect',[
(['x'], False),
(['x','y','z'],True),
@ -98,6 +116,52 @@ class TestGeom:
if update: modified.to_file(reference)
assert geom_equal(modified,Geom.from_file(reference))
def test_renumber(self,default):
modified = copy.deepcopy(default)
microstructure = modified.get_microstructure()
for m in np.unique(microstructure):
microstructure[microstructure==m] = microstructure.max() + np.random.randint(1,30)
modified.update(microstructure)
assert not geom_equal(modified,default)
modified.renumber()
assert geom_equal(modified,default)
def test_substitute(self,default):
modified = copy.deepcopy(default)
microstructure = modified.get_microstructure()
offset = np.random.randint(1,500)
microstructure+=offset
modified.update(microstructure)
assert not geom_equal(modified,default)
modified.substitute(np.arange(default.microstructure.max())+1+offset,
np.arange(default.microstructure.max())+1)
assert geom_equal(modified,default)
@pytest.mark.parametrize('axis_angle',[np.array([1,0,0,86.7]), np.array([0,1,0,90.4]), np.array([0,0,1,90]),
np.array([1,0,0,175]),np.array([0,-1,0,178]),np.array([0,0,1,180])])
def test_rotate360(self,default,axis_angle):
modified = copy.deepcopy(default)
for i in range(np.rint(360/axis_angle[3]).astype(int)):
modified.rotate(Rotation.from_axis_angle(axis_angle,degrees=True))
assert geom_equal(modified,default)
@pytest.mark.parametrize('Eulers',[[32.0,68.0,21.0],
[0.0,32.0,240.0]])
def test_rotate(self,default,update,reference_dir,Eulers):
modified = copy.deepcopy(default)
modified.rotate(Rotation.from_Eulers(Eulers,degrees=True))
tag = 'Eulers={}-{}-{}'.format(*Eulers)
reference = os.path.join(reference_dir,'rotate_{}.geom'.format(tag))
if update: modified.to_file(reference)
assert geom_equal(modified,Geom.from_file(reference))
def test_canvas(self,default):
grid_add = np.random.randint(0,30,(3))
modified = copy.deepcopy(default)
modified.canvas(modified.grid + grid_add)
e = default.grid
assert np.all(modified.microstructure[:e[0],:e[1],:e[2]] == default.microstructure)
@pytest.mark.parametrize('periodic',[True,False])
def test_tessellation_approaches(self,periodic):
grid = np.random.randint(10,20,3)

View File

@ -0,0 +1,41 @@
import random
import pytest
import numpy as np
from damask import Symmetry
class TestSymmetry:
@pytest.mark.parametrize('invalid_symmetry',['fcc','bcc','hello'])
def test_invalid_symmetry(self,invalid_symmetry):
with pytest.raises(KeyError):
s = Symmetry(invalid_symmetry) # noqa
def test_equal(self):
symmetry = random.choice(Symmetry.lattices)
print(symmetry)
assert Symmetry(symmetry) == Symmetry(symmetry)
def test_not_equal(self):
symmetries = random.sample(Symmetry.lattices,k=2)
assert Symmetry(symmetries[0]) != Symmetry(symmetries[1])
@pytest.mark.parametrize('lattice',Symmetry.lattices)
def test_inFZ(self,lattice):
assert Symmetry(lattice).inFZ(np.zeros(3))
@pytest.mark.parametrize('lattice',Symmetry.lattices)
def test_inDisorientationSST(self,lattice):
assert Symmetry(lattice).inDisorientationSST(np.zeros(3))
@pytest.mark.parametrize('lattice',Symmetry.lattices)
@pytest.mark.parametrize('proper',[True,False])
def test_inSST(self,lattice,proper):
assert Symmetry(lattice).inSST(np.zeros(3),proper)
@pytest.mark.parametrize('function',['inFZ','inDisorientationSST'])
def test_invalid_argument(self,function):
s = Symmetry() # noqa
with pytest.raises(ValueError):
eval('s.{}(np.ones(4))'.format(function))

View File

@ -8,13 +8,8 @@ import damask
from damask import Rotation
from damask import Orientation
from damask import Lattice
n = 1000
@pytest.fixture
def default():
"""A set of n random rotations."""
return [Rotation.fromRandom() for r in range(n)]
n = 1000
@pytest.fixture
def reference_dir(reference_dir_base):
@ -28,15 +23,15 @@ class TestOrientation:
{'label':'green','RGB':[0,1,0],'direction':[0,1,1]},
{'label':'blue', 'RGB':[0,0,1],'direction':[1,1,1]}])
@pytest.mark.parametrize('lattice',['fcc','bcc'])
def test_IPF_cubic(self,default,color,lattice):
def test_IPF_cubic(self,color,lattice):
cube = damask.Orientation(damask.Rotation(),lattice)
for direction in set(permutations(np.array(color['direction']))):
assert np.allclose(cube.IPFcolor(direction),np.array(color['RGB']))
assert np.allclose(cube.IPFcolor(np.array(direction)),np.array(color['RGB']))
@pytest.mark.parametrize('lattice',Lattice.lattices)
def test_IPF(self,lattice):
direction = np.random.random(3)*2.0-1
for rot in [Rotation.fromRandom() for r in range(n//100)]:
for rot in [Rotation.from_random() for r in range(n//100)]:
R = damask.Orientation(rot,lattice)
color = R.IPFcolor(direction)
for equivalent in R.equivalentOrientations():
@ -45,7 +40,7 @@ class TestOrientation:
@pytest.mark.parametrize('model',['Bain','KS','GT','GT_prime','NW','Pitsch'])
@pytest.mark.parametrize('lattice',['fcc','bcc'])
def test_relationship_forward_backward(self,model,lattice):
ori = Orientation(Rotation.fromRandom(),lattice)
ori = Orientation(Rotation.from_random(),lattice)
for i,r in enumerate(ori.relatedOrientations(model)):
ori2 = r.relatedOrientations(model)[i]
misorientation = ori.rotation.misorientation(ori2.rotation)
@ -56,8 +51,8 @@ class TestOrientation:
def test_relationship_reference(self,update,reference_dir,model,lattice):
reference = os.path.join(reference_dir,'{}_{}.txt'.format(lattice,model))
ori = Orientation(Rotation(),lattice)
eu = np.array([o.rotation.asEulers(degrees=True) for o in ori.relatedOrientations(model)])
if update:
eu = np.array([o.rotation.as_Eulers(degrees=True) for o in ori.relatedOrientations(model)])
if update:
coords = np.array([(1,i+1) for i,x in enumerate(eu)])
table = damask.Table(eu,{'Eulers':(3,)})
table.add('pos',coords)

View File

@ -1,9 +1,13 @@
import time
import shutil
import os
from datetime import datetime
import pytest
import numpy as np
import h5py
import damask
from damask import Result
from damask import mechanics
@ -13,9 +17,16 @@ def default(tmp_path,reference_dir):
fname = '12grains6x7x8_tensionY.hdf5'
shutil.copy(os.path.join(reference_dir,fname),tmp_path)
f = Result(os.path.join(tmp_path,fname))
f.set_by_time(20.0,20.0)
f.pick('times',20.0)
return f
@pytest.fixture
def single_phase(tmp_path,reference_dir):
"""Single phase Result file in temp location for modification."""
fname = '6grains6x7x8_single_phase_tensionY.hdf5'
shutil.copy(os.path.join(reference_dir,fname),tmp_path)
return Result(os.path.join(tmp_path,fname))
@pytest.fixture
def reference_dir(reference_dir_base):
"""Directory containing reference results."""
@ -28,12 +39,56 @@ class TestResult:
print(default)
def test_time_increments(self,default):
shape = default.read_dataset(default.get_dataset_location('F'),0).shape
default.set_by_time(0.0,20.0)
for i in default.iterate('increments'):
assert shape == default.read_dataset(default.get_dataset_location('F'),0).shape
def test_pick_all(self,default):
default.pick('increments',True)
a = default.get_dataset_location('F')
default.pick('increments','*')
b = default.get_dataset_location('F')
default.pick('increments',default.incs_in_range(0,np.iinfo(int).max))
c = default.get_dataset_location('F')
default.pick('times',True)
d = default.get_dataset_location('F')
default.pick('times','*')
e = default.get_dataset_location('F')
default.pick('times',default.times_in_range(0.0,np.inf))
f = default.get_dataset_location('F')
assert a == b == c == d == e ==f
@pytest.mark.parametrize('what',['increments','times','constituents']) # ToDo: discuss materialpoints
def test_pick_none(self,default,what):
default.pick(what,False)
a = default.get_dataset_location('F')
default.pick(what,[])
b = default.get_dataset_location('F')
assert a == b == []
@pytest.mark.parametrize('what',['increments','times','constituents']) # ToDo: discuss materialpoints
def test_pick_more(self,default,what):
default.pick(what,False)
default.pick_more(what,'*')
a = default.get_dataset_location('F')
default.pick(what,True)
b = default.get_dataset_location('F')
assert a == b
@pytest.mark.parametrize('what',['increments','times','constituents']) # ToDo: discuss materialpoints
def test_pick_less(self,default,what):
default.pick(what,True)
default.pick_less(what,'*')
a = default.get_dataset_location('F')
default.pick(what,False)
b = default.get_dataset_location('F')
assert a == b == []
def test_pick_invalid(self,default):
with pytest.raises(AttributeError):
default.pick('invalid',True)
def test_add_absolute(self,default):
default.add_absolute('Fe')
@ -77,24 +132,40 @@ class TestResult:
in_file = default.read_dataset(loc['s_P'],0)
assert np.allclose(in_memory,in_file)
def test_add_eigenvalues(self,default):
@pytest.mark.parametrize('eigenvalue,function',[('max',np.amax),('min',np.amin)])
def test_add_eigenvalue(self,default,eigenvalue,function):
default.add_Cauchy('P','F')
default.add_eigenvalues('sigma')
loc = {'sigma' :default.get_dataset_location('sigma'),
'lambda(sigma)':default.get_dataset_location('lambda(sigma)')}
in_memory = mechanics.eigenvalues(default.read_dataset(loc['sigma'],0))
in_file = default.read_dataset(loc['lambda(sigma)'],0)
default.add_eigenvalue('sigma',eigenvalue)
loc = {'sigma' :default.get_dataset_location('sigma'),
'lambda':default.get_dataset_location('lambda_{}(sigma)'.format(eigenvalue))}
in_memory = function(mechanics.eigenvalues(default.read_dataset(loc['sigma'],0)),axis=1,keepdims=True)
in_file = default.read_dataset(loc['lambda'],0)
assert np.allclose(in_memory,in_file)
def test_add_eigenvectors(self,default):
@pytest.mark.parametrize('eigenvalue,idx',[('max',2),('mid',1),('min',0)])
def test_add_eigenvector(self,default,eigenvalue,idx):
default.add_Cauchy('P','F')
default.add_eigenvectors('sigma')
default.add_eigenvector('sigma',eigenvalue)
loc = {'sigma' :default.get_dataset_location('sigma'),
'v(sigma)':default.get_dataset_location('v(sigma)')}
in_memory = mechanics.eigenvectors(default.read_dataset(loc['sigma'],0))
'v(sigma)':default.get_dataset_location('v_{}(sigma)'.format(eigenvalue))}
in_memory = mechanics.eigenvectors(default.read_dataset(loc['sigma'],0))[:,idx]
in_file = default.read_dataset(loc['v(sigma)'],0)
assert np.allclose(in_memory,in_file)
@pytest.mark.parametrize('d',[[1,0,0],[0,1,0],[0,0,1]])
def test_add_IPFcolor(self,default,d):
default.add_IPFcolor('orientation',d)
loc = {'orientation': default.get_dataset_location('orientation'),
'color': default.get_dataset_location('IPFcolor_[{} {} {}]'.format(*d))}
qu = default.read_dataset(loc['orientation']).view(np.double).reshape(-1,4)
crystal_structure = default.get_crystal_structure()
in_memory = np.empty((qu.shape[0],3),np.uint8)
for i,q in enumerate(qu):
o = damask.Orientation(q,crystal_structure).reduced()
in_memory[i] = np.uint8(o.IPFcolor(np.array(d))*255)
in_file = default.read_dataset(loc['color'])
assert np.allclose(in_memory,in_file)
def test_add_maximum_shear(self,default):
default.add_Cauchy('P','F')
default.add_maximum_shear('sigma')
@ -143,6 +214,20 @@ class TestResult:
in_file = default.read_dataset(loc['S'],0)
assert np.allclose(in_memory,in_file)
@pytest.mark.parametrize('polar',[True,False])
def test_add_pole(self,default,polar):
pole = np.array([1.,0.,0.])
default.add_pole('orientation',pole,polar)
loc = {'orientation': default.get_dataset_location('orientation'),
'pole': default.get_dataset_location('p^{}_[1 0 0)'.format(u'' if polar else 'xy'))}
rot = damask.Rotation(default.read_dataset(loc['orientation']).view(np.double))
rotated_pole = rot * np.broadcast_to(pole,rot.shape+(3,))
xy = rotated_pole[:,0:2]/(1.+abs(pole[2]))
in_memory = xy if not polar else \
np.block([np.sqrt(xy[:,0:1]*xy[:,0:1]+xy[:,1:2]*xy[:,1:2]),np.arctan2(xy[:,1:2],xy[:,0:1])])
in_file = default.read_dataset(loc['pole'])
assert np.allclose(in_memory,in_file)
def test_add_rotational_part(self,default):
default.add_rotational_part('F')
loc = {'F': default.get_dataset_location('F'),
@ -185,3 +270,42 @@ class TestResult:
in_memory = mechanics.left_stretch(default.read_dataset(loc['F'],0))
in_file = default.read_dataset(loc['V(F)'],0)
assert np.allclose(in_memory,in_file)
def test_add_invalid(self,default):
with pytest.raises(TypeError):
default.add_calculation('#invalid#*2')
@pytest.mark.parametrize('overwrite',['off','on'])
def test_add_overwrite(self,default,overwrite):
default.pick('times',default.times_in_range(0,np.inf)[-1])
default.add_Cauchy()
loc = default.get_dataset_location('sigma')
print(loc)
with h5py.File(default.fname,'r') as f:
created_first = f[loc[0]].attrs['Created'].decode()
created_first = datetime.strptime(created_first,'%Y-%m-%d %H:%M:%S%z')
if overwrite == 'on':
default.enable_overwrite()
else:
default.disable_overwrite()
time.sleep(2.)
default.add_calculation('sigma','#sigma#*0.0+311.','not the Cauchy stress')
with h5py.File(default.fname,'r') as f:
created_second = f[loc[0]].attrs['Created'].decode()
created_second = datetime.strptime(created_second,'%Y-%m-%d %H:%M:%S%z')
if overwrite == 'on':
assert created_first < created_second and np.allclose(default.read_dataset(loc),311.)
else:
assert created_first == created_second and not np.allclose(default.read_dataset(loc),311.)
@pytest.mark.parametrize('output',['F',[],['F','P']])
def test_vtk(self,tmp_path,default,output):
os.chdir(tmp_path)
default.to_vtk(output)
def test_XDMF(self,tmp_path,single_phase):
os.chdir(tmp_path)
single_phase.write_XDMF

File diff suppressed because it is too large Load Diff

View File

@ -18,7 +18,7 @@ def reference_dir(reference_dir_base):
return os.path.join(reference_dir_base,'Table')
class TestTable:
def test_get_scalar(self,default):
d = default.get('s')
assert np.allclose(d,1.0) and d.shape[1:] == (1,)
@ -29,12 +29,12 @@ class TestTable:
def test_get_tensor(self,default):
d = default.get('F')
assert np.allclose(d,1.0) and d.shape[1:] == (3,3)
assert np.allclose(d,1.0) and d.shape[1:] == (3,3)
def test_get_component(self,default):
d = default.get('5_F')
assert np.allclose(d,1.0) and d.shape[1:] == (1,)
def test_write_read_str(self,default,tmpdir):
default.to_ASCII(str(tmpdir.join('default.txt')))
new = Table.from_ASCII(str(tmpdir.join('default.txt')))
@ -69,15 +69,20 @@ class TestTable:
def test_read_strange(self,reference_dir,fname):
with open(os.path.join(reference_dir,fname)) as f:
Table.from_ASCII(f)
def test_set(self,default):
default.set('F',np.zeros((5,3,3)),'set to zero')
d=default.get('F')
assert np.allclose(d,0.0) and d.shape[1:] == (3,3)
def test_set_component(self,default):
default.set('1_F',np.zeros((5)),'set to zero')
d=default.get('F')
assert np.allclose(d[...,0,0],0.0) and d.shape[1:] == (3,3)
def test_labels(self,default):
assert default.labels == ['F','v','s']
def test_add(self,default):
d = np.random.random((5,9))
default.add('nine',d,'random data')

View File

@ -42,12 +42,25 @@ class TestGridFilters:
assert np.allclose(grid_filters.node_displacement_fluct(size,F),
grid_filters.cell_2_node(grid_filters.cell_displacement_fluct(size,F)))
def test_interpolation_nonperiodic(self):
def test_interpolation_to_node(self):
size = np.random.random(3)
grid = np.random.randint(8,32,(3))
F = np.random.random(tuple(grid)+(3,3))
assert np.allclose(grid_filters.node_coord(size,F) [1:-1,1:-1,1:-1],grid_filters.cell_2_node(
grid_filters.cell_coord(size,F))[1:-1,1:-1,1:-1])
assert np.allclose(grid_filters.node_coord(size,F) [1:-1,1:-1,1:-1],
grid_filters.cell_2_node(grid_filters.cell_coord(size,F))[1:-1,1:-1,1:-1])
def test_interpolation_to_cell(self):
grid = np.random.randint(1,30,(3))
node_coord_x = np.linspace(0,np.pi*2,num=grid[0]+1)
node_field_x = np.cos(node_coord_x)
node_field = np.broadcast_to(node_field_x.reshape(-1,1,1),grid+1)
cell_coord_x = node_coord_x[:-1]+node_coord_x[1]*.5
cell_field_x = np.interp(cell_coord_x,node_coord_x,node_field_x,period=np.pi*2.)
cell_field = np.broadcast_to(cell_field_x.reshape(-1,1,1),grid)
assert np.allclose(cell_field,grid_filters.node_2_cell(node_field))
@pytest.mark.parametrize('mode',['cell','node'])
def test_coord0_origin(self,mode):

View File

@ -63,25 +63,25 @@ class TestMechanics:
assert np.allclose(np.matmul(R,U),
np.matmul(V,R))
def test_strain_tensor_no_rotation(self):
@pytest.mark.parametrize('m',[0.0,np.random.random()*10.,np.random.random()*-10.])
def test_strain_tensor_no_rotation(self,m):
"""Ensure that left and right stretch give same results for no rotation."""
F = np.broadcast_to(np.eye(3),[self.n,3,3])*np.random.rand(self.n,3,3)
m = np.random.random()*20.0-10.0
assert np.allclose(mechanics.strain_tensor(F,'U',m),
mechanics.strain_tensor(F,'V',m))
def test_strain_tensor_rotation_equivalence(self):
@pytest.mark.parametrize('m',[0.0,np.random.random()*2.5,np.random.random()*-2.5])
def test_strain_tensor_rotation_equivalence(self,m):
"""Ensure that left and right strain differ only by a rotation."""
F = np.broadcast_to(np.eye(3),[self.n,3,3]) + (np.random.rand(self.n,3,3)*0.5 - 0.25)
m = np.random.random()*5.0-2.5
assert np.allclose(np.linalg.det(mechanics.strain_tensor(F,'U',m)),
np.linalg.det(mechanics.strain_tensor(F,'V',m)))
def test_strain_tensor_rotation(self):
@pytest.mark.parametrize('m',[0.0,np.random.random(),np.random.random()*-1.])
@pytest.mark.parametrize('t',['V','U'])
def test_strain_tensor_rotation(self,m,t):
"""Ensure that pure rotation results in no strain."""
F = mechanics.rotational_part(np.random.rand(self.n,3,3))
t = ['V','U'][np.random.randint(0,2)]
m = np.random.random()*2.0 - 1.0
assert np.allclose(mechanics.strain_tensor(F,t,m),
0.0)

View File

@ -17,10 +17,10 @@ if (PROJECT_NAME STREQUAL "damask-grid")
file(GLOB grid-sources grid/*.f90)
if(NOT CMAKE_BUILD_TYPE STREQUAL "SYNTAXONLY")
add_executable(DAMASK_spectral ${damask-sources} ${grid-sources})
install (TARGETS DAMASK_spectral RUNTIME DESTINATION bin)
add_executable(DAMASK_grid ${damask-sources} ${grid-sources})
install (TARGETS DAMASK_grid RUNTIME DESTINATION bin)
else()
add_library(DAMASK_spectral OBJECT ${damask-sources} ${grid-sources})
add_library(DAMASK_grid OBJECT ${damask-sources} ${grid-sources})
exec_program (mktemp OUTPUT_VARIABLE nothing)
exec_program (mktemp ARGS -d OUTPUT_VARIABLE black_hole)
install (PROGRAMS ${nothing} DESTINATION ${black_hole})
@ -30,7 +30,7 @@ elseif (PROJECT_NAME STREQUAL "damask-mesh")
file(GLOB mesh-sources mesh/*.f90)
add_executable(DAMASK_FEM ${damask-sources} ${mesh-sources})
install (TARGETS DAMASK_FEM RUNTIME DESTINATION bin)
add_executable(DAMASK_mesh ${damask-sources} ${mesh-sources})
install (TARGETS DAMASK_mesh RUNTIME DESTINATION bin)
endif()

View File

@ -11,6 +11,7 @@ module CPFEM
use math
use rotations
use YAML_types
use YAML_parse
use discretization_marc
use material
use config
@ -70,7 +71,7 @@ contains
!> @brief call (thread safe) all module initializations
!--------------------------------------------------------------------------------------------------
subroutine CPFEM_initAll(el,ip)
integer(pInt), intent(in) :: el, & !< FE el number
ip !< FE integration point number
@ -84,11 +85,12 @@ subroutine CPFEM_initAll(el,ip)
call math_init
call rotations_init
call YAML_types_init
call YAML_init
call HDF5_utilities_init
call results_init
call results_init(.false.)
call discretization_marc_init(ip, el)
call lattice_init
call material_init
call material_init(.false.)
call constitutive_init
call crystallite_init
call homogenization_init
@ -134,7 +136,7 @@ subroutine CPFEM_general(mode, parallelExecution, ffn, ffn1, temperature_inp, dt
logical, intent(in) :: parallelExecution !< flag indicating parallel computation of requested IPs
real(pReal), dimension(6), intent(out) :: cauchyStress !< stress as 6 vector
real(pReal), dimension(6,6), intent(out) :: jacobian !< jacobian as 66 tensor (Consistent tangent dcs/dE)
real(pReal) J_inverse, & ! inverse of Jacobian
rnd
real(pReal), dimension (3,3) :: Kirchhoff, & ! Piola-Kirchhoff stress
@ -142,16 +144,16 @@ subroutine CPFEM_general(mode, parallelExecution, ffn, ffn1, temperature_inp, dt
real(pReal), dimension (3,3,3,3) :: H_sym, &
H, &
jacobian3333 ! jacobian in Matrix notation
integer(pInt) elCP, & ! crystal plasticity element number
i, j, k, l, m, n, ph, homog, mySource
logical updateJaco ! flag indicating if Jacobian has to be updated
real(pReal), parameter :: ODD_STRESS = 1e15_pReal, & !< return value for stress in case of ping pong dummy cycle
ODD_JACOBIAN = 1e50_pReal !< return value for jacobian in case of ping pong dummy cycle
elCP = mesh_FEM2DAMASK_elem(elFE)
if (iand(debug_level(debug_CPFEM), debug_levelBasic) /= 0_pInt &
.and. elCP == debug_e .and. ip == debug_i) then
write(6,'(/,a)') '#############################################'
@ -166,16 +168,16 @@ subroutine CPFEM_general(mode, parallelExecution, ffn, ffn1, temperature_inp, dt
write(6,'(a,/)') '# --- terminallyIll --- #'
write(6,'(a,/)') '#############################################'; flush (6)
endif
if (iand(mode, CPFEM_BACKUPJACOBIAN) /= 0_pInt) &
CPFEM_dcsde_knownGood = CPFEM_dcsde
if (iand(mode, CPFEM_RESTOREJACOBIAN) /= 0_pInt) &
CPFEM_dcsde = CPFEM_dcsde_knownGood
!*** age results
if (iand(mode, CPFEM_AGERESULTS) /= 0_pInt) call CPFEM_forward
!*** collection of FEM input with returning of randomize odd stress and jacobian
!* If no parallel execution is required, there is no need to collect FEM input
if (.not. parallelExecution) then
@ -186,7 +188,7 @@ subroutine CPFEM_general(mode, parallelExecution, ffn, ffn1, temperature_inp, dt
end select chosenThermal1
materialpoint_F0(1:3,1:3,ip,elCP) = ffn
materialpoint_F(1:3,1:3,ip,elCP) = ffn1
elseif (iand(mode, CPFEM_COLLECT) /= 0_pInt) then
call random_number(rnd)
if (rnd < 0.5_pReal) rnd = rnd - 1.0_pReal
@ -201,11 +203,11 @@ subroutine CPFEM_general(mode, parallelExecution, ffn, ffn1, temperature_inp, dt
materialpoint_F(1:3,1:3,ip,elCP) = ffn1
CPFEM_calc_done = .false.
endif
!*** calculation of stress and jacobian
if (iand(mode, CPFEM_CALCRESULTS) /= 0_pInt) then
!*** deformation gradient outdated or any actual deformation gradient differs more than relevantStrain from the stored one
validCalculation: if (terminallyIll &
.or. outdatedFFN1 &
@ -223,7 +225,7 @@ subroutine CPFEM_general(mode, parallelExecution, ffn, ffn1, temperature_inp, dt
if (rnd < 0.5_pReal) rnd = rnd - 1.0_pReal
CPFEM_cs(1:6,ip,elCP) = ODD_STRESS * rnd
CPFEM_dcsde(1:6,1:6,ip,elCP) = ODD_JACOBIAN * math_identity2nd(6)
!*** deformation gradient is not outdated
else validCalculation
updateJaco = mod(cycleCounter,iJacoStiffness) == 0
@ -234,7 +236,7 @@ subroutine CPFEM_general(mode, parallelExecution, ffn, ffn1, temperature_inp, dt
if (iand(debug_level(debug_CPFEM), debug_levelExtensive) /= 0_pInt) &
write(6,'(a,i8,1x,i2)') '<< CPFEM >> calculation for elFE ip ',elFE,ip
call materialpoint_stressAndItsTangent(updateJaco, dt)
!* parallel computation and calulation not yet done
elseif (.not. CPFEM_calc_done) then
if (iand(debug_level(debug_CPFEM), debug_levelExtensive) /= 0_pInt) &
@ -243,22 +245,22 @@ subroutine CPFEM_general(mode, parallelExecution, ffn, ffn1, temperature_inp, dt
call materialpoint_stressAndItsTangent(updateJaco, dt)
CPFEM_calc_done = .true.
endif
!* map stress and stiffness (or return odd values if terminally ill)
terminalIllness: if (terminallyIll) then
call random_number(rnd)
if (rnd < 0.5_pReal) rnd = rnd - 1.0_pReal
CPFEM_cs(1:6,ip,elCP) = ODD_STRESS * rnd
CPFEM_dcsde(1:6,1:6,ip,elCP) = ODD_JACOBIAN * math_identity2nd(6)
else terminalIllness
! translate from P to CS
Kirchhoff = matmul(materialpoint_P(1:3,1:3,ip,elCP), transpose(materialpoint_F(1:3,1:3,ip,elCP)))
J_inverse = 1.0_pReal / math_det33(materialpoint_F(1:3,1:3,ip,elCP))
CPFEM_cs(1:6,ip,elCP) = math_sym33to6(J_inverse * Kirchhoff,weighted=.false.)
! translate from dP/dF to dCS/dE
H = 0.0_pReal
do i=1,3; do j=1,3; do k=1,3; do l=1,3; do m=1,3; do n=1,3
@ -269,15 +271,15 @@ subroutine CPFEM_general(mode, parallelExecution, ffn, ffn1, temperature_inp, dt
+ 0.5_pReal * ( Kirchhoff(j,l)*math_delta(i,k) + Kirchhoff(i,k)*math_delta(j,l) &
+ Kirchhoff(j,k)*math_delta(i,l) + Kirchhoff(i,l)*math_delta(j,k))
enddo; enddo; enddo; enddo; enddo; enddo
forall(i=1:3, j=1:3,k=1:3,l=1:3) &
H_sym(i,j,k,l) = 0.25_pReal * (H(i,j,k,l) + H(j,i,k,l) + H(i,j,l,k) + H(j,i,l,k))
CPFEM_dcsde(1:6,1:6,ip,elCP) = math_sym3333to66(J_inverse * H_sym,weighted=.false.)
endif terminalIllness
endif validCalculation
!* report stress and stiffness
if ((iand(debug_level(debug_CPFEM), debug_levelExtensive) /= 0_pInt) &
.and. ((debug_e == elCP .and. debug_i == ip) &
@ -288,17 +290,17 @@ subroutine CPFEM_general(mode, parallelExecution, ffn, ffn1, temperature_inp, dt
'<< CPFEM >> Jacobian/GPa at elFE ip ', elFE, ip, transpose(CPFEM_dcsdE(1:6,1:6,ip,elCP))*1.0e-9_pReal
flush(6)
endif
endif
!*** warn if stiffness close to zero
if (all(abs(CPFEM_dcsdE(1:6,1:6,ip,elCP)) < 1e-10_pReal)) call IO_warning(601,elCP,ip)
!*** copy to output if using commercial FEM solver
cauchyStress = CPFEM_cs (1:6, ip,elCP)
jacobian = CPFEM_dcsdE(1:6,1:6,ip,elCP)
!*** remember extreme values of stress ...
cauchyStress33 = math_6toSym33(CPFEM_cs(1:6,ip,elCP),weighted=.false.)
if (maxval(cauchyStress33) > debug_stressMax) then

View File

@ -12,6 +12,7 @@ module CPFEM2
use math
use rotations
use YAML_types
use YAML_parse
use material
use lattice
use IO
@ -22,7 +23,7 @@ module CPFEM2
use homogenization
use constitutive
use crystallite
#if defined(FEM)
#if defined(Mesh)
use FEM_quadrature
use discretization_mesh
#elif defined(Grid)
@ -43,7 +44,7 @@ subroutine CPFEM_initAll
call DAMASK_interface_init ! Spectral and FEM interface to commandline
call prec_init
call IO_init
#ifdef FEM
#ifdef Mesh
call FEM_quadrature_init
#endif
call numerics_init
@ -52,15 +53,16 @@ subroutine CPFEM_initAll
call math_init
call rotations_init
call YAML_types_init
call YAML_init
call lattice_init
call HDF5_utilities_init
call results_init
#if defined(FEM)
call discretization_mesh_init
call results_init(restart=interface_restartInc>0)
#if defined(Mesh)
call discretization_mesh_init(restart=interface_restartInc>0)
#elif defined(Grid)
call discretization_grid_init
call discretization_grid_init(restart=interface_restartInc>0)
#endif
call material_init
call material_init(restart=interface_restartInc>0)
call constitutive_init
call crystallite_init
call homogenization_init

View File

@ -9,8 +9,6 @@
!> by DAMASK. Interpretating the command line arguments to get load case, geometry file,
!> and working directory.
!--------------------------------------------------------------------------------------------------
#define GCC_MIN 6
#define INTEL_MIN 1700
#define PETSC_MAJOR 3
#define PETSC_MINOR_MIN 10
#define PETSC_MINOR_MAX 13
@ -50,29 +48,6 @@ contains
!--------------------------------------------------------------------------------------------------
subroutine DAMASK_interface_init
#include <petsc/finclude/petscsys.h>
#if defined(__GFORTRAN__) && __GNUC__<GCC_MIN
===================================================================================================
----- WRONG COMPILER VERSION ----- WRONG COMPILER VERSION ----- WRONG COMPILER VERSION -----
===================================================================================================
=============== THIS VERSION OF DAMASK REQUIRES A NEWER gfortran VERSION ======================
================== THIS VERSION OF DAMASK REQUIRES A NEWER gfortran VERSION ===================
===================== THIS VERSION OF DAMASK REQUIRES A NEWER gfortran VERSION ================
===================================================================================================
----- WRONG COMPILER VERSION ----- WRONG COMPILER VERSION ----- WRONG COMPILER VERSION -----
===================================================================================================
#endif
#if defined(__INTEL_COMPILER) && __INTEL_COMPILER<INTEL_MIN
===================================================================================================
----- WRONG COMPILER VERSION ----- WRONG COMPILER VERSION ----- WRONG COMPILER VERSION -----
===================================================================================================
================= THIS VERSION OF DAMASK REQUIRES A NEWER ifort VERSION =======================
==================== THIS VERSION OF DAMASK REQUIRES A NEWER ifort VERSION ====================
======================= THIS VERSION OF DAMASK REQUIRES A NEWER ifort VERSION =================
===================================================================================================
----- WRONG COMPILER VERSION ----- WRONG COMPILER VERSION ----- WRONG COMPILER VERSION -----
===================================================================================================
#endif
#if PETSC_VERSION_MAJOR!=3 || PETSC_VERSION_MINOR<PETSC_MINOR_MIN || PETSC_VERSION_MINOR>PETSC_MINOR_MAX
===================================================================================================
@ -106,7 +81,7 @@ subroutine DAMASK_interface_init
typeSize
integer, dimension(8) :: &
dateAndTime
integer :: mpi_err
integer :: err
PetscErrorCode :: petsc_err
external :: &
quit
@ -118,8 +93,8 @@ subroutine DAMASK_interface_init
#ifdef _OPENMP
! If openMP is enabled, check if the MPI libary supports it and initialize accordingly.
! Otherwise, the first call to PETSc will do the initialization.
call MPI_Init_Thread(MPI_THREAD_FUNNELED,threadLevel,mpi_err)
if (mpi_err /= 0) call quit(1)
call MPI_Init_Thread(MPI_THREAD_FUNNELED,threadLevel,err)
if (err /= 0) call quit(1)
if (threadLevel<MPI_THREAD_FUNNELED) then
write(6,'(/,a)') ' ERROR: MPI library does not support OpenMP'
call quit(1)
@ -128,10 +103,10 @@ subroutine DAMASK_interface_init
call PETScInitializeNoArguments(petsc_err) ! according to PETSc manual, that should be the first line in the code
CHKERRQ(petsc_err) ! this is a macro definition, it is case sensitive
call MPI_Comm_rank(PETSC_COMM_WORLD,worldrank,mpi_err)
if (mpi_err /= 0) call quit(1)
call MPI_Comm_size(PETSC_COMM_WORLD,worldsize,mpi_err)
if (mpi_err /= 0) call quit(1)
call MPI_Comm_rank(PETSC_COMM_WORLD,worldrank,err)
if (err /= 0) call quit(1)
call MPI_Comm_size(PETSC_COMM_WORLD,worldsize,err)
if (err /= 0) call quit(1)
mainProcess: if (worldrank == 0) then
if (output_unit /= 6) then
@ -181,22 +156,23 @@ subroutine DAMASK_interface_init
write(6,'(/,a,2(i2.2,a),i4.4)') ' Date: ',dateAndTime(3),'/',dateAndTime(2),'/', dateAndTime(1)
write(6,'(a,2(i2.2,a),i2.2)') ' Time: ',dateAndTime(5),':', dateAndTime(6),':', dateAndTime(7)
call MPI_Type_size(MPI_INTEGER,typeSize,mpi_err)
if (mpi_err /= 0) call quit(1)
call MPI_Type_size(MPI_INTEGER,typeSize,err)
if (err /= 0) call quit(1)
if (typeSize*8 /= bit_size(0)) then
write(6,'(a)') ' Mismatch between MPI and DAMASK integer'
call quit(1)
endif
call MPI_Type_size(MPI_DOUBLE,typeSize,mpi_err)
if (mpi_err /= 0) call quit(1)
call MPI_Type_size(MPI_DOUBLE,typeSize,err)
if (err /= 0) call quit(1)
if (typeSize*8 /= storage_size(0.0_pReal)) then
write(6,'(a)') ' Mismatch between MPI and DAMASK real'
call quit(1)
endif
do i = 1, command_argument_count()
call get_command_argument(i,arg)
call get_command_argument(i,arg,status=err)
if (err /= 0) call quit(1)
select case(trim(arg)) ! extract key
case ('-h','--help')
write(6,'(a)') ' #######################################################################'
@ -206,7 +182,7 @@ subroutine DAMASK_interface_init
write(6,'(a,/)')' Valid command line switches:'
write(6,'(a)') ' --geom (-g, --geometry)'
write(6,'(a)') ' --load (-l, --loadcase)'
write(6,'(a)') ' --workingdir (-w, --wd, --workingdirectory, -d, --directory)'
write(6,'(a)') ' --workingdir (-w, --wd, --workingdirectory)'
write(6,'(a)') ' --restart (-r, --rs)'
write(6,'(a)') ' --help (-h)'
write(6,'(/,a)')' -----------------------------------------------------------------------'
@ -223,12 +199,12 @@ subroutine DAMASK_interface_init
write(6,'(a)') ' directory.'
write(6,'(a)') ' For further configuration place "numerics.config"'
write(6,'(a)')' and "debug.config" in that directory.'
write(6,'(/,a)')' --restart XX'
write(6,'(a)') ' Reads in increment XX and continues with calculating'
write(6,'(a)') ' increment XX+1 based on this.'
write(6,'(/,a)')' --restart N'
write(6,'(a)') ' Reads in increment N and continues with calculating'
write(6,'(a)') ' increment N+1 based on this.'
write(6,'(a)') ' Appends to existing results file'
write(6,'(a)') ' "NameOfGeom_NameOfLoadFile".'
write(6,'(a)') ' Works only if the restart information for increment XX'
write(6,'(a)') ' "NameOfGeom_NameOfLoadFile.hdf5".'
write(6,'(a)') ' Works only if the restart information for increment N'
write(6,'(a)') ' is available in the working directory.'
write(6,'(/,a)')' -----------------------------------------------------------------------'
write(6,'(a)') ' Help:'
@ -236,19 +212,20 @@ subroutine DAMASK_interface_init
write(6,'(a,/)')' Prints this message and exits'
call quit(0) ! normal Termination
case ('-l', '--load', '--loadcase')
call get_command_argument(i+1,loadCaseArg)
call get_command_argument(i+1,loadCaseArg,status=err)
case ('-g', '--geom', '--geometry')
call get_command_argument(i+1,geometryArg)
case ('-w', '-d', '--wd', '--directory', '--workingdir', '--workingdirectory')
call get_command_argument(i+1,workingDirArg)
call get_command_argument(i+1,geometryArg,status=err)
case ('-w', '--wd', '--workingdir', '--workingdirectory')
call get_command_argument(i+1,workingDirArg,status=err)
case ('-r', '--rs', '--restart')
call get_command_argument(i+1,arg)
call get_command_argument(i+1,arg,status=err)
read(arg,*,iostat=stat) interface_restartInc
if (interface_restartInc < 0 .or. stat /=0) then
write(6,'(/,a)') ' ERROR: Could not parse restart increment: '//trim(arg)
call quit(1)
endif
end select
if (err /= 0) call quit(1)
enddo
if (len_trim(loadcaseArg) == 0 .or. len_trim(geometryArg) == 0) then

View File

@ -81,7 +81,7 @@ contains
!--------------------------------------------------------------------------------------------------
!> @brief open libary and do sanity checks
!> @brief initialize HDF5 libary and do sanity checks
!--------------------------------------------------------------------------------------------------
subroutine HDF5_utilities_init

View File

@ -49,7 +49,7 @@ subroutine IO_init
write(6,'(/,a)') ' <<<+- IO init -+>>>'; flush(6)
call unitTest
call selfTest
end subroutine IO_init
@ -536,6 +536,19 @@ subroutine IO_error(error_ID,el,ip,g,instance,ext_msg)
case (602)
msg = 'invalid selection for debug'
!------------------------------------------------------------------------------------------------
! errors related to YAML input files
case (701)
msg = 'Incorrect indent/Null value not allowed'
case (702)
msg = 'Invalid use of flow yaml'
case (703)
msg = 'Space expected after a list indicator - '
case (704)
msg = 'Space expected after a colon for <key>: <value> pair'
case (705)
msg = 'Unsupported feature'
!-------------------------------------------------------------------------------------------------
! errors related to the grid solver
case (809)
@ -696,7 +709,7 @@ end subroutine IO_warning
!--------------------------------------------------------------------------------------------------
!> @brief check correctness of some IO functions
!--------------------------------------------------------------------------------------------------
subroutine unitTest
subroutine selfTest
integer, dimension(:), allocatable :: chunkPos
character(len=:), allocatable :: str
@ -745,6 +758,6 @@ subroutine unitTest
str = IO_rmComment(' ab #')
if (str /= ' ab'.or. len(str) /= 3) call IO_error(0,ext_msg='IO_rmComment/7')
end subroutine unitTest
end subroutine selfTest
end module IO

644
src/YAML_parse.f90 Normal file
View File

@ -0,0 +1,644 @@
!----------------------------------------------------------------------------------------------------
!> @author Martin Diehl, Max-Planck-Institut für Eisenforschung GmbH
!> @author Sharan Roongta, Max-Planck-Institut für Eisenforschung GmbH
!> @brief Parser for YAML files
!> @details module converts a YAML input file to an equivalent YAML flow style which is then parsed.
!----------------------------------------------------------------------------------------------------
module YAML_parse
use prec
use IO
use YAML_types
implicit none
private
public :: YAML_init
public :: parse_flow,to_flow
contains
!--------------------------------------------------------------------------------------------------
!> @brief do sanity checks
!--------------------------------------------------------------------------------------------------
subroutine YAML_init
call selfTest
end subroutine YAML_init
!--------------------------------------------------------------------------------------------------
!> @brief reads the flow style string and stores it in the form of dictionaries, lists and scalars.
!> @details A node type pointer can either point to a dictionary, list or scalar type entities.
!--------------------------------------------------------------------------------------------------
recursive function parse_flow(flow_string) result(node)
character(len=*), intent(inout) :: flow_string
class (tNode), pointer :: node
class (tNode), pointer :: myVal
character(len=pStringLen) :: key
integer :: e, & !> end position of dictionary or list
s, & !> start position of dictionary or list
d !> position of key: value separator (':')
flow_string = trim(adjustl(flow_string(:)))
if (flow_string(1:1) == '{') then ! start of a dictionary
e = 1
allocate(tDict::node)
do while (e < len_trim(flow_string))
s = e
d = s + scan(flow_string(s+1:),':')
e = d + find_end(flow_string(d+1:),'}')
key = trim(adjustl(flow_string(s+1:d-1)))
myVal => parse_flow(flow_string(d+1:e-1)) ! parse items (recursively)
select type (node)
class is (tDict)
call node%set(key,myVal)
end select
end do
elseif (flow_string(1:1) == '[') then ! start of a list
e = 1
allocate(tList::node)
do while (e < len_trim(flow_string))
s = e
e = s + find_end(flow_string(s+1:),']')
myVal => parse_flow(flow_string(s+1:e-1)) ! parse items (recursively)
select type (node)
class is (tList)
call node%append(myVal)
end select
end do
else ! scalar value
allocate(tScalar::node)
select type (node)
class is (tScalar)
node = trim(adjustl(flow_string))
end select
endif
end function parse_flow
!--------------------------------------------------------------------------------------------------
!> @brief finds location of chunk end: ',' or '}' or ']'
!> @details leaves nested lists ( '[...]' and dicts '{...}') intact
!--------------------------------------------------------------------------------------------------
integer function find_end(str,e_char)
character(len=*), intent(in) :: str
character, intent(in) :: e_char !< end of list/dict ( '}' or ']')
integer :: N_sq, & !< number of open square brackets
N_cu, & !< number of open curly brackets
i
N_sq = 0
N_cu = 0
do i = 1, len_trim(str)
if (N_sq==0 .and. N_cu==0 .and. scan(str(i:i),e_char//',') == 1) exit
N_sq = N_sq + merge(1,0,str(i:i) == '[')
N_cu = N_cu + merge(1,0,str(i:i) == '{')
N_sq = N_sq - merge(1,0,str(i:i) == ']')
N_cu = N_cu - merge(1,0,str(i:i) == '}')
enddo
find_end = i
end function find_end
!--------------------------------------------------------------------------------------------------
! @brief Returns Indentation.
! @details It determines the indentation level for a given block/line.
! In cases for nested lists, an offset is added to determine the indent of the item block (skip
! leading dashes)
!--------------------------------------------------------------------------------------------------
integer function indentDepth(line,offset)
character(len=*), intent(in) :: line
integer, optional,intent(in) :: offset
indentDepth = verify(line,IO_WHITESPACE) -1
if(present(offset)) indentDepth = indentDepth + offset
end function indentDepth
!--------------------------------------------------------------------------------------------------
! @brief check whether a string is in flow style, i.e. starts with '{' or '['
!--------------------------------------------------------------------------------------------------
logical function isFlow(line)
character(len=*), intent(in) :: line
isFlow = index(adjustl(line),'[') == 1 .or. index(adjustl(line),'{') == 1
end function isFlow
!--------------------------------------------------------------------------------------------------
! @brief check whether a string is a scalar item, i.e. starts without any special symbols
!--------------------------------------------------------------------------------------------------
logical function isScalar(line)
character(len=*), intent(in) :: line
isScalar = (.not.isKeyValue(line) .and. .not.isKey(line) .and. .not.isListItem(line) &
.and. .not.isFlow(line))
end function isScalar
!--------------------------------------------------------------------------------------------------
! @brief check whether a string is a list item, i.e. starts with '-'
!--------------------------------------------------------------------------------------------------
logical function isListItem(line)
character(len=*), intent(in) :: line
isListItem = index(adjustl(line),'-') == 1
end function isListItem
!--------------------------------------------------------------------------------------------------
! @brief check whether a string contains a key value pair of the for '<key>: <value>'
!--------------------------------------------------------------------------------------------------
logical function isKeyValue(line)
character(len=*), intent(in) :: line
isKeyValue = .false.
if( .not. isKey(line) .and. index(IO_rmComment(line),':') > 0 .and. .not. isFlow(line)) then
if(index(IO_rmComment(line),': ') > 0) then
isKeyValue = .true.
else
call IO_error(704,ext_msg=line)
endif
endif
end function isKeyValue
!--------------------------------------------------------------------------------------------------
! @brief check whether a string contains a key without a value, i.e. it ends with ':'
! ToDo: check whether this is safe for trailing spaces followed by a new line character
!--------------------------------------------------------------------------------------------------
logical function isKey(line)
character(len=*), intent(in) :: line
if(len(IO_rmComment(line)) == 0) then
isKey = .false.
else
isKey = IO_rmComment(line(len(IO_rmComment(line)):len(IO_rmComment(line)))) == ':' &
.and. .not. isFlow(line)
endif
end function isKey
!--------------------------------------------------------------------------------------------------
! @brief reads a line of YAML block which is already in flow style
! @details Dicts should be enlcosed within '{}' for it to be consistent with DAMASK YAML parser
!--------------------------------------------------------------------------------------------------
recursive subroutine line_isFlow(flow,s_flow,line)
character(len=*), intent(inout) :: flow !< YAML in flow style only
integer, intent(inout) :: s_flow !< start position in flow
character(len=*), intent(in) :: line
integer :: &
s, &
list_chunk, &
dict_chunk
if(index(adjustl(line),'[') == 1) then
s = index(line,'[')
flow(s_flow:s_flow) = '['
s_flow = s_flow +1
do while(s < len_trim(line))
list_chunk = s + find_end(line(s+1:),']')
if(iskeyValue(line(s+1:list_chunk-1))) then
flow(s_flow:s_flow) = '{'
s_flow = s_flow +1
call keyValue_toFlow(flow,s_flow,line(s+1:list_chunk-1))
flow(s_flow:s_flow) = '}'
s_flow = s_flow +1
elseif(isFlow(line(s+1:list_chunk-1))) then
call line_isFlow(flow,s_flow,line(s+1:list_chunk-1))
else
call line_toFlow(flow,s_flow,line(s+1:list_chunk-1))
endif
flow(s_flow:s_flow+1) = ', '
s_flow = s_flow +2
s = s + find_end(line(s+1:),']')
enddo
s_flow = s_flow - 1
if (flow(s_flow-1:s_flow-1) == ',') s_flow = s_flow - 1
flow(s_flow:s_flow) = ']'
s_flow = s_flow+1
elseif(index(adjustl(line),'{') == 1) then
s = index(line,'{')
flow(s_flow:s_flow) = '{'
s_flow = s_flow +1
do while(s < len_trim(line))
dict_chunk = s + find_end(line(s+1:),'}')
if( .not. iskeyValue(line(s+1:dict_chunk-1))) call IO_error(705,ext_msg=line)
call keyValue_toFlow(flow,s_flow,line(s+1:dict_chunk-1))
flow(s_flow:s_flow+1) = ', '
s_flow = s_flow +2
s = s + find_end(line(s+1:),'}')
enddo
s_flow = s_flow -1
if(flow(s_flow-1:s_flow-1) == ',') s_flow = s_flow -1
flow(s_flow:s_flow) = '}'
s_flow = s_flow +1
else
call line_toFlow(flow,s_flow,line)
endif
end subroutine line_isFlow
!-------------------------------------------------------------------------------------------------
! @brief reads a line of YAML block of type <key>: <value> and places it in the YAML flow style structure
! @details Makes sure that the <value> is consistent with the input required in DAMASK YAML parser
!-------------------------------------------------------------------------------------------------
recursive subroutine keyValue_toFlow(flow,s_flow,line)
character(len=*), intent(inout) :: flow !< YAML in flow style only
integer, intent(inout) :: s_flow !< start position in flow
character(len=*), intent(in) :: line
character(len=:), allocatable :: line_asStandard ! standard form of <key>: <value>
integer :: &
d_flow, &
col_pos, &
offset_value
col_pos = index(line,':')
if(isFlow(line(col_pos+1:))) then
d_flow = len_trim(adjustl(line(:col_pos)))
flow(s_flow:s_flow+d_flow+1) = trim(adjustl(line(:col_pos)))//' '
s_flow = s_flow + d_flow+1
call line_isFlow(flow,s_flow,line(col_pos+1:))
else
offset_value = indentDepth(line(col_pos+2:))
line_asStandard = line(:col_pos+1)//line(col_pos+2+offset_value:)
call line_toFlow(flow,s_flow,line_asStandard)
endif
end subroutine keyValue_toFlow
!-------------------------------------------------------------------------------------------------
! @brief reads a line of YAML block and places it in the YAML flow style structure
!-------------------------------------------------------------------------------------------------
subroutine line_toFlow(flow,s_flow,line)
character(len=*), intent(inout) :: flow !< YAML in flow style only
integer, intent(inout) :: s_flow !< start position in flow
character(len=*), intent(in) :: line
integer :: &
d_flow
d_flow = len_trim(adjustl(line))
flow(s_flow:s_flow+d_flow) = trim(adjustl(line))
s_flow = s_flow + d_flow
end subroutine line_toFlow
!-------------------------------------------------------------------------------------------------
! @brief convert a yaml list in block style to a yaml list in flow style
! @details enters the function when encountered with the list indicator '- '
! reads each scalar list item and separates each other with a ','
! If list item is non scalar, it stores the offset for that list item block
! Increase in the indentation level or when list item is not scalar -> 'decide' function is called.
! decrease in indentation level indicates the end of an indentation block
!-------------------------------------------------------------------------------------------------
recursive subroutine lst(blck,flow,s_blck,s_flow,offset)
character(len=*), intent(in) :: blck !< YAML in mixed style
character(len=*), intent(inout) :: flow !< YAML in flow style only
integer, intent(inout) :: s_blck, & !< start position in blck
s_flow, & !< start position in flow
offset !< stores leading '- ' in nested lists
character(len=pStringLen) :: line
integer :: e_blck,indent
indent = indentDepth(blck(s_blck:),offset)
do while (s_blck <= len_trim(blck))
e_blck = s_blck + index(blck(s_blck:),IO_EOL) - 2
line = IO_rmComment(blck(s_blck:e_blck))
if (len_trim(line) == 0) then
s_blck = e_blck + 2 ! forward to next line
cycle
elseif(indentDepth(line,offset) > indent) then
call decide(blck,flow,s_blck,s_flow,offset)
offset = 0
flow(s_flow:s_flow+1) = ', '
s_flow = s_flow + 2
elseif(indentDepth(line,offset) < indent .or. .not. isListItem(line)) then
offset = 0
exit ! job done (lower level)
else
if(trim(adjustl(line)) == '-') then ! list item in next line
s_blck = e_blck + 2
e_blck = e_blck + index(blck(e_blck+2:),IO_EOL)
line = IO_rmComment(blck(s_blck:e_blck))
if(indentDepth(line) < indent .or. indentDepth(line) == indent) &
call IO_error(701,ext_msg=line)
if(isScalar(line)) then
call line_toFlow(flow,s_flow,line)
s_blck = e_blck +2
offset = 0
elseif(isFlow(line)) then
call line_isFlow(flow,s_flow,line)
s_blck = e_blck +2
offset = 0
endif
else ! list item in the same line
if(line(indentDepth(line)+2:indentDepth(line)+2) /= ' ') &
call IO_error(703,ext_msg=line)
line = line(indentDepth(line)+3:)
if(isScalar(line)) then
call line_toFlow(flow,s_flow,line)
s_blck = e_blck +2
offset = 0
elseif(isFlow(line)) then
call line_isFlow(flow,s_flow,line)
s_blck = e_blck +2
offset = 0
else ! non scalar list item
offset = offset + indentDepth(blck(s_blck:))+1 ! offset in spaces to be ignored
s_blck = s_blck + index(blck(s_blck:e_blck),'-') ! s_blck after '-' symbol
endif
end if
end if
if(isScalar(line) .or. isFlow(line)) then
flow(s_flow:s_flow+1) = ', '
s_flow = s_flow +2
endif
end do
s_flow = s_flow - 1
if (flow(s_flow-1:s_flow-1) == ',') s_flow = s_flow - 1
end subroutine lst
!--------------------------------------------------------------------------------------------------
! @brief convert a yaml dict in block style to a yaml dict in flow style
! @details enters the function when encountered with the dictionary indicator ':'
! parses each line in the block and compares indentation of a line with the preceding line
! upon increase in indentation level -> 'decide' function decides if the line is a list or dict
! decrease in indentation indicates the end of an indentation block
!--------------------------------------------------------------------------------------------------
recursive subroutine dct(blck,flow,s_blck,s_flow,offset)
character(len=*), intent(in) :: blck !< YAML in mixed style
character(len=*), intent(inout) :: flow !< YAML in flow style only
integer, intent(inout) :: s_blck, & !< start position in blck
s_flow, & !< start position in flow
offset
character(len=pStringLen) :: line
integer :: e_blck,indent
logical :: previous_isKey
previous_isKey = .false.
indent = indentDepth(blck(s_blck:),offset)
do while (s_blck <= len_trim(blck))
e_blck = s_blck + index(blck(s_blck:),IO_EOL) - 2
line = IO_rmComment(blck(s_blck:e_blck))
if (len_trim(line) == 0) then
s_blck = e_blck + 2 ! forward to next line
cycle
elseif(indentDepth(line,offset) < indent) then
if(isScalar(line) .or. isFlow(line) .and. previous_isKey) &
call IO_error(701,ext_msg=line)
offset = 0
exit ! job done (lower level)
elseif(indentDepth(line,offset) > indent .or. isListItem(line)) then
offset = 0
call decide(blck,flow,s_blck,s_flow,offset)
else
if(isScalar(line)) call IO_error(701,ext_msg=line)
if(isFlow(line)) call IO_error(702,ext_msg=line)
line = line(indentDepth(line)+1:)
if(previous_isKey) then
flow(s_flow-1:s_flow) = ', '
s_flow = s_flow + 1
endif
if(isKeyValue(line)) then
call keyValue_toFlow(flow,s_flow,line)
else
call line_toFlow(flow,s_flow,line)
endif
s_blck = e_blck +2
end if
if(isScalar(line) .or. isKeyValue(line)) then
flow(s_flow:s_flow) = ','
s_flow = s_flow + 1
previous_isKey = .false.
else
previous_isKey = .true.
endif
flow(s_flow:s_flow) = ' '
s_flow = s_flow + 1
offset = 0
end do
s_flow = s_flow - 1
if (flow(s_flow-1:s_flow-1) == ',') s_flow = s_flow - 1
end subroutine dct
!--------------------------------------------------------------------------------------------------
! @brief decide whether next block is list or dict
!--------------------------------------------------------------------------------------------------
recursive subroutine decide(blck,flow,s_blck,s_flow,offset)
character(len=*), intent(in) :: blck !< YAML in mixed style
character(len=*), intent(inout) :: flow !< YAML in flow style only
integer, intent(inout) :: s_blck, & !< start position in blck
s_flow, & !< start position in flow
offset
integer :: e_blck
character(len=pStringLen) :: line
if(s_blck <= len(blck)) then
e_blck = s_blck + index(blck(s_blck:),IO_EOL) - 2
line = IO_rmComment(blck(s_blck:e_blck))
! exit here if '---' is found
if (isListItem(line)) then
flow(s_flow:s_flow) = '['
s_flow = s_flow + 1
call lst(blck,flow,s_blck,s_flow,offset)
flow(s_flow:s_flow) = ']'
s_flow = s_flow + 1
elseif(isKey(line) .or. isKeyValue(line)) then
flow(s_flow:s_flow) = '{'
s_flow = s_flow + 1
call dct(blck,flow,s_blck,s_flow,offset)
flow(s_flow:s_flow) = '}'
s_flow = s_flow + 1
elseif(isFlow(line)) then
call line_isFlow(flow,s_flow,line)
s_blck = e_blck +2
else
line = line(indentDepth(line)+1:)
call line_toFlow(flow,s_flow,line)
s_blck = e_blck +2
endif
endif
end subroutine
!--------------------------------------------------------------------------------------------------
! @brief convert all block style YAML parts to flow style
!--------------------------------------------------------------------------------------------------
function to_flow(blck)
character(len=:), allocatable :: to_flow
character(len=*), intent(in) :: blck !< YAML mixed style
integer :: s_blck, & !< start position in blck
s_flow, & !< start position in flow
offset, & !< counts leading '- ' in nested lists
end_line
if(isFlow(blck)) then
to_flow = trim(adjustl(blck))
else
allocate(character(len=len(blck)*2)::to_flow)
! move forward here (skip empty lines) and remove '----' if found
s_flow = 1
s_blck = 1
offset = 0
call decide(blck,to_flow,s_blck,s_flow,offset)
to_flow = trim(to_flow(:s_flow-1))
endif
end_line = index(to_flow,new_line(''))
if(end_line > 0) to_flow = to_flow(:end_line-1)
end function to_flow
!--------------------------------------------------------------------------------------------------
subroutine selfTest()
if (indentDepth(' a') /= 1) call IO_error(0,ext_msg='indentDepth')
if (indentDepth('a') /= 0) call IO_error(0,ext_msg='indentDepth')
if (indentDepth('x ') /= 0) call IO_error(0,ext_msg='indentDepth')
if ( isFlow(' a')) call IO_error(0,ext_msg='isFLow')
if (.not. isFlow('{')) call IO_error(0,ext_msg='isFlow')
if (.not. isFlow(' [')) call IO_error(0,ext_msg='isFlow')
if ( isListItem(' a')) call IO_error(0,ext_msg='isListItem')
if (.not. isListItem('- a ')) call IO_error(0,ext_msg='isListItem')
if (.not. isListItem(' -b')) call IO_error(0,ext_msg='isListItem')
if ( isKeyValue(' a')) call IO_error(0,ext_msg='isKeyValue')
if ( isKeyValue(' a: ')) call IO_error(0,ext_msg='isKeyValue')
if (.not. isKeyValue(' a: b')) call IO_error(0,ext_msg='isKeyValue')
if ( isKey(' a')) call IO_error(0,ext_msg='isKey')
if ( isKey('{a:b}')) call IO_error(0,ext_msg='isKey')
if ( isKey(' a:b')) call IO_error(0,ext_msg='isKey')
if (.not. isKey(' a: ')) call IO_error(0,ext_msg='isKey')
if (.not. isKey(' a:')) call IO_error(0,ext_msg='isKey')
if (.not. isKey(' a: #')) call IO_error(0,ext_msg='isKey')
if( isScalar('a: ')) call IO_error(0,ext_msg='isScalar')
if( isScalar('a: b')) call IO_error(0,ext_msg='isScalar')
if( isScalar('{a:b}')) call IO_error(0,ext_msg='isScalar')
if( isScalar('- a:')) call IO_error(0,ext_msg='isScalar')
if(.not. isScalar(' a')) call IO_error(0,ext_msg='isScalar')
basic_list: block
character(len=*), parameter :: block_list = &
" - Casablanca"//IO_EOL//&
" - North by Northwest"//IO_EOL
character(len=*), parameter :: block_list_newline = &
" -"//IO_EOL//&
" Casablanca"//IO_EOL//&
" -"//IO_EOL//&
" North by Northwest"//IO_EOL
character(len=*), parameter :: flow_list = &
"[Casablanca, North by Northwest]"
if (.not. to_flow(block_list) == flow_list) call IO_error(0,ext_msg='to_flow')
if (.not. to_flow(block_list_newline) == flow_list) call IO_error(0,ext_msg='to_flow')
end block basic_list
basic_dict: block
character(len=*), parameter :: block_dict = &
" aa: Casablanca"//IO_EOL//&
" bb: North by Northwest"//IO_EOL
character(len=*), parameter :: block_dict_newline = &
" aa:"//IO_EOL//&
" Casablanca"//IO_EOL//&
" bb:"//IO_EOL//&
" North by Northwest"//IO_EOL
character(len=*), parameter :: flow_dict = &
"{aa: Casablanca, bb: North by Northwest}"
if (.not. to_flow(block_dict) == flow_dict) call IO_error(0,ext_msg='to_flow')
if (.not. to_flow(block_dict_newline) == flow_dict) call IO_error(0,ext_msg='to_flow')
end block basic_dict
basic_flow: block
character(len=*), parameter :: flow_braces = &
" source: [{param: 1}, {param: 2}, {param: 3}, {param: 4}]"//IO_EOL
character(len=*), parameter :: flow_mixed_braces = &
" source: [param: 1, {param: 2}, param: 3, {param: 4}]"//IO_EOL
character(len=*), parameter :: flow = &
"{source: [{param: 1}, {param: 2}, {param: 3}, {param: 4}]}"
if (.not. to_flow(flow_braces) == flow) call IO_error(0,ext_msg='to_flow')
if (.not. to_flow(flow_mixed_braces) == flow) call IO_error(0,ext_msg='to_flow')
end block basic_flow
basic_mixed: block
character(len=*), parameter :: block_flow = &
" aa:"//IO_EOL//&
" - "//IO_EOL//&
" param_1: [a: b, c, {d: {e: [f: g, h]}}]"//IO_EOL//&
" - c: d"//IO_EOL//&
" bb:"//IO_EOL//&
" - {param_1: [{a: b}, c, {d: {e: [{f: g}, h]}}]}"//IO_EOL
character(len=*), parameter :: mixed_flow = &
"{aa: [{param_1: [{a: b}, c, {d: {e: [{f: g}, h]}}]}, {c: d}], bb: [{param_1: [{a: b}, c, {d: {e: [{f: g}, h]}}]}]}"
if(.not. to_flow(block_flow) == mixed_flow) call IO_error(0,ext_msg='to_flow')
end block basic_mixed
end subroutine selfTest
end module YAML_parse

View File

@ -16,14 +16,7 @@ module YAML_types
private
public :: &
tNode, &
tScalar, &
tDict, &
tList, &
YAML_types_init
type, abstract :: tNode
type, abstract, public :: tNode
integer :: length = 0
contains
procedure(asFormattedString), deferred :: asFormattedString
@ -102,7 +95,7 @@ module YAML_types
end type tNode
type, extends(tNode) :: tScalar
type, extends(tNode), public :: tScalar
character(len=:), allocatable, private :: value
@ -118,7 +111,7 @@ module YAML_types
asString => tScalar_asString
end type tScalar
type, extends(tNode) :: tList
type, extends(tNode), public :: tList
class(tItem), pointer :: first => null()
@ -136,7 +129,7 @@ module YAML_types
final :: tList_finalize
end type tList
type, extends(tList) :: tDict
type, extends(tList), public :: tDict
contains
procedure :: asFormattedString => tDict_asFormattedString
procedure :: set => tDict_set
@ -171,6 +164,10 @@ module YAML_types
module procedure tScalar_assign__
end interface assignment (=)
public :: &
YAML_types_init, &
assignment(=)
contains
!--------------------------------------------------------------------------------------------------
@ -180,7 +177,7 @@ subroutine YAML_types_init
write(6,'(/,a)') ' <<<+- YAML_types init -+>>>'
call unitTest
call selfTest
end subroutine YAML_types_init
@ -188,7 +185,7 @@ end subroutine YAML_types_init
!--------------------------------------------------------------------------------------------------
!> @brief check correctness of some type bound procedures
!--------------------------------------------------------------------------------------------------
subroutine unitTest
subroutine selfTest
class(tNode), pointer :: s1,s2
allocate(tScalar::s1)
@ -260,7 +257,7 @@ subroutine unitTest
if(n%get_asString(1) /= 'True') call IO_error(0,ext_msg='byIndex_asString')
end block
end subroutine unitTest
end subroutine selfTest
!---------------------------------------------------------------------------------------------------

View File

@ -8,6 +8,7 @@
#include "debug.f90"
#include "list.f90"
#include "YAML_types.f90"
#include "YAML_parse.f90"
#include "future.f90"
#include "config.f90"
#include "LAPACK_interface.f90"

View File

@ -160,7 +160,7 @@ module subroutine plastic_phenopowerlaw_init
config%getFloats('interaction_twintwin'), &
config%getString('lattice_structure'))
prm%gamma_twin_char = lattice_characteristicShear_twin(N_tw,config%getString('lattice_structure'),&
config%getFloat('c/a'))
config%getFloat('c/a',defaultVal=0.0_pReal))
xi_twin_0 = config%getFloats('tau0_twin',requiredSize=size(N_tw))

View File

@ -6,7 +6,7 @@
!> @details doing cutbacking, forwarding in case of restart, reporting statistics, writing
!> results
!--------------------------------------------------------------------------------------------------
program DAMASK_spectral
program DAMASK_grid
#include <petsc/finclude/petscsys.h>
use PETScsys
use prec
@ -495,4 +495,4 @@ program DAMASK_spectral
call quit(0) ! no complains ;)
end program DAMASK_spectral
end program DAMASK_grid

View File

@ -42,7 +42,9 @@ contains
!--------------------------------------------------------------------------------------------------
!> @brief reads the geometry file to obtain information on discretization
!--------------------------------------------------------------------------------------------------
subroutine discretization_grid_init
subroutine discretization_grid_init(restart)
logical, intent(in) :: restart
include 'fftw3-mpi.f03'
real(pReal), dimension(3) :: &
@ -100,13 +102,14 @@ subroutine discretization_grid_init
!--------------------------------------------------------------------------------------------------
! store geometry information for post processing
call results_openJobFile
call results_closeGroup(results_addGroup('geometry'))
call results_addAttribute('grid', grid, 'geometry')
call results_addAttribute('size', geomSize,'geometry')
call results_addAttribute('origin',origin, 'geometry')
call results_closeJobFile
if(.not. restart) then
call results_openJobFile
call results_closeGroup(results_addGroup('geometry'))
call results_addAttribute('grid', grid, 'geometry')
call results_addAttribute('size', geomSize,'geometry')
call results_addAttribute('origin',origin, 'geometry')
call results_closeJobFile
endif
!--------------------------------------------------------------------------------------------------
! geometry information required by the nonlocal CP model
call geometry_plastic_nonlocal_setIPvolume(reshape([(product(mySize/real(myGrid,pReal)),j=1,product(myGrid))], &

View File

@ -529,7 +529,7 @@ subroutine lattice_init
lattice_DamageMobility(p) = config_phase(p)%getFloat('damage_mobility',defaultVal=0.0_pReal)
! SHOULD NOT BE PART OF LATTICE END
call unitTest
call selfTest
enddo
@ -1436,6 +1436,7 @@ function lattice_SchmidMatrix_slip(Nslip,structure,cOverA) result(SchmidMatrix)
NslipMax = BCT_NSLIPSYSTEM
slipSystems = BCT_SYSTEMSLIP
case default
allocate(NslipMax(0))
call IO_error(137,ext_msg='lattice_SchmidMatrix_slip: '//trim(structure))
end select
@ -1485,6 +1486,7 @@ function lattice_SchmidMatrix_twin(Ntwin,structure,cOverA) result(SchmidMatrix)
NtwinMax = HEX_NTWINSYSTEM
twinSystems = HEX_SYSTEMTWIN
case default
allocate(NtwinMax(0))
call IO_error(137,ext_msg='lattice_SchmidMatrix_twin: '//trim(structure))
end select
@ -1564,6 +1566,7 @@ function lattice_SchmidMatrix_cleavage(Ncleavage,structure,cOverA) result(Schmid
NcleavageMax = BCC_NCLEAVAGESYSTEM
cleavageSystems = BCC_SYSTEMCLEAVAGE
case default
allocate(NcleavageMax(0))
call IO_error(137,ext_msg='lattice_SchmidMatrix_cleavage: '//trim(structure))
end select
@ -1919,6 +1922,7 @@ function coordinateSystem_slip(Nslip,structure,cOverA) result(coordinateSystem)
NslipMax = BCT_NSLIPSYSTEM
slipSystems = BCT_SYSTEMSLIP
case default
allocate(NslipMax(0))
call IO_error(137,ext_msg='coordinateSystem_slip: '//trim(structure))
end select
@ -2291,7 +2295,7 @@ end function equivalent_mu
!--------------------------------------------------------------------------------------------------
!> @brief check correctness of some lattice functions
!--------------------------------------------------------------------------------------------------
subroutine unitTest
subroutine selfTest
real(pReal), dimension(:,:,:), allocatable :: CoSy
real(pReal), dimension(:,:), allocatable :: system
@ -2304,7 +2308,6 @@ subroutine unitTest
system = reshape([1.0_pReal+r(1),0.0_pReal,0.0_pReal, 0.0_pReal,1.0_pReal+r(2),0.0_pReal],[6,1])
CoSy = buildCoordinateSystem([1],[1],system,'fcc',0.0_pReal)
if(any(dNeq(CoSy(1:3,1:3,1),math_I3))) &
call IO_error(0)
@ -2321,6 +2324,6 @@ subroutine unitTest
if(dNeq(lambda*0.5_pReal/(lambda+equivalent_mu(C,'reuss')),equivalent_nu(C,'reuss'),1.0e-12_pReal)) &
call IO_error(0,ext_msg='equivalent_nu/reuss')
end subroutine unitTest
end subroutine selfTest
end module lattice

View File

@ -207,7 +207,9 @@ contains
!--------------------------------------------------------------------------------------------------
!> @brief parses material configuration file
!--------------------------------------------------------------------------------------------------
subroutine material_init
subroutine material_init(restart)
logical, intent(in) :: restart
integer :: i,e,m,c,h, myDebug, myPhase, myHomog, myMicro
integer, dimension(:), allocatable :: &
@ -339,11 +341,12 @@ subroutine material_init
call config_deallocate('material.config/microstructure')
call config_deallocate('material.config/texture')
call results_openJobFile
call results_mapping_constituent(material_phaseAt,material_phaseMemberAt,config_name_phase)
call results_mapping_materialpoint(material_homogenizationAt,material_homogenizationMemberAt,config_name_homogenization)
call results_closeJobFile
if (.not. restart) then
call results_openJobFile
call results_mapping_constituent(material_phaseAt,material_phaseMemberAt,config_name_phase)
call results_mapping_materialpoint(material_homogenizationAt,material_homogenizationMemberAt,config_name_homogenization)
call results_closeJobFile
endif
!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!
! BEGIN DEPRECATED

View File

@ -79,7 +79,7 @@ module math
!---------------------------------------------------------------------------------------------------
private :: &
unitTest
selfTest
contains
@ -113,7 +113,7 @@ subroutine math_init
call random_seed(put = randInit)
call unitTest
call selfTest
end subroutine math_init
@ -1192,7 +1192,7 @@ end function math_clip
!--------------------------------------------------------------------------------------------------
!> @brief check correctness of some math functions
!--------------------------------------------------------------------------------------------------
subroutine unitTest
subroutine selfTest
integer, dimension(2,4) :: &
sort_in_ = reshape([+1,+5, +5,+6, -1,-1, +3,-2],[2,4])
@ -1330,6 +1330,6 @@ subroutine unitTest
if(dNeq0(math_LeviCivita(ijk(1),ijk(2),ijk(3))))&
call IO_error(0,ext_msg='math_LeviCivita')
end subroutine unitTest
end subroutine selfTest
end module math

View File

@ -6,7 +6,7 @@
!> @details doing cutbacking, reporting statistics, writing
!> results
!--------------------------------------------------------------------------------------------------
program DAMASK_FEM
program DAMASK_mesh
#include <petsc/finclude/petscsys.h>
use PetscDM
use prec
@ -367,4 +367,4 @@ program DAMASK_FEM
call quit(0) ! no complains ;)
end program DAMASK_FEM
end program DAMASK_mesh

View File

@ -63,7 +63,9 @@ contains
!> @brief initializes the mesh by calling all necessary private routines the mesh module
!! Order and routines strongly depend on type of solver
!--------------------------------------------------------------------------------------------------
subroutine discretization_mesh_init
subroutine discretization_mesh_init(restart)
logical, intent(in) :: restart
integer, dimension(1), parameter:: FE_geomtype = [1] !< geometry type of particular element type
integer, dimension(1) :: FE_Nips !< number of IPs in a specific type of element

View File

@ -17,11 +17,9 @@ module mesh_mech_FEM
use prec
use FEM_utilities
use discretization_mesh
use IO
use DAMASK_interface
use numerics
use FEM_quadrature
use FEsolving
use homogenization
use math

View File

@ -63,8 +63,8 @@ module numerics
#endif
!--------------------------------------------------------------------------------------------------
! FEM parameters:
#ifdef FEM
! Mesh parameters:
#ifdef Mesh
integer, protected, public :: &
integrationOrder = 2, & !< order of quadrature rule required
structOrder = 2 !< order of displacement shape functions
@ -200,8 +200,8 @@ subroutine numerics_init
#endif
!--------------------------------------------------------------------------------------------------
! FEM parameters
#ifdef FEM
! Mesh parameters
#ifdef Mesh
case ('integrationorder')
integrationorder = IO_intValue(line,chunkPos,2)
case ('structorder')
@ -267,7 +267,7 @@ subroutine numerics_init
!--------------------------------------------------------------------------------------------------
! spectral parameters
#ifdef FEM
#ifdef Mesh
write(6,'(a24,1x,i8)') ' integrationOrder: ',integrationOrder
write(6,'(a24,1x,i8)') ' structOrder: ',structOrder
write(6,'(a24,1x,L8)') ' B-Bar stabilisation: ',BBarStabilisation

View File

@ -75,7 +75,7 @@ module prec
emptyStringArray = [character(len=pStringLen)::]
private :: &
unitTest
selfTest
contains
@ -94,7 +94,7 @@ subroutine prec_init
write(6,'(a,e10.3)') ' Minimum value: ',tiny(0.0_pReal)
write(6,'(a,i3)') ' Decimal precision: ',precision(0.0_pReal)
call unitTest
call selfTest
end subroutine prec_init
@ -233,7 +233,7 @@ end function cNeq
!--------------------------------------------------------------------------------------------------
!> @brief check correctness of some prec functions
!--------------------------------------------------------------------------------------------------
subroutine unitTest
subroutine selfTest
integer, allocatable, dimension(:) :: realloc_lhs_test
real(pReal), dimension(2) :: r
@ -249,6 +249,6 @@ subroutine unitTest
realloc_lhs_test = [1,2]
if (any(realloc_lhs_test/=[1,2])) call quit(9000)
end subroutine unitTest
end subroutine selfTest
end module prec

View File

@ -112,7 +112,7 @@ contains
subroutine quaternions_init
write(6,'(/,a)') ' <<<+- quaternions init -+>>>'; flush(6)
call unitTest
call selfTest
end subroutine quaternions_init
@ -457,7 +457,7 @@ end function inverse
!--------------------------------------------------------------------------------------------------
!> @brief check correctness of some quaternions functions
!--------------------------------------------------------------------------------------------------
subroutine unitTest
subroutine selfTest
real(pReal), dimension(4) :: qu
type(quaternion) :: q, q_2
@ -524,7 +524,7 @@ subroutine unitTest
endif
#endif
end subroutine unitTest
end subroutine selfTest
end module quaternions

View File

@ -15,31 +15,31 @@ module results
implicit none
private
integer(HID_T) :: resultsFile
interface results_writeDataset
module procedure results_writeTensorDataset_real
module procedure results_writeVectorDataset_real
module procedure results_writeScalarDataset_real
module procedure results_writeTensorDataset_int
module procedure results_writeVectorDataset_int
module procedure results_writeScalarDataset_rotation
end interface results_writeDataset
interface results_addAttribute
module procedure results_addAttribute_real
module procedure results_addAttribute_int
module procedure results_addAttribute_str
module procedure results_addAttribute_int_array
module procedure results_addAttribute_real_array
end interface results_addAttribute
public :: &
@ -59,24 +59,28 @@ module results
results_mapping_materialpoint
contains
subroutine results_init
subroutine results_init(restart)
logical, intent(in) :: restart
character(len=pStringLen) :: commandLine
write(6,'(/,a)') ' <<<+- results init -+>>>'
write(6,'(/,a)') ' <<<+- results init -+>>>'; flush(6)
write(6,'(/,a)') ' Diehl et al., Integrating Materials and Manufacturing Innovation 6(1):8391, 2017'
write(6,'(a)') ' https://doi.org/10.1007/s40192-017-0084-5'
resultsFile = HDF5_openFile(trim(getSolverJobName())//'.hdf5','w',.true.)
call results_addAttribute('DADF5_version_major',0)
call results_addAttribute('DADF5_version_minor',6)
call results_addAttribute('DAMASK_version',DAMASKVERSION)
call get_command(commandLine)
call results_addAttribute('call',trim(commandLine))
call results_closeGroup(results_addGroup('mapping'))
call results_closeGroup(results_addGroup('mapping/cellResults'))
call results_closeJobFile
if(.not. restart) then
resultsFile = HDF5_openFile(trim(getSolverJobName())//'.hdf5','w',.true.)
call results_addAttribute('DADF5_version_major',0)
call results_addAttribute('DADF5_version_minor',6)
call results_addAttribute('DAMASK_version',DAMASKVERSION)
call get_command(commandLine)
call results_addAttribute('call',trim(commandLine))
call results_closeGroup(results_addGroup('mapping'))
call results_closeGroup(results_addGroup('mapping/cellResults'))
call results_closeJobFile
endif
end subroutine results_init
@ -87,7 +91,7 @@ end subroutine results_init
subroutine results_openJobFile
resultsFile = HDF5_openFile(trim(getSolverJobName())//'.hdf5','a',.true.)
end subroutine results_openJobFile
@ -105,7 +109,7 @@ end subroutine results_closeJobFile
!> @brief creates the group of increment and adds time as attribute to the file
!--------------------------------------------------------------------------------------------------
subroutine results_addIncrement(inc,time)
integer, intent(in) :: inc
real(pReal), intent(in) :: time
character(len=pStringLen) :: incChar
@ -137,7 +141,7 @@ end subroutine results_finalizeIncrement
integer(HID_T) function results_openGroup(groupName)
character(len=*), intent(in) :: groupName
results_openGroup = HDF5_openGroup(resultsFile,groupName)
end function results_openGroup
@ -149,7 +153,7 @@ end function results_openGroup
integer(HID_T) function results_addGroup(groupName)
character(len=*), intent(in) :: groupName
results_addGroup = HDF5_addGroup(resultsFile,groupName)
end function results_addGroup
@ -161,7 +165,7 @@ end function results_addGroup
subroutine results_closeGroup(group_id)
integer(HID_T), intent(in) :: group_id
call HDF5_closeGroup(group_id)
end subroutine results_closeGroup
@ -290,23 +294,25 @@ subroutine results_writeScalarDataset_real(group,dataset,label,description,SIuni
character(len=*), intent(in) :: label,group,description
character(len=*), intent(in), optional :: SIunit
real(pReal), intent(inout), dimension(:) :: dataset
integer(HID_T) :: groupHandle
groupHandle = results_openGroup(group)
#ifdef PETSc
call HDF5_write(groupHandle,dataset,label,.true.)
#else
call HDF5_write(groupHandle,dataset,label,.false.)
#endif
if (HDF5_objectExists(groupHandle,label)) &
call HDF5_addAttribute(groupHandle,'Description',description,label)
if (HDF5_objectExists(groupHandle,label) .and. present(SIunit)) &
call HDF5_addAttribute(groupHandle,'Unit',SIunit,label)
if (HDF5_objectExists(groupHandle,label)) &
call HDF5_addAttribute(groupHandle,'Creator','DAMASK '//DAMASKVERSION,label)
if (HDF5_objectExists(groupHandle,label)) &
call HDF5_addAttribute(groupHandle,'Created',now(),label)
call HDF5_closeGroup(groupHandle)
end subroutine results_writeScalarDataset_real
@ -319,23 +325,25 @@ subroutine results_writeVectorDataset_real(group,dataset,label,description,SIuni
character(len=*), intent(in) :: label,group,description
character(len=*), intent(in), optional :: SIunit
real(pReal), intent(inout), dimension(:,:) :: dataset
integer(HID_T) :: groupHandle
groupHandle = results_openGroup(group)
#ifdef PETSc
call HDF5_write(groupHandle,dataset,label,.true.)
#else
call HDF5_write(groupHandle,dataset,label,.false.)
#endif
if (HDF5_objectExists(groupHandle,label)) &
call HDF5_addAttribute(groupHandle,'Description',description,label)
if (HDF5_objectExists(groupHandle,label) .and. present(SIunit)) &
call HDF5_addAttribute(groupHandle,'Unit',SIunit,label)
if (HDF5_objectExists(groupHandle,label)) &
call HDF5_addAttribute(groupHandle,'Creator','DAMASK '//DAMASKVERSION,label)
if (HDF5_objectExists(groupHandle,label)) &
call HDF5_addAttribute(groupHandle,'Created',now(),label)
call HDF5_closeGroup(groupHandle)
end subroutine results_writeVectorDataset_real
@ -350,13 +358,13 @@ subroutine results_writeTensorDataset_real(group,dataset,label,description,SIuni
character(len=*), intent(in), optional :: SIunit
logical, intent(in), optional :: transposed
real(pReal), intent(in), dimension(:,:,:) :: dataset
integer :: i
integer :: i
logical :: transposed_
integer(HID_T) :: groupHandle
real(pReal), dimension(:,:,:), allocatable :: dataset_transposed
if(present(transposed)) then
transposed_ = transposed
else
@ -374,19 +382,21 @@ subroutine results_writeTensorDataset_real(group,dataset,label,description,SIuni
endif
groupHandle = results_openGroup(group)
#ifdef PETSc
call HDF5_write(groupHandle,dataset_transposed,label,.true.)
#else
call HDF5_write(groupHandle,dataset_transposed,label,.false.)
#endif
if (HDF5_objectExists(groupHandle,label)) &
call HDF5_addAttribute(groupHandle,'Description',description,label)
if (HDF5_objectExists(groupHandle,label) .and. present(SIunit)) &
call HDF5_addAttribute(groupHandle,'Unit',SIunit,label)
if (HDF5_objectExists(groupHandle,label)) &
call HDF5_addAttribute(groupHandle,'Creator','DAMASK '//DAMASKVERSION,label)
if (HDF5_objectExists(groupHandle,label)) &
call HDF5_addAttribute(groupHandle,'Created',now(),label)
call HDF5_closeGroup(groupHandle)
end subroutine results_writeTensorDataset_real
@ -400,23 +410,25 @@ subroutine results_writeVectorDataset_int(group,dataset,label,description,SIunit
character(len=*), intent(in) :: label,group,description
character(len=*), intent(in), optional :: SIunit
integer, intent(inout), dimension(:,:) :: dataset
integer(HID_T) :: groupHandle
groupHandle = results_openGroup(group)
#ifdef PETSc
call HDF5_write(groupHandle,dataset,label,.true.)
#else
call HDF5_write(groupHandle,dataset,label,.false.)
#endif
if (HDF5_objectExists(groupHandle,label)) &
call HDF5_addAttribute(groupHandle,'Description',description,label)
if (HDF5_objectExists(groupHandle,label) .and. present(SIunit)) &
call HDF5_addAttribute(groupHandle,'Unit',SIunit,label)
if (HDF5_objectExists(groupHandle,label)) &
call HDF5_addAttribute(groupHandle,'Creator','DAMASK '//DAMASKVERSION,label)
if (HDF5_objectExists(groupHandle,label)) &
call HDF5_addAttribute(groupHandle,'Created',now(),label)
call HDF5_closeGroup(groupHandle)
end subroutine results_writeVectorDataset_int
@ -430,23 +442,25 @@ subroutine results_writeTensorDataset_int(group,dataset,label,description,SIunit
character(len=*), intent(in) :: label,group,description
character(len=*), intent(in), optional :: SIunit
integer, intent(inout), dimension(:,:,:) :: dataset
integer(HID_T) :: groupHandle
groupHandle = results_openGroup(group)
#ifdef PETSc
call HDF5_write(groupHandle,dataset,label,.true.)
#else
call HDF5_write(groupHandle,dataset,label,.false.)
#endif
if (HDF5_objectExists(groupHandle,label)) &
call HDF5_addAttribute(groupHandle,'Description',description,label)
if (HDF5_objectExists(groupHandle,label) .and. present(SIunit)) &
call HDF5_addAttribute(groupHandle,'Unit',SIunit,label)
if (HDF5_objectExists(groupHandle,label)) &
call HDF5_addAttribute(groupHandle,'Creator','DAMASK '//DAMASKVERSION,label)
if (HDF5_objectExists(groupHandle,label)) &
call HDF5_addAttribute(groupHandle,'Created',now(),label)
call HDF5_closeGroup(groupHandle)
end subroutine results_writeTensorDataset_int
@ -460,23 +474,25 @@ subroutine results_writeScalarDataset_rotation(group,dataset,label,description,l
character(len=*), intent(in) :: label,group,description
character(len=*), intent(in), optional :: lattice_structure
type(rotation), intent(inout), dimension(:) :: dataset
integer(HID_T) :: groupHandle
groupHandle = results_openGroup(group)
#ifdef PETSc
call HDF5_write(groupHandle,dataset,label,.true.)
#else
call HDF5_write(groupHandle,dataset,label,.false.)
#endif
if (HDF5_objectExists(groupHandle,label)) &
call HDF5_addAttribute(groupHandle,'Description',description,label)
if (HDF5_objectExists(groupHandle,label) .and. present(lattice_structure)) &
call HDF5_addAttribute(groupHandle,'Lattice',lattice_structure,label)
if (HDF5_objectExists(groupHandle,label)) &
call HDF5_addAttribute(groupHandle,'Creator','DAMASK '//DAMASKVERSION,label)
if (HDF5_objectExists(groupHandle,label)) &
call HDF5_addAttribute(groupHandle,'Created',now(),label)
call HDF5_closeGroup(groupHandle)
end subroutine results_writeScalarDataset_rotation
@ -486,11 +502,11 @@ end subroutine results_writeScalarDataset_rotation
!> @brief adds the unique mapping from spatial position and constituent ID to results
!--------------------------------------------------------------------------------------------------
subroutine results_mapping_constituent(phaseAt,memberAtLocal,label)
integer, dimension(:,:), intent(in) :: phaseAt !< phase section at (constituent,element)
integer, dimension(:,:,:), intent(in) :: memberAtLocal !< phase member at (constituent,IP,element)
character(len=pStringLen), dimension(:), intent(in) :: label !< label of each phase section
integer, dimension(size(memberAtLocal,1),size(memberAtLocal,2),size(memberAtLocal,3)) :: &
phaseAtMaterialpoint, &
memberAtGlobal
@ -500,7 +516,7 @@ subroutine results_mapping_constituent(phaseAt,memberAtLocal,label)
myShape, & !< shape of the dataset (this process)
myOffset, &
totalShape !< shape of the dataset (all processes)
integer(HID_T) :: &
loc_id, & !< identifier of group in file
dtype_id, & !< identifier of compound data type
@ -512,30 +528,30 @@ subroutine results_mapping_constituent(phaseAt,memberAtLocal,label)
plist_id, &
dt_id
integer(SIZE_T) :: type_size_string, type_size_int
integer :: ierr, i
!---------------------------------------------------------------------------------------------------
! compound type: name of phase section + position/index within results array
call h5tcopy_f(H5T_NATIVE_CHARACTER, dt_id, ierr)
call h5tset_size_f(dt_id, int(len(label(1)),SIZE_T), ierr)
call h5tget_size_f(dt_id, type_size_string, ierr)
call h5tget_size_f(H5T_NATIVE_INTEGER, type_size_int, ierr)
call h5tcreate_f(H5T_COMPOUND_F, type_size_string + type_size_int, dtype_id, ierr)
call h5tinsert_f(dtype_id, "Name", 0_SIZE_T, dt_id,ierr)
call h5tinsert_f(dtype_id, "Position", type_size_string, H5T_NATIVE_INTEGER, ierr)
!--------------------------------------------------------------------------------------------------
! create memory types for each component of the compound type
call h5tcreate_f(H5T_COMPOUND_F, type_size_string, name_id, ierr)
call h5tinsert_f(name_id, "Name", 0_SIZE_T, dt_id, ierr)
call h5tcreate_f(H5T_COMPOUND_F, type_size_int, position_id, ierr)
call h5tinsert_f(position_id, "Position", 0_SIZE_T, H5T_NATIVE_INTEGER, ierr)
call h5tclose_f(dt_id, ierr)
!--------------------------------------------------------------------------------------------------
@ -553,10 +569,10 @@ subroutine results_mapping_constituent(phaseAt,memberAtLocal,label)
#ifdef PETSc
call h5pset_dxpl_mpio_f(plist_id, H5FD_MPIO_COLLECTIVE_F, ierr)
if (ierr < 0) call IO_error(1,ext_msg='results_mapping_constituent: h5pset_dxpl_mpio_f')
call MPI_allreduce(MPI_IN_PLACE,writeSize,worldsize,MPI_INT,MPI_SUM,PETSC_COMM_WORLD,ierr) ! get output at each process
if (ierr /= 0) call IO_error(894,ext_msg='results_mapping_constituent: MPI_allreduce/writeSize')
call MPI_allreduce(MPI_IN_PLACE,memberOffset,size(memberOffset),MPI_INT,MPI_SUM,PETSC_COMM_WORLD,ierr)! get offset at each process
if (ierr /= 0) call IO_error(894,ext_msg='results_mapping_constituent: MPI_allreduce/memberOffset')
#endif
@ -564,15 +580,15 @@ subroutine results_mapping_constituent(phaseAt,memberAtLocal,label)
myShape = int([size(phaseAt,1),writeSize(worldrank)], HSIZE_T)
myOffset = int([0,sum(writeSize(0:worldrank-1))], HSIZE_T)
totalShape = int([size(phaseAt,1),sum(writeSize)], HSIZE_T)
!--------------------------------------------------------------------------------------------------
! create dataspace in memory (local shape = hyperslab) and in file (global shape)
call h5screate_simple_f(2,myShape,memspace_id,ierr,myShape)
if (ierr < 0) call IO_error(1,ext_msg='results_mapping_constituent: h5screate_simple_f/memspace_id')
call h5screate_simple_f(2,totalShape,filespace_id,ierr,totalShape)
if (ierr < 0) call IO_error(1,ext_msg='results_mapping_constituent: h5screate_simple_f/filespace_id')
call h5sselect_hyperslab_f(filespace_id, H5S_SELECT_SET_F, myOffset, myShape, ierr)
if (ierr < 0) call IO_error(1,ext_msg='results_mapping_constituent: h5sselect_hyperslab_f')
@ -581,7 +597,7 @@ subroutine results_mapping_constituent(phaseAt,memberAtLocal,label)
do i = 1, size(phaseAtMaterialpoint,2)
phaseAtMaterialpoint(:,i,:) = phaseAt
enddo
!---------------------------------------------------------------------------------------------------
! renumber member from my process to all processes
do i = 1, size(label)
@ -591,11 +607,11 @@ subroutine results_mapping_constituent(phaseAt,memberAtLocal,label)
!--------------------------------------------------------------------------------------------------
! write the components of the compound type individually
call h5pset_preserve_f(plist_id, .TRUE., ierr)
loc_id = results_openGroup('/mapping/cellResults')
call h5dcreate_f(loc_id, 'constituent', dtype_id, filespace_id, dset_id, ierr)
if (ierr < 0) call IO_error(1,ext_msg='results_mapping_constituent: h5dcreate_f')
call h5dwrite_f(dset_id, name_id, reshape(label(pack(phaseAtMaterialpoint,.true.)),myShape), &
myShape, ierr, file_space_id = filespace_id, mem_space_id = memspace_id, xfer_prp = plist_id)
if (ierr < 0) call IO_error(1,ext_msg='results_mapping_constituent: h5dwrite_f/name_id')
@ -621,11 +637,11 @@ end subroutine results_mapping_constituent
!> @brief adds the unique mapping from spatial position and constituent ID to results
!--------------------------------------------------------------------------------------------------
subroutine results_mapping_materialpoint(homogenizationAt,memberAtLocal,label)
integer, dimension(:), intent(in) :: homogenizationAt !< homogenization section at (element)
integer, dimension(:,:), intent(in) :: memberAtLocal !< homogenization member at (IP,element)
character(len=pStringLen), dimension(:), intent(in) :: label !< label of each homogenization section
integer, dimension(size(memberAtLocal,1),size(memberAtLocal,2)) :: &
homogenizationAtMaterialpoint, &
memberAtGlobal
@ -635,7 +651,7 @@ subroutine results_mapping_materialpoint(homogenizationAt,memberAtLocal,label)
myShape, & !< shape of the dataset (this process)
myOffset, &
totalShape !< shape of the dataset (all processes)
integer(HID_T) :: &
loc_id, & !< identifier of group in file
dtype_id, & !< identifier of compound data type
@ -647,30 +663,30 @@ subroutine results_mapping_materialpoint(homogenizationAt,memberAtLocal,label)
plist_id, &
dt_id
integer(SIZE_T) :: type_size_string, type_size_int
integer :: ierr, i
!---------------------------------------------------------------------------------------------------
! compound type: name of phase section + position/index within results array
call h5tcopy_f(H5T_NATIVE_CHARACTER, dt_id, ierr)
call h5tset_size_f(dt_id, int(len(label(1)),SIZE_T), ierr)
call h5tget_size_f(dt_id, type_size_string, ierr)
call h5tget_size_f(H5T_NATIVE_INTEGER, type_size_int, ierr)
call h5tcreate_f(H5T_COMPOUND_F, type_size_string + type_size_int, dtype_id, ierr)
call h5tinsert_f(dtype_id, "Name", 0_SIZE_T, dt_id,ierr)
call h5tinsert_f(dtype_id, "Position", type_size_string, H5T_NATIVE_INTEGER, ierr)
!--------------------------------------------------------------------------------------------------
! create memory types for each component of the compound type
call h5tcreate_f(H5T_COMPOUND_F, type_size_string, name_id, ierr)
call h5tinsert_f(name_id, "Name", 0_SIZE_T, dt_id, ierr)
call h5tcreate_f(H5T_COMPOUND_F, type_size_int, position_id, ierr)
call h5tinsert_f(position_id, "Position", 0_SIZE_T, H5T_NATIVE_INTEGER, ierr)
call h5tclose_f(dt_id, ierr)
!--------------------------------------------------------------------------------------------------
@ -688,10 +704,10 @@ subroutine results_mapping_materialpoint(homogenizationAt,memberAtLocal,label)
#ifdef PETSc
call h5pset_dxpl_mpio_f(plist_id, H5FD_MPIO_COLLECTIVE_F, ierr)
if (ierr < 0) call IO_error(1,ext_msg='results_mapping_materialpoint: h5pset_dxpl_mpio_f')
call MPI_allreduce(MPI_IN_PLACE,writeSize,worldsize,MPI_INT,MPI_SUM,PETSC_COMM_WORLD,ierr) ! get output at each process
if (ierr /= 0) call IO_error(894,ext_msg='results_mapping_materialpoint: MPI_allreduce/writeSize')
call MPI_allreduce(MPI_IN_PLACE,memberOffset,size(memberOffset),MPI_INT,MPI_SUM,PETSC_COMM_WORLD,ierr)! get offset at each process
if (ierr /= 0) call IO_error(894,ext_msg='results_mapping_materialpoint: MPI_allreduce/memberOffset')
#endif
@ -699,15 +715,15 @@ subroutine results_mapping_materialpoint(homogenizationAt,memberAtLocal,label)
myShape = int([writeSize(worldrank)], HSIZE_T)
myOffset = int([sum(writeSize(0:worldrank-1))], HSIZE_T)
totalShape = int([sum(writeSize)], HSIZE_T)
!--------------------------------------------------------------------------------------------------
! create dataspace in memory (local shape = hyperslab) and in file (global shape)
call h5screate_simple_f(1,myShape,memspace_id,ierr,myShape)
if (ierr < 0) call IO_error(1,ext_msg='results_mapping_materialpoint: h5screate_simple_f/memspace_id')
call h5screate_simple_f(1,totalShape,filespace_id,ierr,totalShape)
if (ierr < 0) call IO_error(1,ext_msg='results_mapping_materialpoint: h5screate_simple_f/filespace_id')
call h5sselect_hyperslab_f(filespace_id, H5S_SELECT_SET_F, myOffset, myShape, ierr)
if (ierr < 0) call IO_error(1,ext_msg='results_mapping_materialpoint: h5sselect_hyperslab_f')
@ -716,7 +732,7 @@ subroutine results_mapping_materialpoint(homogenizationAt,memberAtLocal,label)
do i = 1, size(homogenizationAtMaterialpoint,1)
homogenizationAtMaterialpoint(i,:) = homogenizationAt
enddo
!---------------------------------------------------------------------------------------------------
! renumber member from my process to all processes
do i = 1, size(label)
@ -726,11 +742,11 @@ subroutine results_mapping_materialpoint(homogenizationAt,memberAtLocal,label)
!--------------------------------------------------------------------------------------------------
! write the components of the compound type individually
call h5pset_preserve_f(plist_id, .TRUE., ierr)
loc_id = results_openGroup('/mapping/cellResults')
call h5dcreate_f(loc_id, 'materialpoint', dtype_id, filespace_id, dset_id, ierr)
if (ierr < 0) call IO_error(1,ext_msg='results_mapping_materialpoint: h5dcreate_f')
call h5dwrite_f(dset_id, name_id, reshape(label(pack(homogenizationAtMaterialpoint,.true.)),myShape), &
myShape, ierr, file_space_id = filespace_id, mem_space_id = memspace_id, xfer_prp = plist_id)
if (ierr < 0) call IO_error(1,ext_msg='results_mapping_materialpoint: h5dwrite_f/name_id')
@ -752,6 +768,21 @@ subroutine results_mapping_materialpoint(homogenizationAt,memberAtLocal,label)
end subroutine results_mapping_materialpoint
!--------------------------------------------------------------------------------------------------
!> @brief current date and time (including time zone information)
!--------------------------------------------------------------------------------------------------
character(len=24) function now()
character(len=5) :: zone
integer, dimension(8) :: values
call date_and_time(values=values,zone=zone)
write(now,'(i4.4,5(a,i2.2),a)') &
values(1),'-',values(2),'-',values(3),' ',values(5),':',values(6),':',values(7),zone
end function now
!!--------------------------------------------------------------------------------------------------
!!> @brief adds the backward mapping from spatial position and constituent ID to results
!!--------------------------------------------------------------------------------------------------

View File

@ -105,7 +105,11 @@ subroutine rotations_init
call quaternions_init
write(6,'(/,a)') ' <<<+- rotations init -+>>>'; flush(6)
call unitTest
write(6,'(/,a)') ' Rowenhorst et al., Modelling and Simulation in Materials Science and Engineering 23:083501, 2015'
write(6,'(a)') ' https://doi.org/10.1088/0965-0393/23/8/083501'
call selfTest
end subroutine rotations_init
@ -1340,7 +1344,7 @@ end function GetPyramidOrder
!--------------------------------------------------------------------------------------------------
!> @brief check correctness of some rotations functions
!--------------------------------------------------------------------------------------------------
subroutine unitTest
subroutine selfTest
type(rotation) :: R
real(pReal), dimension(4) :: qu, ax, ro
@ -1443,7 +1447,7 @@ subroutine unitTest
enddo
end subroutine unitTest
end subroutine selfTest
end module rotations